repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
pombredanne/puke-1 | puke/Tools.py | 2 | 13617 | #!/usr/bin/env python
# -*- coding: utf8 -*-
import os, os.path, shutil, logging, sys, filecmp, stat, re, time
from puke.FileList import *
from puke.Console import *
from scss import Scss
from puke.FileSystem import *
from puke.Compress import *
from puke.Std import *
from puke.ToolsExec import *
from puke.Cache import *
import System
CSS_COMPRESSOR = sys.argv[0] + '.css.compress'
JS_COMPRESSOR = sys.argv[0] + '.js.compress'
JS_RUNNER = sys.argv[0] + '.js.runner'
def combine(in_files, out_file, verbose=False, replace = None):
in_files = FileList.check(in_files)
builddir = os.path.dirname(out_file)
makedir(builddir)
temp_file = os.path.join(builddir, '.temp')
console.header( "- Combining files :")
isCSS = False
combined = ""
for f in in_files:
if not __get_ext(f) in ['css', 'scss']:
continue
isCSS = True
combined = "@option compress: no;"
for f in in_files:
fh = open(f)
data = fh.read() + '\n'
if replace:
data = __replace(data, replace)
fh.close()
console.info(' + %s ' % (__pretty(f)))
combined += data
if isCSS:
combined = __parse_scss(combined)
console.confirm( " Generating %s" % out_file)
writefile(out_file, combined)
def minify(in_file, out_file = None, verbose=False, strict=True, ecma3=False):
System.check_package('java')
if not isinstance(in_file, str):
raise Exception("Minify : single file only")
if not out_file:
out_file = in_file
in_type = __get_ext(out_file)
org_size = os.path.getsize(in_file)
console.header('- Minifying %s (%.2f kB)' % (__pretty(in_file), org_size / 1024.0))
if in_type == 'js':
__minify_js(in_file, out_file + '.tmp', verbose, strict, ecma3)
else:
__minify_css(in_file, out_file + '.tmp', verbose)
copyfile(out_file + '.tmp', out_file)
os.remove(out_file + '.tmp')
new_size = os.path.getsize(out_file)
#avoid division by zero
if not new_size:
console.fail('Compression fail')
console.info(' ~ Original: %.2f kB' % (org_size / 1024.0))
console.info(' ~ Compressed: %.2f kB' % (new_size / 1024.0))
if not org_size:
return
console.confirm(' %s ( Reduction : %.1f%% )' % (out_file, (float(org_size - new_size) / org_size * 100)))
def jslint(files, fix = False, relax = False, fail = True):
System.check_package('java')
files = FileList.check(files)
options = []
command = ""
options.append('--jslint_error=optional_type_marker')
options.append('--jslint_error=blank_lines_at_top_level')
options.append('--jslint_error=indentation')
# This covers jsdoctoolkit 2
tags = '--custom_jsdoc_tags=homepage,version,ignore,returns,example,function,requires,name,namespace,property,static,constant,default,location,copyright,memberOf,lends,fileOverview'
# This covers jsdoc3 as well
tags += ',module,abstract,file,kind,summary,description,event,exception,exports,fires,global,inner,instance,member,var,memberof,mixes,mixin,arg,argument,readonly,since,todo,public'
options.append(tags)
if fix == True:
header = "Fix JS lint"
command = "fixjsstyle %s %s " % ( ' '.join(options) , ' '.join(files))
else:
header = "JS lint"
command = "gjslint %s %s " % ( ' '.join(options) , ' '.join(files))
if relax == True:
command += ' | grep -v "Line too long"'
result = sh(command, header = "%s (%s files)" % (header, len(files)) )
error = re.search('Found\s([0-9]+)\serrors', result)
if fail and error:
console.fail( ' :puke:\n' + error.group())
def jsdoc3(files, destination, template = None, fail = True):
System.check_package('java')
files = FileList.check(files)
redirect = ''
if not template:
template = "templates/gristaupe"
if template == "templates/gristaupe":
redirect = destination
destination = "console"
jsdoc = os.path.join(__get_datas_path(), 'jsdoc3')
out = Std()
output = sh('cd "%s"; java -classpath lib/js.jar org.mozilla.javascript.tools.shell.Main -debug -modules nodejs_modules -modules rhino_modules -modules . jsdoc.js\
--destination "%s" --template "%s" %s' % (jsdoc, destination, template, '"' + '" "'.join(files) + '"'), header = "Generating js doc v3", output = False, std = out)
if fail and out.code:
console.fail(out.err)
if template == "templates/gristaupe":
writefile(redirect, out.out);
console.confirm(' JSON Doc generated in "%s"' % redirect)
else:
console.confirm(' JSON Doc generated in "%s"' % destination)
return out.out
def jsdoc(files, folder, template = None, fail = True):
System.check_package('java')
files = FileList.check(files)
if not template:
template = "%s/templates/gris_taupe" % jsdoc
jsdoc = os.path.join(__get_datas_path(), 'jsdoc-toolkit')
output = sh("java -jar %s/jsrun.jar %s/app/run.js -d=%s -t=%s -a %s" % (jsdoc, jsdoc, folder, template, ' '.join(files)), header = "Generating js doc", output = True)
if fail and output:
console.fail(output)
console.confirm(' Doc generated in "%s"' % folder)
def patch(dir, patch, p=0):
# patch -p0 -N <
output = sh(['cd %s' % dir, 'patch -p%s -N -r pukepatch.rej < %s' % (p, patch)], output = False)
console.header(' - Patching %s with %s' % (dir, patch))
lines = output.split('\n')
for line in lines:
if 'can\'t find file to patch' in line:
console.fail('Path : can\'t find file to patch')
return False
if line.startswith('patching'):
console.confirm(' ' + line)
elif 'ignored' in line:
console.warn(' ' + line)
else:
console.info(' ' + line)
try:
remove(join(dir, 'pukepatch.rej'))
except:
pass
def prompt(message, default = ''):
console.warn(message)
result = sh('read toto && echo $toto', header = None, output=False)
result = result.strip()
if not result:
result = default
return result
def deepcopy(file_list, folder, replace = None):
file_list = FileList.check(file_list, True)
stat = 0
console.header( "- copy to %s (%s files)" % (folder, len(file_list)))
#Check if Sed changed
forceRefresh = False
if replace:
filesId = FileList.getSignature(file_list)
sedID = replace.getSignature()
filesInfo = Cache.read("sed-%s" % filesId)
sedUpdated = True
if filesInfo:
lastSed = filesInfo.split('\n')[-1]
lastSed = lastSed.split(':')[0]
if lastSed == sedID:
sedUpdated = False
filesInfo += "\n"
else:
filesInfo = ""
if sedUpdated:
forceRefresh = True
Cache.write("sed-%s" % (filesId), "%s%s:%s" % (filesInfo, sedID, int(time.time())))
for (file, basepath) in file_list:
if basepath:
dst_file = __pretty(file).replace(basepath, '').strip(os.sep)
dst_file = os.path.join(folder,dst_file)
else:
dst_file = os.path.join(folder,os.path.basename(__pretty(file)))
#console.warn("File : " + file + " dest " + dst_file)
if not forceRefresh:
res = copyfile(file, dst_file, force = True)
else:
copyfile(file, dst_file)
res = True
if res and replace:
fh = open(dst_file)
data = fh.read()
data = __replace(data, replace)
fh.close()
writefile(dst_file, data)
if res:
console.info(' + %s' % __pretty(file))
stat += 1
console.confirm( " %s files updated" % (stat))
def stats(file_list, title = ""):
file_list = FileList.check(file_list)
if not title:
title = "Stats on files"
console.header(" - %s :" % title)
if len(file_list) == 0:
console.info( " No files ")
return False
safelist = '"' + '" "'.join(file_list) + '"'
size = __exec("du -k %s | cut -f1 |(tr '\n' '+'; echo 0) | bc" % safelist)
try:
size = int(size)
except Exception:
return 0
size = size * 1024
lines = __exec("wc -l %s | tail -1" % safelist)
lines = re.findall(r'\d+(?:\.\d+)?', lines)
if len(lines):
try:
lines = int(lines.pop(0))
except:
lines = 0
else:
lines = 0
console.info( " ~ Files : %s" % len(file_list))
console.info( " ~ Lines : %s (%s per file)" % (lines, (lines / len(file_list))))
console.info( " ~ Size : %s (%s per file)" % (hsizeof(size), hsizeof((size / len(file_list)))))
console.info("")
return (len(file_list), lines, size)
def pack (file_list, output):
file_list = FileList.check(file_list)
console.header( "- Packing files to %s (%s files)" % (output, len(file_list)))
comp = Compress.open(output, "w")
for file in file_list:
console.info(' * %s' % __pretty(file))
comp.add(file)
comp.close()
console.confirm(" %s packed" % output)
def unpack (pack_file, output, extract = None, verbose=True):
console.header( "- Unpacking %s to %s " % (pack_file, output))
if not Compress.check(pack_file):
console.error(" %s is not a valid pack" % output)
raise PukeError('%s is not a valid pack' % output)
return
comp = Compress.open(pack_file, "r")
if not extract:
comp.extractall(output)
comp.close()
console.confirm(" %s fast unpacked in %s" % (pack_file, output))
return True
count = 0
for fname in comp:
if extract and extract not in fname:
continue
(data, infos) = comp.extract(fname)
#folder
if data == None:
continue
output_file = os.path.join(output, fname)
if exists(output_file) and os.path.getmtime(output_file) == infos.mtime:
continue
if verbose == True:
console.info(' + %s' % __pretty(output_file))
writefile(output_file, data, mtime=infos.mtime)
if infos.mode:
chmod(output_file, infos.mode)
count += 1
comp.close()
console.confirm(" %s unpacked in %s (%s files)" % (pack_file, output, count))
### WIP
"""
def __jasmine(files):
System.check_package('java')
files = [files]
envjs = os.path.join(__get_datas_path(), 'envjs')
output = sh("cd %s && java -jar %s -opt -1 %s/envjs.bootstrap.js %s" % (envjs, JS_RUNNER, envjs, " ".join(files)) , header = None, output = False)
lines = output.split('\n')
hasFailed = False
for line in lines:
if "Envjs" in line:
continue
if "Loading" in line:
console.info(' * ' + line)
elif "FAILED" in line:
hasFailed = True
console.error(' ' + line + ' ')
elif "Suite" in line:
console.log(' • ' + line)
elif "Spec" in line or "Error" in line:
console.info(' ' + line)
elif "Passed:" in line:
console.log(' ' + '-'*40)
console.log(' '+ line)
elif "Failed:" in line:
console.log(' ' + line)
elif "Total" in line:
console.log(' ' + line)
console.log(' ' + '-'*40)
else:
console.info(' ' + line)
if not hasFailed:
console.confirm(' Tests success')
else:
console.fail(" Tests failure")
"""
def __replace(data, replace):
for k in replace.keys():
data = re.sub(k, replace.get(k), data)
return data
def __pretty(filename):
if filename.startswith('.pukecache'):
return Cache.getInfo(filename.split(os.sep).pop())
return filename
def __minify_css(in_file, out_file, verbose):
options = ['-o "%s"' % out_file,
'--type css']
if verbose:
options.append('-v')
os.system('java -jar "%s" %s %s' % (CSS_COMPRESSOR,
' '.join(options), in_file))
def __get_ext(filename):
return filename.split('.')[-1]
def __parse_scss(payload):
css = Scss()
return css.compile(payload)
def __minify_js(in_file, out_file, verbose, strict=True, ecma3=False):
options = ['--js %s' % in_file,
'--js_output_file %s' % out_file,
'--warning_level QUIET']
if ecma3 == True:
options.append('--language_in ECMASCRIPT3')
elif strict == True:
options.append('--language_in ECMASCRIPT5_STRICT')
else:
options.append('--language_in ECMASCRIPT5')
os.system('java -jar "%s" %s' % (JS_COMPRESSOR, ' '.join(options)))
def __get_datas_path():
return os.path.join(os.path.dirname( __file__ ), 'datas')
def __exec(command):
result = ""
p = os.popen(command)
for line in p.readlines():
result += line + '\n'
p.close()
return result
def hsizeof(num):
try:
num = int(num)
except Exception:
return 0
for x in ['bytes','KB','MB','GB','TB']:
if num < 1024.0:
return "%3.1f%s" % (num, x)
num /= 1024.0
| mit |
Frenzie/youtube-dl | youtube_dl/extractor/godtube.py | 177 | 1785 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
parse_iso8601,
)
class GodTubeIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?godtube\.com/watch/\?v=(?P<id>[\da-zA-Z]+)'
_TESTS = [
{
'url': 'https://www.godtube.com/watch/?v=0C0CNNNU',
'md5': '77108c1e4ab58f48031101a1a2119789',
'info_dict': {
'id': '0C0CNNNU',
'ext': 'mp4',
'title': 'Woman at the well.',
'duration': 159,
'timestamp': 1205712000,
'uploader': 'beverlybmusic',
'upload_date': '20080317',
'thumbnail': 're:^https?://.*\.jpg$',
},
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
config = self._download_xml(
'http://www.godtube.com/resource/mediaplayer/%s.xml' % video_id.lower(),
video_id, 'Downloading player config XML')
video_url = config.find('file').text
uploader = config.find('author').text
timestamp = parse_iso8601(config.find('date').text)
duration = parse_duration(config.find('duration').text)
thumbnail = config.find('image').text
media = self._download_xml(
'http://www.godtube.com/media/xml/?v=%s' % video_id, video_id, 'Downloading media XML')
title = media.find('title').text
return {
'id': video_id,
'url': video_url,
'title': title,
'thumbnail': thumbnail,
'timestamp': timestamp,
'uploader': uploader,
'duration': duration,
}
| unlicense |
en0/Supybot_sasl | plugins/Dunno/test.py | 21 | 3402 | ###
# Copyright (c) 2003-2005, Daniel DiPaolo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
class DunnoTestCase(ChannelPluginTestCase):
plugins = ('Dunno', 'User')
def setUp(self):
PluginTestCase.setUp(self)
self.prefix = 'foo!bar@baz'
self.assertNotError('register tester moo', private=True)
def testDunnoAdd(self):
self.assertNotError('dunno add moo')
self.assertResponse('asdfagagfosdfk', 'moo')
def testDunnoRemove(self):
self.assertNotError('dunno add moo')
self.assertNotError('dunno remove 1')
def testDunnoSearch(self):
self.assertNotError('dunno add foo')
self.assertRegexp('dunno search moo', 'No.*dunnos.*found')
# Test searching using just the getopts
self.assertRegexp('dunno search --regexp m/foo/', r'1 found')
self.assertNotError('dunno add moo')
self.assertRegexp('dunno search moo', r'1 found')
self.assertRegexp('dunno search m', r'1 found')
# Test multiple adds
for i in range(5):
self.assertNotError('dunno add moo%s' % i)
self.assertRegexp('dunno search moo', r'6 found')
def testDunnoGet(self):
self.assertNotError('dunno add moo')
self.assertRegexp('dunno get 1', r'#1.*moo')
self.assertNotError('dunno add $who')
self.assertRegexp('dunno get 2', r'#2.*\$who')
self.assertError('dunno get 3')
self.assertError('dunno get a')
def testDunnoChange(self):
self.assertNotError('dunno add moo')
self.assertNotError('dunno change 1 s/moo/bar/')
self.assertRegexp('dunno get 1', '.*?: [\'"]bar[\'"]')
def testDollarCommand(self):
self.assertNotError("dunno add I can't $command.")
self.assertResponse('asdf', "I can't asdf.")
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| bsd-3-clause |
kwlzn/pants | tests/python/pants_test/scm/test_git.py | 1 | 22315 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
import types
import unittest
from contextlib import contextmanager
from textwrap import dedent
from unittest import skipIf
from pants.scm.git import Git
from pants.scm.scm import Scm
from pants.util.contextutil import environment_as, pushd, temporary_dir
from pants.util.dirutil import chmod_plus_x, safe_mkdir, safe_mkdtemp, safe_open, safe_rmtree, touch
from pants_test.testutils.git_util import MIN_REQUIRED_GIT_VERSION, git_version
@skipIf(git_version() < MIN_REQUIRED_GIT_VERSION,
'The GitTest requires git >= {}.'.format(MIN_REQUIRED_GIT_VERSION))
class GitTest(unittest.TestCase):
@staticmethod
def init_repo(remote_name, remote):
# TODO (peiyu) clean this up, use `git_util.initialize_repo`.
subprocess.check_call(['git', 'init'])
subprocess.check_call(['git', 'config', 'user.email', 'you@example.com'])
subprocess.check_call(['git', 'config', 'user.name', 'Your Name'])
subprocess.check_call(['git', 'remote', 'add', remote_name, remote])
def setUp(self):
self.origin = safe_mkdtemp()
with pushd(self.origin):
subprocess.check_call(['git', 'init', '--bare'])
self.gitdir = safe_mkdtemp()
self.worktree = safe_mkdtemp()
self.readme_file = os.path.join(self.worktree, 'README')
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
self.init_repo('depot', self.origin)
touch(self.readme_file)
subprocess.check_call(['git', 'add', 'README'])
safe_mkdir(os.path.join(self.worktree, 'dir'))
with open(os.path.join(self.worktree, 'dir', 'f'), 'w') as f:
f.write("file in subdir")
# Make some symlinks
os.symlink('f', os.path.join(self.worktree, 'dir', 'relative-symlink'))
os.symlink('no-such-file', os.path.join(self.worktree, 'dir', 'relative-nonexistent'))
os.symlink('dir/f', os.path.join(self.worktree, 'dir', 'not-absolute\u2764'))
os.symlink('../README', os.path.join(self.worktree, 'dir', 'relative-dotdot'))
os.symlink('dir', os.path.join(self.worktree, 'link-to-dir'))
os.symlink('README/f', os.path.join(self.worktree, 'not-a-dir'))
os.symlink('loop1', os.path.join(self.worktree, 'loop2'))
os.symlink('loop2', os.path.join(self.worktree, 'loop1'))
subprocess.check_call(['git', 'add', 'README', 'dir', 'loop1', 'loop2',
'link-to-dir', 'not-a-dir'])
subprocess.check_call(['git', 'commit', '-am', 'initial commit with decode -> \x81b'])
self.initial_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
subprocess.check_call(['git', 'tag', 'first'])
subprocess.check_call(['git', 'push', '--tags', 'depot', 'master'])
subprocess.check_call(['git', 'branch', '--set-upstream', 'master', 'depot/master'])
with safe_open(self.readme_file, 'w') as readme:
readme.write('Hello World.\u2764'.encode('utf-8'))
subprocess.check_call(['git', 'commit', '-am', 'Update README.'])
self.current_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
self.clone2 = safe_mkdtemp()
with pushd(self.clone2):
self.init_repo('origin', self.origin)
subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])
with safe_open(os.path.realpath('README'), 'a') as readme:
readme.write('--')
subprocess.check_call(['git', 'commit', '-am', 'Update README 2.'])
subprocess.check_call(['git', 'push', '--tags', 'origin', 'master'])
self.git = Git(gitdir=self.gitdir, worktree=self.worktree)
@contextmanager
def mkremote(self, remote_name):
with temporary_dir() as remote_uri:
subprocess.check_call(['git', 'remote', 'add', remote_name, remote_uri])
try:
yield remote_uri
finally:
subprocess.check_call(['git', 'remote', 'remove', remote_name])
def tearDown(self):
safe_rmtree(self.origin)
safe_rmtree(self.gitdir)
safe_rmtree(self.worktree)
safe_rmtree(self.clone2)
def test_listdir(self):
reader = self.git.repo_reader(self.initial_rev)
for dirname in '.', './.':
results = reader.listdir(dirname)
self.assertEquals(['README',
'dir',
'link-to-dir',
'loop1',
'loop2',
'not-a-dir'],
sorted(results))
for dirname in 'dir', './dir':
results = reader.listdir('dir')
self.assertEquals(['f',
'not-absolute\u2764'.encode('utf-8'),
'relative-dotdot',
'relative-nonexistent',
'relative-symlink'],
sorted(results))
results = reader.listdir('link-to-dir')
self.assertEquals(['f',
'not-absolute\u2764'.encode('utf-8'),
'relative-dotdot',
'relative-nonexistent',
'relative-symlink'],
sorted(results))
with self.assertRaises(reader.MissingFileException):
with reader.listdir('bogus'):
pass
def test_lstat(self):
reader = self.git.repo_reader(self.initial_rev)
def lstat(*components):
return type(reader.lstat(os.path.join(*components)))
self.assertEquals(reader.Symlink, lstat('dir', 'relative-symlink'))
self.assertEquals(reader.Symlink, lstat('not-a-dir'))
self.assertEquals(reader.File, lstat('README'))
self.assertEquals(reader.Dir, lstat('dir'))
self.assertEquals(types.NoneType, lstat('nope-not-here'))
def test_readlink(self):
reader = self.git.repo_reader(self.initial_rev)
def readlink(*components):
return reader.readlink(os.path.join(*components))
self.assertEquals('dir/f', readlink('dir', 'relative-symlink'))
self.assertEquals(None, readlink('not-a-dir'))
self.assertEquals(None, readlink('README'))
self.assertEquals(None, readlink('dir'))
self.assertEquals(None, readlink('nope-not-here'))
def test_open(self):
reader = self.git.repo_reader(self.initial_rev)
with reader.open('README') as f:
self.assertEquals('', f.read())
with reader.open('dir/f') as f:
self.assertEquals('file in subdir', f.read())
with self.assertRaises(reader.MissingFileException):
with reader.open('no-such-file') as f:
self.assertEquals('', f.read())
with self.assertRaises(reader.MissingFileException):
with reader.open('dir/no-such-file') as f:
pass
with self.assertRaises(reader.IsDirException):
with reader.open('dir') as f:
self.assertEquals('', f.read())
current_reader = self.git.repo_reader(self.current_rev)
with current_reader.open('README') as f:
self.assertEquals('Hello World.\u2764'.encode('utf-8'), f.read())
with current_reader.open('link-to-dir/f') as f:
self.assertEquals('file in subdir', f.read())
with current_reader.open('dir/relative-symlink') as f:
self.assertEquals('file in subdir', f.read())
with self.assertRaises(current_reader.SymlinkLoopException):
with current_reader.open('loop1') as f:
pass
with self.assertRaises(current_reader.MissingFileException):
with current_reader.open('dir/relative-nonexistent') as f:
pass
with self.assertRaises(current_reader.NotADirException):
with current_reader.open('not-a-dir') as f:
pass
with self.assertRaises(current_reader.MissingFileException):
with current_reader.open('dir/not-absolute\u2764') as f:
pass
with self.assertRaises(current_reader.MissingFileException):
with current_reader.open('dir/relative-nonexistent') as f:
pass
with current_reader.open('dir/relative-dotdot') as f:
self.assertEquals('Hello World.\u2764'.encode('utf-8'), f.read())
def test_integration(self):
self.assertEqual(set(), self.git.changed_files())
self.assertEqual({'README'}, self.git.changed_files(from_commit='HEAD^'))
tip_sha = self.git.commit_id
self.assertTrue(tip_sha)
self.assertTrue(tip_sha in self.git.changelog())
merge_base = self.git.merge_base()
self.assertTrue(merge_base)
self.assertTrue(merge_base in self.git.changelog())
with self.assertRaises(Scm.LocalException):
self.git.server_url
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
with self.mkremote('origin') as origin_uri:
# We shouldn't be fooled by remotes with origin in their name.
with self.mkremote('temp_origin'):
origin_url = self.git.server_url
self.assertEqual(origin_url, origin_uri)
self.assertTrue(self.git.tag_name.startswith('first-'), msg='un-annotated tags should be found')
self.assertEqual('master', self.git.branch_name)
def edit_readme():
with open(self.readme_file, 'a') as fp:
fp.write('More data.')
edit_readme()
with open(os.path.join(self.worktree, 'INSTALL'), 'w') as untracked:
untracked.write('make install')
self.assertEqual({'README'}, self.git.changed_files())
self.assertEqual({'README', 'INSTALL'}, self.git.changed_files(include_untracked=True))
# confirm that files outside of a given relative_to path are ignored
self.assertEqual(set(), self.git.changed_files(relative_to='non-existent'))
self.git.commit('API Changes.')
try:
# These changes should be rejected because our branch point from origin is 1 commit behind
# the changes pushed there in clone 2.
self.git.push()
except Scm.RemoteException:
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
subprocess.check_call(['git', 'reset', '--hard', 'depot/master'])
self.git.refresh()
edit_readme()
self.git.commit('''API '"' " Changes.''')
self.git.push()
# HEAD is merged into master
self.assertEqual(self.git.commit_date(self.git.merge_base()), self.git.commit_date('HEAD'))
self.assertEqual(self.git.commit_date('HEAD'), self.git.commit_date('HEAD'))
self.git.tag('second', message='''Tagged ' " Changes''')
with temporary_dir() as clone:
with pushd(clone):
self.init_repo('origin', self.origin)
subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])
with open(os.path.realpath('README')) as readme:
self.assertEqual('--More data.', readme.read())
git = Git()
# Check that we can pick up committed and uncommitted changes.
with safe_open(os.path.realpath('CHANGES'), 'w') as changes:
changes.write('none')
subprocess.check_call(['git', 'add', 'CHANGES'])
self.assertEqual({'README', 'CHANGES'}, git.changed_files(from_commit='first'))
self.assertEqual('master', git.branch_name)
self.assertEqual('second', git.tag_name, msg='annotated tags should be found')
def test_detect_worktree(self):
with temporary_dir() as _clone:
with pushd(_clone):
clone = os.path.realpath(_clone)
self.init_repo('origin', self.origin)
subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])
def worktree_relative_to(cwd, expected):
# Given a cwd relative to the worktree, tests that the worktree is detected as 'expected'.
orig_cwd = os.getcwd()
try:
abs_cwd = os.path.join(clone, cwd)
if not os.path.isdir(abs_cwd):
os.mkdir(abs_cwd)
os.chdir(abs_cwd)
actual = Git.detect_worktree()
self.assertEqual(expected, actual)
finally:
os.chdir(orig_cwd)
worktree_relative_to('..', None)
worktree_relative_to('.', clone)
worktree_relative_to('is', clone)
worktree_relative_to('is/a', clone)
worktree_relative_to('is/a/dir', clone)
def test_detect_worktree_no_cwd(self):
with temporary_dir() as _clone:
with pushd(_clone):
clone = os.path.realpath(_clone)
self.init_repo('origin', self.origin)
subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])
def worktree_relative_to(some_dir, expected):
# Given a directory relative to the worktree, tests that the worktree is detected as 'expected'.
subdir = os.path.join(clone, some_dir)
if not os.path.isdir(subdir):
os.mkdir(subdir)
actual = Git.detect_worktree(subdir=subdir)
self.assertEqual(expected, actual)
worktree_relative_to('..', None)
worktree_relative_to('.', clone)
worktree_relative_to('is', clone)
worktree_relative_to('is/a', clone)
worktree_relative_to('is/a/dir', clone)
@property
def test_changes_in(self):
"""Test finding changes in a diffspecs
To some extent this is just testing functionality of git not pants, since all pants says
is that it will pass the diffspec to git diff-tree, but this should serve to at least document
the functionality we belive works.
"""
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
def commit_contents_to_files(content, *files):
for path in files:
with safe_open(os.path.join(self.worktree, path), 'w') as fp:
fp.write(content)
subprocess.check_call(['git', 'add', '.'])
subprocess.check_call(['git', 'commit', '-m', 'change {}'.format(files)])
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
# We can get changes in HEAD or by SHA
c1 = commit_contents_to_files('1', 'foo')
self.assertEqual({'foo'}, self.git.changes_in('HEAD'))
self.assertEqual({'foo'}, self.git.changes_in(c1))
# Changes in new HEAD, from old-to-new HEAD, in old HEAD, or from old-old-head to new.
commit_contents_to_files('2', 'bar')
self.assertEqual({'bar'}, self.git.changes_in('HEAD'))
self.assertEqual({'bar'}, self.git.changes_in('HEAD^..HEAD'))
self.assertEqual({'foo'}, self.git.changes_in('HEAD^'))
self.assertEqual({'foo'}, self.git.changes_in('HEAD~1'))
self.assertEqual({'foo', 'bar'}, self.git.changes_in('HEAD^^..HEAD'))
# New commit doesn't change results-by-sha
self.assertEqual({'foo'}, self.git.changes_in(c1))
# Files changed in multiple diffs within a range
c3 = commit_contents_to_files('3', 'foo')
self.assertEqual({'foo', 'bar'}, self.git.changes_in('{}..{}'.format(c1, c3)))
# Changes in a tag
subprocess.check_call(['git', 'tag', 'v1'])
self.assertEqual({'foo'}, self.git.changes_in('v1'))
# Introduce a new filename
c4 = commit_contents_to_files('4', 'baz')
self.assertEqual({'baz'}, self.git.changes_in('HEAD'))
# Tag-to-sha
self.assertEqual({'baz'}, self.git.changes_in('{}..{}'.format('v1', c4)))
# We can get multiple changes from one ref
commit_contents_to_files('5', 'foo', 'bar')
self.assertEqual({'foo', 'bar'}, self.git.changes_in('HEAD'))
self.assertEqual({'foo', 'bar', 'baz'}, self.git.changes_in('HEAD~4..HEAD'))
self.assertEqual({'foo', 'bar', 'baz'}, self.git.changes_in('{}..HEAD'.format(c1)))
self.assertEqual({'foo', 'bar', 'baz'}, self.git.changes_in('{}..{}'.format(c1, c4)))
def test_changelog_utf8(self):
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
def commit_contents_to_files(message, encoding, content, *files):
for path in files:
with safe_open(os.path.join(self.worktree, path), 'w') as fp:
fp.write(content)
subprocess.check_call(['git', 'add', '.'])
subprocess.check_call(['git', 'config', '--local', '--add', 'i18n.commitencoding',
encoding])
try:
subprocess.check_call(['git', 'commit', '-m', message.encode(encoding)])
finally:
subprocess.check_call(['git', 'config', '--local', '--unset-all', 'i18n.commitencoding'])
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
# Mix in a non-UTF-8 author to all commits to exercise the corner described here does not
# adversely impact the ability to render the changelog (even if rendering for certain
# characters is incorrect): http://comments.gmane.org/gmane.comp.version-control.git/262685
# NB: This method of override requires we include `user.name` and `user.email` even though we
# only use `user.name` to exercise non-UTF-8. Without `user.email`, it will be unset and
# commits can then fail on machines without a proper hostname setup for git to fall back to
# when concocting a last-ditch `user.email`.
non_utf8_config = dedent("""
[user]
name = Noralf Trønnes
email = noralf@example.com
""").encode('iso-8859-1')
with open(os.path.join(self.gitdir, 'config'), 'wb') as fp:
fp.write(non_utf8_config)
# Note the copyright symbol is used as the non-ascii character in the next 3 commits
commit_contents_to_files('START1 © END', 'iso-8859-1', '1', 'foo')
commit_contents_to_files('START2 © END', 'latin1', '1', 'bar')
commit_contents_to_files('START3 © END', 'utf-8', '1', 'baz')
commit_contents_to_files('START4 ~ END', 'us-ascii', '1', 'bip')
# Prove our non-utf-8 encodings were stored in the commit metadata.
log = subprocess.check_output(['git', 'log', '--format=%e'])
self.assertEqual(['us-ascii', 'latin1', 'iso-8859-1'], filter(None, log.strip().splitlines()))
# And show that the git log successfully transcodes all the commits none-the-less to utf-8
changelog = self.git.changelog()
# The ascii commit should combine with the iso-8859-1 author an fail to transcode the
# o-with-stroke character, and so it should be replaced with the utf-8 replacement character
# \uFFF or �.
self.assertIn('Noralf Tr�nnes', changelog)
self.assertIn('Noralf Tr\uFFFDnnes', changelog)
# For the other 3 commits, each of iso-8859-1, latin1 and utf-8 have an encoding for the
# o-with-stroke character - \u00F8 or ø - so we should find it;
self.assertIn('Noralf Trønnes', changelog)
self.assertIn('Noralf Tr\u00F8nnes', changelog)
self.assertIn('START1 © END', changelog)
self.assertIn('START2 © END', changelog)
self.assertIn('START3 © END', changelog)
self.assertIn('START4 ~ END', changelog)
def test_refresh_with_conflict(self):
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
self.assertEqual(set(), self.git.changed_files())
self.assertEqual({'README'}, self.git.changed_files(from_commit='HEAD^'))
self.assertEqual({'README'}, self.git.changes_in('HEAD'))
# Create a change on this branch that is incompatible with the change to master
with open(self.readme_file, 'w') as readme:
readme.write('Conflict')
subprocess.check_call(['git', 'commit', '-am', 'Conflict'])
self.assertEquals(set(), self.git.changed_files(include_untracked=True, from_commit='HEAD'))
with self.assertRaises(Scm.LocalException):
self.git.refresh(leave_clean=False)
# The repo is dirty
self.assertEquals({'README'},
self.git.changed_files(include_untracked=True, from_commit='HEAD'))
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
subprocess.check_call(['git', 'reset', '--hard', 'HEAD'])
# Now try with leave_clean
with self.assertRaises(Scm.LocalException):
self.git.refresh(leave_clean=True)
# The repo is clean
self.assertEquals(set(), self.git.changed_files(include_untracked=True, from_commit='HEAD'))
def test_commit_with_new_untracked_file_adds_file(self):
new_file = os.path.join(self.worktree, 'untracked_file')
touch(new_file)
self.assertEqual({'untracked_file'}, self.git.changed_files(include_untracked=True))
self.git.add(new_file)
self.assertEqual({'untracked_file'}, self.git.changed_files())
self.git.commit('API Changes.')
self.assertEqual(set(), self.git.changed_files(include_untracked=True))
class DetectWorktreeFakeGitTest(unittest.TestCase):
@contextmanager
def empty_path(self):
with temporary_dir() as path:
with environment_as(PATH=path):
yield path
@contextmanager
def unexecutable_git(self):
with self.empty_path() as path:
git = os.path.join(path, 'git')
touch(git)
yield git
@contextmanager
def executable_git(self):
with self.unexecutable_git() as git:
chmod_plus_x(git)
yield git
def test_detect_worktree_no_git(self):
with self.empty_path():
self.assertIsNone(Git.detect_worktree())
def test_detect_worktree_unexectuable_git(self):
with self.unexecutable_git() as git:
self.assertIsNone(Git.detect_worktree())
self.assertIsNone(Git.detect_worktree(binary=git))
def test_detect_worktree_invalid_executable_git(self):
with self.executable_git() as git:
self.assertIsNone(Git.detect_worktree())
self.assertIsNone(Git.detect_worktree(binary=git))
def test_detect_worktree_failing_git(self):
with self.executable_git() as git:
with open(git, 'w') as fp:
fp.write('#!/bin/sh\n')
fp.write('exit 1')
self.assertIsNone(Git.detect_worktree())
self.assertIsNone(Git.detect_worktree(git))
def test_detect_worktree_working_git(self):
expected_worktree_dir = '/a/fake/worktree/dir'
with self.executable_git() as git:
with open(git, 'w') as fp:
fp.write('#!/bin/sh\n')
fp.write('echo ' + expected_worktree_dir)
self.assertEqual(expected_worktree_dir, Git.detect_worktree())
self.assertEqual(expected_worktree_dir, Git.detect_worktree(binary=git))
| apache-2.0 |
mtesseracttech/CustomEngine | lib/bullet/src/examples/pybullet/vr_kuka_setup.py | 1 | 4440 | import pybullet as p
p.connect(p.SHARED_MEMORY)
p.resetSimulation()
objects = [p.loadURDF("plane.urdf", 0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("samurai.urdf", 0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("pr2_gripper.urdf", 0.500000,0.300006,0.700000,-0.000000,-0.000000,-0.000031,1.000000)]
pr2_gripper = objects[0]
print ("pr2_gripper=")
print (pr2_gripper)
jointPositions=[ 0.550569, 0.000000, 0.549657, 0.000000 ]
for jointIndex in range (p.getNumJoints(pr2_gripper)):
p.resetJointState(pr2_gripper,jointIndex,jointPositions[jointIndex])
pr2_cid = p.createConstraint(pr2_gripper,-1,-1,-1,p.JOINT_FIXED,[0,0,0],[0.2,0,0],[0.500000,0.300006,0.700000])
print ("pr2_cid")
print (pr2_cid)
objects = [p.loadURDF("kuka_iiwa/model_vr_limits.urdf", 1.400000,-0.200000,0.600000,0.000000,0.000000,0.000000,1.000000)]
kuka = objects[0]
jointPositions=[ -0.000000, -0.000000, 0.000000, 1.570793, 0.000000, -1.036725, 0.000001 ]
for jointIndex in range (p.getNumJoints(kuka)):
p.resetJointState(kuka,jointIndex,jointPositions[jointIndex])
objects = [p.loadURDF("lego/lego.urdf", 1.000000,-0.200000,0.700000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("lego/lego.urdf", 1.000000,-0.200000,0.800000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("lego/lego.urdf", 1.000000,-0.200000,0.900000,0.000000,0.000000,0.000000,1.000000)]
objects = p.loadSDF("gripper/wsg50_one_motor_gripper_new_free_base.sdf")
kuka_gripper = objects[0]
print ("kuka gripper=")
print(kuka_gripper)
p.resetBasePositionAndOrientation(kuka_gripper,[0.923103,-0.200000,1.250036],[-0.000000,0.964531,-0.000002,-0.263970])
jointPositions=[ 0.000000, -0.011130, -0.206421, 0.205143, -0.009999, 0.000000, -0.010055, 0.000000 ]
for jointIndex in range (p.getNumJoints(kuka_gripper)):
p.resetJointState(kuka_gripper,jointIndex,jointPositions[jointIndex])
kuka_cid = p.createConstraint(kuka, 6, kuka_gripper,0,p.JOINT_FIXED, [0,0,0], [0,0,0.05],[0,0,0])
objects = [p.loadURDF("jenga/jenga.urdf", 1.300000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 1.200000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 1.100000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 1.000000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 0.900000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 0.800000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("table/table.urdf", 1.000000,-0.200000,0.000000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("teddy_vhacd.urdf", 1.050000,-0.500000,0.700000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("cube_small.urdf", 0.950000,-0.100000,0.700000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("sphere_small.urdf", 0.850000,-0.400000,0.700000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("duck_vhacd.urdf", 0.850000,-0.400000,0.900000,0.000000,0.000000,0.707107,0.707107)]
objects = p.loadSDF("kiva_shelf/model.sdf")
ob = objects[0]
p.resetBasePositionAndOrientation(ob,[0.000000,1.000000,1.204500],[0.000000,0.000000,0.000000,1.000000])
objects = [p.loadURDF("teddy_vhacd.urdf", -0.100000,0.600000,0.850000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("sphere_small.urdf", -0.100000,0.955006,1.169706,0.633232,-0.000000,-0.000000,0.773962)]
objects = [p.loadURDF("cube_small.urdf", 0.300000,0.600000,0.850000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("table_square/table_square.urdf", -1.000000,0.000000,0.000000,0.000000,0.000000,0.000000,1.000000)]
ob = objects[0]
jointPositions=[ 0.000000 ]
for jointIndex in range (p.getNumJoints(ob)):
p.resetJointState(ob,jointIndex,jointPositions[jointIndex])
objects = [p.loadURDF("husky/husky.urdf", 2.000000,-5.000000,1.000000,0.000000,0.000000,0.000000,1.000000)]
ob = objects[0]
jointPositions=[ 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000 ]
for jointIndex in range (p.getNumJoints(ob)):
p.resetJointState(ob,jointIndex,jointPositions[jointIndex])
p.setGravity(0.000000,0.000000,0.000000)
p.setGravity(0,0,-10)
p.stepSimulation()
p.disconnect()
| apache-2.0 |
elkingtonmcb/django | django/core/handlers/wsgi.py | 339 | 9181 | from __future__ import unicode_literals
import cgi
import codecs
import logging
import sys
from io import BytesIO
from threading import Lock
from django import http
from django.conf import settings
from django.core import signals
from django.core.handlers import base
from django.core.urlresolvers import set_script_prefix
from django.utils import six
from django.utils.encoding import force_str, force_text
from django.utils.functional import cached_property
logger = logging.getLogger('django.request')
# encode() and decode() expect the charset to be a native string.
ISO_8859_1, UTF_8 = str('iso-8859-1'), str('utf-8')
class LimitedStream(object):
'''
LimitedStream wraps another stream in order to not allow reading from it
past specified amount of bytes.
'''
def __init__(self, stream, limit, buf_size=64 * 1024 * 1024):
self.stream = stream
self.remaining = limit
self.buffer = b''
self.buf_size = buf_size
def _read_limited(self, size=None):
if size is None or size > self.remaining:
size = self.remaining
if size == 0:
return b''
result = self.stream.read(size)
self.remaining -= len(result)
return result
def read(self, size=None):
if size is None:
result = self.buffer + self._read_limited()
self.buffer = b''
elif size < len(self.buffer):
result = self.buffer[:size]
self.buffer = self.buffer[size:]
else: # size >= len(self.buffer)
result = self.buffer + self._read_limited(size - len(self.buffer))
self.buffer = b''
return result
def readline(self, size=None):
while b'\n' not in self.buffer and \
(size is None or len(self.buffer) < size):
if size:
# since size is not None here, len(self.buffer) < size
chunk = self._read_limited(size - len(self.buffer))
else:
chunk = self._read_limited()
if not chunk:
break
self.buffer += chunk
sio = BytesIO(self.buffer)
if size:
line = sio.readline(size)
else:
line = sio.readline()
self.buffer = sio.read()
return line
class WSGIRequest(http.HttpRequest):
def __init__(self, environ):
script_name = get_script_name(environ)
path_info = get_path_info(environ)
if not path_info:
# Sometimes PATH_INFO exists, but is empty (e.g. accessing
# the SCRIPT_NAME URL without a trailing slash). We really need to
# operate as if they'd requested '/'. Not amazingly nice to force
# the path like this, but should be harmless.
path_info = '/'
self.environ = environ
self.path_info = path_info
# be careful to only replace the first slash in the path because of
# http://test/something and http://test//something being different as
# stated in http://www.ietf.org/rfc/rfc2396.txt
self.path = '%s/%s' % (script_name.rstrip('/'),
path_info.replace('/', '', 1))
self.META = environ
self.META['PATH_INFO'] = path_info
self.META['SCRIPT_NAME'] = script_name
self.method = environ['REQUEST_METHOD'].upper()
_, content_params = cgi.parse_header(environ.get('CONTENT_TYPE', ''))
if 'charset' in content_params:
try:
codecs.lookup(content_params['charset'])
except LookupError:
pass
else:
self.encoding = content_params['charset']
self._post_parse_error = False
try:
content_length = int(environ.get('CONTENT_LENGTH'))
except (ValueError, TypeError):
content_length = 0
self._stream = LimitedStream(self.environ['wsgi.input'], content_length)
self._read_started = False
self.resolver_match = None
def _get_scheme(self):
return self.environ.get('wsgi.url_scheme')
@cached_property
def GET(self):
# The WSGI spec says 'QUERY_STRING' may be absent.
raw_query_string = get_bytes_from_wsgi(self.environ, 'QUERY_STRING', '')
return http.QueryDict(raw_query_string, encoding=self._encoding)
def _get_post(self):
if not hasattr(self, '_post'):
self._load_post_and_files()
return self._post
def _set_post(self, post):
self._post = post
@cached_property
def COOKIES(self):
raw_cookie = get_str_from_wsgi(self.environ, 'HTTP_COOKIE', '')
return http.parse_cookie(raw_cookie)
def _get_files(self):
if not hasattr(self, '_files'):
self._load_post_and_files()
return self._files
POST = property(_get_post, _set_post)
FILES = property(_get_files)
class WSGIHandler(base.BaseHandler):
initLock = Lock()
request_class = WSGIRequest
def __call__(self, environ, start_response):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
with self.initLock:
try:
# Check that middleware is still uninitialized.
if self._request_middleware is None:
self.load_middleware()
except:
# Unload whatever middleware we got
self._request_middleware = None
raise
set_script_prefix(get_script_name(environ))
signals.request_started.send(sender=self.__class__, environ=environ)
try:
request = self.request_class(environ)
except UnicodeDecodeError:
logger.warning('Bad Request (UnicodeDecodeError)',
exc_info=sys.exc_info(),
extra={
'status_code': 400,
}
)
response = http.HttpResponseBadRequest()
else:
response = self.get_response(request)
response._handler_class = self.__class__
status = '%s %s' % (response.status_code, response.reason_phrase)
response_headers = [(str(k), str(v)) for k, v in response.items()]
for c in response.cookies.values():
response_headers.append((str('Set-Cookie'), str(c.output(header=''))))
start_response(force_str(status), response_headers)
if getattr(response, 'file_to_stream', None) is not None and environ.get('wsgi.file_wrapper'):
response = environ['wsgi.file_wrapper'](response.file_to_stream)
return response
def get_path_info(environ):
"""
Returns the HTTP request's PATH_INFO as a unicode string.
"""
path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '/')
return path_info.decode(UTF_8)
def get_script_name(environ):
"""
Returns the equivalent of the HTTP request's SCRIPT_NAME environment
variable. If Apache mod_rewrite has been used, returns what would have been
the script name prior to any rewriting (so it's the script name as seen
from the client's perspective), unless the FORCE_SCRIPT_NAME setting is
set (to anything).
"""
if settings.FORCE_SCRIPT_NAME is not None:
return force_text(settings.FORCE_SCRIPT_NAME)
# If Apache's mod_rewrite had a whack at the URL, Apache set either
# SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any
# rewrites. Unfortunately not every Web server (lighttpd!) passes this
# information through all the time, so FORCE_SCRIPT_NAME, above, is still
# needed.
script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '')
if not script_url:
script_url = get_bytes_from_wsgi(environ, 'REDIRECT_URL', '')
if script_url:
path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '')
script_name = script_url[:-len(path_info)] if path_info else script_url
else:
script_name = get_bytes_from_wsgi(environ, 'SCRIPT_NAME', '')
return script_name.decode(UTF_8)
def get_bytes_from_wsgi(environ, key, default):
"""
Get a value from the WSGI environ dictionary as bytes.
key and default should be str objects. Under Python 2 they may also be
unicode objects provided they only contain ASCII characters.
"""
value = environ.get(str(key), str(default))
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. This is wrong for Django websites where UTF-8
# is the default. Re-encode to recover the original bytestring.
return value.encode(ISO_8859_1) if six.PY3 else value
def get_str_from_wsgi(environ, key, default):
"""
Get a value from the WSGI environ dictionary as str.
key and default should be str objects. Under Python 2 they may also be
unicode objects provided they only contain ASCII characters.
"""
value = get_bytes_from_wsgi(environ, key, default)
return value.decode(UTF_8, errors='replace') if six.PY3 else value
| bsd-3-clause |
jjmiranda/edx-platform | common/djangoapps/geoinfo/middleware.py | 194 | 1421 | """
Middleware to identify the country of origin of page requests.
Middleware adds `country_code` in session.
Usage:
# To enable the Geoinfo feature on a per-view basis, use:
decorator `django.utils.decorators.decorator_from_middleware(middleware_class)`
"""
import logging
import pygeoip
from ipware.ip import get_real_ip
from django.conf import settings
log = logging.getLogger(__name__)
class CountryMiddleware(object):
"""
Identify the country by IP address.
"""
def process_request(self, request):
"""
Identify the country by IP address.
Store country code in session.
"""
new_ip_address = get_real_ip(request)
old_ip_address = request.session.get('ip_address', None)
if not new_ip_address and old_ip_address:
del request.session['ip_address']
del request.session['country_code']
elif new_ip_address != old_ip_address:
if new_ip_address.find(':') >= 0:
country_code = pygeoip.GeoIP(settings.GEOIPV6_PATH).country_code_by_addr(new_ip_address)
else:
country_code = pygeoip.GeoIP(settings.GEOIP_PATH).country_code_by_addr(new_ip_address)
request.session['country_code'] = country_code
request.session['ip_address'] = new_ip_address
log.debug('Country code for IP: %s is set to %s', new_ip_address, country_code)
| agpl-3.0 |
elenaoat/AutobahnPython | examples/websocket/multiproto/server2.py | 19 | 2348 | ###############################################################################
##
## Copyright 2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.internet import reactor
from twisted.python import log
from twisted.web.server import Site
from twisted.web.static import Data
from autobahn.websocket import WebSocketServerFactory, \
WebSocketServerProtocol
from autobahn.resource import WebSocketResource
class Echo1ServerProtocol(WebSocketServerProtocol):
def onMessage(self, msg, binary):
self.sendMessage("Echo 1 - " + msg)
class Echo2ServerProtocol(WebSocketServerProtocol):
def onMessage(self, msg, binary):
self.sendMessage("Echo 2 - " + msg)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
log.startLogging(sys.stdout)
debug = True
else:
debug = False
factory1 = WebSocketServerFactory("ws://localhost:9000",
debug = debug,
debugCodePaths = debug)
factory1.protocol = Echo1ServerProtocol
resource1 = WebSocketResource(factory1)
factory2 = WebSocketServerFactory("ws://localhost:9000",
debug = debug,
debugCodePaths = debug)
factory2.protocol = Echo2ServerProtocol
resource2 = WebSocketResource(factory2)
## Establish a dummy root resource
root = Data("", "text/plain")
## and our WebSocket servers under different paths ..
root.putChild("echo1", resource1)
root.putChild("echo2", resource2)
## both under one Twisted Web Site
site = Site(root)
reactor.listenTCP(9000, site)
reactor.run()
| apache-2.0 |
RO-ny9/python-for-android | python-modules/twisted/twisted/conch/error.py | 60 | 2677 | # Copyright (c) 2001-2008 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An error to represent bad things happening in Conch.
Maintainer: Paul Swartz
"""
from twisted.cred.error import UnauthorizedLogin
class ConchError(Exception):
def __init__(self, value, data = None):
Exception.__init__(self, value, data)
self.value = value
self.data = data
class NotEnoughAuthentication(Exception):
"""
This is thrown if the authentication is valid, but is not enough to
successfully verify the user. i.e. don't retry this type of
authentication, try another one.
"""
class ValidPublicKey(UnauthorizedLogin):
"""
Raised by public key checkers when they receive public key credentials
that don't contain a signature at all, but are valid in every other way.
(e.g. the public key matches one in the user's authorized_keys file).
Protocol code (eg
L{SSHUserAuthServer<twisted.conch.ssh.userauth.SSHUserAuthServer>}) which
attempts to log in using
L{ISSHPrivateKey<twisted.cred.credentials.ISSHPrivateKey>} credentials
should be prepared to handle a failure of this type by telling the user to
re-authenticate using the same key and to include a signature with the new
attempt.
See U{http://www.ietf.org/rfc/rfc4252.txt} section 7 for more details.
"""
class IgnoreAuthentication(Exception):
"""
This is thrown to let the UserAuthServer know it doesn't need to handle the
authentication anymore.
"""
class MissingKeyStoreError(Exception):
"""
Raised if an SSHAgentServer starts receiving data without its factory
providing a keys dict on which to read/write key data.
"""
class UserRejectedKey(Exception):
"""
The user interactively rejected a key.
"""
class InvalidEntry(Exception):
"""
An entry in a known_hosts file could not be interpreted as a valid entry.
"""
class HostKeyChanged(Exception):
"""
The host key of a remote host has changed.
@ivar offendingEntry: The entry which contains the persistent host key that
disagrees with the given host key.
@type offendingEntry: L{twisted.conch.interfaces.IKnownHostEntry}
@ivar path: a reference to the known_hosts file that the offending entry
was loaded from
@type path: L{twisted.python.filepath.FilePath}
@ivar lineno: The line number of the offending entry in the given path.
@type lineno: L{int}
"""
def __init__(self, offendingEntry, path, lineno):
Exception.__init__(self)
self.offendingEntry = offendingEntry
self.path = path
self.lineno = lineno
| apache-2.0 |
jsvelasquezv/agroind-mobile | platforms/browser/www/node_modules/node-ninja/gyp/PRESUBMIT.py | 1369 | 3662 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for GYP.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
PYLINT_BLACKLIST = [
# TODO: fix me.
# From SCons, not done in google style.
'test/lib/TestCmd.py',
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
]
PYLINT_DISABLED_WARNINGS = [
# TODO: fix me.
# Many tests include modules they don't use.
'W0611',
# Possible unbalanced tuple unpacking with sequence.
'W0632',
# Attempting to unpack a non-sequence.
'W0633',
# Include order doesn't properly include local files?
'F0401',
# Some use of built-in names.
'W0622',
# Some unused variables.
'W0612',
# Operator not preceded/followed by space.
'C0323',
'C0322',
# Unnecessary semicolon.
'W0301',
# Unused argument.
'W0613',
# String has no effect (docstring in wrong place).
'W0105',
# map/filter on lambda could be replaced by comprehension.
'W0110',
# Use of eval.
'W0123',
# Comma not followed by space.
'C0324',
# Access to a protected member.
'W0212',
# Bad indent.
'W0311',
# Line too long.
'C0301',
# Undefined variable.
'E0602',
# Not exception type specified.
'W0702',
# No member of that name.
'E1101',
# Dangerous default {}.
'W0102',
# Cyclic import.
'R0401',
# Others, too many to sort.
'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
'R0201', 'E0101', 'C0321',
# ************* Module copy
# W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect
'W0104',
]
def CheckChangeOnUpload(input_api, output_api):
report = []
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return report
def CheckChangeOnCommit(input_api, output_api):
report = []
# Accept any year number from 2009 to the current year.
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')'
# The (c) is deprecated, but tolerate it until it's removed from all files.
license = (
r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
r'.*? found in the LICENSE file\.\n'
) % {
'year': years_re,
}
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, license_header=license))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
'http://gyp-status.appspot.com/status',
'http://gyp-status.appspot.com/current'))
import os
import sys
old_sys_path = sys.path
try:
sys.path = ['pylib', 'test/lib'] + sys.path
blacklist = PYLINT_BLACKLIST
if sys.platform == 'win32':
blacklist = [os.path.normpath(x).replace('\\', '\\\\')
for x in PYLINT_BLACKLIST]
report.extend(input_api.canned_checks.RunPylint(
input_api,
output_api,
black_list=blacklist,
disabled_warnings=PYLINT_DISABLED_WARNINGS))
finally:
sys.path = old_sys_path
return report
TRYBOTS = [
'linux_try',
'mac_try',
'win_try',
]
def GetPreferredTryMasters(_, change):
return {
'client.gyp': { t: set(['defaulttests']) for t in TRYBOTS },
}
| apache-2.0 |
mikalstill/nova | nova/scheduler/filters/type_filter.py | 5 | 1509 | # Copyright (c) 2012 The Cloudscaling Group, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.scheduler import filters
from nova.scheduler.filters import utils
class AggregateTypeAffinityFilter(filters.BaseHostFilter):
"""AggregateTypeAffinityFilter limits instance_type by aggregate
return True if no instance_type key is set or if the aggregate metadata
key 'instance_type' has the instance_type name as a value
"""
# Aggregate data does not change within a request
run_filter_once_per_request = True
RUN_ON_REBUILD = False
def host_passes(self, host_state, spec_obj):
instance_type = spec_obj.flavor
aggregate_vals = utils.aggregate_values_from_key(
host_state, 'instance_type')
for val in aggregate_vals:
if (instance_type.name in
[x.strip() for x in val.split(',')]):
return True
return not aggregate_vals
| apache-2.0 |
tum-pbs/PhiFlow | phi/math/_nd.py | 1 | 21446 | # Because division is different in Python 2 and 3
from __future__ import division
from typing import Tuple
import numpy as np
from . import _ops as math
from . import extrapolation as extrapolation
from ._config import GLOBAL_AXIS_ORDER
from ._ops import stack
from ._shape import Shape, channel, batch, spatial
from ._tensors import Tensor, TensorLike, variable_values
from ._tensors import wrap
from .extrapolation import Extrapolation
def spatial_sum(value: Tensor):
return math.sum_(value, dim=value.shape.spatial.names)
def vec_abs(vec: Tensor):
return math.sqrt(math.sum_(vec ** 2, dim=vec.shape.channel.names))
def vec_squared(vec: Tensor):
return math.sum_(vec ** 2, dim=channel('vector'))
def cross_product(vec1: Tensor, vec2: Tensor):
vec1 = math.tensor(vec1)
vec2 = math.tensor(vec2)
spatial_rank = vec1.vector.size if 'vector' in vec1.shape else vec2.vector.size
if spatial_rank == 2: # Curl in 2D
assert vec2.vector.exists
if vec1.vector.exists:
v1_x, v1_y = vec1.vector.unstack()
v2_x, v2_y = vec2.vector.unstack()
if GLOBAL_AXIS_ORDER.is_x_first:
return v1_x * v2_y - v1_y * v2_x
else:
return - v1_x * v2_y + v1_y * v2_x
else:
v2_x, v2_y = vec2.vector.unstack()
if GLOBAL_AXIS_ORDER.is_x_first:
return vec1 * math.stack([-v2_y, v2_x], channel('vector'))
else:
return vec1 * math.stack([v2_y, -v2_x], channel('vector'))
elif spatial_rank == 3: # Curl in 3D
raise NotImplementedError(f'spatial_rank={spatial_rank} not yet implemented')
else:
raise AssertionError(f'dims = {spatial_rank}. Vector product not available in > 3 dimensions')
def normalize_to(target: Tensor, source: Tensor, epsilon=1e-5):
"""
Multiplies the target so that its total content matches the source.
Args:
target: a tensor
source: a tensor or number
epsilon: small number to prevent division by zero or None. (Default value = 1e-5)
target: Tensor:
source: Tensor:
Returns:
normalized tensor of the same shape as target
"""
target_total = math.sum_(target, dim=target.shape.non_batch.names)
denominator = math.maximum(target_total, epsilon) if epsilon is not None else target_total
source_total = math.sum_(source, dim=source.shape.non_batch.names)
return target * (source_total / denominator)
def l1_loss(x) -> Tensor:
"""
Computes *∑<sub>i</sub> ||x<sub>i</sub>||<sub>1</sub>*, summing over all non-batch dimensions.
Args:
x: `Tensor` or `TensorLike`.
For `TensorLike` objects, only value the sum over all value attributes is computed.
Returns:
loss: `Tensor`
"""
if isinstance(x, Tensor):
return math.sum_(abs(x), x.shape.non_batch)
elif isinstance(x, TensorLike):
return sum([l1_loss(getattr(x, a)) for a in variable_values(x)])
else:
raise ValueError(x)
def l2_loss(x) -> Tensor:
"""
Computes *∑<sub>i</sub> ||x<sub>i</sub>||<sub>2</sub><sup>2</sup> / 2*, summing over all non-batch dimensions.
Args:
x: `Tensor` or `TensorLike`.
For `TensorLike` objects, only value the sum over all value attributes is computed.
Returns:
loss: `Tensor`
"""
if isinstance(x, Tensor):
if x.dtype.kind == complex:
x = abs(x)
return math.sum_(x ** 2, x.shape.non_batch) * 0.5
elif isinstance(x, TensorLike):
return sum([l2_loss(getattr(x, a)) for a in variable_values(x)])
else:
raise ValueError(x)
def frequency_loss(x,
frequency_falloff: float = 100,
threshold=1e-5,
ignore_mean=False) -> Tensor:
"""
Penalizes the squared `values` in frequency (Fourier) space.
Lower frequencies are weighted more strongly then higher frequencies, depending on `frequency_falloff`.
Args:
x: `Tensor` or `TensorLike` Values to penalize, typically `actual - target`.
frequency_falloff: Large values put more emphasis on lower frequencies, 1.0 weights all frequencies equally.
*Note*: The total loss is not normalized. Varying the value will result in losses of different magnitudes.
threshold: Frequency amplitudes below this value are ignored.
Setting this to zero may cause infinities or NaN values during backpropagation.
ignore_mean: If `True`, does not penalize the mean value (frequency=0 component).
Returns:
Scalar loss value
"""
if isinstance(x, Tensor):
if ignore_mean:
x -= math.mean(x, x.shape.non_batch)
k_squared = vec_squared(math.fftfreq(x.shape.spatial))
weights = math.exp(-0.5 * k_squared * frequency_falloff ** 2)
diff_fft = abs_square(math.fft(x) * weights)
diff_fft = math.sqrt(math.maximum(diff_fft, threshold))
return l2_loss(diff_fft)
elif isinstance(x, TensorLike):
return sum([frequency_loss(getattr(x, a), frequency_falloff, threshold, ignore_mean) for a in variable_values(x)])
else:
raise ValueError(x)
def abs_square(complex_values: Tensor) -> Tensor:
"""
Squared magnitude of complex values.
Args:
complex_values: complex `Tensor`
Returns:
Tensor: real valued magnitude squared
"""
return math.imag(complex_values) ** 2 + math.real(complex_values) ** 2
# Divergence
# def divergence(tensor, dx=1, difference='central', padding='constant', dimensions=None):
# """
# Computes the spatial divergence of a vector channel from finite differences.
#
# :param tensor: vector field; tensor of shape (batch size, spatial dimensions..., spatial rank)
# :param dx: distance between adjacent grid points (default 1)
# :param difference: type of difference, one of ('forward', 'central') (default 'forward')
# :return: tensor of shape (batch size, spatial dimensions..., 1)
# """
# assert difference in ('central', 'forward', 'backward'), difference
# rank = spatial_rank(tensor)
# if difference == 'forward':
# return _divergence_nd(tensor, padding, (0, 1), dims) / dx ** rank # TODO why dx^rank?
# elif difference == 'backward':
# return _divergence_nd(tensor, padding, (-1, 0), dims) / dx ** rank
# else:
# return _divergence_nd(tensor, padding, (-1, 1), dims) / (2 * dx) ** rank
#
#
# def _divergence_nd(x_, padding, relative_shifts, dims=None):
# x = tensor(x_)
# assert x.shape.channel.rank == 1
# dims = dims if dims is not None else x.shape.spatial.names
# x = math.pad(x, {axis: (-relative_shifts[0], relative_shifts[1]) for axis in dims}, mode=padding)
# components = []
# for dimension in dims:
# dim_index_in_spatial = x.shape.spatial.reset_indices().index(dimension)
# lower, upper = _multi_roll(x, dimension, relative_shifts, diminish_others=(-relative_shifts[0], relative_shifts[1]), names=dims, base_selection={0: rank - dimension - 1})
# components.append(upper - lower)
# return math.sum_(components, 0)
def shift(x: Tensor,
offsets: tuple,
dims: tuple or None = None,
padding: Extrapolation or None = extrapolation.BOUNDARY,
stack_dim: Shape or None = channel('shift')) -> list:
"""
shift Tensor by a fixed offset and abiding by extrapolation
Args:
x: Input data
offsets: Shift size
dims: Dimensions along which to shift, defaults to None
padding: padding to be performed at the boundary, defaults to extrapolation.BOUNDARY
stack_dim: dimensions to be stacked, defaults to 'shift'
Returns:
list: offset_tensor
"""
if stack_dim is None:
assert len(dims) == 1
x = wrap(x)
dims = dims if dims is not None else x.shape.spatial.names
pad_lower = max(0, -min(offsets))
pad_upper = max(0, max(offsets))
if padding:
x = math.pad(x, {axis: (pad_lower, pad_upper) for axis in dims}, mode=padding)
offset_tensors = []
for offset in offsets:
components = []
for dimension in dims:
if padding:
slices = {dim: slice(pad_lower + offset, (-pad_upper + offset) or None) if dim == dimension else slice(pad_lower, -pad_upper or None) for dim in dims}
else:
slices = {dim: slice(pad_lower + offset, (-pad_upper + offset) or None) if dim == dimension else slice(None, None) for dim in dims}
components.append(x[slices])
offset_tensors.append(stack(components, stack_dim) if stack_dim is not None else components[0])
return offset_tensors
def extrapolate_valid_values(values: Tensor, valid: Tensor, distance_cells: int = 1) -> Tuple[Tensor, Tensor]:
"""
Extrapolates the values of `values` which are marked by the nonzero values of `valid` for `distance_cells` steps in all spatial directions.
Overlapping extrapolated values get averaged. Extrapolation also includes diagonals.
Examples (1-step extrapolation), x marks the values for extrapolation:
200 000 111 004 00x 044 102 000 144
010 + 0x0 => 111 000 + 000 => 234 004 + 00x => 234
040 000 111 200 x00 220 200 x00 234
Args:
values: Tensor which holds the values for extrapolation
valid: Tensor with same size as `x` marking the values for extrapolation with nonzero values
distance_cells: Number of extrapolation steps
Returns:
values: Extrapolation result
valid: mask marking all valid values after extrapolation
"""
def binarize(x):
return math.divide_no_nan(x, x)
distance_cells = min(distance_cells, max(values.shape.sizes))
for _ in range(distance_cells):
valid = binarize(valid)
valid_values = valid * values
overlap = valid
for dim in values.shape.spatial.names:
values_l, values_r = shift(valid_values, (-1, 1), dims=dim, padding=extrapolation.ZERO)
valid_values = math.sum_(values_l + values_r + valid_values, dim='shift')
mask_l, mask_r = shift(overlap, (-1, 1), dims=dim, padding=extrapolation.ZERO)
overlap = math.sum_(mask_l + mask_r + overlap, dim='shift')
extp = math.divide_no_nan(valid_values, overlap) # take mean where extrapolated values overlap
values = math.where(valid, values, math.where(binarize(overlap), extp, values))
valid = overlap
return values, binarize(valid)
# Gradient
def spatial_gradient(grid: Tensor,
dx: float or int = 1,
difference: str = 'central',
padding: Extrapolation or None = extrapolation.BOUNDARY,
dims: tuple or None = None,
stack_dim: Shape = channel('gradient')):
"""
Calculates the spatial_gradient of a scalar channel from finite differences.
The spatial_gradient vectors are in reverse order, lowest dimension first.
Args:
grid: grid values
dims: optional) sequence of dimension names
dx: physical distance between grid points (default 1)
difference: type of difference, one of ('forward', 'backward', 'central') (default 'forward')
padding: tensor padding mode
stack_dim: name of the new vector dimension listing the spatial_gradient w.r.t. the various axes
Returns:
tensor of shape (batch_size, spatial_dimensions..., spatial rank)
"""
grid = wrap(grid)
if difference.lower() == 'central':
left, right = shift(grid, (-1, 1), dims, padding, stack_dim=stack_dim)
return (right - left) / (dx * 2)
elif difference.lower() == 'forward':
left, right = shift(grid, (0, 1), dims, padding, stack_dim=stack_dim)
return (right - left) / dx
elif difference.lower() == 'backward':
left, right = shift(grid, (-1, 0), dims, padding, stack_dim=stack_dim)
return (right - left) / dx
else:
raise ValueError('Invalid difference type: {}. Can be CENTRAL or FORWARD'.format(difference))
# Laplace
def laplace(x: Tensor,
dx: Tensor or float = 1,
padding: Extrapolation = extrapolation.BOUNDARY,
dims: tuple or None = None):
"""
Spatial Laplace operator as defined for scalar fields.
If a vector field is passed, the laplace is computed component-wise.
Args:
x: n-dimensional field of shape (batch, spacial dimensions..., components)
dx: scalar or 1d tensor
padding: extrapolation
dims: The second derivative along these dimensions is summed over
Returns:
`phi.math.Tensor` of same shape as `x`
"""
if not isinstance(dx, (int, float)):
dx = wrap(dx, batch('_laplace'))
if isinstance(x, Extrapolation):
return x.spatial_gradient()
left, center, right = shift(wrap(x), (-1, 0, 1), dims, padding, stack_dim=batch('_laplace'))
result = (left + right - 2 * center) / dx
result = math.sum_(result, '_laplace')
return result
def fourier_laplace(grid: Tensor,
dx: Tensor or Shape or float or list or tuple,
times: int = 1):
"""
Applies the spatial laplace operator to the given tensor with periodic boundary conditions.
*Note:* The results of `fourier_laplace` and `laplace` are close but not identical.
This implementation computes the laplace operator in Fourier space.
The result for periodic fields is exact, i.e. no numerical instabilities can occur, even for higher-order derivatives.
Args:
grid: tensor, assumed to have periodic boundary conditions
dx: distance between grid points, tensor-like, scalar or vector
times: number of times the laplace operator is applied. The computational cost is independent of this parameter.
grid: Tensor:
dx: Tensor or Shape or float or list or tuple:
times: int: (Default value = 1)
Returns:
tensor of same shape as `tensor`
"""
frequencies = math.fft(math.to_complex(grid))
k_squared = math.sum_(math.fftfreq(grid.shape) ** 2, 'vector')
fft_laplace = -(2 * np.pi) ** 2 * k_squared
result = math.real(math.ifft(frequencies * fft_laplace ** times))
return math.cast(result / wrap(dx) ** 2, grid.dtype)
def fourier_poisson(grid: Tensor,
dx: Tensor or Shape or float or list or tuple,
times: int = 1):
"""
Inverse operation to `fourier_laplace`.
Args:
grid: Tensor:
dx: Tensor or Shape or float or list or tuple:
times: int: (Default value = 1)
Returns:
"""
frequencies = math.fft(math.to_complex(grid))
k_squared = math.sum_(math.fftfreq(grid.shape) ** 2, 'vector')
fft_laplace = -(2 * np.pi) ** 2 * k_squared
# fft_laplace.tensor[(0,) * math.ndims(k_squared)] = math.inf # assume NumPy array to edit
result = math.real(math.ifft(math.divide_no_nan(frequencies, math.to_complex(fft_laplace ** times))))
return math.cast(result * wrap(dx) ** 2, grid.dtype)
# Downsample / Upsample
def downsample2x(grid: Tensor,
padding: Extrapolation = extrapolation.BOUNDARY,
dims: tuple or None = None) -> Tensor:
"""
Resamples a regular grid to half the number of spatial sample points per dimension.
The grid values at the new points are determined via mean (linear interpolation).
Args:
grid: full size grid
padding: grid extrapolation. Used to insert an additional value for odd spatial dims
dims: dims along which down-sampling is applied. If None, down-sample along all spatial dims.
grid: Tensor:
padding: Extrapolation: (Default value = extrapolation.BOUNDARY)
dims: tuple or None: (Default value = None)
Returns:
half-size grid
"""
dims = grid.shape.spatial.only(dims).names
odd_dimensions = [dim for dim in dims if grid.shape.get_size(dim) % 2 != 0]
grid = math.pad(grid, {dim: (0, 1) for dim in odd_dimensions}, padding)
for dim in dims:
grid = (grid[{dim: slice(1, None, 2)}] + grid[{dim: slice(0, None, 2)}]) / 2
return grid
def upsample2x(grid: Tensor,
padding: Extrapolation = extrapolation.BOUNDARY,
dims: tuple or None = None) -> Tensor:
"""
Resamples a regular grid to double the number of spatial sample points per dimension.
The grid values at the new points are determined via linear interpolation.
Args:
grid: half-size grid
padding: grid extrapolation
dims: dims along which up-sampling is applied. If None, up-sample along all spatial dims.
grid: Tensor:
padding: Extrapolation: (Default value = extrapolation.BOUNDARY)
dims: tuple or None: (Default value = None)
Returns:
double-size grid
"""
for i, dim in enumerate(grid.shape.spatial.only(dims)):
left, center, right = shift(grid, (-1, 0, 1), dim.names, padding, None)
interp_left = 0.25 * left + 0.75 * center
interp_right = 0.75 * center + 0.25 * right
stacked = math.stack([interp_left, interp_right], spatial('_interleave'))
grid = math.join_dimensions(stacked, (dim.name, '_interleave'), dim)
return grid
def sample_subgrid(grid: Tensor, start: Tensor, size: Shape) -> Tensor:
"""
Samples a sub-grid from `grid` with equal distance between sampling points.
The values at the new sample points are determined via linear interpolation.
Args:
grid: `Tensor` to be resampled. Values are assumed to be sampled at cell centers.
start: Origin point of sub-grid within `grid`, measured in number of cells.
Must have a single dimension called `vector`.
Example: `start=(1, 0.5)` would slice off the first grid point in dim 1 and take the mean of neighbouring points in dim 2.
The order of dims must be equal to `size` and `grid.shape.spatial`.
size: Resolution of the sub-grid. Must not be larger than the resolution of `grid`.
The order of dims must be equal to `start` and `grid.shape.spatial`.
Returns:
Sub-grid as `Tensor`
"""
assert start.shape.names == ('vector',)
assert grid.shape.spatial.names == size.names
assert math.all_available(start), "Cannot perform sample_subgrid() during tracing, 'start' must be known."
discard = {}
for dim, d_start, d_size in zip(grid.shape.spatial.names, start, size.sizes):
discard[dim] = slice(int(d_start), int(d_start) + d_size + (1 if d_start != 0 else 0))
grid = grid[discard]
upper_weight = start % 1
lower_weight = 1 - upper_weight
for i, dim in enumerate(grid.shape.spatial.names):
if upper_weight[i].native() not in (0, 1):
lower, upper = shift(grid, (0, 1), [dim], padding=None, stack_dim=None)
grid = upper * upper_weight[i] + lower * lower_weight[i]
return grid
# Poisson Brackets
def poisson_bracket(grid1, grid2):
if all([grid1.rank == grid2.rank == 2,
grid1.boundary == grid2.boundary == extrapolation.PERIODIC,
len(set(list(grid1.dx) + list(grid2.dx))) == 1]):
return _periodic_2d_arakawa_poisson_bracket(grid1.values, grid2.values, grid1.dx)
else:
raise NotImplementedError("\n".join([
"Not implemented for:"
f"ranks ({grid1.rank}, {grid2.rank}) != 2",
f"boundary ({grid1.boundary}, {grid2.boundary}) != {extrapolation.PERIODIC}",
f"dx uniform ({grid1.dx}, {grid2.dx})"
]))
def _periodic_2d_arakawa_poisson_bracket(tensor1: Tensor, tensor2: Tensor, dx: float):
"""
Solves the poisson bracket using the Arakawa Scheme [tensor1, tensor2]
Only works in 2D, with equal spaced grids, and periodic boundary conditions
Args:
tensor1(Tensor): first field in the poisson bracket
tensor2(Tensor): second field in the poisson bracket
dx(float): Grid size (equal in x-y)
tensor1: Tensor:
tensor2: Tensor:
dx: float:
Returns:
"""
zeta = math.pad(value=tensor1, widths={'x': (1, 1), 'y': (1, 1)}, mode=extrapolation.PERIODIC)
psi = math.pad(value=tensor2, widths={'x': (1, 1), 'y': (1, 1)}, mode=extrapolation.PERIODIC)
return (zeta.x[2:].y[1:-1] * (psi.x[1:-1].y[2:] - psi.x[1:-1].y[0:-2] + psi.x[2:].y[2:] - psi.x[2:].y[0:-2])
- zeta.x[0:-2].y[1:-1] * (psi.x[1:-1].y[2:] - psi.x[1:-1].y[0:-2] + psi.x[0:-2].y[2:] - psi.x[0:-2].y[0:-2])
- zeta.x[1:-1].y[2:] * (psi.x[2:].y[1:-1] - psi.x[0:-2].y[1:-1] + psi.x[2:].y[2:] - psi.x[0:-2].y[2:])
+ zeta.x[1:-1].y[0:-2] * (psi.x[2:].y[1:-1] - psi.x[0:-2].y[1:-1] + psi.x[2:].y[0:-2] - psi.x[0:-2].y[0:-2])
+ zeta.x[2:].y[0:-2] * (psi.x[2:].y[1:-1] - psi.x[1:-1].y[0:-2])
+ zeta.x[2:].y[2:] * (psi.x[1:-1].y[2:] - psi.x[2:].y[1:-1])
- zeta.x[0:-2].y[2:] * (psi.x[1:-1].y[2:] - psi.x[0:-2].y[1:-1])
- zeta.x[0:-2].y[0:-2] * (psi.x[0:-2].y[1:-1] - psi.x[1:-1].y[0:-2])) / (12 * dx ** 2)
| mit |
lamblin/pylearn2 | pylearn2/sandbox/cuda_convnet/tests/profile_probabilistic_max_pooling.py | 44 | 2414 | from __future__ import print_function
import theano.tensor as T
import numpy as np
from theano.compat.six.moves import xrange
from theano import config
from theano import function
import time
from pylearn2.utils import sharedX
from pylearn2.sandbox.cuda_convnet.probabilistic_max_pooling import \
prob_max_pool_c01b
from pylearn2.expr.probabilistic_max_pooling import max_pool_c01b
def profile(f):
print('profiling ',f)
rng = np.random.RandomState([2012,7,19])
batch_size = 128
rows = 30
cols = 30
channels = 16
pool_rows = 3
pool_cols = 3
zv = rng.randn(channels, rows, cols, batch_size).astype(config.floatX)
# put the inputs + outputs in shared variables so we don't pay GPU
# transfer during test
p_shared = sharedX(zv[:,0:rows:pool_rows,0:cols:pool_cols,:])
h_shared = sharedX(zv)
z_shared = sharedX(zv)
p_th, h_th = f( z_shared, (pool_rows, pool_cols) )
func = function([],updates = { p_shared : p_th, h_shared : h_th} )
print('warming up')
for i in xrange(10):
func()
trials = 10
results = []
for i in xrange(trials):
t1 = time.time()
for j in xrange(10):
func()
t2 = time.time()
print(t2 - t1)
results.append(t2-t1)
print('final: ',sum(results)/float(trials))
def profile_grad(f):
print('profiling gradient of ',f)
rng = np.random.RandomState([2012,7,19])
batch_size = 128
rows = 9
cols = 9
channels = 16
pool_rows = 3
pool_cols = 3
zv = rng.randn(channels, rows, cols, batch_size).astype(config.floatX)
# put the inputs + outputs in shared variables so we don't pay GPU
# transfer during test
grad_shared = sharedX(zv)
z_shared = sharedX(zv)
p_th, h_th = f( z_shared, (pool_rows, pool_cols) )
func = function([],updates = { grad_shared : T.grad(p_th.sum() +
h_th.sum(), z_shared)} )
print('warming up')
for i in xrange(10):
func()
trials = 10
results = []
for i in xrange(trials):
t1 = time.time()
for j in xrange(10):
func()
t2 = time.time()
print(t2 - t1)
results.append(t2-t1)
print('final: ',sum(results)/float(trials))
if __name__ == '__main__':
profile(prob_max_pool_c01b)
profile(max_pool_c01b)
profile_grad(prob_max_pool_c01b)
profile_grad(max_pool_c01b)
| bsd-3-clause |
RoboErik/RUBIK | Rubik/RubikSolver/SDL_BM017CS/SDL_PC_BM017CS.py | 1 | 4650 | #!/usr/bin/python
import time
import smbus
from Adafruit_I2C import Adafruit_I2C
# ===========================================================================
# SDL_BM017 / TCS34725 color sensor i2c driver Class
# SwitchDoc Labs / Project Curacao
# originally from Project Curacao
# Version 1.1
# 2/14/14
#
# ===========================================================================
class SDL_BM017:
i2c = None
# i2C Addresses
__SDL_BM017_SensorAddress = 0x29
__SDL_BM017_EnableAddress = 0xa0 # register address + command bits
__SDL_BM017_ATimeAddress = 0xa1 # register address + command bits
__SDL_BM017_ControlAddress = 0xaf # register address + command bits
__SDL_BM017_IDAddress = 0xb2 # register address + command bits
__SDL_BM017_ColorAddress = 0xb4 # register address + command bits
__SDL_BM017_StatusAddress = 0xb3 # register address + command bits
__SDL_BM017_ClearInterrupts = 0x66
# bit definitions
TCS34725_ENABLE_AIEN = (0x10) # RGBC Interrupt Enable
TCS34725_ENABLE_WEN = (0x08) # Wait enable - Writing 1 activates the wait timer
TCS34725_ENABLE_AEN = (0x02) # RGBC Enable - Writing 1 actives the ADC, 0 disables it
TCS34725_ENABLE_PON = (0x01) # Power on - Writing 1 activates the internal oscillator, 0 disables it
# color results
clear_color = 0
red_color = 0
green_color = 0
blue_color = 0
__SDL_BM017_IntegrationTime = 0xF6
__SDL_BM017_Gain = 0x00
debug = False
def __init__(self, debug=False):
self.i2c = Adafruit_I2C(self.__SDL_BM017_SensorAddress)
self.i2c.write8(self.__SDL_BM017_ATimeAddress, self.__SDL_BM017_IntegrationTime)
self.i2c.write8(self.__SDL_BM017_ControlAddress, self.__SDL_BM017_Gain)
self.i2c.write8(self.__SDL_BM017_EnableAddress, 0x03)
time.sleep(0.700)
self.debug = debug
if (self.debug):
print("SDL_BM017 initialized")
def isSDL_BM017There(self):
device = self.i2c.readU8(self.__SDL_BM017_IDAddress)
if (device == 0x44):
if (self.debug):
print ("SDL_BM017 / TCS34725 is present")
return 1
else:
if (self.debug):
print ("SDL_BM017 / TCS34725 is NOT present")
return 0
def readStatus(self):
status = self.i2c.readU8(self.__SDL_BM017_StatusAddress)
if (self.debug):
print("SDL_BM017 Status=", status)
return status
def getColors(self):
colorList = self.i2c.readList(self.__SDL_BM017_ColorAddress, 8)
if (self.debug):
print("ColorList = ", colorList)
self.clear_color = (colorList[1] << 8) + (colorList[0])
self.red_color = (colorList[3] << 8) + (colorList[2])
self.green_color = (colorList[5] << 8) + (colorList[4])
self.blue_color = (colorList[7] << 8) + (colorList[6])
if (self.debug):
print("clear_color= ", self.clear_color)
print("red_color= ", self.red_color)
print("green_color= ", self.green_color)
print("blue_color= ", self.blue_color)
return colorList
def setIntegrationTimeAndGain(self, IT, Gain):
self.i2c.write8(self.__SDL_BM017_ATimeAddress, IT)
self.i2c.write8(self.__SDL_BM017_ControlAddress, Gain)
self.i2c.write8(self.__SDL_BM017_EnableAddress, 0x03)
time.sleep(0.700)
if (self.debug):
print("IT set to:", IT)
print("Gain set to:", Gain)
__SDL_BM017_IntegrationTime = IT
__SDL_BM017_Gain = Gain
def disableDevice(self):
reg = self.i2c.readU8(self.__SDL_BM017_EnableAddress)
self.i2c.write8(self.__SDL_BM017_EnableAddress, reg & ~(self.TCS34725_ENABLE_PON | self.TCS34725_ENABLE_AEN));
# This can be used to trigger the LED. Connect the INT pin to LEDON pin and connecting the VDD_LED pin to 3.3V
def setInterrupt(self, state):
reg = self.i2c.readU8(self.__SDL_BM017_EnableAddress)
if (state):
reg |= self.TCS34725_ENABLE_AIEN
if (self.debug):
print("Interrupt On")
else:
reg &= ~self.TCS34725_ENABLE_AIEN
if (self.debug):
print("Interrupt Off")
self.i2c.write8(self.__SDL_BM017_EnableAddress, reg)
def clearInterrupt(self):
self.i2c.write8(0x66, 0x00)
def setInterruptLimits(self, low, high):
self.i2c.write8(0x04, low & 0xFF)
self.i2c.write8(0x05, low >> 8)
self.i2c.write8(0x06, high & 0xFF)
self.i2c.write8(0x07, low >> 8)
| apache-2.0 |
criteo-forks/graphite-web | webapp/tests/test_dashboard.py | 4 | 22800 | import copy
import errno
import mock
import os
from . import TEST_CONF_DIR
from django.conf import settings
try:
from django.urls import reverse
except ImportError: # Django < 1.10
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from .base import TestCase
from django.test.utils import override_settings
from graphite.util import json
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except ImportError:
from django.contrib.auth.models import User
class DashboardTest(TestCase):
# Set config to the test config file
settings.DASHBOARD_CONF = os.path.join(TEST_CONF_DIR, 'dashboard.conf')
# Define a testtemplate
testtemplate = {"state": '{"graphs": [[ "target=a.b.c.*.__VALUE__.d", { "from":"-2days", "target":[ "a.b.c.*.__VALUE__.d" ], "until":"now" }, "/render?width=400&from=-2days&until=now&height=250&target=a.b.c.*.__VALUE__.d&_uniq=0.6526056618895382&title=a.b.c.*.__VALUE__.d" ]]}'}
@override_settings(DASHBOARD_CONF=os.path.join(TEST_CONF_DIR, 'dashboard.conf.missing'))
def test_dashboard_missing_conf(self):
url = reverse('dashboard')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@override_settings(DASHBOARD_CONF=os.path.join(TEST_CONF_DIR, 'dashboard.conf.missing'))
def test_dashboard_template_missing_template(self):
url = reverse('dashboard_template', args=['bogustemplate', 'testkey'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@mock.patch('graphite.dashboard.views.DashboardConfig.check')
def test_dashboard_conf_read_failure(self, check):
check.side_effect = OSError(errno.EPERM, 'Operation not permitted')
url = reverse('dashboard')
with self.assertRaises(Exception):
_ = self.client.get(url)
@mock.patch('graphite.dashboard.views.DashboardConfig.check')
def test_dashboard_template_conf_read_failure(self, check):
check.side_effect = OSError(errno.EPERM, 'Operation not permitted')
url = reverse('dashboard_template', args=['bogustemplate', 'testkey'])
with self.assertRaises(Exception):
_ = self.client.get(url)
@override_settings(DASHBOARD_CONF=os.path.join(TEST_CONF_DIR, 'dashboard.conf.missing_ui'))
def test_dashboard_conf_missing_ui(self):
url = reverse('dashboard')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@override_settings(DASHBOARD_CONF=os.path.join(TEST_CONF_DIR, 'dashboard.conf.missing_ui'))
def test_dashboard_template_missing_ui(self):
url = reverse('dashboard_template', args=['bogustemplate', 'testkey'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@override_settings(DASHBOARD_CONF=os.path.join(TEST_CONF_DIR, 'dashboard.conf.missing_keyboard-shortcuts'))
def test_dashboard_conf_missing_keyboard_shortcuts(self):
url = reverse('dashboard')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@override_settings(DASHBOARD_CONF=os.path.join(TEST_CONF_DIR, 'dashboard.conf.missing_keyboard-shortcuts'))
def test_dashboard_template_missing_keyboard_shortcuts(self):
url = reverse('dashboard_template', args=['bogustemplate', 'testkey'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@override_settings(DASHBOARD_CONF=os.path.join(TEST_CONF_DIR, 'dashboard.conf.invalid_theme'))
def test_dashboard_conf_invalid_theme(self):
url = reverse('dashboard')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@override_settings(DASHBOARD_CONF=os.path.join(TEST_CONF_DIR, 'dashboard.conf.invalid_theme'))
def test_dashboard_template_invalid_theme(self):
url = reverse('dashboard_template', args=['bogustemplate', 'testkey'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_dashboard(self):
url = reverse('dashboard')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_dashboard_no_user(self):
url = reverse('dashboard')
request = {"user": '', "state": '{}'}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
def test_dashboard_pass_valid(self):
url = reverse('dashboard_save', args=['testdashboard'])
request = {"state": '{}'}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard', args=['testdashboard'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_dashboard_pass_invalid_name(self):
url = reverse('dashboard', args=['bogusdashboard'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_dashboard_find_empty(self):
url = reverse('dashboard_find')
request = {"query": ""}
response = self.client.get(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"dashboards": []}')
def test_dashboard_save_empty(self):
url = reverse('dashboard_save', args=['testdashboard'])
request = {"state": '{}'}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
def test_dashboard_save_overwrite(self):
url = reverse('dashboard_save', args=['testdashboard'])
request = {"state": '{}'}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
def test_dashboard_find_existing(self):
url = reverse('dashboard_save', args=['testdashboard'])
request = {"state": '{}'}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard_find')
request = {"query": "test"}
response = self.client.get(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"dashboards": [{"name": "testdashboard"}]}')
def test_dashboard_find_not_existing(self):
url = reverse('dashboard_save', args=['testdashboard'])
request = {"state": '{}'}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard_find')
request = {"query": "not here"}
response = self.client.get(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"dashboards": []}')
def test_dashboard_load_not_existing(self):
url = reverse('dashboard_load', args=['bogusdashboard'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"error": "Dashboard \'bogusdashboard\' does not exist. "}')
def test_dashboard_load_existing(self):
url = reverse('dashboard_save', args=['testdashboard'])
request = {"state": '{}'}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard_load', args=['testdashboard'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"state": {}}')
def test_dashboard_delete_nonexisting(self):
url = reverse('dashboard_delete', args=['bogusdashboard'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"error": "Dashboard \'bogusdashboard\' does not exist. "}')
def test_dashboard_delete_existing(self):
# Create a dashboard entry
url = reverse('dashboard_save', args=['testdashboard'])
request = {"state": '{}'}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
# Delete it
url = reverse('dashboard_delete', args=['testdashboard'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"success": true}')
# Confirm it was deleted
url = reverse('dashboard_find')
request = {"query": ""}
response = self.client.get(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"dashboards": []}')
def test_dashboard_create_temporary(self):
url = reverse('dashboard_create_temporary')
request = {"state": '{}'}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"name": "temporary-0"}')
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"name": "temporary-1"}')
url = reverse('dashboard_find')
request = {"query": ""}
response = self.client.get(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"dashboards": []}')
def test_dashboard_template_pass_invalid(self):
url = reverse('dashboard_template', args=['bogustemplate', 'testkey'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_dashboard_template_pass_valid(self):
url = reverse('dashboard_save_template', args=['testtemplate', 'testkey'])
request = copy.deepcopy(self.testtemplate)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard_template', args=['testtemplate', 'testkey'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_dashboard_find_template_empty(self):
url = reverse('dashboard_find_template')
request = {"query": ""}
response = self.client.get(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"templates": []}')
def test_dashboard_save_template(self):
url = reverse('dashboard_save_template', args=['testtemplate', 'testkey'])
request = copy.deepcopy(self.testtemplate)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
# Save again after it now exists
def test_dashboard_save_template_overwrite(self):
url = reverse('dashboard_save_template', args=['testtemplate', 'testkey'])
request = copy.deepcopy(self.testtemplate)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard_save_template', args=['testtemplate', 'testkey'])
request = copy.deepcopy(self.testtemplate)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
def test_dashboard_find_template(self):
url = reverse('dashboard_save_template', args=['testtemplate', 'testkey'])
request = copy.deepcopy(self.testtemplate)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard_find_template')
request = {"query": "test"}
response = self.client.get(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"templates": [{"name": "testtemplate"}]}')
def test_dashboard_find_template_nonexistent(self):
url = reverse('dashboard_save_template', args=['testtemplate', 'testkey'])
request = copy.deepcopy(self.testtemplate)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard_find_template')
request = {"query": "not here"}
response = self.client.get(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"templates": []}')
def test_dashboard_load_template_nonexistent(self):
url = reverse('dashboard_save_template', args=['testtemplate', 'testkey'])
request = copy.deepcopy(self.testtemplate)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard_load_template', args=['bogustemplate', 'testkey'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"error": "Template \'bogustemplate\' does not exist. "}')
def test_dashboard_load_template_existing(self):
url = reverse('dashboard_save_template', args=['testtemplate', 'testkey'])
request = copy.deepcopy(self.testtemplate)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard_load_template', args=['testtemplate', 'testkey'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
graph_data = json.loads(self.testtemplate["state"].replace('__VALUE__', 'testkey'))
self.assertEqual(data, json.loads('{"state": {"name": "testtemplate/testkey", "graphs": ' + json.dumps(graph_data['graphs']) + '}}'))
def test_dashboard_delete_template_nonexisting(self):
# Delete nonexistent template
url = reverse('dashboard_delete_template', args=['bogustemplate'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"error": "Template \'bogustemplate\' does not exist. "}')
def test_dashboard_delete_template_existing(self):
url = reverse('dashboard_save_template', args=['testtemplate', 'testkey'])
request = copy.deepcopy(self.testtemplate)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
url = reverse('dashboard_delete_template', args=['testtemplate'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"success": true}')
url = reverse('dashboard_find_template')
request = {"query": ""}
response = self.client.get(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"templates": []}')
def test_dashboard_help(self):
url = reverse('dashboard_help')
request = {}
response = self.client.get(url, request)
self.assertEqual(response.status_code, 200)
def test_dashboard_email(self):
url = reverse('dashboard_email')
request = {"sender": "noreply@localhost",
"recipients": "noreply@localhost",
"subject": "Test email",
"message": "Here is the test graph",
"graph_params": '{"target":["sumSeries(a.b.c.d)"],"title":"Test","width":"500","from":"-55minutes","until":"now","height":"400"}'}
response = self.client.post(url, request)
self.assertEqual(response.content, b'{"success": true}')
@mock.patch('graphite.dashboard.views.renderView')
def test_dashboard_email_mock_renderView(self, rv):
url = reverse('dashboard_email')
request = {"sender": "noreply@localhost",
"recipients": "noreply@localhost",
"subject": "Test email",
"message": "Here is the test graph",
"graph_params": '{"target":["sumSeries(a.b.c.d)"],"title":"Test","width":"500","from":"-55minutes","until":"now","height":"400"}'}
responseObject = HttpResponse()
responseObject.content = ''
rv.return_value = responseObject
response = self.client.post(url, request)
self.assertEqual(response.content, b'{"success": true}')
def test_dashboard_login_invalid_authenticate(self):
url = reverse('dashboard_login')
request = {"username": "testuser",
"password": "testpassword"}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), json.loads('{"errors": {"reason": "Username and/or password invalid."}, "success": false, "text": {}, "permissions": []}'))
@mock.patch('graphite.dashboard.views.authenticate')
def test_dashboard_login_valid_authenticate(self, authenticate):
url = reverse('dashboard_login')
request = {"username": "testuser",
"password": "testpassword"}
user = User.objects.create(email='testuser@test.com')
user.backend = ''
authenticate.return_value = user
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), json.loads('{"permissions": ["change", "delete"], "success": true, "text": {}, "errors": {}}'))
@mock.patch('graphite.dashboard.views.authenticate')
def test_dashboard_login_valid_authenticate_not_active(self, authenticate):
url = reverse('dashboard_login')
request = {"username": "testuser",
"password": "testpassword"}
user = User.objects.create(email='testuser@test.com')
user.backend = ''
user.is_active = False
authenticate.return_value = user
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), json.loads('{"permissions": [], "success": false, "errors": {"reason": "Account disabled."}, "text": {}}'))
def test_dashboard_logout(self):
url = reverse('dashboard_logout')
request = {"username": "testuser"}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), json.loads('{"errors": {}, "success": true, "text": {}}'))
@mock.patch('graphite.dashboard.views.getPermissions')
def test_dashboard_save_no_permissions(self, gp):
gp.return_value = [None]
url = reverse('dashboard_save', args=['testdashboard'])
request = {"state": '{}'}
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"error": "Must be logged in with appropriate permissions to save"}')
@mock.patch('graphite.dashboard.views.getPermissions')
def test_dashboard_delete_no_permissions(self, gp):
gp.return_value = [None]
url = reverse('dashboard_delete', args=['testdashboard'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"error": "Must be logged in with appropriate permissions to delete"}')
@mock.patch('graphite.dashboard.views.getPermissions')
def test_dashboard_save_template_no_permissions(self, gp):
gp.return_value = [None]
url = reverse('dashboard_save_template', args=['testtemplate', 'testkey'])
request = copy.deepcopy(self.testtemplate)
response = self.client.post(url, request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"error": "Must be logged in with appropriate permissions to save the template"}')
@mock.patch('graphite.dashboard.views.getPermissions')
def test_dashboard_delete_template_no_permissions(self, gp):
gp.return_value = [None]
url = reverse('dashboard_delete_template', args=['testtemplate'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"error": "Must be logged in with appropriate permissions to delete the template"}')
def test_getPermissions_no_user(self):
settings.DASHBOARD_REQUIRE_AUTHENTICATION=False
settings.DASHBOARD_REQUIRE_PERMISSIONS=False
settings.DASHBOARD_REQUIRE_EDIT_GROUP=False
from graphite.dashboard.views import getPermissions
self.assertEqual(getPermissions(False), ['change', 'delete'])
def test_getPermissions_no_user_require_auth(self):
settings.DASHBOARD_REQUIRE_AUTHENTICATION=True
settings.DASHBOARD_REQUIRE_PERMISSIONS=False
settings.DASHBOARD_REQUIRE_EDIT_GROUP=False
from graphite.dashboard.views import getPermissions
self.assertEqual(getPermissions(False), [])
def test_getPermissions_valid_user(self):
settings.DASHBOARD_REQUIRE_AUTHENTICATION=True
settings.DASHBOARD_REQUIRE_PERMISSIONS=False
settings.DASHBOARD_REQUIRE_EDIT_GROUP=False
from graphite.dashboard.views import getPermissions
user = User.objects.create(email='testuser@test.com')
user.backend = ''
self.assertEqual(getPermissions(user), ['change', 'delete'])
def test_getPermissions_valid_user_require_perm(self):
settings.DASHBOARD_REQUIRE_AUTHENTICATION=True
settings.DASHBOARD_REQUIRE_PERMISSIONS=True
settings.DASHBOARD_REQUIRE_EDIT_GROUP=False
from graphite.dashboard.views import getPermissions
user = User.objects.create(email='testuser@test.com')
user.backend = ''
self.assertEqual(getPermissions(user), [])
def test_getPermissions_valid_user_edit_group(self):
settings.DASHBOARD_REQUIRE_AUTHENTICATION=True
settings.DASHBOARD_REQUIRE_PERMISSIONS=False
settings.DASHBOARD_REQUIRE_EDIT_GROUP=True
from graphite.dashboard.views import getPermissions
user = User.objects.create(email='testuser@test.com')
user.backend = ''
self.assertEqual(getPermissions(user), [])
def test_getPermissions_valid_user_require_perms_edit_group(self):
settings.DASHBOARD_REQUIRE_AUTHENTICATION=True
settings.DASHBOARD_REQUIRE_PERMISSIONS=True
settings.DASHBOARD_REQUIRE_EDIT_GROUP=True
from graphite.dashboard.views import getPermissions
user = User.objects.create(email='testuser@test.com')
user.backend = ''
self.assertEqual(getPermissions(user), [])
| apache-2.0 |
yeyanchao/calibre | src/calibre/gui2/catalog/catalog_tab_template_ui.py | 1 | 1116 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/yc/code/calibre/calibre/src/calibre/gui2/catalog/catalog_tab_template.ui'
#
# Created: Thu Oct 25 16:54:55 2012
# by: PyQt4 UI code generator 4.8.5
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(650, 575)
Form.setWindowTitle(_("Form"))
self.label = QtGui.QLabel(Form)
self.label.setGeometry(QtCore.QRect(20, 12, 181, 17))
self.label.setText(_("Tab template for catalog.ui"))
self.label.setObjectName(_fromUtf8("label"))
self.lineEdit = QtGui.QLineEdit(Form)
self.lineEdit.setGeometry(QtCore.QRect(407, 10, 161, 22))
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
pass
| gpl-3.0 |
xbezdick/tempest | tempest/api/network/base_security_groups.py | 17 | 2072 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.network import base
from tempest.common.utils import data_utils
class BaseSecGroupTest(base.BaseNetworkTest):
def _create_security_group(self):
# Create a security group
name = data_utils.rand_name('secgroup-')
group_create_body = self.client.create_security_group(name=name)
self.addCleanup(self._delete_security_group,
group_create_body['security_group']['id'])
self.assertEqual(group_create_body['security_group']['name'], name)
return group_create_body, name
def _delete_security_group(self, secgroup_id):
self.client.delete_security_group(secgroup_id)
# Asserting that the security group is not found in the list
# after deletion
list_body = self.client.list_security_groups()
secgroup_list = list()
for secgroup in list_body['security_groups']:
secgroup_list.append(secgroup['id'])
self.assertNotIn(secgroup_id, secgroup_list)
def _delete_security_group_rule(self, rule_id):
self.client.delete_security_group_rule(rule_id)
# Asserting that the security group is not found in the list
# after deletion
list_body = self.client.list_security_group_rules()
rules_list = list()
for rule in list_body['security_group_rules']:
rules_list.append(rule['id'])
self.assertNotIn(rule_id, rules_list)
| apache-2.0 |
wdv4758h/ZipPy | edu.uci.python.benchmark/src/benchmarks/pymaging/pymaging/exceptions.py | 1 | 1691 | # -*- coding: utf-8 -*-
# Copyright (c) 2012, Jonas Obrist
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Jonas Obrist nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL JONAS OBRIST BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class PymagingException(Exception): pass
class FormatNotSupported(PymagingException): pass
class InvalidColor(PymagingException): pass
| bsd-3-clause |
joeghodsi/interview-questions | cracking-the-coding-interview/ch8-object-oriented-design/8.2-call-center.py | 1 | 3065 | '''
Problem: design and implement a call center which has 3 types of employees: respondents, managers,
directors. A call is answered by the first available respondent. If the respondent can't handle
the call, they must escalate to a manager. If the manager can't handle it or is not free, they
must escalate to a director. Implement a method which assigns a call to the first available
employee
total time: 50min
'''
from queue import Queue
class CallCenter:
def __init__(self):
self.active_calls = []
self.waiting_calls = Queue()
self.respondents = []
self.free_respondents = Queue()
self.managers = []
self.directors = []
def dispatch_call(self, call):
'''dispatches a new call'''
if len(self.free_respondents) == 0:
self.waiting_calls.enqueue(call)
return # all respondents are currently busy, please wait
self._dispatch_call(call)
def escalate(self, call):
'''escalates a call to the next employee level. can be because the employee is busy or
not equipped to handle the call'''
current_employee = call.employee
next_employee = current_employee.boss
if not next_employee.free:
next_employee = next_employee.boss # simplification: assume director is free
call.employee = next_employee
next_employee.free = False
current_employee.free = True
if current_employee.role == Role.respondent:
self.free_respondents.append(current_employee)
def call_end_receiver(self, call):
'''listens for signal that call has ended'''
self.active_calls.remove(call)
call.employee.free = True
def employee_free_receiver(self, employee):
'''listens for signal that employee has become free'''
self.free_respondents.append(employee)
next_call = self.waiting_calls.pop()
if next_call:
self._dispatch_call(next_call)
def _dispatch_call(self, call):
if call.employee:
return # the call is already dispatched
free_respondent = self.free_respondents.pop()
call.employee = free_respondent
free_respondent.free = False
call.start()
self.active_calls.append(call)
class Call:
def __init__(self):
self.waited = 0 # seconds caller has waited
self.call_length = 0 # seconds call lasted once connected
self.caller_info = None # metadata. caller may have entered credit card, phone number, etc
self.employee = None # one-to-one employee taking the call
def start(self):
pass
class Employee:
def __init__(self, role):
self.info = None # metadata. name, address, phone, etc
self.role = role or Role.respondent # role enum: respondent, manager, director
self.free = True # whether they are free to take a call
self.boss = None # the person this employee reports to
class Role:
respondent = 0
manager = 1
director = 2
| unlicense |
ZobairAlijan/osf.io | framework/auth/__init__.py | 5 | 2204 | # -*- coding: utf-8 -*-
from datetime import datetime
from framework.sessions import session, create_session, Session
from modularodm import Q
from framework import bcrypt
from framework.auth import signals
from framework.auth.exceptions import DuplicateEmailError
from .core import User, Auth
from .core import get_user
__all__ = [
'get_display_name',
'Auth',
'User',
'get_user',
'check_password',
'authenticate',
'logout',
'register_unconfirmed',
]
def get_display_name(username):
"""Return the username to display in the navbar. Shortens long usernames."""
if len(username) > 40:
return '%s...%s' % (username[:20].strip(), username[-15:].strip())
return username
# check_password(actual_pw_hash, given_password) -> Boolean
check_password = bcrypt.check_password_hash
def authenticate(user, access_token, response):
data = session.data if session._get_current_object() else {}
data.update({
'auth_user_username': user.username,
'auth_user_id': user._primary_key,
'auth_user_fullname': user.fullname,
'auth_user_access_token': access_token,
})
user.date_last_login = datetime.utcnow()
user.save()
response = create_session(response, data=data)
return response
def logout():
for key in ['auth_user_username', 'auth_user_id', 'auth_user_fullname', 'auth_user_access_token']:
try:
del session.data[key]
except KeyError:
pass
Session.remove(Q('_id', 'eq', session._id))
return True
def register_unconfirmed(username, password, fullname):
user = get_user(email=username)
if not user:
user = User.create_unconfirmed(username=username,
password=password,
fullname=fullname)
user.save()
signals.unconfirmed_user_created.send(user)
elif not user.is_registered: # User is in db but not registered
user.add_unconfirmed_email(username)
user.set_password(password)
user.fullname = fullname
user.update_guessed_names()
user.save()
else:
raise DuplicateEmailError('User {0!r} already exists'.format(username))
return user
| apache-2.0 |
iambibhas/django | django/test/client.py | 9 | 26065 | from __future__ import unicode_literals
import sys
import os
import re
import mimetypes
from copy import copy
from importlib import import_module
from io import BytesIO
from django.apps import apps
from django.conf import settings
from django.core import urlresolvers
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import WSGIRequest, ISO_8859_1, UTF_8
from django.core.signals import (request_started, request_finished,
got_request_exception)
from django.db import close_old_connections
from django.http import SimpleCookie, HttpRequest, QueryDict
from django.template import TemplateDoesNotExist
from django.test import signals
from django.utils.functional import curry, SimpleLazyObject
from django.utils.encoding import force_bytes, force_str, uri_to_iri
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils import six
from django.utils.six.moves.urllib.parse import urlparse, urlsplit
from django.test.utils import ContextList
__all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile('.*; charset=([\w\d-]+);?')
class RedirectCycleError(Exception):
"""
The test client has been asked to follow a redirect loop.
"""
def __init__(self, message, last_response):
super(RedirectCycleError, self).__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
store.setdefault('context', ContextList()).append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
is_file = lambda thing: hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
filename = os.path.basename(file.name) if hasattr(file, 'name') else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
if not filename:
filename = key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())
]
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = uri_to_iri(path).encode(UTF_8)
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. We replicate this behavior here.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode(ISO_8859_1) if six.PY3 else path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
data = {} if data is None else data
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def trace(self, path, secure=False, **extra):
"Construct a TRACE request."
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(path)
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
def _session(self):
"""
Obtains the current session variables.
"""
if apps.is_installed('django.contrib.sessions'):
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME, None)
if cookie:
return engine.SessionStore(cookie.value)
else:
s = engine.SessionStore()
s.save()
self.cookies[settings.SESSION_COOKIE_NAME] = s.session_key
return s
return {}
session = property(_session)
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
got_request_exception.connect(self.store_exc_info, dispatch_uid="request-exception")
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(
lambda: urlresolvers.resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid="request-exception")
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""
Send a TRACE request to the server.
"""
response = super(Client, self).trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect, or the user is inactive, or if the sessions framework is
not available.
"""
from django.contrib.auth import authenticate, login
user = authenticate(**credentials)
if (user and user.is_active and
apps.is_installed('django.contrib.sessions')):
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
return True
else:
return False
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
response = self.get(url.path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
| bsd-3-clause |
daemonmaker/pylearn2 | pylearn2/expr/stochastic_pool.py | 44 | 6909 | """
An implementation of stochastic max-pooling, based on
Stochastic Pooling for Regularization of Deep Convolutional Neural Networks
Matthew D. Zeiler, Rob Fergus, ICLR 2013
"""
__authors__ = "Mehdi Mirza"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Mehdi Mirza", "Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "Mehdi Mirza"
__email__ = "mirzamom@iro"
import numpy
from theano.compat.six.moves import xrange
import theano
from theano import tensor
from theano.gof.op import get_debug_values
from pylearn2.utils.rng import make_theano_rng
from pylearn2.utils import contains_inf
def stochastic_max_pool_bc01(bc01, pool_shape, pool_stride, image_shape, rng = None):
"""
.. todo::
WRITEME properly
Stochastic max pooling for training as defined in:
Stochastic Pooling for Regularization of Deep Convolutional Neural Networks
Matthew D. Zeiler, Rob Fergus
Parameters
----------
bc01 : theano 4-tensor
in format (batch size, channels, rows, cols),
IMPORTANT: All values should be positive
pool_shape : tuple
shape of the pool region (rows, cols)
pool_stride : tuple
strides between pooling regions (row stride, col stride)
image_shape : tuple
avoid doing some of the arithmetic in theano
rng : theano random stream
"""
r, c = image_shape
pr, pc = pool_shape
rs, cs = pool_stride
batch = bc01.shape[0]
channel = bc01.shape[1]
rng = make_theano_rng(rng, 2022, which_method='multinomial')
# Compute index in pooled space of last needed pool
# (needed = each input pixel must appear in at least one pool)
def last_pool(im_shp, p_shp, p_strd):
rval = int(numpy.ceil(float(im_shp - p_shp) / p_strd))
assert p_strd * rval + p_shp >= im_shp
assert p_strd * (rval - 1) + p_shp < im_shp
return rval
# Compute starting row of the last pool
last_pool_r = last_pool(image_shape[0] ,pool_shape[0], pool_stride[0]) * pool_stride[0]
# Compute number of rows needed in image for all indexes to work out
required_r = last_pool_r + pr
last_pool_c = last_pool(image_shape[1] ,pool_shape[1], pool_stride[1]) * pool_stride[1]
required_c = last_pool_c + pc
# final result shape
res_r = int(numpy.floor(last_pool_r/rs)) + 1
res_c = int(numpy.floor(last_pool_c/cs)) + 1
for bc01v in get_debug_values(bc01):
assert not contains_inf(bc01v)
assert bc01v.shape[2] == image_shape[0]
assert bc01v.shape[3] == image_shape[1]
# padding
padded = tensor.alloc(0.0, batch, channel, required_r, required_c)
name = bc01.name
if name is None:
name = 'anon_bc01'
bc01 = tensor.set_subtensor(padded[:,:, 0:r, 0:c], bc01)
bc01.name = 'zero_padded_' + name
# unraveling
window = tensor.alloc(0.0, batch, channel, res_r, res_c, pr, pc)
window.name = 'unravlled_winodows_' + name
for row_within_pool in xrange(pool_shape[0]):
row_stop = last_pool_r + row_within_pool + 1
for col_within_pool in xrange(pool_shape[1]):
col_stop = last_pool_c + col_within_pool + 1
win_cell = bc01[:,:,row_within_pool:row_stop:rs, col_within_pool:col_stop:cs]
window = tensor.set_subtensor(window[:,:,:,:, row_within_pool, col_within_pool], win_cell)
# find the norm
norm = window.sum(axis = [4, 5])
norm = tensor.switch(tensor.eq(norm, 0.0), 1.0, norm)
norm = window / norm.dimshuffle(0, 1, 2, 3, 'x', 'x')
# get prob
prob = rng.multinomial(pvals = norm.reshape((batch * channel * res_r * res_c, pr * pc)), dtype='float32')
# select
res = (window * prob.reshape((batch, channel, res_r, res_c, pr, pc))).max(axis=5).max(axis=4)
res.name = 'pooled_' + name
return tensor.cast(res, theano.config.floatX)
def weighted_max_pool_bc01(bc01, pool_shape, pool_stride, image_shape, rng = None):
"""
This implements test time probability weighted pooling defined in:
Stochastic Pooling for Regularization of Deep Convolutional Neural Networks
Matthew D. Zeiler, Rob Fergus
Parameters
----------
bc01 : theano 4-tensor
minibatch in format (batch size, channels, rows, cols),
IMPORTANT: All values should be poitivie
pool_shape : theano 4-tensor
shape of the pool region (rows, cols)
pool_stride : tuple
strides between pooling regions (row stride, col stride)
image_shape : tuple
avoid doing some of the arithmetic in theano
"""
r, c = image_shape
pr, pc = pool_shape
rs, cs = pool_stride
batch = bc01.shape[0]
channel = bc01.shape[1]
rng = make_theano_rng(rng, 2022, which_method='multinomial')
# Compute index in pooled space of last needed pool
# (needed = each input pixel must appear in at least one pool)
def last_pool(im_shp, p_shp, p_strd):
rval = int(numpy.ceil(float(im_shp - p_shp) / p_strd))
assert p_strd * rval + p_shp >= im_shp
assert p_strd * (rval - 1) + p_shp < im_shp
return rval
# Compute starting row of the last pool
last_pool_r = last_pool(image_shape[0] ,pool_shape[0], pool_stride[0]) * pool_stride[0]
# Compute number of rows needed in image for all indexes to work out
required_r = last_pool_r + pr
last_pool_c = last_pool(image_shape[1] ,pool_shape[1], pool_stride[1]) * pool_stride[1]
required_c = last_pool_c + pc
# final result shape
res_r = int(numpy.floor(last_pool_r/rs)) + 1
res_c = int(numpy.floor(last_pool_c/cs)) + 1
for bc01v in get_debug_values(bc01):
assert not contains_inf(bc01v)
assert bc01v.shape[2] == image_shape[0]
assert bc01v.shape[3] == image_shape[1]
# padding
padded = tensor.alloc(0.0, batch, channel, required_r, required_c)
name = bc01.name
if name is None:
name = 'anon_bc01'
bc01 = tensor.set_subtensor(padded[:,:, 0:r, 0:c], bc01)
bc01.name = 'zero_padded_' + name
# unraveling
window = tensor.alloc(0.0, batch, channel, res_r, res_c, pr, pc)
window.name = 'unravlled_winodows_' + name
for row_within_pool in xrange(pool_shape[0]):
row_stop = last_pool_r + row_within_pool + 1
for col_within_pool in xrange(pool_shape[1]):
col_stop = last_pool_c + col_within_pool + 1
win_cell = bc01[:,:,row_within_pool:row_stop:rs, col_within_pool:col_stop:cs]
window = tensor.set_subtensor(window[:,:,:,:, row_within_pool, col_within_pool], win_cell)
# find the norm
norm = window.sum(axis = [4, 5])
norm = tensor.switch(tensor.eq(norm, 0.0), 1.0, norm)
norm = window / norm.dimshuffle(0, 1, 2, 3, 'x', 'x')
# average
res = (window * norm).sum(axis=[4,5])
res.name = 'pooled_' + name
return res.reshape((batch, channel, res_r, res_c))
| bsd-3-clause |
KMUX-Project/KMUX | kmux/modules/root/action/root.py | 1 | 3287 | #!/usr/bin/python3
'''
KMUX - a free and open source small business server.
Copyright (C) 2015, Julian Thomé <julian.thome.de@gmail.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
import sys
from util.json import Json
import os
import jinja2
import json
import glob
class Root():
number = 10
def __init__(self, name):
self.dir = 'modules/' + name
self.inifile = self.dir + '/config/ini.json'
self.depfile = self.dir + '/config/dep.json'
self.genfile = self.dir + '/templates/gen.kmux'
self.instfile = self.dir + '/templates/inst.kmux'
self.number = Root.number
Root.number = Root.number + 1
self.depfile = self.depfile
self.basicconf = {}
self.basicconf['nr'] = self.number
self.basicconf['name'] = name
self.inidict = {}
def __renderConfig(self, tfile, vars):
temploader = jinja2.FileSystemLoader(".")
env = jinja2.Environment(loader=temploader)
template = env.get_template(tfile)
return json.loads(template.render(vars))
def __renderTemplates(self, tfiles, vars, outpath):
if tfiles == None or len(tfiles) == 0:
return
if not os.path.isdir(outpath):
print("Could not find " + outpath)
return
temploader = jinja2.FileSystemLoader(".")
env = jinja2.Environment(loader=temploader)
for tfile in tfiles:
template = env.get_template(tfile)
out = template.render(vars)
outdir = os.path.abspath(outpath + "/" + os.path.dirname(tfile))
if not os.path.exists(outdir):
os.makedirs(outdir)
outfile = outdir + "/" + \
os.path.splitext(os.path.basename(tfile))[0]
print("write to " + outfile)
ofile = open(outfile, "w")
ofile.write(out)
ofile.close()
def __findTemplates(self):
return list(glob.iglob(self.dir + '/templates/**/*.tmpl'))
def getNumber(self):
return self.number
def getDir(self):
return self.dir
def genIni(self, globconf):
# take the parameters from the global configuration file
self.basicconf.update(globconf)
return self.__renderConfig(self.inifile, self.basicconf)
def getIniFile(self):
return self.inidict
def genTemplates(self, globconf, outpath):
tfiles = self.__findTemplates()
self.__renderTemplates(tfiles, globconf, outpath)
def getDependencies(self):
return Json.readJSONFile(self.depfile)
def getDepFile(self):
return self.depfile
| gpl-2.0 |
foglamp/FogLAMP | python/foglamp/services/core/api/service.py | 1 | 23354 | # -*- coding: utf-8 -*-
# FOGLAMP_BEGIN
# See: http://foglamp.readthedocs.io/
# FOGLAMP_END
import asyncio
import os
import datetime
import uuid
import platform
import json
from aiohttp import web
from typing import Dict, List
from foglamp.common import utils
from foglamp.common import logger
from foglamp.common.service_record import ServiceRecord
from foglamp.common.storage_client.payload_builder import PayloadBuilder
from foglamp.common.storage_client.exceptions import StorageServerError
from foglamp.common.configuration_manager import ConfigurationManager
from foglamp.services.core import server
from foglamp.services.core import connect
from foglamp.services.core.api import utils as apiutils
from foglamp.services.core.scheduler.entities import StartUpSchedule
from foglamp.services.core.service_registry.service_registry import ServiceRegistry
from foglamp.services.core.service_registry import exceptions as service_registry_exceptions
from foglamp.common.common import _FOGLAMP_ROOT
from foglamp.services.core.api.plugins import common
from foglamp.services.core.api.plugins import install
from foglamp.services.core.api.plugins.exceptions import *
from foglamp.common.audit_logger import AuditLogger
__author__ = "Mark Riddoch, Ashwin Gopalakrishnan, Amarendra K Sinha"
__copyright__ = "Copyright (c) 2018 OSIsoft, LLC"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
_help = """
-------------------------------------------------------------------------------
| GET POST | /foglamp/service |
| GET | /foglamp/service/available |
| GET | /foglamp/service/installed |
| PUT | /foglamp/service/{type}/{name}/update |
-------------------------------------------------------------------------------
"""
_logger = logger.setup()
#################################
# Service
#################################
def get_service_records():
sr_list = list()
for service_record in ServiceRegistry.all():
sr_list.append(
{
'name': service_record._name,
'type': service_record._type,
'address': service_record._address,
'management_port': service_record._management_port,
'service_port': service_record._port,
'protocol': service_record._protocol,
'status': ServiceRecord.Status(int(service_record._status)).name.lower()
})
recs = {'services': sr_list}
return recs
def get_service_installed() -> List:
services = []
svc_prefix = 'foglamp.services.'
for root, dirs, files in os.walk(_FOGLAMP_ROOT + "/" + "services"):
for f in files:
if f.startswith(svc_prefix):
services.append(f.split(svc_prefix)[-1])
return services
async def get_health(request):
"""
Args:
request:
Returns:
health of all registered services
:Example:
curl -X GET http://localhost:8081/foglamp/service
"""
response = get_service_records()
return web.json_response(response)
async def delete_service(request):
""" Delete an existing service
:Example:
curl -X DELETE http://localhost:8081/foglamp/service/<svc name>
"""
try:
svc = request.match_info.get('service_name', None)
storage = connect.get_storage_async()
result = await get_schedule(storage, svc)
if result['count'] == 0:
return web.HTTPNotFound(reason='{} service does not exist.'.format(svc))
config_mgr = ConfigurationManager(storage)
# In case of notification service, if notifications exists, then deletion is not allowed
if 'notification' in result['rows'][0]['process_name']:
notf_children = await config_mgr.get_category_child(category_name="Notifications")
children = [x['key'] for x in notf_children]
if len(notf_children) > 0:
return web.HTTPBadRequest(reason='Notification service `{}` can not be deleted, as {} notification instances exist.'.format(svc, children))
# First disable the schedule
svc_schedule = result['rows'][0]
sch_id = uuid.UUID(svc_schedule['id'])
if svc_schedule['enabled'].lower() == 't':
await server.Server.scheduler.disable_schedule(sch_id)
# return control to event loop
await asyncio.sleep(1)
# Delete all configuration for the service name
await config_mgr.delete_category_and_children_recursively(svc)
# Remove from registry as it has been already shutdown via disable_schedule() and since
# we intend to delete the schedule also, there is no use of its Service registry entry
try:
services = ServiceRegistry.get(name=svc)
ServiceRegistry.remove_from_registry(services[0]._id)
except service_registry_exceptions.DoesNotExist:
pass
# Delete schedule
await server.Server.scheduler.delete_schedule(sch_id)
except Exception as ex:
raise web.HTTPInternalServerError(reason=str(ex))
else:
return web.json_response({'result': 'Service {} deleted successfully.'.format(svc)})
async def add_service(request):
"""
Create a new service to run a specific plugin
:Example:
curl -X POST http://localhost:8081/foglamp/service -d '{"name": "DHT 11", "plugin": "dht11", "type": "south", "enabled": true}'
curl -sX POST http://localhost:8081/foglamp/service -d '{"name": "Sine", "plugin": "sinusoid", "type": "south", "enabled": true, "config": {"dataPointsPerSec": {"value": "10"}}}' | jq
curl -X POST http://localhost:8081/foglamp/service -d '{"name": "NotificationServer", "type": "notification", "enabled": true}' | jq
curl -sX POST http://localhost:8081/foglamp/service?action=install -d '{"format":"repository", "name": "foglamp-service-notification"}'
curl -sX POST http://localhost:8081/foglamp/service?action=install -d '{"format":"repository", "name": "foglamp-service-notification", "version":"1.6.0"}'
"""
try:
data = await request.json()
if not isinstance(data, dict):
raise ValueError('Data payload must be a valid JSON')
name = data.get('name', None)
plugin = data.get('plugin', None)
service_type = data.get('type', None)
enabled = data.get('enabled', None)
config = data.get('config', None)
if name is None:
raise web.HTTPBadRequest(reason='Missing name property in payload.')
if 'action' in request.query and request.query['action'] != '':
if request.query['action'] == 'install':
file_format = data.get('format', None)
if file_format is None:
raise ValueError("format param is required")
if file_format not in ["repository"]:
raise ValueError("Invalid format. Must be 'repository'")
version = data.get('version', None)
if version:
delimiter = '.'
if str(version).count(delimiter) != 2:
raise ValueError('Service semantic version is incorrect; it should be like X.Y.Z')
services, log_path = await common.fetch_available_packages("service")
if name not in services:
raise KeyError('{} service is not available for the given repository or already installed'.format(name))
_platform = platform.platform()
pkg_mgt = 'yum' if 'centos' in _platform or 'redhat' in _platform else 'apt'
code, link, msg = await install.install_package_from_repo(name, pkg_mgt, version)
if code != 0:
raise PackageError(link)
message = "{} is successfully {}".format(name, msg)
storage = connect.get_storage_async()
audit = AuditLogger(storage)
audit_detail = {'packageName': name}
log_code = 'PKGUP' if msg == 'updated' else 'PKGIN'
await audit.information(log_code, audit_detail)
return web.json_response({'message': message, "link": link})
else:
raise web.HTTPBadRequest(reason='{} is not a valid action'.format(request.query['action']))
if utils.check_reserved(name) is False:
raise web.HTTPBadRequest(reason='Invalid name property in payload.')
if utils.check_foglamp_reserved(name) is False:
raise web.HTTPBadRequest(reason="'{}' is reserved for FogLAMP and can not be used as service name!".format(name))
if service_type is None:
raise web.HTTPBadRequest(reason='Missing type property in payload.')
service_type = str(service_type).lower()
if service_type == 'north':
raise web.HTTPNotAcceptable(reason='north type is not supported for the time being.')
if service_type not in ['south', 'notification']:
raise web.HTTPBadRequest(reason='Only south and notification type are supported.')
if plugin is None and service_type == 'south':
raise web.HTTPBadRequest(reason='Missing plugin property for type south in payload.')
if plugin and utils.check_reserved(plugin) is False:
raise web.HTTPBadRequest(reason='Invalid plugin property in payload.')
if enabled is not None:
if enabled not in ['true', 'false', True, False]:
raise web.HTTPBadRequest(reason='Only "true", "false", true, false'
' are allowed for value of enabled.')
is_enabled = True if ((type(enabled) is str and enabled.lower() in ['true']) or (
(type(enabled) is bool and enabled is True))) else False
# Check if a valid plugin has been provided
plugin_module_path, plugin_config, process_name, script = "", {}, "", ""
if service_type == 'south':
# "plugin_module_path" is fixed by design. It is MANDATORY to keep the plugin in the exactly similar named
# folder, within the plugin_module_path.
# if multiple plugin with same name are found, then python plugin import will be tried first
plugin_module_path = "{}/python/foglamp/plugins/{}/{}".format(_FOGLAMP_ROOT, service_type, plugin)
try:
plugin_info = common.load_and_fetch_python_plugin_info(plugin_module_path, plugin, service_type)
plugin_config = plugin_info['config']
if not plugin_config:
_logger.exception("Plugin %s import problem from path %s", plugin, plugin_module_path)
raise web.HTTPNotFound(reason='Plugin "{}" import problem from path "{}".'.format(plugin, plugin_module_path))
process_name = 'south_c'
script = '["services/south_c"]'
except FileNotFoundError as ex:
# Checking for C-type plugins
plugin_config = load_c_plugin(plugin, service_type)
if not plugin_config:
_logger.exception("Plugin %s import problem from path %s. %s", plugin, plugin_module_path, str(ex))
raise web.HTTPNotFound(reason='Plugin "{}" import problem from path "{}".'.format(plugin, plugin_module_path))
process_name = 'south_c'
script = '["services/south_c"]'
except TypeError as ex:
_logger.exception(str(ex))
raise web.HTTPBadRequest(reason=str(ex))
except Exception as ex:
_logger.exception("Failed to fetch plugin configuration. %s", str(ex))
raise web.HTTPInternalServerError(reason='Failed to fetch plugin configuration')
elif service_type == 'notification':
process_name = 'notification_c'
script = '["services/notification_c"]'
storage = connect.get_storage_async()
config_mgr = ConfigurationManager(storage)
# Check whether category name already exists
category_info = await config_mgr.get_category_all_items(category_name=name)
if category_info is not None:
raise web.HTTPBadRequest(reason="The '{}' category already exists".format(name))
# Check that the schedule name is not already registered
count = await check_schedules(storage, name)
if count != 0:
raise web.HTTPBadRequest(reason='A service with this name already exists.')
# Check that the process name is not already registered
count = await check_scheduled_processes(storage, process_name)
if count == 0:
# Now first create the scheduled process entry for the new service
payload = PayloadBuilder().INSERT(name=process_name, script=script).payload()
try:
res = await storage.insert_into_tbl("scheduled_processes", payload)
except StorageServerError as ex:
_logger.exception("Failed to create scheduled process. %s", ex.error)
raise web.HTTPInternalServerError(reason='Failed to create service.')
except Exception as ex:
_logger.exception("Failed to create scheduled process. %s", str(ex))
raise web.HTTPInternalServerError(reason='Failed to create service.')
# check that notification service is not already registered, right now notification service LIMIT to 1
if service_type == 'notification':
res = await check_notification_schedule(storage)
for ps in res['rows']:
if 'notification_c' in ps['process_name']:
raise web.HTTPBadRequest(reason='A Notification service schedule already exists.')
elif service_type == 'south':
try:
# Create a configuration category from the configuration defined in the plugin
category_desc = plugin_config['plugin']['description']
await config_mgr.create_category(category_name=name,
category_description=category_desc,
category_value=plugin_config,
keep_original_items=True)
# Create the parent category for all South services
await config_mgr.create_category("South", {}, "South microservices", True)
await config_mgr.create_child_category("South", [name])
# If config is in POST data, then update the value for each config item
if config is not None:
if not isinstance(config, dict):
raise ValueError('Config must be a JSON object')
for k, v in config.items():
await config_mgr.set_category_item_value_entry(name, k, v['value'])
except Exception as ex:
await config_mgr.delete_category_and_children_recursively(name)
_logger.exception("Failed to create plugin configuration. %s", str(ex))
raise web.HTTPInternalServerError(reason='Failed to create plugin configuration. {}'.format(ex))
# If all successful then lastly add a schedule to run the new service at startup
try:
schedule = StartUpSchedule()
schedule.name = name
schedule.process_name = process_name
schedule.repeat = datetime.timedelta(0)
schedule.exclusive = True
# if "enabled" is supplied, it gets activated in save_schedule() via is_enabled flag
schedule.enabled = False
# Save schedule
await server.Server.scheduler.save_schedule(schedule, is_enabled)
schedule = await server.Server.scheduler.get_schedule_by_name(name)
except StorageServerError as ex:
await config_mgr.delete_category_and_children_recursively(name)
_logger.exception("Failed to create schedule. %s", ex.error)
raise web.HTTPInternalServerError(reason='Failed to create service.')
except Exception as ex:
await config_mgr.delete_category_and_children_recursively(name)
_logger.exception("Failed to create service. %s", str(ex))
raise web.HTTPInternalServerError(reason='Failed to create service.')
except PackageError as e:
msg = "Service installation request failed"
raise web.HTTPBadRequest(body=json.dumps({"message": msg, "link": str(e)}), reason=msg)
except ValueError as e:
raise web.HTTPBadRequest(reason=str(e))
except KeyError as ex:
raise web.HTTPNotFound(reason=str(ex))
else:
return web.json_response({'name': name, 'id': str(schedule.schedule_id)})
def load_c_plugin(plugin: str, service_type: str) -> Dict:
try:
plugin_info = apiutils.get_plugin_info(plugin, dir=service_type)
if plugin_info['type'] != service_type:
msg = "Plugin of {} type is not supported".format(plugin_info['type'])
raise TypeError(msg)
plugin_config = plugin_info['config']
except Exception:
# Now looking for hybrid plugins if exists
plugin_info = common.load_and_fetch_c_hybrid_plugin_info(plugin, True)
if plugin_info:
plugin_config = plugin_info['config']
return plugin_config
async def check_scheduled_processes(storage, process_name):
payload = PayloadBuilder().SELECT("name").WHERE(['name', '=', process_name]).payload()
result = await storage.query_tbl_with_payload('scheduled_processes', payload)
return result['count']
async def check_schedules(storage, schedule_name):
payload = PayloadBuilder().SELECT("schedule_name").WHERE(['schedule_name', '=', schedule_name]).payload()
result = await storage.query_tbl_with_payload('schedules', payload)
return result['count']
async def get_schedule(storage, schedule_name):
payload = PayloadBuilder().SELECT(["id", "enabled"]).WHERE(['schedule_name', '=', schedule_name]).payload()
result = await storage.query_tbl_with_payload('schedules', payload)
return result
async def check_notification_schedule(storage):
payload = PayloadBuilder().SELECT("process_name").payload()
result = await storage.query_tbl_with_payload('schedules', payload)
return result
async def get_available(request: web.Request) -> web.Response:
""" get list of a available services via package management i.e apt or yum
:Example:
curl -X GET http://localhost:8081/foglamp/service/available
"""
try:
services, log_path = await common.fetch_available_packages("service")
except PackageError as e:
msg = "Fetch available service package request failed"
raise web.HTTPBadRequest(body=json.dumps({"message": msg, "link": str(e)}), reason=msg)
except Exception as ex:
raise web.HTTPInternalServerError(reason=str(ex))
return web.json_response({"services": services, "link": log_path})
async def get_installed(request: web.Request) -> web.Response:
""" get list of a installed services
:Example:
curl -X GET http://localhost:8081/foglamp/service/installed
"""
services = get_service_installed()
return web.json_response({"services": services})
async def update_service(request: web.Request) -> web.Response:
""" update service
:Example:
curl -sX PUT http://localhost:8081/foglamp/service/notification/notification/update
"""
_type = request.match_info.get('type', None)
name = request.match_info.get('name', None)
try:
_type = _type.lower()
if _type != 'notification':
raise ValueError("Invalid service type. Must be 'notification'")
# Check requested service name is installed or not
installed_services = get_service_installed()
if name not in installed_services:
raise KeyError("{} service is not installed yet. Hence update is not possible.".format(name))
storage_client = connect.get_storage_async()
# TODO: process_name ends with "_c" suffix
payload = PayloadBuilder().SELECT("id", "enabled", "schedule_name").WHERE(['process_name', '=', '{}_c'.format(
_type)]).payload()
result = await storage_client.query_tbl_with_payload('schedules', payload)
sch_info = result['rows']
sch_list = []
if sch_info and sch_info[0]['enabled'] == 't':
status, reason = await server.Server.scheduler.disable_schedule(uuid.UUID(sch_info[0]['id']))
if status:
_logger.warning("Schedule is disabled for {}, as {} service of type {} is being updated...".format(
sch_info[0]['schedule_name'], name, _type))
# TODO: SCHCH Audit log entry
sch_list.append(sch_info[0]['id'])
# service update is running as a background task
loop = request.loop
request._type = _type
request._name = name
request._sch_list = sch_list
loop.call_later(1, do_update, request)
except KeyError as ex:
raise web.HTTPNotFound(reason=str(ex))
except ValueError as ex:
raise web.HTTPBadRequest(reason=str(ex))
except Exception as ex:
raise web.HTTPInternalServerError(reason=str(ex))
return web.json_response({"message": "{} service update in process. Wait for few minutes to complete.".format(
name)})
def do_update(request):
_logger.info("{} service update started...".format(request._name))
name = "foglamp-service-{}".format(request._name.lower())
_platform = platform.platform()
stdout_file_path = common.create_log_file("update", name)
pkg_mgt = 'apt'
cmd = "sudo {} -y update > {} 2>&1".format(pkg_mgt, stdout_file_path)
if 'centos' in _platform or 'redhat' in _platform:
pkg_mgt = 'yum'
cmd = "sudo {} check-update > {} 2>&1".format(pkg_mgt, stdout_file_path)
ret_code = os.system(cmd)
# sudo apt/yum -y install only happens when update is without any error
if ret_code == 0:
cmd = "sudo {} -y install {} >> {} 2>&1".format(pkg_mgt, name, stdout_file_path)
ret_code = os.system(cmd)
# relative log file link
link = "log/" + stdout_file_path.split("/")[-1]
if ret_code != 0:
_logger.error("{} service update failed. Logs available at {}".format(request._name, link))
else:
_logger.info("{} service update completed. Logs available at {}".format(request._name, link))
# PKGUP audit log entry
storage_client = connect.get_storage_async()
audit = AuditLogger(storage_client)
audit_detail = {'packageName': name}
asyncio.ensure_future(audit.information('PKGUP', audit_detail))
# Restart the service which was disabled before update
for s in request._sch_list:
asyncio.ensure_future(server.Server.scheduler.enable_schedule(uuid.UUID(s)))
| apache-2.0 |
ruibarreira/linuxtrail | usr/lib/python2.7/dist-packages/cupshelpers/__init__.py | 4 | 1813 | ## system-config-printer
## Copyright (C) 2008, 2011 Red Hat, Inc.
## Authors:
## Tim Waugh <twaugh@redhat.com>
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
__all__ = ['set_debugprint_fn',
'Device', 'Printer', 'activateNewPrinter',
'copyPPDOptions', 'getDevices', 'getPrinters',
'missingPackagesAndExecutables', 'missingExecutables',
'parseDeviceID',
'setPPDPageSize',
'ppds',
'openprinting']
def _no_debug (x):
return
_debugprint_fn = _no_debug
def _debugprint (x):
_debugprint_fn (x)
def set_debugprint_fn (debugprint):
"""
Set debugging hook.
@param debugprint: function to print debug output
@type debugprint: fn (str) -> None
"""
global _debugprint_fn
_debugprint_fn = debugprint
from cupshelpers import \
Device, \
Printer, \
activateNewPrinter, \
copyPPDOptions, \
getDevices, \
getPrinters, \
missingPackagesAndExecutables, \
missingExecutables, \
parseDeviceID, \
setPPDPageSize
import ppds
import openprinting
| gpl-3.0 |
Nitaco/ansible | lib/ansible/modules/cloud/google/gcp_compute_instance_group_manager.py | 8 | 21997 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ["preview"],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_instance_group_manager
description:
- Creates a managed instance group using the information that you specify in the request.
After the group is created, it schedules an action to create instances in the group
using the specified instance template. This operation is marked as DONE when the
group is created even if the instances in the group have not yet been created. You
must separately verify the status of the individual instances.
- A managed instance group can have up to 1000 VM instances per group.
short_description: Creates a GCP InstanceGroupManager
version_added: 2.6
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices: ['present', 'absent']
default: 'present'
base_instance_name:
description:
- The base instance name to use for instances in this group. The value must be 1-58
characters long. Instances are named by appending a hyphen and a random four-character
string to the base instance name.
- The base instance name must comply with RFC1035.
required: true
description:
description:
- An optional description of this resource. Provide this property when you create
the resource.
required: false
instance_template:
description:
- A reference to InstanceTemplate resource.
required: true
name:
description:
- The name of the managed instance group. The name must be 1-63 characters long, and
comply with RFC1035.
required: true
named_ports:
description:
- Named ports configured for the Instance Groups complementary to this Instance Group
Manager.
required: false
suboptions:
name:
description:
- The name for this named port. The name must be 1-63 characters long, and comply
with RFC1035.
required: false
port:
description:
- The port number, which can be a value between 1 and 65535.
required: false
target_pools:
description:
- TargetPool resources to which instances in the instanceGroup field are added. The
target pools automatically apply to all of the instances in the managed instance
group.
required: false
target_size:
description:
- The target number of running instances for this managed instance group. Deleting
or abandoning instances reduces this number. Resizing the group changes this number.
required: false
zone:
description:
- A reference to Zone resource.
required: true
extends_documentation_fragment: gcp
'''
EXAMPLES = '''
- name: create a network
gcp_compute_network:
name: 'network-instancetemplate'
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
scopes:
- https://www.googleapis.com/auth/compute
state: present
register: network
- name: create a address
gcp_compute_address:
name: 'address-instancetemplate'
region: 'us-west1'
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
scopes:
- https://www.googleapis.com/auth/compute
state: present
register: address
- name: create a instance template
gcp_compute_instance_template:
name: "{{ resource_name }}"
properties:
disks:
- auto_delete: true
boot: true
initialize_params:
source_image: 'projects/ubuntu-os-cloud/global/images/family/ubuntu-1604-lts'
machine_type: n1-standard-1
network_interfaces:
- network: "{{ network }}"
access_configs:
- name: 'test-config'
type: 'ONE_TO_ONE_NAT'
nat_ip: "{{ address }}"
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
scopes:
- https://www.googleapis.com/auth/compute
state: present
register: instancetemplate
- name: create a instance group manager
gcp_compute_instance_group_manager:
name: testObject
base_instance_name: 'test1-child'
instance_template: "{{ instancetemplate }}"
target_size: 3
zone: 'us-west1-a'
project: testProject
auth_kind: service_account
service_account_file: /tmp/auth.pem
scopes:
- https://www.googleapis.com/auth/compute
state: present
'''
RETURN = '''
base_instance_name:
description:
- The base instance name to use for instances in this group. The value must be 1-58
characters long. Instances are named by appending a hyphen and a random four-character
string to the base instance name.
- The base instance name must comply with RFC1035.
returned: success
type: str
creation_timestamp:
description:
- The creation timestamp for this managed instance group in RFC3339 text format.
returned: success
type: str
current_actions:
description:
- The list of instance actions and the number of instances in this managed instance
group that are scheduled for each of those actions.
returned: success
type: complex
contains:
abandoning:
description:
- The total number of instances in the managed instance group that are scheduled to
be abandoned. Abandoning an instance removes it from the managed instance group
without deleting it.
returned: success
type: int
creating:
description:
- The number of instances in the managed instance group that are scheduled to be created
or are currently being created. If the group fails to create any of these instances,
it tries again until it creates the instance successfully.
- If you have disabled creation retries, this field will not be populated; instead,
the creatingWithoutRetries field will be populated.
returned: success
type: int
creating_without_retries:
description:
- The number of instances that the managed instance group will attempt to create.
The group attempts to create each instance only once. If the group fails to create
any of these instances, it decreases the group's targetSize value accordingly.
returned: success
type: int
deleting:
description:
- The number of instances in the managed instance group that are scheduled to be deleted
or are currently being deleted.
returned: success
type: int
none:
description:
- The number of instances in the managed instance group that are running and have
no scheduled actions.
returned: success
type: int
recreating:
description:
- The number of instances in the managed instance group that are scheduled to be recreated
or are currently being being recreated.
- Recreating an instance deletes the existing root persistent disk and creates a new
disk from the image that is defined in the instance template.
returned: success
type: int
refreshing:
description:
- The number of instances in the managed instance group that are being reconfigured
with properties that do not require a restart or a recreate action. For example,
setting or removing target pools for the instance.
returned: success
type: int
restarting:
description:
- The number of instances in the managed instance group that are scheduled to be restarted
or are currently being restarted.
returned: success
type: int
description:
description:
- An optional description of this resource. Provide this property when you create
the resource.
returned: success
type: str
id:
description:
- A unique identifier for this resource.
returned: success
type: int
instance_group:
description:
- A reference to InstanceGroup resource.
returned: success
type: dict
instance_template:
description:
- A reference to InstanceTemplate resource.
returned: success
type: dict
name:
description:
- The name of the managed instance group. The name must be 1-63 characters long, and
comply with RFC1035.
returned: success
type: str
named_ports:
description:
- Named ports configured for the Instance Groups complementary to this Instance Group
Manager.
returned: success
type: complex
contains:
name:
description:
- The name for this named port. The name must be 1-63 characters long, and comply
with RFC1035.
returned: success
type: str
port:
description:
- The port number, which can be a value between 1 and 65535.
returned: success
type: int
region:
description:
- A reference to Region resource.
returned: success
type: str
target_pools:
description:
- TargetPool resources to which instances in the instanceGroup field are added. The
target pools automatically apply to all of the instances in the managed instance
group.
returned: success
type: list
target_size:
description:
- The target number of running instances for this managed instance group. Deleting
or abandoning instances reduces this number. Resizing the group changes this number.
returned: success
type: int
zone:
description:
- A reference to Zone resource.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, remove_nones_from_dict, replace_resource_dict
import json
import re
import time
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
base_instance_name=dict(required=True, type='str'),
description=dict(type='str'),
instance_template=dict(required=True, type='dict'),
name=dict(required=True, type='str'),
named_ports=dict(type='list', elements='dict', options=dict(
name=dict(type='str'),
port=dict(type='int')
)),
target_pools=dict(type='list', elements='dict'),
target_size=dict(type='int'),
zone=dict(required=True, type='str')
)
)
state = module.params['state']
kind = 'compute#instanceGroupManager'
fetch = fetch_resource(module, self_link(module), kind)
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
fetch = update(module, self_link(module), kind)
changed = True
else:
delete(module, self_link(module), kind)
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module), kind)
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.post(link, resource_to_request(module)))
def update(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.put(link, resource_to_request(module)))
def delete(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.delete(link))
def resource_to_request(module):
request = {
u'kind': 'compute#instanceGroupManager',
u'baseInstanceName': module.params.get('base_instance_name'),
u'description': module.params.get('description'),
u'instanceTemplate': replace_resource_dict(module.params.get(u'instance_template', {}), 'selfLink'),
u'name': module.params.get('name'),
u'namedPorts': InstGrouManaNamePortArray(module.params.get('named_ports', []), module).to_request(),
u'targetPools': replace_resource_dict(module.params.get('target_pools', []), 'selfLink'),
u'targetSize': module.params.get('target_size')
}
return_vals = {}
for k, v in request.items():
if v:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, kind):
auth = GcpSession(module, 'compute')
return return_if_object(module, auth.get(link), kind)
def self_link(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{name}".format(**module.params)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers".format(**module.params)
def return_if_object(module, response, kind):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
if result['kind'] != kind:
module.fail_json(msg="Incorrect result: {kind}".format(**result))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'baseInstanceName': response.get(u'baseInstanceName'),
u'creationTimestamp': response.get(u'creationTimestamp'),
u'currentActions': InstGrouManaCurrActi(response.get(u'currentActions', {}), module).from_response(),
u'description': module.params.get('description'),
u'id': response.get(u'id'),
u'instanceGroup': response.get(u'instanceGroup'),
u'instanceTemplate': response.get(u'instanceTemplate'),
u'name': response.get(u'name'),
u'namedPorts': InstGrouManaNamePortArray(response.get(u'namedPorts', []), module).from_response(),
u'region': response.get(u'region'),
u'targetPools': response.get(u'targetPools'),
u'targetSize': response.get(u'targetSize')
}
def region_selflink(name, params):
if name is None:
return
url = r"https://www.googleapis.com/compute/v1/projects/.*/regions/[a-z1-9\-]*"
if not re.match(url, name):
name = "https://www.googleapis.com/compute/v1/projects/{project}/regions/%s".format(**params) % name
return name
def async_op_url(module, extra_data=None):
if extra_data is None:
extra_data = {}
url = "https://www.googleapis.com/compute/v1/projects/{project}/zones/{zone}/operations/{op_id}"
combined = extra_data.copy()
combined.update(module.params)
return url.format(**combined)
def wait_for_operation(module, response):
op_result = return_if_object(module, response, 'compute#operation')
if op_result is None:
return None
status = navigate_hash(op_result, ['status'])
wait_done = wait_for_completion(status, op_result, module)
return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#instanceGroupManager')
def wait_for_completion(status, op_result, module):
op_id = navigate_hash(op_result, ['name'])
op_uri = async_op_url(module, {'op_id': op_id})
while status != 'DONE':
raise_if_errors(op_result, ['error', 'errors'], 'message')
time.sleep(1.0)
if status not in ['PENDING', 'RUNNING', 'DONE']:
module.fail_json(msg="Invalid result %s" % status)
op_result = fetch_resource(module, op_uri, 'compute#operation')
status = navigate_hash(op_result, ['status'])
return op_result
def raise_if_errors(response, err_path, module):
errors = navigate_hash(response, err_path)
if errors is not None:
module.fail_json(msg=errors)
class InstGrouManaCurrActi(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = {}
def to_request(self):
return remove_nones_from_dict({
u'abandoning': self.request.get('abandoning'),
u'creating': self.request.get('creating'),
u'creatingWithoutRetries': self.request.get('creating_without_retries'),
u'deleting': self.request.get('deleting'),
u'none': self.request.get('none'),
u'recreating': self.request.get('recreating'),
u'refreshing': self.request.get('refreshing'),
u'restarting': self.request.get('restarting')
})
def from_response(self):
return remove_nones_from_dict({
u'abandoning': self.request.get(u'abandoning'),
u'creating': self.request.get(u'creating'),
u'creatingWithoutRetries': self.request.get(u'creatingWithoutRetries'),
u'deleting': self.request.get(u'deleting'),
u'none': self.request.get(u'none'),
u'recreating': self.request.get(u'recreating'),
u'refreshing': self.request.get(u'refreshing'),
u'restarting': self.request.get(u'restarting')
})
class InstGrouManaNamePortArray(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = []
def to_request(self):
items = []
for item in self.request:
items.append(self._request_for_item(item))
return items
def from_response(self):
items = []
for item in self.request:
items.append(self._response_from_item(item))
return items
def _request_for_item(self, item):
return remove_nones_from_dict({
u'name': item.get('name'),
u'port': item.get('port')
})
def _response_from_item(self, item):
return remove_nones_from_dict({
u'name': item.get(u'name'),
u'port': item.get(u'port')
})
if __name__ == '__main__':
main()
| gpl-3.0 |
ilexius/odoo | addons/account_test/report/account_test_report.py | 18 | 2791 | # -*- coding: utf-8 -*-
import datetime
from openerp import api, models, _
from openerp.tools.safe_eval import safe_eval as eval
#
# Use period and Journal for selection or resources
#
class ReportAssertAccount(models.AbstractModel):
_name = 'report.account_test.report_accounttest'
@api.model
def execute_code(self, code_exec):
def reconciled_inv():
"""
returns the list of invoices that are set as reconciled = True
"""
return self.env['account.invoice'].search([('reconciled', '=', True)]).ids
def order_columns(item, cols=None):
"""
This function is used to display a dictionary as a string, with its columns in the order chosen.
:param item: dict
:param cols: list of field names
:returns: a list of tuples (fieldname: value) in a similar way that would dict.items() do except that the
returned values are following the order given by cols
:rtype: [(key, value)]
"""
if cols is None:
cols = item.keys()
return [(col, item.get(col)) for col in cols if col in item.keys()]
localdict = {
'cr': self.env.cr,
'uid': self.env.uid,
'reconciled_inv': reconciled_inv, # specific function used in different tests
'result': None, # used to store the result of the test
'column_order': None, # used to choose the display order of columns (in case you are returning a list of dict)
}
eval(code_exec, localdict, mode="exec", nocopy=True)
result = localdict['result']
column_order = localdict.get('column_order', None)
if not isinstance(result, (tuple, list, set)):
result = [result]
if not result:
result = [_('The test was passed successfully')]
else:
def _format(item):
if isinstance(item, dict):
return ', '.join(["%s: %s" % (tup[0], tup[1]) for tup in order_columns(item, column_order)])
else:
return item
result = [_(_format(rec)) for rec in result]
return result
@api.multi
def render_html(self, data=None):
Report = self.env['report']
report = Report._get_report_from_name('account_test.report_accounttest')
records = self.env['accounting.assert.test'].browse(self.ids)
docargs = {
'doc_ids': self._ids,
'doc_model': report.model,
'docs': records,
'data': data,
'execute_code': self.execute_code,
'datetime': datetime
}
return Report.render('account_test.report_accounttest', docargs)
| gpl-3.0 |
RuthAngus/LSST-max | code/clusters.py | 1 | 7180 | # coding: utf-8
# # Recovering rotation periods in simulated LSST data
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from gatspy.periodic import LombScargle
from toy_simulator import simulate_LSST
import simple_gyro as sg
import pandas as pd
import sys
def find_nearest(array, value):
"""
Match a period to a bin.
array: array of bin heights.
value: the period of the star.
Returns the value and index of the bin.
"""
m = np.abs(array-value) == np.abs(array-value).min()
return array[m], m
def assign_amps(ps, log10P, log10R, stdR):
"""
Take periods and bin values and return an array of amplitudes.
"""
npi = np.array([find_nearest(10**log10P, p) for p in ps])
nearest_ps, inds = npi[:, 0], npi[:, 1]
log_ranges = np.array([log10R[i] for i in inds])[:, 0]
std_ranges = np.array([stdR[i] for i in inds])[:, 0]
return np.random.randn(len(ps))*std_ranges + log_ranges
def make_arrays(data, temp_bin, ps, teff, rmag):
"""
Amplitude arrays for each temperature bin
"""
P, R, std = np.array(data["log10P"]), np.array(data["log10R"]), \
np.array(data["stdR"])
if temp_bin == 3500:
m = teff < 3750
elif temp_bin == 6000:
m = teff > 6000
else:
m = (temp_bin - 250 < teff) * (teff < temp_bin + 250)
periods, teffs, rmags = ps[m], teff[m], rmag[m]
amplitudes = assign_amps(periods, P, R, std)
return periods, amplitudes, teffs, rmags
def LSST_sig(m):
"""
Approximate the noise in figure 2 of arxiv:1603.06638 from the apparent
r-mag.
Returns the noise in magnitudes and ppm.
"""
if m < 19:
return .005
mags = np.array([19, 20, 21, 22, 23, 24, 25])
sigs = np.array([.005, .007, .01, .02, .03, .1, .2])
return sigs[np.abs(mags - m) == np.abs(mags-m).min()][0]
def pgram(N, years, fname):
ps = np.linspace(2, 100, 1000) # the period array (in days)
print("Computing periodograms")
# Now compute LS pgrams for a set of LSST light curves & save highest peak
ids = np.arange(N)
periods = np.zeros_like(ids)
for i, id in enumerate(ids):
sid = str(int(id)).zfill(4)
x, y, yerr = np.genfromtxt("simulations/{0}/{1}.txt".format(fname,
sid)).T
m = x < years * 365.25
xt, yt, yerrt = x[m], y[m], yerr[m][m]
model = LombScargle().fit(xt, yt, yerrt) # compute pgram
pgram = model.periodogram(ps)
# find peaks
peaks = np.array([j for j in range(1, len(ps)-1) if pgram[j-1]
< pgram[j] and pgram[j+1] < pgram[j]])
if len(peaks):
period = ps[pgram == max(pgram[peaks])][0]
else:
period = 0
periods[i] = period
np.savetxt("results/{0}/{1}_{2}yr_result.txt".format(fname, sid,
years), [period])
np.savetxt("{0}_{1}yr_results.txt".format(fname, years), periods.T)
return periods
def inject(fname):
"""
Simulate rotation periods for LSST targets and attempt to recover those
rotation periods.
Saves an array of injected periods (days), recovered periods (days), Teff,
rmag, injected amplitudes (ppm) and noise (ppm).
'true_ps, periods, logamps, teffs, rmags, true_as, noises_ppm'
"""
print("Loading Cluster file...")
# Randomly select targets from a TRILEGAL output.
logAges, bvs, logTeff, rmag = np.genfromtxt("{0}.dat".format(fname)).T
teff = 10**logTeff
# Calculate periods from ages and colours for cool stars
m = bvs > .4 # select only cool stars
cool_ages = 10**logAges[m] * 1e-9
cool_ps = sg.period(cool_ages, bvs[m])
cool_teffs = teff[m]
cool_rmags = rmag[m]
# Draw from a sum of two Gaussians (modelled in another notebook) that
# describes the period distribution for hot stars. Approximations:
# I have lumped all stars with colour < 0.4 in together AND I actually
# used teff = 6250, not B-V = 0.4 in the other notebook.
hot_ages = 10**logAges[~m] * 1e-9 # select hot stars
hot_teffs = teff[~m]
hot_rmags = rmag[~m]
# copy parameters for two Gaussians from hot_stars ipython notebook
A1, A2, mu1, mu2, sig1, sig2 = 254.11651209, 49.8149765, 3.00751724, \
3.73399554, 2.26525979, 8.31739725
hot_ps = np.zeros_like(hot_ages)
hot_ps1 = np.random.randn(int(len(hot_ages)*(1 - A2/A1)))*sig1 + mu1
hot_ps2 = np.random.randn(int(len(hot_ages)*(A2/A1)))*sig2 + mu2
hot_ps[:len(hot_ps1)] = hot_ps1
hot_ps[len(hot_ps1):len(hot_ps1) + len(hot_ps2)] = hot_ps2
tot = len(hot_ps1) + len(hot_ps2)
hot_ps[tot:] = np.random.randn(len(hot_ps)-tot)*sig2 + mu2
# combine the modes
age = np.concatenate((cool_ages, hot_ages))
ps = np.concatenate((cool_ps, hot_ps))
teff = np.concatenate((cool_teffs, hot_teffs))
rmag = np.concatenate((cool_rmags, hot_rmags))
print("Calculating amplitudes...")
# Use Derek's results to calculate amplitudes
# Column headings: log10P, log10R, stdR, Nbin
d35 = pd.read_csv("data/rot_v_act3500.txt")
d40 = pd.read_csv("data/rot_v_act4000.txt")
d45 = pd.read_csv("data/rot_v_act4500.txt")
d50 = pd.read_csv("data/rot_v_act5000.txt")
d55 = pd.read_csv("data/rot_v_act5500.txt")
d60 = pd.read_csv("data/rot_v_act6000.txt")
# Assign amplitudes
pers, logamps, teffs, rmags = \
np.concatenate((make_arrays(d35, 3500, ps, teff, rmag),
make_arrays(d40, 4000, ps, teff, rmag),
make_arrays(d45, 4500, ps, teff, rmag),
make_arrays(d50, 5000, ps, teff, rmag),
make_arrays(d55, 5500, ps, teff, rmag)),
axis=1)
# make_arrays(d60, 6000, ps, teff, rmag)),
amps = 10**logamps # parts per million
noises_mag = np.array([LSST_sig(mag) for mag in rmags])
noises_ppm = (1 - 10**(-noises_mag/2.5)) * 1e6
# Simulate light curves
print("Simulating light curves...")
path = "simulations/{0}".format(fname) # where to save the lcs
[simulate_LSST(i, pers[i], amps[i], path, noises_ppm[i]) for i in
range(len(pers))]
# save the true values
ids = np.arange(len(pers))
data = np.vstack((ids, pers, amps))
np.savetxt("{0}/truth.txt".format(path), data.T)
print("Saving results")
data = np.vstack((pers, amps, teffs, rmags, noises_ppm))
np.savetxt("parameters_{0}.txt".format(fname), data.T)
return pers, amps, teffs, rmags, noises_ppm
if __name__ == "__main__":
fname = "{0}".format(sys.argv[1])
# Run simlations
pers, amps, teffs, rmags, noises_ppm = inject("{0}".format(fname))
# recover periods
pers, amps, teffs, rmags, noises_ppm = \
np.genfromtxt("parameters_{0}.txt".format(fname)).T
N = len(pers)
years = [1, 5, 10]
for year in years:
periods = pgram(N, year, fname)
data = np.vstack((pers, periods, np.log(amps), teffs, rmags, amps,
noises_ppm))
np.savetxt("{0}yr_results{1}.txt".format(year, fname), data.T)
| mit |
blockstack/blockstack-server | integration_tests/blockstack_integration_tests/scenarios/name_pre_reg_up_xfer_delays.py | 1 | 6755 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Blockstack
~~~~~
copyright: (c) 2014-2015 by Halfmoon Labs, Inc.
copyright: (c) 2016 by Blockstack.org
This file is part of Blockstack
Blockstack is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Blockstack is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Blockstack. If not, see <http://www.gnu.org/licenses/>.
"""
import testlib
import virtualchain
import json
wallets = [
testlib.Wallet( "5JesPiN68qt44Hc2nT8qmyZ1JDwHebfoh9KQ52Lazb1m1LaKNj9", 100000000000 ),
testlib.Wallet( "5KHqsiU9qa77frZb6hQy9ocV7Sus9RWJcQGYYBJJBb2Efj1o77e", 100000000000 ),
testlib.Wallet( "5Kg5kJbQHvk1B64rJniEmgbD83FpZpbw2RjdAZEzTefs9ihN3Bz", 100000000000 ),
testlib.Wallet( "5JuVsoS9NauksSkqEjbUZxWwgGDQbMwPsEfoRBSpLpgDX1RtLX7", 100000000000 ),
testlib.Wallet( "5KEpiSRr1BrT8vRD7LKGCEmudokTh1iMHbiThMQpLdwBwhDJB1T", 100000000000 )
]
debug = False
consensus = "17ac43c1d8549c3181b200f1bf97eb7d"
def scenario( wallets, **kw ):
global debug
resp = testlib.blockstack_namespace_preorder( "test", wallets[1].addr, wallets[0].privkey )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
consensus_hash = testlib.get_consensus_at( testlib.get_current_block( **kw ), **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
resp = testlib.blockstack_namespace_reveal( "test", wallets[1].addr, 52595, 250, 4, [6,5,4,3,2,1,0,0,0,0,0,0,0,0,0,0], 10, 10, wallets[0].privkey )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
consensus_hash = testlib.get_consensus_at( testlib.get_current_block( **kw ), **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
resp = testlib.blockstack_namespace_ready( "test", wallets[1].privkey )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
consensus_hash = testlib.get_consensus_at( testlib.get_current_block( **kw ), **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
resp = testlib.blockstack_name_preorder( "foo.test", wallets[2].privkey, wallets[3].addr, consensus_hash=consensus_hash )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
consensus_hash = testlib.get_consensus_at( testlib.get_current_block( **kw ), **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
resp = testlib.blockstack_name_register( "foo.test", wallets[2].privkey, wallets[3].addr )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
consensus_hash = testlib.get_consensus_at( testlib.get_current_block( **kw ), **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
resp = testlib.blockstack_name_update( "foo.test", "11" * 20, wallets[3].privkey, consensus_hash=consensus_hash )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
consensus_hash = testlib.get_consensus_at( testlib.get_current_block( **kw ), **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
resp = testlib.blockstack_name_transfer( "foo.test", wallets[4].addr, True, wallets[3].privkey, consensus_hash=consensus_hash )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
consensus_hash = testlib.get_consensus_at( testlib.get_current_block( **kw ), **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
resp = testlib.blockstack_name_update( "foo.test", "22" * 20, wallets[4].privkey, consensus_hash=consensus_hash )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
consensus_hash = testlib.get_consensus_at( testlib.get_current_block( **kw ), **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
resp = testlib.blockstack_name_transfer( "foo.test", wallets[3].addr, False, wallets[4].privkey, consensus_hash=consensus_hash )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
consensus_hash = testlib.get_consensus_at( testlib.get_current_block( **kw ), **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
resp = testlib.blockstack_name_update( "foo.test", "33" * 20, wallets[3].privkey, consensus_hash=consensus_hash )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
consensus_hash = testlib.get_consensus_at( testlib.get_current_block( **kw ), **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
resp = testlib.blockstack_name_transfer( "foo.test", wallets[4].addr, True, wallets[3].privkey, consensus_hash=consensus_hash )
if debug or 'error' in resp:
print json.dumps( resp, indent=4 )
testlib.next_block( **kw )
testlib.next_block( **kw )
testlib.next_block( **kw )
def check( state_engine ):
# not revealed, but ready
ns = state_engine.get_namespace_reveal( "test" )
if ns is not None:
print "'test' not revealed"
return False
ns = state_engine.get_namespace( "test" )
if ns is None:
print "'test' not found"
return False
if ns['namespace_id'] != 'test':
print "'test' not returned"
return False
# not preordered
preorder = state_engine.get_name_preorder( "foo.test", virtualchain.make_payment_script(wallets[2].addr), wallets[3].addr )
if preorder is not None:
print "'foo.test' still preordered"
return False
# registered
name_rec = state_engine.get_name( "foo.test" )
if name_rec is None:
print "'foo.test' not registered"
return False
# updated, and data is preserved
if name_rec['value_hash'] != '33' * 20:
print "'foo.test' invalid value hash"
return False
# transferred
if name_rec['address'] != wallets[4].addr or name_rec['sender'] != virtualchain.make_payment_script(wallets[4].addr):
print "'foo.test' invalid owner"
return False
return True
| gpl-3.0 |
michelts/lettuce | tests/integration/lib/Django-1.3/django/core/mail/backends/smtp.py | 202 | 3878 | """SMTP email backend class."""
import smtplib
import socket
import threading
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail.utils import DNS_NAME
from django.core.mail.message import sanitize_address
class EmailBackend(BaseEmailBackend):
"""
A wrapper that manages the SMTP network connection.
"""
def __init__(self, host=None, port=None, username=None, password=None,
use_tls=None, fail_silently=False, **kwargs):
super(EmailBackend, self).__init__(fail_silently=fail_silently)
self.host = host or settings.EMAIL_HOST
self.port = port or settings.EMAIL_PORT
self.username = username or settings.EMAIL_HOST_USER
self.password = password or settings.EMAIL_HOST_PASSWORD
if use_tls is None:
self.use_tls = settings.EMAIL_USE_TLS
else:
self.use_tls = use_tls
self.connection = None
self._lock = threading.RLock()
def open(self):
"""
Ensures we have a connection to the email server. Returns whether or
not a new connection was required (True or False).
"""
if self.connection:
# Nothing to do if the connection is already open.
return False
try:
# If local_hostname is not specified, socket.getfqdn() gets used.
# For performance, we use the cached FQDN for local_hostname.
self.connection = smtplib.SMTP(self.host, self.port,
local_hostname=DNS_NAME.get_fqdn())
if self.use_tls:
self.connection.ehlo()
self.connection.starttls()
self.connection.ehlo()
if self.username and self.password:
self.connection.login(self.username, self.password)
return True
except:
if not self.fail_silently:
raise
def close(self):
"""Closes the connection to the email server."""
try:
try:
self.connection.quit()
except socket.sslerror:
# This happens when calling quit() on a TLS connection
# sometimes.
self.connection.close()
except:
if self.fail_silently:
return
raise
finally:
self.connection = None
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
if not email_messages:
return
self._lock.acquire()
try:
new_conn_created = self.open()
if not self.connection:
# We failed silently on open().
# Trying to send would be pointless.
return
num_sent = 0
for message in email_messages:
sent = self._send(message)
if sent:
num_sent += 1
if new_conn_created:
self.close()
finally:
self._lock.release()
return num_sent
def _send(self, email_message):
"""A helper method that does the actual sending."""
if not email_message.recipients():
return False
from_email = sanitize_address(email_message.from_email, email_message.encoding)
recipients = [sanitize_address(addr, email_message.encoding)
for addr in email_message.recipients()]
try:
self.connection.sendmail(from_email, recipients,
email_message.message().as_string())
except:
if not self.fail_silently:
raise
return False
return True
| gpl-3.0 |
xin3liang/platform_prebuilts_gcc_darwin-x86_arm_arm-linux-androideabi-4.9 | share/gdb/python/gdb/command/type_printers.py | 137 | 4424 | # Type printer commands.
# Copyright (C) 2010-2013 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import copy
import gdb
"""GDB commands for working with type-printers."""
class InfoTypePrinter(gdb.Command):
"""GDB command to list all registered type-printers.
Usage: info type-printers
"""
def __init__ (self):
super(InfoTypePrinter, self).__init__("info type-printers",
gdb.COMMAND_DATA)
def list_type_printers(self, type_printers):
"""Print a list of type printers."""
# A potential enhancement is to provide an option to list printers in
# "lookup order" (i.e. unsorted).
sorted_type_printers = sorted (copy.copy(type_printers),
key = lambda x: x.name)
for printer in sorted_type_printers:
if printer.enabled:
enabled = ''
else:
enabled = " [disabled]"
print (" %s%s" % (printer.name, enabled))
def invoke(self, arg, from_tty):
"""GDB calls this to perform the command."""
sep = ''
for objfile in gdb.objfiles():
if objfile.type_printers:
print ("%sType printers for %s:" % (sep, objfile.name))
self.list_type_printers(objfile.type_printers)
sep = '\n'
if gdb.current_progspace().type_printers:
print ("%sType printers for program space:" % sep)
self.list_type_printers(gdb.current_progspace().type_printers)
sep = '\n'
if gdb.type_printers:
print ("%sGlobal type printers:" % sep)
self.list_type_printers(gdb.type_printers)
class _EnableOrDisableCommand(gdb.Command):
def __init__(self, setting, name):
super(_EnableOrDisableCommand, self).__init__(name, gdb.COMMAND_DATA)
self.setting = setting
def set_some(self, name, printers):
result = False
for p in printers:
if name == p.name:
p.enabled = self.setting
result = True
return result
def invoke(self, arg, from_tty):
"""GDB calls this to perform the command."""
for name in arg.split():
ok = False
for objfile in gdb.objfiles():
if self.set_some(name, objfile.type_printers):
ok = True
if self.set_some(name, gdb.current_progspace().type_printers):
ok = True
if self.set_some(name, gdb.type_printers):
ok = True
if not ok:
print ("No type printer named '%s'" % name)
def add_some(self, result, word, printers):
for p in printers:
if p.name.startswith(word):
result.append(p.name)
def complete(self, text, word):
result = []
for objfile in gdb.objfiles():
self.add_some(result, word, objfile.type_printers)
self.add_some(result, word, gdb.current_progspace().type_printers)
self.add_some(result, word, gdb.type_printers)
return result
class EnableTypePrinter(_EnableOrDisableCommand):
"""GDB command to enable the specified type printer.
Usage: enable type-printer NAME
NAME is the name of the type-printer.
"""
def __init__(self):
super(EnableTypePrinter, self).__init__(True, "enable type-printer")
class DisableTypePrinter(_EnableOrDisableCommand):
"""GDB command to disable the specified type-printer.
Usage: disable type-printer NAME
NAME is the name of the type-printer.
"""
def __init__(self):
super(DisableTypePrinter, self).__init__(False, "disable type-printer")
InfoTypePrinter()
EnableTypePrinter()
DisableTypePrinter()
| gpl-2.0 |
matsprea/omim | 3party/protobuf/python/google/protobuf/internal/cpp_message.py | 80 | 23543 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains helper functions used to create protocol message classes from
Descriptor objects at runtime backed by the protocol buffer C++ API.
"""
__author__ = 'petar@google.com (Petar Petrov)'
import copy_reg
import operator
from google.protobuf.internal import _net_proto2___python
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import message
_LABEL_REPEATED = _net_proto2___python.LABEL_REPEATED
_LABEL_OPTIONAL = _net_proto2___python.LABEL_OPTIONAL
_CPPTYPE_MESSAGE = _net_proto2___python.CPPTYPE_MESSAGE
_TYPE_MESSAGE = _net_proto2___python.TYPE_MESSAGE
def GetDescriptorPool():
"""Creates a new DescriptorPool C++ object."""
return _net_proto2___python.NewCDescriptorPool()
_pool = GetDescriptorPool()
def GetFieldDescriptor(full_field_name):
"""Searches for a field descriptor given a full field name."""
return _pool.FindFieldByName(full_field_name)
def BuildFile(content):
"""Registers a new proto file in the underlying C++ descriptor pool."""
_net_proto2___python.BuildFile(content)
def GetExtensionDescriptor(full_extension_name):
"""Searches for extension descriptor given a full field name."""
return _pool.FindExtensionByName(full_extension_name)
def NewCMessage(full_message_name):
"""Creates a new C++ protocol message by its name."""
return _net_proto2___python.NewCMessage(full_message_name)
def ScalarProperty(cdescriptor):
"""Returns a scalar property for the given descriptor."""
def Getter(self):
return self._cmsg.GetScalar(cdescriptor)
def Setter(self, value):
self._cmsg.SetScalar(cdescriptor, value)
return property(Getter, Setter)
def CompositeProperty(cdescriptor, message_type):
"""Returns a Python property the given composite field."""
def Getter(self):
sub_message = self._composite_fields.get(cdescriptor.name, None)
if sub_message is None:
cmessage = self._cmsg.NewSubMessage(cdescriptor)
sub_message = message_type._concrete_class(__cmessage=cmessage)
self._composite_fields[cdescriptor.name] = sub_message
return sub_message
return property(Getter)
class RepeatedScalarContainer(object):
"""Container for repeated scalar fields."""
__slots__ = ['_message', '_cfield_descriptor', '_cmsg']
def __init__(self, msg, cfield_descriptor):
self._message = msg
self._cmsg = msg._cmsg
self._cfield_descriptor = cfield_descriptor
def append(self, value):
self._cmsg.AddRepeatedScalar(
self._cfield_descriptor, value)
def extend(self, sequence):
for element in sequence:
self.append(element)
def insert(self, key, value):
values = self[slice(None, None, None)]
values.insert(key, value)
self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values)
def remove(self, value):
values = self[slice(None, None, None)]
values.remove(value)
self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values)
def __setitem__(self, key, value):
values = self[slice(None, None, None)]
values[key] = value
self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values)
def __getitem__(self, key):
return self._cmsg.GetRepeatedScalar(self._cfield_descriptor, key)
def __delitem__(self, key):
self._cmsg.DeleteRepeatedField(self._cfield_descriptor, key)
def __len__(self):
return len(self[slice(None, None, None)])
def __eq__(self, other):
if self is other:
return True
if not operator.isSequenceType(other):
raise TypeError(
'Can only compare repeated scalar fields against sequences.')
# We are presumably comparing against some other sequence type.
return other == self[slice(None, None, None)]
def __ne__(self, other):
return not self == other
def __hash__(self):
raise TypeError('unhashable object')
def sort(self, *args, **kwargs):
# Maintain compatibility with the previous interface.
if 'sort_function' in kwargs:
kwargs['cmp'] = kwargs.pop('sort_function')
self._cmsg.AssignRepeatedScalar(self._cfield_descriptor,
sorted(self, *args, **kwargs))
def RepeatedScalarProperty(cdescriptor):
"""Returns a Python property the given repeated scalar field."""
def Getter(self):
container = self._composite_fields.get(cdescriptor.name, None)
if container is None:
container = RepeatedScalarContainer(self, cdescriptor)
self._composite_fields[cdescriptor.name] = container
return container
def Setter(self, new_value):
raise AttributeError('Assignment not allowed to repeated field '
'"%s" in protocol message object.' % cdescriptor.name)
doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name
return property(Getter, Setter, doc=doc)
class RepeatedCompositeContainer(object):
"""Container for repeated composite fields."""
__slots__ = ['_message', '_subclass', '_cfield_descriptor', '_cmsg']
def __init__(self, msg, cfield_descriptor, subclass):
self._message = msg
self._cmsg = msg._cmsg
self._subclass = subclass
self._cfield_descriptor = cfield_descriptor
def add(self, **kwargs):
cmessage = self._cmsg.AddMessage(self._cfield_descriptor)
return self._subclass(__cmessage=cmessage, __owner=self._message, **kwargs)
def extend(self, elem_seq):
"""Extends by appending the given sequence of elements of the same type
as this one, copying each individual message.
"""
for message in elem_seq:
self.add().MergeFrom(message)
def remove(self, value):
# TODO(protocol-devel): This is inefficient as it needs to generate a
# message pointer for each message only to do index(). Move this to a C++
# extension function.
self.__delitem__(self[slice(None, None, None)].index(value))
def MergeFrom(self, other):
for message in other[:]:
self.add().MergeFrom(message)
def __getitem__(self, key):
cmessages = self._cmsg.GetRepeatedMessage(
self._cfield_descriptor, key)
subclass = self._subclass
if not isinstance(cmessages, list):
return subclass(__cmessage=cmessages, __owner=self._message)
return [subclass(__cmessage=m, __owner=self._message) for m in cmessages]
def __delitem__(self, key):
self._cmsg.DeleteRepeatedField(
self._cfield_descriptor, key)
def __len__(self):
return self._cmsg.FieldLength(self._cfield_descriptor)
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
if not isinstance(other, self.__class__):
raise TypeError('Can only compare repeated composite fields against '
'other repeated composite fields.')
messages = self[slice(None, None, None)]
other_messages = other[slice(None, None, None)]
return messages == other_messages
def __hash__(self):
raise TypeError('unhashable object')
def sort(self, cmp=None, key=None, reverse=False, **kwargs):
# Maintain compatibility with the old interface.
if cmp is None and 'sort_function' in kwargs:
cmp = kwargs.pop('sort_function')
# The cmp function, if provided, is passed the results of the key function,
# so we only need to wrap one of them.
if key is None:
index_key = self.__getitem__
else:
index_key = lambda i: key(self[i])
# Sort the list of current indexes by the underlying object.
indexes = range(len(self))
indexes.sort(cmp=cmp, key=index_key, reverse=reverse)
# Apply the transposition.
for dest, src in enumerate(indexes):
if dest == src:
continue
self._cmsg.SwapRepeatedFieldElements(self._cfield_descriptor, dest, src)
# Don't swap the same value twice.
indexes[src] = src
def RepeatedCompositeProperty(cdescriptor, message_type):
"""Returns a Python property for the given repeated composite field."""
def Getter(self):
container = self._composite_fields.get(cdescriptor.name, None)
if container is None:
container = RepeatedCompositeContainer(
self, cdescriptor, message_type._concrete_class)
self._composite_fields[cdescriptor.name] = container
return container
def Setter(self, new_value):
raise AttributeError('Assignment not allowed to repeated field '
'"%s" in protocol message object.' % cdescriptor.name)
doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name
return property(Getter, Setter, doc=doc)
class ExtensionDict(object):
"""Extension dictionary added to each protocol message."""
def __init__(self, msg):
self._message = msg
self._cmsg = msg._cmsg
self._values = {}
def __setitem__(self, extension, value):
from google.protobuf import descriptor
if not isinstance(extension, descriptor.FieldDescriptor):
raise KeyError('Bad extension %r.' % (extension,))
cdescriptor = extension._cdescriptor
if (cdescriptor.label != _LABEL_OPTIONAL or
cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
raise TypeError('Extension %r is repeated and/or a composite type.' % (
extension.full_name,))
self._cmsg.SetScalar(cdescriptor, value)
self._values[extension] = value
def __getitem__(self, extension):
from google.protobuf import descriptor
if not isinstance(extension, descriptor.FieldDescriptor):
raise KeyError('Bad extension %r.' % (extension,))
cdescriptor = extension._cdescriptor
if (cdescriptor.label != _LABEL_REPEATED and
cdescriptor.cpp_type != _CPPTYPE_MESSAGE):
return self._cmsg.GetScalar(cdescriptor)
ext = self._values.get(extension, None)
if ext is not None:
return ext
ext = self._CreateNewHandle(extension)
self._values[extension] = ext
return ext
def ClearExtension(self, extension):
from google.protobuf import descriptor
if not isinstance(extension, descriptor.FieldDescriptor):
raise KeyError('Bad extension %r.' % (extension,))
self._cmsg.ClearFieldByDescriptor(extension._cdescriptor)
if extension in self._values:
del self._values[extension]
def HasExtension(self, extension):
from google.protobuf import descriptor
if not isinstance(extension, descriptor.FieldDescriptor):
raise KeyError('Bad extension %r.' % (extension,))
return self._cmsg.HasFieldByDescriptor(extension._cdescriptor)
def _FindExtensionByName(self, name):
"""Tries to find a known extension with the specified name.
Args:
name: Extension full name.
Returns:
Extension field descriptor.
"""
return self._message._extensions_by_name.get(name, None)
def _CreateNewHandle(self, extension):
cdescriptor = extension._cdescriptor
if (cdescriptor.label != _LABEL_REPEATED and
cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
cmessage = self._cmsg.NewSubMessage(cdescriptor)
return extension.message_type._concrete_class(__cmessage=cmessage)
if cdescriptor.label == _LABEL_REPEATED:
if cdescriptor.cpp_type == _CPPTYPE_MESSAGE:
return RepeatedCompositeContainer(
self._message, cdescriptor, extension.message_type._concrete_class)
else:
return RepeatedScalarContainer(self._message, cdescriptor)
# This shouldn't happen!
assert False
return None
def NewMessage(bases, message_descriptor, dictionary):
"""Creates a new protocol message *class*."""
_AddClassAttributesForNestedExtensions(message_descriptor, dictionary)
_AddEnumValues(message_descriptor, dictionary)
_AddDescriptors(message_descriptor, dictionary)
return bases
def InitMessage(message_descriptor, cls):
"""Constructs a new message instance (called before instance's __init__)."""
cls._extensions_by_name = {}
_AddInitMethod(message_descriptor, cls)
_AddMessageMethods(message_descriptor, cls)
_AddPropertiesForExtensions(message_descriptor, cls)
copy_reg.pickle(cls, lambda obj: (cls, (), obj.__getstate__()))
def _AddDescriptors(message_descriptor, dictionary):
"""Sets up a new protocol message class dictionary.
Args:
message_descriptor: A Descriptor instance describing this message type.
dictionary: Class dictionary to which we'll add a '__slots__' entry.
"""
dictionary['__descriptors'] = {}
for field in message_descriptor.fields:
dictionary['__descriptors'][field.name] = GetFieldDescriptor(
field.full_name)
dictionary['__slots__'] = list(dictionary['__descriptors'].iterkeys()) + [
'_cmsg', '_owner', '_composite_fields', 'Extensions', '_HACK_REFCOUNTS']
def _AddEnumValues(message_descriptor, dictionary):
"""Sets class-level attributes for all enum fields defined in this message.
Args:
message_descriptor: Descriptor object for this message type.
dictionary: Class dictionary that should be populated.
"""
for enum_type in message_descriptor.enum_types:
dictionary[enum_type.name] = enum_type_wrapper.EnumTypeWrapper(enum_type)
for enum_value in enum_type.values:
dictionary[enum_value.name] = enum_value.number
def _AddClassAttributesForNestedExtensions(message_descriptor, dictionary):
"""Adds class attributes for the nested extensions."""
extension_dict = message_descriptor.extensions_by_name
for extension_name, extension_field in extension_dict.iteritems():
assert extension_name not in dictionary
dictionary[extension_name] = extension_field
def _AddInitMethod(message_descriptor, cls):
"""Adds an __init__ method to cls."""
# Create and attach message field properties to the message class.
# This can be done just once per message class, since property setters and
# getters are passed the message instance.
# This makes message instantiation extremely fast, and at the same time it
# doesn't require the creation of property objects for each message instance,
# which saves a lot of memory.
for field in message_descriptor.fields:
field_cdescriptor = cls.__descriptors[field.name]
if field.label == _LABEL_REPEATED:
if field.cpp_type == _CPPTYPE_MESSAGE:
value = RepeatedCompositeProperty(field_cdescriptor, field.message_type)
else:
value = RepeatedScalarProperty(field_cdescriptor)
elif field.cpp_type == _CPPTYPE_MESSAGE:
value = CompositeProperty(field_cdescriptor, field.message_type)
else:
value = ScalarProperty(field_cdescriptor)
setattr(cls, field.name, value)
# Attach a constant with the field number.
constant_name = field.name.upper() + '_FIELD_NUMBER'
setattr(cls, constant_name, field.number)
def Init(self, **kwargs):
"""Message constructor."""
cmessage = kwargs.pop('__cmessage', None)
if cmessage:
self._cmsg = cmessage
else:
self._cmsg = NewCMessage(message_descriptor.full_name)
# Keep a reference to the owner, as the owner keeps a reference to the
# underlying protocol buffer message.
owner = kwargs.pop('__owner', None)
if owner:
self._owner = owner
if message_descriptor.is_extendable:
self.Extensions = ExtensionDict(self)
else:
# Reference counting in the C++ code is broken and depends on
# the Extensions reference to keep this object alive during unit
# tests (see b/4856052). Remove this once b/4945904 is fixed.
self._HACK_REFCOUNTS = self
self._composite_fields = {}
for field_name, field_value in kwargs.iteritems():
field_cdescriptor = self.__descriptors.get(field_name, None)
if not field_cdescriptor:
raise ValueError('Protocol message has no "%s" field.' % field_name)
if field_cdescriptor.label == _LABEL_REPEATED:
if field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE:
field_name = getattr(self, field_name)
for val in field_value:
field_name.add().MergeFrom(val)
else:
getattr(self, field_name).extend(field_value)
elif field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE:
getattr(self, field_name).MergeFrom(field_value)
else:
setattr(self, field_name, field_value)
Init.__module__ = None
Init.__doc__ = None
cls.__init__ = Init
def _IsMessageSetExtension(field):
"""Checks if a field is a message set extension."""
return (field.is_extension and
field.containing_type.has_options and
field.containing_type.GetOptions().message_set_wire_format and
field.type == _TYPE_MESSAGE and
field.message_type == field.extension_scope and
field.label == _LABEL_OPTIONAL)
def _AddMessageMethods(message_descriptor, cls):
"""Adds the methods to a protocol message class."""
if message_descriptor.is_extendable:
def ClearExtension(self, extension):
self.Extensions.ClearExtension(extension)
def HasExtension(self, extension):
return self.Extensions.HasExtension(extension)
def HasField(self, field_name):
return self._cmsg.HasField(field_name)
def ClearField(self, field_name):
child_cmessage = None
if field_name in self._composite_fields:
child_field = self._composite_fields[field_name]
del self._composite_fields[field_name]
child_cdescriptor = self.__descriptors[field_name]
# TODO(anuraag): Support clearing repeated message fields as well.
if (child_cdescriptor.label != _LABEL_REPEATED and
child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
child_field._owner = None
child_cmessage = child_field._cmsg
if child_cmessage is not None:
self._cmsg.ClearField(field_name, child_cmessage)
else:
self._cmsg.ClearField(field_name)
def Clear(self):
cmessages_to_release = []
for field_name, child_field in self._composite_fields.iteritems():
child_cdescriptor = self.__descriptors[field_name]
# TODO(anuraag): Support clearing repeated message fields as well.
if (child_cdescriptor.label != _LABEL_REPEATED and
child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
child_field._owner = None
cmessages_to_release.append((child_cdescriptor, child_field._cmsg))
self._composite_fields.clear()
self._cmsg.Clear(cmessages_to_release)
def IsInitialized(self, errors=None):
if self._cmsg.IsInitialized():
return True
if errors is not None:
errors.extend(self.FindInitializationErrors());
return False
def SerializeToString(self):
if not self.IsInitialized():
raise message.EncodeError(
'Message %s is missing required fields: %s' % (
self._cmsg.full_name, ','.join(self.FindInitializationErrors())))
return self._cmsg.SerializeToString()
def SerializePartialToString(self):
return self._cmsg.SerializePartialToString()
def ParseFromString(self, serialized):
self.Clear()
self.MergeFromString(serialized)
def MergeFromString(self, serialized):
byte_size = self._cmsg.MergeFromString(serialized)
if byte_size < 0:
raise message.DecodeError('Unable to merge from string.')
return byte_size
def MergeFrom(self, msg):
if not isinstance(msg, cls):
raise TypeError(
"Parameter to MergeFrom() must be instance of same class: "
"expected %s got %s." % (cls.__name__, type(msg).__name__))
self._cmsg.MergeFrom(msg._cmsg)
def CopyFrom(self, msg):
self._cmsg.CopyFrom(msg._cmsg)
def ByteSize(self):
return self._cmsg.ByteSize()
def SetInParent(self):
return self._cmsg.SetInParent()
def ListFields(self):
all_fields = []
field_list = self._cmsg.ListFields()
fields_by_name = cls.DESCRIPTOR.fields_by_name
for is_extension, field_name in field_list:
if is_extension:
extension = cls._extensions_by_name[field_name]
all_fields.append((extension, self.Extensions[extension]))
else:
field_descriptor = fields_by_name[field_name]
all_fields.append(
(field_descriptor, getattr(self, field_name)))
all_fields.sort(key=lambda item: item[0].number)
return all_fields
def FindInitializationErrors(self):
return self._cmsg.FindInitializationErrors()
def __str__(self):
return str(self._cmsg)
def __eq__(self, other):
if self is other:
return True
if not isinstance(other, self.__class__):
return False
return self.ListFields() == other.ListFields()
def __ne__(self, other):
return not self == other
def __hash__(self):
raise TypeError('unhashable object')
def __unicode__(self):
# Lazy import to prevent circular import when text_format imports this file.
from google.protobuf import text_format
return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
# Attach the local methods to the message class.
for key, value in locals().copy().iteritems():
if key not in ('key', 'value', '__builtins__', '__name__', '__doc__'):
setattr(cls, key, value)
# Static methods:
def RegisterExtension(extension_handle):
extension_handle.containing_type = cls.DESCRIPTOR
cls._extensions_by_name[extension_handle.full_name] = extension_handle
if _IsMessageSetExtension(extension_handle):
# MessageSet extension. Also register under type name.
cls._extensions_by_name[
extension_handle.message_type.full_name] = extension_handle
cls.RegisterExtension = staticmethod(RegisterExtension)
def FromString(string):
msg = cls()
msg.MergeFromString(string)
return msg
cls.FromString = staticmethod(FromString)
def _AddPropertiesForExtensions(message_descriptor, cls):
"""Adds properties for all fields in this protocol message type."""
extension_dict = message_descriptor.extensions_by_name
for extension_name, extension_field in extension_dict.iteritems():
constant_name = extension_name.upper() + '_FIELD_NUMBER'
setattr(cls, constant_name, extension_field.number)
| apache-2.0 |
bgris/ODL_bgris | lib/python3.5/site-packages/scipy/optimize/_numdiff.py | 51 | 21209 | """Routines for numerical differentiation."""
from __future__ import division
import numpy as np
from ..sparse import issparse, csc_matrix, csr_matrix, coo_matrix, find
from ._group_columns import group_dense, group_sparse
EPS = np.finfo(np.float64).eps
def _adjust_scheme_to_bounds(x0, h, num_steps, scheme, lb, ub):
"""Adjust final difference scheme to the presence of bounds.
Parameters
----------
x0 : ndarray, shape (n,)
Point at which we wish to estimate derivative.
h : ndarray, shape (n,)
Desired finite difference steps.
num_steps : int
Number of `h` steps in one direction required to implement finite
difference scheme. For example, 2 means that we need to evaluate
f(x0 + 2 * h) or f(x0 - 2 * h)
scheme : {'1-sided', '2-sided'}
Whether steps in one or both directions are required. In other
words '1-sided' applies to forward and backward schemes, '2-sided'
applies to center schemes.
lb : ndarray, shape (n,)
Lower bounds on independent variables.
ub : ndarray, shape (n,)
Upper bounds on independent variables.
Returns
-------
h_adjusted : ndarray, shape (n,)
Adjusted step sizes. Step size decreases only if a sign flip or
switching to one-sided scheme doesn't allow to take a full step.
use_one_sided : ndarray of bool, shape (n,)
Whether to switch to one-sided scheme. Informative only for
``scheme='2-sided'``.
"""
if scheme == '1-sided':
use_one_sided = np.ones_like(h, dtype=bool)
elif scheme == '2-sided':
h = np.abs(h)
use_one_sided = np.zeros_like(h, dtype=bool)
else:
raise ValueError("`scheme` must be '1-sided' or '2-sided'.")
if np.all((lb == -np.inf) & (ub == np.inf)):
return h, use_one_sided
h_total = h * num_steps
h_adjusted = h.copy()
lower_dist = x0 - lb
upper_dist = ub - x0
if scheme == '1-sided':
x = x0 + h_total
violated = (x < lb) | (x > ub)
fitting = np.abs(h_total) <= np.maximum(lower_dist, upper_dist)
h_adjusted[violated & fitting] *= -1
forward = (upper_dist >= lower_dist) & ~fitting
h_adjusted[forward] = upper_dist[forward] / num_steps
backward = (upper_dist < lower_dist) & ~fitting
h_adjusted[backward] = -lower_dist[backward] / num_steps
elif scheme == '2-sided':
central = (lower_dist >= h_total) & (upper_dist >= h_total)
forward = (upper_dist >= lower_dist) & ~central
h_adjusted[forward] = np.minimum(
h[forward], 0.5 * upper_dist[forward] / num_steps)
use_one_sided[forward] = True
backward = (upper_dist < lower_dist) & ~central
h_adjusted[backward] = -np.minimum(
h[backward], 0.5 * lower_dist[backward] / num_steps)
use_one_sided[backward] = True
min_dist = np.minimum(upper_dist, lower_dist) / num_steps
adjusted_central = (~central & (np.abs(h_adjusted) <= min_dist))
h_adjusted[adjusted_central] = min_dist[adjusted_central]
use_one_sided[adjusted_central] = False
return h_adjusted, use_one_sided
def _compute_absolute_step(rel_step, x0, method):
if rel_step is None:
if method == '2-point':
rel_step = EPS**0.5
elif method == '3-point':
rel_step = EPS**(1 / 3)
elif method == 'cs':
rel_step = EPS**(0.5)
else:
raise ValueError("`method` must be '2-point' or '3-point'.")
sign_x0 = (x0 >= 0).astype(float) * 2 - 1
return rel_step * sign_x0 * np.maximum(1.0, np.abs(x0))
def _prepare_bounds(bounds, x0):
lb, ub = [np.asarray(b, dtype=float) for b in bounds]
if lb.ndim == 0:
lb = np.resize(lb, x0.shape)
if ub.ndim == 0:
ub = np.resize(ub, x0.shape)
return lb, ub
def group_columns(A, order=0):
"""Group columns of a 2-d matrix for sparse finite differencing [1]_.
Two columns are in the same group if in each row at least one of them
has zero. A greedy sequential algorithm is used to construct groups.
Parameters
----------
A : array_like or sparse matrix, shape (m, n)
Matrix of which to group columns.
order : int, iterable of int with shape (n,) or None
Permutation array which defines the order of columns enumeration.
If int or None, a random permutation is used with `order` used as
a random seed. Default is 0, that is use a random permutation but
guarantee repeatability.
Returns
-------
groups : ndarray of int, shape (n,)
Contains values from 0 to n_groups-1, where n_groups is the number
of found groups. Each value ``groups[i]`` is an index of a group to
which i-th column assigned. The procedure was helpful only if
n_groups is significantly less than n.
References
----------
.. [1] A. Curtis, M. J. D. Powell, and J. Reid, "On the estimation of
sparse Jacobian matrices", Journal of the Institute of Mathematics
and its Applications, 13 (1974), pp. 117-120.
"""
if issparse(A):
A = csc_matrix(A)
else:
A = np.atleast_2d(A)
A = (A != 0).astype(np.int32)
if A.ndim != 2:
raise ValueError("`A` must be 2-dimensional.")
m, n = A.shape
if order is None or np.isscalar(order):
rng = np.random.RandomState(order)
order = rng.permutation(n)
else:
order = np.asarray(order)
if order.shape != (n,):
raise ValueError("`order` has incorrect shape.")
A = A[:, order]
if issparse(A):
groups = group_sparse(m, n, A.indices, A.indptr)
else:
groups = group_dense(m, n, A)
groups[order] = groups.copy()
return groups
def approx_derivative(fun, x0, method='3-point', rel_step=None, f0=None,
bounds=(-np.inf, np.inf), sparsity=None, args=(),
kwargs={}):
"""Compute finite difference approximation of the derivatives of a
vector-valued function.
If a function maps from R^n to R^m, its derivatives form m-by-n matrix
called the Jacobian, where an element (i, j) is a partial derivative of
f[i] with respect to x[j].
Parameters
----------
fun : callable
Function of which to estimate the derivatives. The argument x
passed to this function is ndarray of shape (n,) (never a scalar
even if n=1). It must return 1-d array_like of shape (m,) or a scalar.
x0 : array_like of shape (n,) or float
Point at which to estimate the derivatives. Float will be converted
to a 1-d array.
method : {'3-point', '2-point'}, optional
Finite difference method to use:
- '2-point' - use the fist order accuracy forward or backward
difference.
- '3-point' - use central difference in interior points and the
second order accuracy forward or backward difference
near the boundary.
- 'cs' - use a complex-step finite difference scheme. This assumes
that the user function is real-valued and can be
analytically continued to the complex plane. Otherwise,
produces bogus results.
rel_step : None or array_like, optional
Relative step size to use. The absolute step size is computed as
``h = rel_step * sign(x0) * max(1, abs(x0))``, possibly adjusted to
fit into the bounds. For ``method='3-point'`` the sign of `h` is
ignored. If None (default) then step is selected automatically,
see Notes.
f0 : None or array_like, optional
If not None it is assumed to be equal to ``fun(x0)``, in this case
the ``fun(x0)`` is not called. Default is None.
bounds : tuple of array_like, optional
Lower and upper bounds on independent variables. Defaults to no bounds.
Each bound must match the size of `x0` or be a scalar, in the latter
case the bound will be the same for all variables. Use it to limit the
range of function evaluation.
sparsity : {None, array_like, sparse matrix, 2-tuple}, optional
Defines a sparsity structure of the Jacobian matrix. If the Jacobian
matrix is known to have only few non-zero elements in each row, then
it's possible to estimate its several columns by a single function
evaluation [3]_. To perform such economic computations two ingredients
are required:
* structure : array_like or sparse matrix of shape (m, n). A zero
element means that a corresponding element of the Jacobian
identically equals to zero.
* groups : array_like of shape (n,). A column grouping for a given
sparsity structure, use `group_columns` to obtain it.
A single array or a sparse matrix is interpreted as a sparsity
structure, and groups are computed inside the function. A tuple is
interpreted as (structure, groups). If None (default), a standard
dense differencing will be used.
Note, that sparse differencing makes sense only for large Jacobian
matrices where each row contains few non-zero elements.
args, kwargs : tuple and dict, optional
Additional arguments passed to `fun`. Both empty by default.
The calling signature is ``fun(x, *args, **kwargs)``.
Returns
-------
J : ndarray or csr_matrix
Finite difference approximation of the Jacobian matrix. If `sparsity`
is None then ndarray with shape (m, n) is returned. Although if m=1 it
is returned as a gradient with shape (n,). If `sparsity` is not None,
csr_matrix with shape (m, n) is returned.
See Also
--------
check_derivative : Check correctness of a function computing derivatives.
Notes
-----
If `rel_step` is not provided, it assigned to ``EPS**(1/s)``, where EPS is
machine epsilon for float64 numbers, s=2 for '2-point' method and s=3 for
'3-point' method. Such relative step approximately minimizes a sum of
truncation and round-off errors, see [1]_.
A finite difference scheme for '3-point' method is selected automatically.
The well-known central difference scheme is used for points sufficiently
far from the boundary, and 3-point forward or backward scheme is used for
points near the boundary. Both schemes have the second-order accuracy in
terms of Taylor expansion. Refer to [2]_ for the formulas of 3-point
forward and backward difference schemes.
For dense differencing when m=1 Jacobian is returned with a shape (n,),
on the other hand when n=1 Jacobian is returned with a shape (m, 1).
Our motivation is the following: a) It handles a case of gradient
computation (m=1) in a conventional way. b) It clearly separates these two
different cases. b) In all cases np.atleast_2d can be called to get 2-d
Jacobian with correct dimensions.
References
----------
.. [1] W. H. Press et. al. "Numerical Recipes. The Art of Scientific
Computing. 3rd edition", sec. 5.7.
.. [2] A. Curtis, M. J. D. Powell, and J. Reid, "On the estimation of
sparse Jacobian matrices", Journal of the Institute of Mathematics
and its Applications, 13 (1974), pp. 117-120.
.. [3] B. Fornberg, "Generation of Finite Difference Formulas on
Arbitrarily Spaced Grids", Mathematics of Computation 51, 1988.
Examples
--------
>>> import numpy as np
>>> from scipy.optimize import approx_derivative
>>>
>>> def f(x, c1, c2):
... return np.array([x[0] * np.sin(c1 * x[1]),
... x[0] * np.cos(c2 * x[1])])
...
>>> x0 = np.array([1.0, 0.5 * np.pi])
>>> approx_derivative(f, x0, args=(1, 2))
array([[ 1., 0.],
[-1., 0.]])
Bounds can be used to limit the region of function evaluation.
In the example below we compute left and right derivative at point 1.0.
>>> def g(x):
... return x**2 if x >= 1 else x
...
>>> x0 = 1.0
>>> approx_derivative(g, x0, bounds=(-np.inf, 1.0))
array([ 1.])
>>> approx_derivative(g, x0, bounds=(1.0, np.inf))
array([ 2.])
"""
if method not in ['2-point', '3-point', 'cs']:
raise ValueError("Unknown method '%s'. " % method)
x0 = np.atleast_1d(x0)
if x0.ndim > 1:
raise ValueError("`x0` must have at most 1 dimension.")
lb, ub = _prepare_bounds(bounds, x0)
if lb.shape != x0.shape or ub.shape != x0.shape:
raise ValueError("Inconsistent shapes between bounds and `x0`.")
def fun_wrapped(x):
f = np.atleast_1d(fun(x, *args, **kwargs))
if f.ndim > 1:
raise RuntimeError(("`fun` return value has "
"more than 1 dimension."))
return f
if f0 is None:
f0 = fun_wrapped(x0)
else:
f0 = np.atleast_1d(f0)
if f0.ndim > 1:
raise ValueError("`f0` passed has more than 1 dimension.")
if np.any((x0 < lb) | (x0 > ub)):
raise ValueError("`x0` violates bound constraints.")
h = _compute_absolute_step(rel_step, x0, method)
if method == '2-point':
h, use_one_sided = _adjust_scheme_to_bounds(
x0, h, 1, '1-sided', lb, ub)
elif method == '3-point':
h, use_one_sided = _adjust_scheme_to_bounds(
x0, h, 1, '2-sided', lb, ub)
elif method == 'cs':
use_one_sided = False
if sparsity is None:
return _dense_difference(fun_wrapped, x0, f0, h, use_one_sided, method)
else:
if not issparse(sparsity) and len(sparsity) == 2:
structure, groups = sparsity
else:
structure = sparsity
groups = group_columns(sparsity)
if issparse(structure):
structure = csc_matrix(structure)
else:
structure = np.atleast_2d(structure)
groups = np.atleast_1d(groups)
return _sparse_difference(fun_wrapped, x0, f0, h, use_one_sided,
structure, groups, method)
def _dense_difference(fun, x0, f0, h, use_one_sided, method):
m = f0.size
n = x0.size
J_transposed = np.empty((n, m))
h_vecs = np.diag(h)
for i in range(h.size):
if method == '2-point':
x = x0 + h_vecs[i]
dx = x[i] - x0[i] # Recompute dx as exactly representable number.
df = fun(x) - f0
elif method == '3-point' and use_one_sided[i]:
x1 = x0 + h_vecs[i]
x2 = x0 + 2 * h_vecs[i]
dx = x2[i] - x0[i]
f1 = fun(x1)
f2 = fun(x2)
df = -3.0 * f0 + 4 * f1 - f2
elif method == '3-point' and not use_one_sided[i]:
x1 = x0 - h_vecs[i]
x2 = x0 + h_vecs[i]
dx = x2[i] - x1[i]
f1 = fun(x1)
f2 = fun(x2)
df = f2 - f1
elif method == 'cs':
f1 = fun(x0 + h_vecs[i]*1.j)
df = f1.imag
dx = h_vecs[i, i]
else:
raise RuntimeError("Never be here.")
J_transposed[i] = df / dx
if m == 1:
J_transposed = np.ravel(J_transposed)
return J_transposed.T
def _sparse_difference(fun, x0, f0, h, use_one_sided,
structure, groups, method):
m = f0.size
n = x0.size
row_indices = []
col_indices = []
fractions = []
n_groups = np.max(groups) + 1
for group in range(n_groups):
# Perturb variables which are in the same group simultaneously.
e = np.equal(group, groups)
h_vec = h * e
if method == '2-point':
x = x0 + h_vec
dx = x - x0
df = fun(x) - f0
# The result is written to columns which correspond to perturbed
# variables.
cols, = np.nonzero(e)
# Find all non-zero elements in selected columns of Jacobian.
i, j, _ = find(structure[:, cols])
# Restore column indices in the full array.
j = cols[j]
elif method == '3-point':
# Here we do conceptually the same but separate one-sided
# and two-sided schemes.
x1 = x0.copy()
x2 = x0.copy()
mask_1 = use_one_sided & e
x1[mask_1] += h_vec[mask_1]
x2[mask_1] += 2 * h_vec[mask_1]
mask_2 = ~use_one_sided & e
x1[mask_2] -= h_vec[mask_2]
x2[mask_2] += h_vec[mask_2]
dx = np.zeros(n)
dx[mask_1] = x2[mask_1] - x0[mask_1]
dx[mask_2] = x2[mask_2] - x1[mask_2]
f1 = fun(x1)
f2 = fun(x2)
cols, = np.nonzero(e)
i, j, _ = find(structure[:, cols])
j = cols[j]
mask = use_one_sided[j]
df = np.empty(m)
rows = i[mask]
df[rows] = -3 * f0[rows] + 4 * f1[rows] - f2[rows]
rows = i[~mask]
df[rows] = f2[rows] - f1[rows]
elif method == 'cs':
f1 = fun(x0 + h_vec*1.j)
df = f1.imag
dx = h_vec
cols, = np.nonzero(e)
i, j, _ = find(structure[:, cols])
j = cols[j]
else:
raise ValueError("Never be here.")
# All that's left is to compute the fraction. We store i, j and
# fractions as separate arrays and later construct coo_matrix.
row_indices.append(i)
col_indices.append(j)
fractions.append(df[i] / dx[j])
row_indices = np.hstack(row_indices)
col_indices = np.hstack(col_indices)
fractions = np.hstack(fractions)
J = coo_matrix((fractions, (row_indices, col_indices)), shape=(m, n))
return csr_matrix(J)
def check_derivative(fun, jac, x0, bounds=(-np.inf, np.inf), args=(),
kwargs={}):
"""Check correctness of a function computing derivatives (Jacobian or
gradient) by comparison with a finite difference approximation.
Parameters
----------
fun : callable
Function of which to estimate the derivatives. The argument x
passed to this function is ndarray of shape (n,) (never a scalar
even if n=1). It must return 1-d array_like of shape (m,) or a scalar.
jac : callable
Function which computes Jacobian matrix of `fun`. It must work with
argument x the same way as `fun`. The return value must be array_like
or sparse matrix with an appropriate shape.
x0 : array_like of shape (n,) or float
Point at which to estimate the derivatives. Float will be converted
to 1-d array.
bounds : 2-tuple of array_like, optional
Lower and upper bounds on independent variables. Defaults to no bounds.
Each bound must match the size of `x0` or be a scalar, in the latter
case the bound will be the same for all variables. Use it to limit the
range of function evaluation.
args, kwargs : tuple and dict, optional
Additional arguments passed to `fun` and `jac`. Both empty by default.
The calling signature is ``fun(x, *args, **kwargs)`` and the same
for `jac`.
Returns
-------
accuracy : float
The maximum among all relative errors for elements with absolute values
higher than 1 and absolute errors for elements with absolute values
less or equal than 1. If `accuracy` is on the order of 1e-6 or lower,
then it is likely that your `jac` implementation is correct.
See Also
--------
approx_derivative : Compute finite difference approximation of derivative.
Examples
--------
>>> import numpy as np
>>> from scipy.optimize import check_derivative
>>>
>>>
>>> def f(x, c1, c2):
... return np.array([x[0] * np.sin(c1 * x[1]),
... x[0] * np.cos(c2 * x[1])])
...
>>> def jac(x, c1, c2):
... return np.array([
... [np.sin(c1 * x[1]), c1 * x[0] * np.cos(c1 * x[1])],
... [np.cos(c2 * x[1]), -c2 * x[0] * np.sin(c2 * x[1])]
... ])
...
>>>
>>> x0 = np.array([1.0, 0.5 * np.pi])
>>> check_derivative(f, jac, x0, args=(1, 2))
2.4492935982947064e-16
"""
J_to_test = jac(x0, *args, **kwargs)
if issparse(J_to_test):
J_diff = approx_derivative(fun, x0, bounds=bounds, sparsity=J_to_test,
args=args, kwargs=kwargs)
J_to_test = csr_matrix(J_to_test)
abs_err = J_to_test - J_diff
i, j, abs_err_data = find(abs_err)
J_diff_data = np.asarray(J_diff[i, j]).ravel()
return np.max(np.abs(abs_err_data) /
np.maximum(1, np.abs(J_diff_data)))
else:
J_diff = approx_derivative(fun, x0, bounds=bounds,
args=args, kwargs=kwargs)
abs_err = np.abs(J_to_test - J_diff)
return np.max(abs_err / np.maximum(1, np.abs(J_diff)))
| gpl-3.0 |
debsankha/networkx | examples/algorithms/blockmodel.py | 32 | 3009 | #!/usr/bin/env python
# encoding: utf-8
"""
Example of creating a block model using the blockmodel function in NX. Data used is the Hartford, CT drug users network:
@article{,
title = {Social Networks of Drug Users in {High-Risk} Sites: Finding the Connections},
volume = {6},
shorttitle = {Social Networks of Drug Users in {High-Risk} Sites},
url = {http://dx.doi.org/10.1023/A:1015457400897},
doi = {10.1023/A:1015457400897},
number = {2},
journal = {{AIDS} and Behavior},
author = {Margaret R. Weeks and Scott Clair and Stephen P. Borgatti and Kim Radda and Jean J. Schensul},
month = jun,
year = {2002},
pages = {193--206}
}
"""
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
from collections import defaultdict
import networkx as nx
import numpy
from scipy.cluster import hierarchy
from scipy.spatial import distance
import matplotlib.pyplot as plt
def create_hc(G):
"""Creates hierarchical cluster of graph G from distance matrix"""
path_length=nx.all_pairs_shortest_path_length(G)
distances=numpy.zeros((len(G),len(G)))
for u,p in path_length.items():
for v,d in p.items():
distances[u][v]=d
# Create hierarchical cluster
Y=distance.squareform(distances)
Z=hierarchy.complete(Y) # Creates HC using farthest point linkage
# This partition selection is arbitrary, for illustrive purposes
membership=list(hierarchy.fcluster(Z,t=1.15))
# Create collection of lists for blockmodel
partition=defaultdict(list)
for n,p in zip(list(range(len(G))),membership):
partition[p].append(n)
return list(partition.values())
if __name__ == '__main__':
G=nx.read_edgelist("hartford_drug.edgelist")
# Extract largest connected component into graph H
H=nx.connected_component_subgraphs(G)[0]
# Makes life easier to have consecutively labeled integer nodes
H=nx.convert_node_labels_to_integers(H)
# Create parititions with hierarchical clustering
partitions=create_hc(H)
# Build blockmodel graph
BM=nx.blockmodel(H,partitions)
# Draw original graph
pos=nx.spring_layout(H,iterations=100)
fig=plt.figure(1,figsize=(6,10))
ax=fig.add_subplot(211)
nx.draw(H,pos,with_labels=False,node_size=10)
plt.xlim(0,1)
plt.ylim(0,1)
# Draw block model with weighted edges and nodes sized by number of internal nodes
node_size=[BM.node[x]['nnodes']*10 for x in BM.nodes()]
edge_width=[(2*d['weight']) for (u,v,d) in BM.edges(data=True)]
# Set positions to mean of positions of internal nodes from original graph
posBM={}
for n in BM:
xy=numpy.array([pos[u] for u in BM.node[n]['graph']])
posBM[n]=xy.mean(axis=0)
ax=fig.add_subplot(212)
nx.draw(BM,posBM,node_size=node_size,width=edge_width,with_labels=False)
plt.xlim(0,1)
plt.ylim(0,1)
plt.axis('off')
plt.savefig('hartford_drug_block_model.png')
| bsd-3-clause |
nasfarley88/tomato-timer-tg-bot | tomatotimertgbot/__init__.py | 1 | 7810 | #!/usr/bin/env python3
import asyncio
import telepot
import yaml
import datetime
import random
import math
import dataset
from . import help, task
def format_seconds_as_mm_ss(seconds):
return "{}:{:02}".format(
math.floor(seconds/60.0), math.floor(seconds) % 60)
class Tomato(telepot.helper.ChatHandler):
def __init__(self, seed_tuple, timeout):
super(Tomato, self).__init__(seed_tuple, timeout)
self._current_task = None
# self.chat_id = seed_tuple[1]['chat']['id']
print(self.chat_id)
print(self)
print(self.__dict__)
@asyncio.coroutine
def on_message(self, msg):
# TODO make sure that thing like /tsq is not matched as /ts
if "/help" == msg["text"]:
yield from self.sender.sendMessage(help.text)
# make this auto start task with set goal
elif ("/starttaskquick" == msg["text"] or
"/q" == msg["text"]):
yield from self.task_begin(msg, delay=10, goal="NOTHING")
elif ("/starttask" == msg["text"] or
"/st" == msg["text"]):
yield from self.task_begin(msg)
elif ("/timeleft" == msg["text"] or
"/tl" == msg["text"]):
yield from self.task_time_left(msg)
elif ("/canceltask" == msg["text"] or
"/ct" == msg["text"]):
yield from self.task_cancel(msg)
elif ("/displaytask" == msg["text"] or
"/dt" == msg["text"]):
yield from self.task_current_goal(msg)
elif "/compliment" == msg["text"]:
yield from self.compliment(msg)
elif ("/tomato" == msg["text"] or
"/t" == msg["text"]):
yield from self.sender.sendMessage("I AM A TOMATO!!!")
elif "/alltasks" == msg["text"]:
yield from self.send_all_tasks_for_user(msg)
elif ("/tt" == msg["text"] or
"/taskstoday" == msg["text"]):
yield from self.tasks_today_for_user(msg)
elif("/shortbreak" == msg["text"] or
"/sb" == msg["text"]):
yield from self.break_time(msg, delay=60*5)
elif("/longbreak" == msg["text"] or
"/lb" == msg["text"]):
yield from self.break_time(msg, delay=60*20)
elif("/tinybreak" == msg["text"]):
yield from self.break_time(msg, delay=1)
@asyncio.coroutine
def task_current_goal(self, msg):
try:
yield from self.sender.sendMessage(self._current_task.goal)
except AttributeError:
yield from self.sender.sendMessage("NO GOAL.")
# TODO get this to confirm task needs cancelling
@asyncio.coroutine
def task_cancel(self, msg):
if self._current_task:
yield from self.sender.sendMessage("TASK CANCELLED.")
yield from self._current_task.cancel()
self._current_task = None
else:
yield from self.sender.sendMessage("TASK NOT STARTED.")
@asyncio.coroutine
def compliment(self, msg):
compliments = [
"YOU ARE NOT A TOMATO.",
"GOOD JOB.",
"YOU WORK HARD.",
"MAYBE YOU FAIL. MAYBE NOT.",
"I AM CONFUSED.",
"YOUR COMMAND OF LANGUAGE IS ... EXISTENT.",
"GO HUMAN GO.",
"YOUR HAIR IS INTERESTING.",
"GET BACK TO WORK." if self._current_task else "BREAK OVER.",
]
yield from self.sender.sendMessage(random.choice(compliments))
def task_time_left(self, msg):
if self._current_task:
yield from self.sender.sendMessage(
"{} LEFT TO ACHIEVE ".format(
format_seconds_as_mm_ss(
self._current_task.time_remaining()))+self._current_task.goal)
else:
yield from self.sender.sendMessage(
"TASK NOT STARTED.")
@asyncio.coroutine
def task_end(self, msg):
yield from self.sender.sendMessage("TASK END!")
reply_keyboard = {'keyboard': [['Yes', 'No']]}
yield from self.sender.sendMessage(
"YOU ACHIEVE "+self._current_task.goal+"?", reply_markup=reply_keyboard)
try:
print("Starting to listen...")
l = self._bot.create_listener()
l.set_options(timeout=30)
l.capture(chat__id=self.chat_id)
answer = yield from l.wait()
# TODO check the answer is in the affirmative with a reply keyboard
if answer['text'] == 'Yes':
self._current_task.complete()
yield from self.sender.sendMessage("GOOD JOB.", reply_markup={'hide_keyboard':True})
else:
yield from self.sender.sendMessage("I HAVE BEEN TOLD NOT TO JUDGE YOU.", reply_markup={'hide_keyboard':True})
except telepot.helper.WaitTooLong as e:
yield from self.sender.sendMessage("TOO SLOW.", reply_markup={'hide_keyboard':True})
raise(e)
# There is now no current task, let the old one fall into
# garbage collection and die a natural death
self._current_task = None
@asyncio.coroutine
def request_goal(self, msg):
yield from self.sender.sendMessage("YOUR GOAL:")
self.listener.capture(chat__id=self.chat_id)
self.listener.set_options(timeout=30)
try:
current_goal_msg = yield from self.listener.wait()
except telepot.helper.WaitTooLong as e:
yield from self.sender.sendMessage("TOO SLOW.")
raise(e)
except Exception as e:
yield from self.sender.sendMessage("MALFUNCTION.")
raise(e)
self.listener.set_options(timeout=None)
current_goal = current_goal_msg["text"].replace("@tomato_task_bot ", "")
return current_goal
@asyncio.coroutine
def break_time(self, msg, delay=60*5):
"""A break function."""
yield from self.sender.sendMessage("BREAK {:d} MIN STARTED".format(int(delay/60)))
yield from asyncio.sleep(delay)
yield from self.sender.sendMessage("BREAK OVER")
@asyncio.coroutine
def task_begin(self, msg, delay=60*25, goal=None):
if goal:
current_goal = goal
else:
current_goal = yield from self.request_goal(msg)
h = task.Task(
self.chat_id,
current_goal,
lambda: asyncio.async(self.task_end(msg)),
delay=delay,
)
self._current_task = h
yield from self.sender.sendMessage("TASK BEGIN!")
@asyncio.coroutine
def send_all_tasks_for_user(self, msg):
with dataset.connect("sqlite:///tasks.db") as d:
all_tasks_for_user = str(list(d['tasks'].find(chat_id=self.chat_id)))
yield from self.sender.sendMessage(all_tasks_for_user)
@asyncio.coroutine
def tasks_today_for_user(self, msg):
with dataset.connect("sqlite:///tasks.db") as d:
print(d['tasks'].find(chat_id=self.chat_id))
for x in d['tasks'].find(chat_id=self.chat_id):
print(x)
all_tasks_for_user = [x for x in d['tasks'].find(chat_id=self.chat_id) if x["start_time"] > datetime.datetime.now()-datetime.timedelta(days=1)]
print("all tasks for user")
print(all_tasks_for_user)
if len(all_tasks_for_user) == 0:
yield from self.sender.sendMessage("No tasks completed today.")
else:
string_list = ["Tasks completed today"]
for x in all_tasks_for_user:
print(x)
string_list.append("{:%H:%M} {goal}".format(
x['start_time'], goal=x['goal']))
yield from self.sender.sendMessage("\n".join(string_list))
| cc0-1.0 |
ztane/tet | tet/config/__init__.py | 2 | 8526 | from typing import Callable, Any
import sys
from collections import ChainMap
from collections.abc import Mapping
from functools import wraps
from pyramid.config import *
from pyramid.config import Configurator
from tet.decorators import deprecated
from tet.i18n import configure_i18n
from tet.util.collections import flatten
from tet.util.path import caller_package
class TetAppFactory(object):
"""
This method is deprecated in favour of procedural configuration /
pyramid_zcml with create_configurator. See `application_factory`
decorator for more details.
"""
scan = None
includes = []
excludes = []
i18n = True
default_i18n_domain = None
settings = {}
global_config = None
# :type config: Configurator
config = None
default_includes = [
'tet.services',
'tet.renderers.json'
]
@deprecated
def __new__(cls, global_config, **settings_kw):
instance = cls.instantiate()
instance.init_app_factory(global_config, settings_kw)
return instance.construct_app()
@classmethod
def instantiate(cls):
return super(TetAppFactory, cls).__new__(cls)
def __init__(self, *args, **kwargs):
super(TetAppFactory, self).__init__()
def _dummy(self, config: Configurator):
pass
def init_app_factory(self, global_config, settings):
self.settings = settings
self.global_config = global_config
self.config = self.make_configurator()
self.do_default_includes()
def do_default_includes(self):
excludes = set(self.excludes)
def conditional_include(item):
if item not in excludes:
self.config.include(item)
for item in self.default_includes:
conditional_include(item)
def prepare_i18n(self):
if self.i18n:
configure_i18n(self.config, self.default_i18n_domain)
def make_configurator(self) -> Configurator:
return Configurator(settings=self.settings)
pre_configure_app = _dummy
configure_db = _dummy
def configure_app(self, config: Configurator) -> None:
self.configure_db(config)
self.configure_routes(config)
def configure_routes(self, config: Configurator) -> None:
pass
def post_configure_app(self, config: Configurator) -> None:
pass
def do_scan(self) -> None:
self.config.scan(self.scan)
def do_include(self) -> None:
for i in self.includes:
self.config.include(i)
def construct_app(self) -> None:
if self.includes:
self.do_include()
self.prepare_i18n()
self.pre_configure_app(self.config)
self.configure_app(self.config)
self.post_configure_app(self.config)
if self.scan:
self.do_scan()
return self.wrap_app(self.config.make_wsgi_app())
def wrap_app(self, app) -> None:
return app
@classmethod
@deprecated
def main(cls, global_config, **settings):
return cls(global_config, **settings)
ALL_FEATURES = [
'services',
'i18n',
'renderers.json',
'renderers.tonnikala',
'renderers.tonnikala.i18n',
'security.authorization',
'security.csrf'
]
MINIMAL_FEATURES = []
def create_configurator(*,
global_config=None,
settings=None,
merge_global_config=True,
configurator_class=Configurator,
included_features=(),
excluded_features=(),
package=None,
**kw) -> Configurator:
defaults = {}
if merge_global_config and isinstance(global_config, Mapping):
settings = ChainMap(settings, global_config, defaults)
extracted_settings = {}
if package is None:
package = caller_package(ignored_modules=[__name__])
for name in ['default_i18n_domain']:
if name in kw:
extracted_settings[name] = kw.pop(name)
if hasattr(package, '__name__'):
package_name = package.__name__
else:
package_name = package
defaults['default_i18n_domain'] = package_name
config = configurator_class(settings=settings,
package=package,
**kw)
config.add_settings(extracted_settings)
included_features = list(flatten(included_features))
excluded_features = set(flatten(excluded_features))
feature_set = set(included_features) - set(excluded_features)
config.registry.tet_features = feature_set
for feature_name in included_features:
if feature_name in feature_set:
try:
config.include('tet.' + feature_name)
except Exception as e:
print('Unable to include feature {}: {}'.format(
feature_name,
e
), file=sys.stderr)
raise
return config
def application_factory(factory_function: Callable[[Configurator], Any]=None,
configure_only=False,
included_features=ALL_FEATURES,
excluded_features=(),
package=None,
**extra_parameters):
"""
A decorator for main method / application configurator for Tet. The
wrapped function must accept a single argument - the Configurator. The
wrapper itself accepts arguments (global_config, **settings) like an
ordinary Pyramid/Paster application entry point does.
If configure_only=False (the default), then the return value is a
WSGI application created from the configurator.
`included_features` contains an iterable of features that should be
automatically included in the application. By default all standard Tet
features are included. For maximal future compatibility you can specify the
included feature names here.
`excluded_features` should be an iterable of features that shouldn't be
automatically included - this serves as a fast way to get all standard
features except a named few.
`package` should be the package passed to the Configurator object;
otherwise the package of the caller is assumed.
:param factory_function: The actual wrapped factory function that
accepts parameter (config: Configurator)
:param configure_only: True if no WSGI application is to be made, false
to actually create the WSGI application as the return value
:param included_features: The iterable of included features. This can
in turn contain other iterables; they are flattened by the wrapper into
a list of strings.
:param excluded_features: The iterable of excluded features. This can
in turn contain other iterables; they are flattened by the wrapper into
a list of strings.
:param extra_parameters: extra parameters that will be passed as-is to
the actual configurator generation.
:return: the WSGI app if `configure_only` is `False`; `config`, if
`configure_only` is `True`.
"""
if package is None:
package = caller_package(ignored_modules=[__name__])
def decorator(function):
@wraps(function)
def wrapper(*a, **kw):
if len(a) > 1:
raise TypeError('application_factory wrapped function '
'called with more than 1 positional argument')
global_config = a[0] if a else None
settings = kw
config = create_configurator(global_config=global_config,
settings=settings,
included_features=included_features,
excluded_features=excluded_features,
package=package,
**extra_parameters)
returned = function(config)
if isinstance(returned, Configurator):
config = returned
if not configure_only:
return config.make_wsgi_app()
else:
return returned
return wrapper
if factory_function is not None:
if not callable(factory_function):
raise TypeError("Factory function was specified but not callable")
else:
return decorator(factory_function)
else:
return decorator
| bsd-3-clause |
Samuel789/MediPi | MedManagementWeb/env/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_OCB.py | 4 | 25001 | # ===================================================================
#
# Copyright (c) 2014, Legrandin <helderijs@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ===================================================================
import os
import re
import unittest
from binascii import hexlify
from Crypto.Util.py3compat import b, tobytes, bchr, unhexlify
from Crypto.Util.strxor import strxor_c
from Crypto.Util.number import long_to_bytes
from Crypto.SelfTest.st_common import list_test_cases
from Crypto.Cipher import AES
from Crypto.Hash import SHAKE128
def get_tag_random(tag, length):
return SHAKE128.new(data=tobytes(tag)).read(length)
class OcbTests(unittest.TestCase):
key_128 = get_tag_random("key_128", 16)
nonce_96 = get_tag_random("nonce_128", 12)
data_128 = get_tag_random("data_128", 16)
def test_loopback_128(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
pt = get_tag_random("plaintext", 16 * 100)
ct, mac = cipher.encrypt_and_digest(pt)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
pt2 = cipher.decrypt_and_verify(ct, mac)
self.assertEqual(pt, pt2)
def test_nonce(self):
# Nonce is optional
AES.new(self.key_128, AES.MODE_OCB)
cipher = AES.new(self.key_128, AES.MODE_OCB, self.nonce_96)
ct = cipher.encrypt(self.data_128)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertEqual(ct, cipher.encrypt(self.data_128))
def test_nonce_must_be_bytes(self):
self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB,
nonce='test12345678')
def test_nonce_length(self):
# nonce cannot be empty
self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB,
nonce=b(""))
# nonce can be up to 15 bytes long
for length in range(1, 16):
AES.new(self.key_128, AES.MODE_OCB, nonce=self.data_128[:length])
self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB,
nonce=self.data_128)
def test_block_size_128(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertEqual(cipher.block_size, AES.block_size)
# By default, a 15 bytes long nonce is randomly generated
nonce1 = AES.new(self.key_128, AES.MODE_OCB).nonce
nonce2 = AES.new(self.key_128, AES.MODE_OCB).nonce
self.assertEqual(len(nonce1), 15)
self.assertNotEqual(nonce1, nonce2)
def test_nonce_attribute(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertEqual(cipher.nonce, self.nonce_96)
# By default, a 15 bytes long nonce is randomly generated
nonce1 = AES.new(self.key_128, AES.MODE_OCB).nonce
nonce2 = AES.new(self.key_128, AES.MODE_OCB).nonce
self.assertEqual(len(nonce1), 15)
self.assertNotEqual(nonce1, nonce2)
def test_unknown_parameters(self):
self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB,
self.nonce_96, 7)
self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB,
nonce=self.nonce_96, unknown=7)
# But some are only known by the base cipher
# (e.g. use_aesni consumed by the AES module)
AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96,
use_aesni=False)
def test_null_encryption_decryption(self):
for func in "encrypt", "decrypt":
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
result = getattr(cipher, func)(b(""))
self.assertEqual(result, b(""))
def test_either_encrypt_or_decrypt(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.encrypt(b("xyz"))
self.assertRaises(TypeError, cipher.decrypt, b("xyz"))
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.decrypt(b("xyz"))
self.assertRaises(TypeError, cipher.encrypt, b("xyz"))
def test_data_must_be_bytes(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertRaises(TypeError, cipher.encrypt, 'test1234567890-*')
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertRaises(TypeError, cipher.decrypt, 'test1234567890-*')
def test_mac_len(self):
# Invalid MAC length
self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB,
nonce=self.nonce_96, mac_len=7)
self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB,
nonce=self.nonce_96, mac_len=16+1)
# Valid MAC length
for mac_len in range(8, 16 + 1):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96,
mac_len=mac_len)
_, mac = cipher.encrypt_and_digest(self.data_128)
self.assertEqual(len(mac), mac_len)
# Default MAC length
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
_, mac = cipher.encrypt_and_digest(self.data_128)
self.assertEqual(len(mac), 16)
def test_invalid_mac(self):
from Crypto.Util.strxor import strxor_c
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
ct, mac = cipher.encrypt_and_digest(self.data_128)
invalid_mac = strxor_c(mac, 0x01)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertRaises(ValueError, cipher.decrypt_and_verify, ct,
invalid_mac)
def test_hex_mac(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
mac_hex = cipher.hexdigest()
self.assertEqual(cipher.digest(), unhexlify(mac_hex))
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.hexverify(mac_hex)
def test_message_chunks(self):
# Validate that both associated data and plaintext/ciphertext
# can be broken up in chunks of arbitrary length
auth_data = get_tag_random("authenticated data", 127)
plaintext = get_tag_random("plaintext", 127)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.update(auth_data)
ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext)
def break_up(data, chunk_length):
return [data[i:i+chunk_length] for i in range(0, len(data),
chunk_length)]
# Encryption
for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128:
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
for chunk in break_up(auth_data, chunk_length):
cipher.update(chunk)
pt2 = b("")
for chunk in break_up(ciphertext, chunk_length):
pt2 += cipher.decrypt(chunk)
pt2 += cipher.decrypt()
self.assertEqual(plaintext, pt2)
cipher.verify(ref_mac)
# Decryption
for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128:
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
for chunk in break_up(auth_data, chunk_length):
cipher.update(chunk)
ct2 = b("")
for chunk in break_up(plaintext, chunk_length):
ct2 += cipher.encrypt(chunk)
ct2 += cipher.encrypt()
self.assertEqual(ciphertext, ct2)
self.assertEqual(cipher.digest(), ref_mac)
class OcbFSMTests(unittest.TestCase):
key_128 = get_tag_random("key_128", 16)
nonce_96 = get_tag_random("nonce_128", 12)
data_128 = get_tag_random("data_128", 16)
def test_valid_init_encrypt_decrypt_digest_verify(self):
# No authenticated data, fixed plaintext
# Verify path INIT->ENCRYPT->ENCRYPT(NONE)->DIGEST
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
ct = cipher.encrypt(self.data_128)
ct += cipher.encrypt()
mac = cipher.digest()
# Verify path INIT->DECRYPT->DECRYPT(NONCE)->VERIFY
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.decrypt(ct)
cipher.decrypt()
cipher.verify(mac)
def test_invalid_init_encrypt_decrypt_digest_verify(self):
# No authenticated data, fixed plaintext
# Verify path INIT->ENCRYPT->DIGEST
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
ct = cipher.encrypt(self.data_128)
self.assertRaises(TypeError, cipher.digest)
# Verify path INIT->DECRYPT->VERIFY
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.decrypt(ct)
self.assertRaises(TypeError, cipher.verify)
def test_valid_init_update_digest_verify(self):
# No plaintext, fixed authenticated data
# Verify path INIT->UPDATE->DIGEST
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
mac = cipher.digest()
# Verify path INIT->UPDATE->VERIFY
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
cipher.verify(mac)
def test_valid_full_path(self):
# Fixed authenticated data, fixed plaintext
# Verify path INIT->UPDATE->ENCRYPT->ENCRYPT(NONE)->DIGEST
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
ct = cipher.encrypt(self.data_128)
ct += cipher.encrypt()
mac = cipher.digest()
# Verify path INIT->UPDATE->DECRYPT->DECRYPT(NONE)->VERIFY
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
cipher.decrypt(ct)
cipher.decrypt()
cipher.verify(mac)
def test_invalid_encrypt_after_final(self):
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
cipher.encrypt(self.data_128)
cipher.encrypt()
self.assertRaises(TypeError, cipher.encrypt, self.data_128)
def test_invalid_decrypt_after_final(self):
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
cipher.decrypt(self.data_128)
cipher.decrypt()
self.assertRaises(TypeError, cipher.decrypt, self.data_128)
def test_valid_init_digest(self):
# Verify path INIT->DIGEST
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.digest()
def test_valid_init_verify(self):
# Verify path INIT->VERIFY
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
mac = cipher.digest()
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.verify(mac)
def test_valid_multiple_encrypt_or_decrypt(self):
for method_name in "encrypt", "decrypt":
for auth_data in (None, b("333"), self.data_128,
self.data_128 + b("3")):
if auth_data is None:
assoc_len = None
else:
assoc_len = len(auth_data)
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
if auth_data is not None:
cipher.update(auth_data)
method = getattr(cipher, method_name)
method(self.data_128)
method(self.data_128)
method(self.data_128)
method(self.data_128)
method()
def test_valid_multiple_digest_or_verify(self):
# Multiple calls to digest
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.update(self.data_128)
first_mac = cipher.digest()
for x in range(4):
self.assertEqual(first_mac, cipher.digest())
# Multiple calls to verify
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.update(self.data_128)
for x in range(5):
cipher.verify(first_mac)
def test_valid_encrypt_and_digest_decrypt_and_verify(self):
# encrypt_and_digest
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.update(self.data_128)
ct, mac = cipher.encrypt_and_digest(self.data_128)
# decrypt_and_verify
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.update(self.data_128)
pt = cipher.decrypt_and_verify(ct, mac)
self.assertEqual(self.data_128, pt)
def test_invalid_mixing_encrypt_decrypt(self):
# Once per method, with or without assoc. data
for method1_name, method2_name in (("encrypt", "decrypt"),
("decrypt", "encrypt")):
for assoc_data_present in (True, False):
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
if assoc_data_present:
cipher.update(self.data_128)
getattr(cipher, method1_name)(self.data_128)
self.assertRaises(TypeError, getattr(cipher, method2_name),
self.data_128)
def test_invalid_encrypt_or_update_after_digest(self):
for method_name in "encrypt", "update":
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.encrypt(self.data_128)
cipher.encrypt()
cipher.digest()
self.assertRaises(TypeError, getattr(cipher, method_name),
self.data_128)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.encrypt_and_digest(self.data_128)
def test_invalid_decrypt_or_update_after_verify(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
ct = cipher.encrypt(self.data_128)
ct += cipher.encrypt()
mac = cipher.digest()
for method_name in "decrypt", "update":
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.decrypt(ct)
cipher.decrypt()
cipher.verify(mac)
self.assertRaises(TypeError, getattr(cipher, method_name),
self.data_128)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.decrypt_and_verify(ct, mac)
self.assertRaises(TypeError, getattr(cipher, method_name),
self.data_128)
class OcbRfc7253Test(unittest.TestCase):
# Tuple with
# - nonce
# - authenticated data
# - plaintext
# - ciphertext and 16 byte MAC tag
tv1_key = "000102030405060708090A0B0C0D0E0F"
tv1 = (
(
"BBAA99887766554433221100",
"",
"",
"785407BFFFC8AD9EDCC5520AC9111EE6"
),
(
"BBAA99887766554433221101",
"0001020304050607",
"0001020304050607",
"6820B3657B6F615A5725BDA0D3B4EB3A257C9AF1F8F03009"
),
(
"BBAA99887766554433221102",
"0001020304050607",
"",
"81017F8203F081277152FADE694A0A00"
),
(
"BBAA99887766554433221103",
"",
"0001020304050607",
"45DD69F8F5AAE72414054CD1F35D82760B2CD00D2F99BFA9"
),
(
"BBAA99887766554433221104",
"000102030405060708090A0B0C0D0E0F",
"000102030405060708090A0B0C0D0E0F",
"571D535B60B277188BE5147170A9A22C3AD7A4FF3835B8C5"
"701C1CCEC8FC3358"
),
(
"BBAA99887766554433221105",
"000102030405060708090A0B0C0D0E0F",
"",
"8CF761B6902EF764462AD86498CA6B97"
),
(
"BBAA99887766554433221106",
"",
"000102030405060708090A0B0C0D0E0F",
"5CE88EC2E0692706A915C00AEB8B2396F40E1C743F52436B"
"DF06D8FA1ECA343D"
),
(
"BBAA99887766554433221107",
"000102030405060708090A0B0C0D0E0F1011121314151617",
"000102030405060708090A0B0C0D0E0F1011121314151617",
"1CA2207308C87C010756104D8840CE1952F09673A448A122"
"C92C62241051F57356D7F3C90BB0E07F"
),
(
"BBAA99887766554433221108",
"000102030405060708090A0B0C0D0E0F1011121314151617",
"",
"6DC225A071FC1B9F7C69F93B0F1E10DE"
),
(
"BBAA99887766554433221109",
"",
"000102030405060708090A0B0C0D0E0F1011121314151617",
"221BD0DE7FA6FE993ECCD769460A0AF2D6CDED0C395B1C3C"
"E725F32494B9F914D85C0B1EB38357FF"
),
(
"BBAA9988776655443322110A",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F",
"BD6F6C496201C69296C11EFD138A467ABD3C707924B964DE"
"AFFC40319AF5A48540FBBA186C5553C68AD9F592A79A4240"
),
(
"BBAA9988776655443322110B",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F",
"",
"FE80690BEE8A485D11F32965BC9D2A32"
),
(
"BBAA9988776655443322110C",
"",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F",
"2942BFC773BDA23CABC6ACFD9BFD5835BD300F0973792EF4"
"6040C53F1432BCDFB5E1DDE3BC18A5F840B52E653444D5DF"
),
(
"BBAA9988776655443322110D",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"D5CA91748410C1751FF8A2F618255B68A0A12E093FF45460"
"6E59F9C1D0DDC54B65E8628E568BAD7AED07BA06A4A69483"
"A7035490C5769E60"
),
(
"BBAA9988776655443322110E",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"",
"C5CD9D1850C141E358649994EE701B68"
),
(
"BBAA9988776655443322110F",
"",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"4412923493C57D5DE0D700F753CCE0D1D2D95060122E9F15"
"A5DDBFC5787E50B5CC55EE507BCB084E479AD363AC366B95"
"A98CA5F3000B1479"
)
)
# Tuple with
# - key
# - nonce
# - authenticated data
# - plaintext
# - ciphertext and 12 byte MAC tag
tv2 = (
"0F0E0D0C0B0A09080706050403020100",
"BBAA9988776655443322110D",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"1792A4E31E0755FB03E31B22116E6C2DDF9EFD6E33D536F1"
"A0124B0A55BAE884ED93481529C76B6AD0C515F4D1CDD4FD"
"AC4F02AA"
)
# Tuple with
# - key length
# - MAC tag length
# - Expected output
tv3 = (
(128, 128, "67E944D23256C5E0B6C61FA22FDF1EA2"),
(192, 128, "F673F2C3E7174AAE7BAE986CA9F29E17"),
(256, 128, "D90EB8E9C977C88B79DD793D7FFA161C"),
(128, 96, "77A3D8E73589158D25D01209"),
(192, 96, "05D56EAD2752C86BE6932C5E"),
(256, 96, "5458359AC23B0CBA9E6330DD"),
(128, 64, "192C9B7BD90BA06A"),
(192, 64, "0066BC6E0EF34E24"),
(256, 64, "7D4EA5D445501CBE"),
)
def test1(self):
key = unhexlify(b(self.tv1_key))
for tv in self.tv1:
nonce, aad, pt, ct = [ unhexlify(b(x)) for x in tv ]
ct, mac_tag = ct[:-16], ct[-16:]
cipher = AES.new(key, AES.MODE_OCB, nonce=nonce)
cipher.update(aad)
ct2 = cipher.encrypt(pt) + cipher.encrypt()
self.assertEqual(ct, ct2)
self.assertEqual(mac_tag, cipher.digest())
cipher = AES.new(key, AES.MODE_OCB, nonce=nonce)
cipher.update(aad)
pt2 = cipher.decrypt(ct) + cipher.decrypt()
self.assertEqual(pt, pt2)
cipher.verify(mac_tag)
def test2(self):
key, nonce, aad, pt, ct = [ unhexlify(b(x)) for x in self.tv2 ]
ct, mac_tag = ct[:-12], ct[-12:]
cipher = AES.new(key, AES.MODE_OCB, nonce=nonce, mac_len=12)
cipher.update(aad)
ct2 = cipher.encrypt(pt) + cipher.encrypt()
self.assertEqual(ct, ct2)
self.assertEqual(mac_tag, cipher.digest())
cipher = AES.new(key, AES.MODE_OCB, nonce=nonce, mac_len=12)
cipher.update(aad)
pt2 = cipher.decrypt(ct) + cipher.decrypt()
self.assertEqual(pt, pt2)
cipher.verify(mac_tag)
def test3(self):
for keylen, taglen, result in self.tv3:
key = bchr(0) * (keylen // 8 - 1) + bchr(taglen)
C = b("")
for i in range(128):
S = bchr(0) * i
N = long_to_bytes(3 * i + 1, 12)
cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8)
cipher.update(S)
C += cipher.encrypt(S) + cipher.encrypt() + cipher.digest()
N = long_to_bytes(3 * i + 2, 12)
cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8)
C += cipher.encrypt(S) + cipher.encrypt() + cipher.digest()
N = long_to_bytes(3 * i + 3, 12)
cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8)
cipher.update(S)
C += cipher.encrypt() + cipher.digest()
N = long_to_bytes(385, 12)
cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8)
cipher.update(C)
result2 = cipher.encrypt() + cipher.digest()
self.assertEqual(unhexlify(b(result)), result2)
def get_tests(config={}):
tests = []
tests += list_test_cases(OcbTests)
tests += list_test_cases(OcbFSMTests)
tests += list_test_cases(OcbRfc7253Test)
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
| apache-2.0 |
tsdmgz/ansible | lib/ansible/modules/network/avi/avi_useraccountprofile.py | 27 | 4667 | #!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 17.1.1
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_useraccountprofile
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of UserAccountProfile Avi RESTful Object
description:
- This module is used to configure UserAccountProfile object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
account_lock_timeout:
description:
- Lock timeout period (in minutes).
- Default is 30 minutes.
- Default value when not specified in API or module is interpreted by Avi Controller as 30.
credentials_timeout_threshold:
description:
- The time period after which credentials expire.
- Default is 180 days.
- Default value when not specified in API or module is interpreted by Avi Controller as 180.
max_concurrent_sessions:
description:
- Maximum number of concurrent sessions allowed.
- There are unlimited sessions by default.
- Default value when not specified in API or module is interpreted by Avi Controller as 0.
max_login_failure_count:
description:
- Number of login attempts before lockout.
- Default is 3 attempts.
- Default value when not specified in API or module is interpreted by Avi Controller as 3.
max_password_history_count:
description:
- Maximum number of passwords to be maintained in the password history.
- Default is 4 passwords.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.
name:
description:
- Name of the object.
required: true
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create UserAccountProfile object
avi_useraccountprofile:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_useraccountprofile
"""
RETURN = '''
obj:
description: UserAccountProfile (api/useraccountprofile) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
account_lock_timeout=dict(type='int',),
credentials_timeout_threshold=dict(type='int',),
max_concurrent_sessions=dict(type='int',),
max_login_failure_count=dict(type='int',),
max_password_history_count=dict(type='int',),
name=dict(type='str', required=True),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'useraccountprofile',
set([]))
if __name__ == '__main__':
main()
| gpl-3.0 |
qinjian623/emacs-config | .python-environments/default/lib/python2.7/site-packages/pip/_vendor/requests/packages/charade/euctwfreq.py | 3133 | 34872 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# EUCTW frequency table
# Converted from big5 work
# by Taiwan's Mandarin Promotion Council
# <http:#www.edu.tw:81/mandr/>
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
# Char to FreqOrder table ,
EUCTW_TABLE_SIZE = 8102
EUCTWCharToFreqOrder = (
1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
#Everything below is of no interest for detection purpose
2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
# flake8: noqa
| gpl-3.0 |
kennethlove/django | django/contrib/gis/geoip/tests.py | 6 | 4706 | from __future__ import unicode_literals
import os
from django.conf import settings
from django.contrib.gis.geos import GEOSGeometry
from django.contrib.gis.geoip import GeoIP, GeoIPException
from django.utils import unittest
# Note: Requires use of both the GeoIP country and city datasets.
# The GEOIP_DATA path should be the only setting set (the directory
# should contain links or the actual database files 'GeoIP.dat' and
# 'GeoLiteCity.dat'.
class GeoIPTest(unittest.TestCase):
def test01_init(self):
"Testing GeoIP initialization."
g1 = GeoIP() # Everything inferred from GeoIP path
path = settings.GEOIP_PATH
g2 = GeoIP(path, 0) # Passing in data path explicitly.
g3 = GeoIP.open(path, 0) # MaxMind Python API syntax.
for g in (g1, g2, g3):
self.assertEqual(True, bool(g._country))
self.assertEqual(True, bool(g._city))
# Only passing in the location of one database.
city = os.path.join(path, 'GeoLiteCity.dat')
cntry = os.path.join(path, 'GeoIP.dat')
g4 = GeoIP(city, country='')
self.assertEqual(None, g4._country)
g5 = GeoIP(cntry, city='')
self.assertEqual(None, g5._city)
# Improper parameters.
bad_params = (23, 'foo', 15.23)
for bad in bad_params:
self.assertRaises(GeoIPException, GeoIP, cache=bad)
if isinstance(bad, basestring):
e = GeoIPException
else:
e = TypeError
self.assertRaises(e, GeoIP, bad, 0)
def test02_bad_query(self):
"Testing GeoIP query parameter checking."
cntry_g = GeoIP(city='<foo>')
# No city database available, these calls should fail.
self.assertRaises(GeoIPException, cntry_g.city, 'google.com')
self.assertRaises(GeoIPException, cntry_g.coords, 'yahoo.com')
# Non-string query should raise TypeError
self.assertRaises(TypeError, cntry_g.country_code, 17)
self.assertRaises(TypeError, cntry_g.country_name, GeoIP)
def test03_country(self):
"Testing GeoIP country querying methods."
g = GeoIP(city='<foo>')
fqdn = 'www.google.com'
addr = '12.215.42.19'
for query in (fqdn, addr):
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code' : 'US', 'country_name' : 'United States'},
g.country(query))
def test04_city(self):
"Testing GeoIP city querying methods."
g = GeoIP(country='<foo>')
addr = '128.249.1.1'
fqdn = 'tmc.edu'
for query in (fqdn, addr):
# Country queries should still work.
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code' : 'US', 'country_name' : 'United States'},
g.country(query))
# City information dictionary.
d = g.city(query)
self.assertEqual('USA', d['country_code3'])
self.assertEqual('Houston', d['city'])
self.assertEqual('TX', d['region'])
self.assertEqual(713, d['area_code'])
geom = g.geos(query)
self.assertTrue(isinstance(geom, GEOSGeometry))
lon, lat = (-95.4010, 29.7079)
lat_lon = g.lat_lon(query)
lat_lon = (lat_lon[1], lat_lon[0])
for tup in (geom.tuple, g.coords(query), g.lon_lat(query), lat_lon):
self.assertAlmostEqual(lon, tup[0], 4)
self.assertAlmostEqual(lat, tup[1], 4)
def test05_unicode_response(self):
"Testing that GeoIP strings are properly encoded, see #16553."
g = GeoIP()
d = g.city('62.224.93.23')
self.assertEqual('Schümberg', d['city'])
def test06_unicode_query(self):
"Testing that GeoIP accepts unicode string queries, see #17059."
g = GeoIP()
d = g.country('whitehouse.gov')
self.assertEqual('US', d['country_code'])
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(GeoIPTest))
return s
def run(verbosity=1):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
| bsd-3-clause |
levilucio/SyVOLT | t_core/Mutex/HTakeRulePivotRHS.py | 1 | 7540 |
from core.himesis import Himesis, HimesisPostConditionPattern
import cPickle as pickle
class HTakeRulePivotRHS(HimesisPostConditionPattern):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HTakeRulePivotRHS.
"""
# Create the himesis graph
EDGE_LIST = [(2, 0), (0, 1)]
super(HTakeRulePivotRHS, self).__init__(name='HTakeRulePivotRHS', num_nodes=3, edges=EDGE_LIST)
self.is_compiled = True # now this instance has been compiled
# Set the graph attributes
self["mm__"] = pickle.loads("""(lp1
S'MT_post__Mutex'
p2
aS'MoTifRule'
p3
a.""")
self["MT_action__"] = pickle.loads("""S"#===============================================================================@n# This code is executed after the rule has been applied.@n# You can access a node labelled n matched by this rule by: PostNode('n').@n# To access attribute x of node n, use: PostNode('n')['x'].@n#===============================================================================@n@npass@n"
p1
.""").replace("@n", "\n")
self["name"] = pickle.loads("""S''
.""")
self["GUID__"] = pickle.loads("""ccopy_reg
_reconstructor
p1
(cuuid
UUID
p2
c__builtin__
object
p3
NtRp4
(dp5
S'int'
p6
L177184248977566067407519061715160582110L
sb.""")
# Set the node attributes
self.vs[0]["mm__"] = pickle.loads("""S'MT_post__held_by'
p1
.""")
self.vs[0]["MT_label__"] = pickle.loads("""S'5'
.""")
self.vs[0]["GUID__"] = pickle.loads("""ccopy_reg
_reconstructor
p1
(cuuid
UUID
p2
c__builtin__
object
p3
NtRp4
(dp5
S'int'
p6
L327468026380999584779519446649229225705L
sb.""")
self.vs[1]["mm__"] = pickle.loads("""S'MT_post__Process'
p1
.""")
self.vs[1]["MT_label__"] = pickle.loads("""S'1'
.""")
self.vs[1]["MT_pivotOut__"] = pickle.loads("""S'p'
.""")
self.vs[1]["MT_post__name"] = pickle.loads("""S"@n#===============================================================================@n# You can access the value of the current node's attribute value by: attr_value.@n# You can access a matched node labelled n by: PreNode('n').@n# To access attribute x of node n, use: PreNode('n')['x'].@n# Note that the attribute values are those before the match is rewritten.@n# The order in which this code is executed depends on the label value of the encapsulating node.@n# The given action must return the new value of the attribute.@n#===============================================================================@n@nreturn attr_value@n"
p1
.""").replace("@n", "\n")
self.vs[1]["GUID__"] = pickle.loads("""ccopy_reg
_reconstructor
p1
(cuuid
UUID
p2
c__builtin__
object
p3
NtRp4
(dp5
S'int'
p6
L307306096358356756191155922592500254709L
sb.""")
self.vs[2]["mm__"] = pickle.loads("""S'MT_post__Resource'
p1
.""")
self.vs[2]["MT_label__"] = pickle.loads("""S'2'
.""")
self.vs[2]["MT_post__name"] = pickle.loads("""S"@n#===============================================================================@n# You can access the value of the current node's attribute value by: attr_value.@n# You can access a matched node labelled n by: PreNode('n').@n# To access attribute x of node n, use: PreNode('n')['x'].@n# Note that the attribute values are those before the match is rewritten.@n# The order in which this code is executed depends on the label value of the encapsulating node.@n# The given action must return the new value of the attribute.@n#===============================================================================@n@nreturn attr_value@n"
p1
.""").replace("@n", "\n")
self.vs[2]["GUID__"] = pickle.loads("""ccopy_reg
_reconstructor
p1
(cuuid
UUID
p2
c__builtin__
object
p3
NtRp4
(dp5
S'int'
p6
L155582527659668895370133264659473414121L
sb.""")
from HTakeRulePivotLHS import HTakeRulePivotLHS
self.pre = HTakeRulePivotLHS()
def action(self, PostNode, graph):
"""
Executable constraint code.
@param PostNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the rule has been applied.
# You can access a node labelled n matched by this rule by: PostNode('n').
# To access attribute x of node n, use: PostNode('n')['x'].
#===============================================================================
pass
def execute(self, packet, match):
"""
Transforms the current match of the packet according to the rule %s.
Pivots are also assigned, if any.
@param packet: The input packet.
@param match: The match to rewrite.
"""
graph = packet.graph
# Build a dictionary {label: node index} mapping each label of the pattern to a node in the graph to rewrite.
# Because of the uniqueness property of labels in a rule, we can store all LHS labels
# and subsequently add the labels corresponding to the nodes to be created.
labels = match.copy()
#===============================================================================
# Update attribute values
#===============================================================================
#===============================================================================
# Create new nodes
#===============================================================================
# MT_post__held_by5
new_node = graph.add_node()
labels[5] = new_node
graph.vs[new_node][Himesis.Constants.META_MODEL] = 'held_by'
#===============================================================================
# Create new edges
#===============================================================================
# MT_post__Resource2 -> MT_post__held_by5
graph.add_edges((labels[2], labels[5]))
# MT_post__held_by5 -> MT_post__Process1
graph.add_edges((labels[5], labels[1]))
#===============================================================================
# Delete nodes (this will automatically delete the adjacent edges)
#===============================================================================
# MT_pre__toke3, MT_pre__request4
graph.delete_nodes([labels[3], labels[4]])
#===============================================================================
# Set the output pivots
#===============================================================================
# MT_post__Process1
packet.global_pivots['p'] = graph.vs[labels[1]][Himesis.Constants.GUID]
#===============================================================================
# Finally, perform the post-action
#===============================================================================
try:
self.action(lambda i: graph.vs[labels[i]], graph)
except Exception, e:
raise Exception('An error has occurred while applying the post-action', e)
| mit |
usakhelo/FreeCAD | src/Mod/OpenSCAD/OpenSCADCommands.py | 2 | 20612 | #***************************************************************************
#* *
#* Copyright (c) 2012 Sebastian Hoogen <github@sebastianhoogen.de> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
__title__="FreeCAD OpenSCAD Workbench - GUI Commands"
__author__ = "Sebastian Hoogen"
__url__ = ["http://www.freecadweb.org"]
'''
This Script includes the GUI Commands of the OpenSCAD module
'''
import FreeCAD,FreeCADGui
from PySide import QtCore, QtGui
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def translate(context, text):
"convenience function for Qt translator"
return QtGui.QApplication.translate(context, text, None, _encoding)
except AttributeError:
def translate(context, text):
"convenience function for Qt translator"
return QtGui.QApplication.translate(context, text, None)
def utf8(unio):
return unicode(unio).encode('UTF8')
class ExplodeGroup:
"Ungroup Objects"
def IsActive(self):
return FreeCADGui.Selection.countObjectsOfType('Part::Feature') > 0
def Activated(self):
def isdefault(shapecolor):
def comparefloat(f1,f2):
if f1 == 0.0:
return f1 == f2
else:
return abs((f1-f2)/f1) < 2**-24
scol=FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View")\
.GetUnsigned('DefaultShapeColor',0xccccccff)
defaultcolor = (((scol >> 24) & 0xff) / 255.0,\
((scol >> 16) & 0xff) / 255.0,\
((scol >> 8) & 0xff) / 255.0, 0.0)
return all(all(comparefloat(fcc,dcc) for fcc,dcc in \
zip(facecolor,defaultcolor)) for facecolor in shapecolor)
def isgrey(shapecolor):
defaultcolor=(float.fromhex('0x1.99999ap-1'),float.fromhex(\
'0x1.99999ap-1'),float.fromhex('0x1.99999ap-1'),0.0)
return all(facecolor == defaultcolor for facecolor in shapecolor)
def randomcolor(transp=0.0):
import random
return (random.random(),random.random(),random.random(),transp)
def explode(obj,color=True):
if obj.isDerivedFrom('Part::Fuse') or \
obj.isDerivedFrom('Part::MultiFuse') or \
obj.isDerivedFrom('Part::Compound'):
plm = obj.Placement
outlist = obj.OutList[:]
if plm.isNull() or all((len(oo.InList)==1 and \
not oo.isDerivedFrom('PartDesign::Feature')) \
for oo in obj.OutList):
obj.Document.removeObject(obj.Name)
for oo in outlist:
if not plm.isNull():
oo.Placement=plm.multiply(oo.Placement)
if FreeCAD.GuiUp:
import FreeCADGui
oo.ViewObject.show()
if color and isdefault(oo.ViewObject.DiffuseColor):
if color == True:
oo.ViewObject.DiffuseColor=randomcolor()
else:
oo.ViewObject.DiffuseColor=color
else:
FreeCAD.Console.PrintError(unicode(translate('OpenSCAD',\
'Unable to explode %s')) % obj.Name +u'\n')
for obj in FreeCADGui.Selection.getSelection():
if len(obj.InList) == 0: # allowed only for for top level objects
explode(obj)
def GetResources(self):
return {'Pixmap' : 'OpenSCAD_Explode_Group', 'MenuText': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_ExplodeGroup',\
'Explode Group'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_ExplodeGroup',\
'remove fusion, apply placement to children and color randomly')}
class ColorCodeShape:
"Change the Color of selected or all Shapes based on their validity"
def Activated(self):
import colorcodeshapes
selection=FreeCADGui.Selection.getSelectionEx()
if len(selection) > 0:
objs=[selobj.Object for selobj in selection]
else:
objs=FreeCAD.ActiveDocument.Objects
colorcodeshapes.colorcodeshapes(objs)
def GetResources(self):
return {'Pixmap' : 'OpenSCAD_ColorCodeShape', 'MenuText': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_ColorCodeShape',\
'Color Shapes'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_ColorCodeShape',\
'Color Shapes by validity and type')}
class Edgestofaces:
def IsActive(self):
return FreeCADGui.Selection.countObjectsOfType('Part::Feature') > 0
def Activated(self):
from OpenSCAD2Dgeom import edgestofaces,Overlappingfaces
selection=FreeCADGui.Selection.getSelectionEx()
edges=[]
for selobj in selection:
edges.extend(selobj.Object.Shape.Edges)
Overlappingfaces(edgestofaces(edges,None)).makefeatures(FreeCAD.ActiveDocument)
for selobj in selection:
selobj.Object.ViewObject.hide()
FreeCAD.ActiveDocument.recompute()
def GetResources(self):
return {'Pixmap' : 'python', 'MenuText': QtCore.QT_TRANSLATE_NOOP(\
'OpenSCAD_Edgestofaces','Convert Edges To Faces'),
'ToolTip': QtCore.QT_TRANSLATE_NOOP('OpenSCAD',\
'Convert Edges to Faces')}
class RefineShapeFeature:
def IsActive(self):
return FreeCADGui.Selection.countObjectsOfType('Part::Feature') > 0
def Activated(self):
import Part,OpenSCADFeatures
selection=FreeCADGui.Selection.getSelectionEx()
for selobj in selection:
newobj=selobj.Document.addObject("Part::FeaturePython",'refine')
OpenSCADFeatures.RefineShape(newobj,selobj.Object)
OpenSCADFeatures.ViewProviderTree(newobj.ViewObject)
newobj.Label='refine_%s' % selobj.Object.Label
selobj.Object.ViewObject.hide()
FreeCAD.ActiveDocument.recompute()
def GetResources(self):
return {'Pixmap' : 'OpenSCAD_RefineShapeFeature', 'MenuText': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_RefineShapeFeature',\
'Refine Shape Feature'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_RefineShapeFeature',\
'Create Refine Shape Feature')}
class IncreaseToleranceFeature:
def IsActive(self):
return FreeCADGui.Selection.countObjectsOfType('Part::Feature') > 0
def Activated(self):
import Part,OpenSCADFeatures
selection=FreeCADGui.Selection.getSelectionEx()
for selobj in selection:
newobj=selobj.Document.addObject("Part::FeaturePython",'tolerance')
OpenSCADFeatures.IncreaseTolerance(newobj,selobj.Object)
OpenSCADFeatures.ViewProviderTree(newobj.ViewObject)
newobj.Label='tolerance_%s' % selobj.Object.Label
selobj.Object.ViewObject.hide()
FreeCAD.ActiveDocument.recompute()
def GetResources(self):
return {'Pixmap' : 'OpenSCAD_IncreaseToleranceFeature', 'MenuText': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_IncreaseToleranceFeature',\
'Increase Tolerance Feature'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_IncreaseToleranceFeature',\
'Create Feature that allows to increase the tolerance')}
class ExpandPlacements:
'''This should aid interactive repair in the future
but currently it breaks extrusions, as axis, base and so on have to be
recalculated'''
def IsActive(self):
return FreeCADGui.Selection.countObjectsOfType('Part::Feature') > 0
def Activated(self):
import expandplacements
for selobj in FreeCADGui.Selection.getSelectionEx():
expandplacements.expandplacements(selobj.Object,FreeCAD.Placement())
FreeCAD.ActiveDocument.recompute()
def GetResources(self):
return {'Pixmap' : 'python', 'MenuText': QtCore.QT_TRANSLATE_NOOP(\
'OpenSCAD_ExpandPlacements','Expand Placements'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_ExpandPlacements',\
'Expand all placements downwards the FeatureTree')}
class ReplaceObject:
def IsActive(self):
nobj = FreeCADGui.Selection.countObjectsOfType('Part::Feature')
if nobj == 3: return True
elif nobj == 2: return tuple((len(obj.InList)) for obj in \
FreeCADGui.Selection.getSelection()) in ((0,1),(1,0))
#else: return False
def Activated(self):
import replaceobj
objs=FreeCADGui.Selection.getSelection()
if len(objs)==3 or \
tuple((len(obj.InList)) for obj in objs) in ((0,1),(1,0)):
replaceobj.replaceobjfromselection(objs)
else:
FreeCAD.Console.PrintError(unicode(translate('OpenSCAD',\
'Please select 3 objects first'))+u'\n')
def GetResources(self):
return {'Pixmap' : 'OpenSCAD_ReplaceObject', 'MenuText': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_ReplaceObject',\
'Replace Object'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_ReplaceObject',\
'Replace an object in the Feature Tree. Please select old, new and parent object')}
class RemoveSubtree:
def IsActive(self):
return FreeCADGui.Selection.countObjectsOfType('Part::Feature') > 0
def Activated(self):
import OpenSCADUtils,FreeCADGui
OpenSCADUtils.removesubtree(FreeCADGui.Selection.getSelection())
def GetResources(self):
return {'Pixmap' : 'OpenSCAD_RemoveSubtree', 'MenuText': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_RemoveSubtree',\
'Remove Objects and their Children'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_RemoveSubtree',\
'Removes the selected objects and all children that are not referenced from other objects')}
class AddSCADWidget(QtGui.QWidget):
def __init__(self,*args):
QtGui.QWidget.__init__(self,*args)
self.textEdit=QtGui.QTextEdit()
self.buttonadd = QtGui.QPushButton(translate('OpenSCAD','Add'))
self.buttonclear = QtGui.QPushButton(translate('OpenSCAD','Clear'))
self.checkboxmesh = QtGui.QCheckBox(translate('OpenSCAD','as Mesh'))
layouth=QtGui.QHBoxLayout()
layouth.addWidget(self.buttonadd)
layouth.addWidget(self.buttonclear)
layout= QtGui.QVBoxLayout()
layout.addLayout(layouth)
layout.addWidget(self.checkboxmesh)
layout.addWidget(self.textEdit)
self.setLayout(layout)
self.setWindowTitle(translate('OpenSCAD','Add OpenSCAD Element'))
self.textEdit.setText(u'cube();')
self.buttonclear.clicked.connect(self.textEdit.clear)
def retranslateUi(self, widget=None):
self.buttonadd.setText(translate('OpenSCAD','Add'))
self.buttonclear.setText(translate('OpenSCAD','Clear'))
self.checkboxmesh.setText(translate('OpenSCAD','as Mesh'))
self.setWindowTitle(translate('OpenSCAD','Add OpenSCAD Element'))
class AddSCADTask:
def __init__(self):
self.form = AddSCADWidget()
self.form.buttonadd.clicked.connect(self.addelement)
def getStandardButtons(self):
return int(QtGui.QDialogButtonBox.Close)
def isAllowedAlterSelection(self):
return True
def isAllowedAlterView(self):
return True
def isAllowedAlterDocument(self):
return True
def addelement(self):
scadstr=unicode(self.form.textEdit.toPlainText()).encode('utf8')
asmesh=self.form.checkboxmesh.checkState()
import OpenSCADUtils, os
extension= 'stl' if asmesh else 'csg'
try:
tmpfilename=OpenSCADUtils.callopenscadstring(scadstr,extension)
doc=FreeCAD.activeDocument() or FreeCAD.newDocument()
if asmesh:
import Mesh
Mesh.insert(tmpfilename,doc.Name)
else:
import importCSG
importCSG.insert(tmpfilename,doc.Name)
try:
os.unlink(tmpfilename)
except OSError:
pass
except OpenSCADUtils.OpenSCADError as e:
FreeCAD.Console.PrintError(e.value)
class OpenSCADMeshBooleanWidget(QtGui.QWidget):
def __init__(self,*args):
QtGui.QWidget.__init__(self,*args)
#self.textEdit=QtGui.QTextEdit()
self.buttonadd = QtGui.QPushButton(translate('OpenSCAD','Perform'))
self.rb_group = QtGui.QButtonGroup()
self.rb_group_box = QtGui.QGroupBox()
self.rb_group_box_layout = QtGui.QVBoxLayout()
self.rb_group_box.setLayout(self.rb_group_box_layout)
self.rb_union = QtGui.QRadioButton("Union")
self.rb_group.addButton(self.rb_union)
self.rb_group_box_layout.addWidget(self.rb_union)
self.rb_intersection = QtGui.QRadioButton("Intersection")
self.rb_group.addButton(self.rb_intersection)
self.rb_group_box_layout.addWidget(self.rb_intersection)
self.rb_difference = QtGui.QRadioButton("Difference")
self.rb_group.addButton(self.rb_difference)
self.rb_group_box_layout.addWidget(self.rb_difference)
self.rb_hull = QtGui.QRadioButton("Hull")
self.rb_group.addButton(self.rb_hull)
self.rb_group_box_layout.addWidget(self.rb_hull)
self.rb_minkowski = QtGui.QRadioButton("Minkowski")
self.rb_group.addButton(self.rb_minkowski)
self.rb_group_box_layout.addWidget(self.rb_minkowski)
layouth=QtGui.QHBoxLayout()
layouth.addWidget(self.buttonadd)
layout= QtGui.QVBoxLayout()
layout.addLayout(layouth)
layout.addWidget(self.rb_group_box)
self.setLayout(layout)
self.setWindowTitle(translate('OpenSCAD','Mesh Boolean'))
def retranslateUi(self, widget=None):
self.buttonadd.setText(translate('OpenSCAD','Perform'))
self.setWindowTitle(translate('OpenSCAD','Mesh Boolean'))
class OpenSCADMeshBooleanTask:
def __init__(self):
pass
self.form = OpenSCADMeshBooleanWidget()
self.form.buttonadd.clicked.connect(self.doboolean)
def getStandardButtons(self):
return int(QtGui.QDialogButtonBox.Close)
def isAllowedAlterSelection(self):
return False
def isAllowedAlterView(self):
return False
def isAllowedAlterDocument(self):
return True
def doboolean(self):
from OpenSCADUtils import meshoponobjs
if self.form.rb_intersection.isChecked(): opname = 'intersection'
elif self.form.rb_difference.isChecked(): opname = 'difference'
elif self.form.rb_hull.isChecked(): opname = 'hull'
elif self.form.rb_minkowski.isChecked(): opname = 'minkowski'
else: opname = 'union'
newmesh,objsused = meshoponobjs(opname,FreeCADGui.Selection.getSelection())
if len(objsused) > 0:
newmeshobj = FreeCAD.activeDocument().addObject('Mesh::Feature',opname) #create a Feature for the result
newmeshobj.Mesh = newmesh #assign the result to the new Feature
for obj in objsused:
obj.ViewObject.hide() #hide the selected Features
class AddOpenSCADElement:
def IsActive(self):
return not FreeCADGui.Control.activeDialog()
def Activated(self):
panel = AddSCADTask()
FreeCADGui.Control.showDialog(panel)
def GetResources(self):
return {'Pixmap' : 'OpenSCAD_AddOpenSCADElement', 'MenuText': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_AddOpenSCADElement',\
'Add OpenSCAD Element...'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_AddOpenSCADElement',\
'Add an OpenSCAD element by entering OpenSCAD code and executing the OpenSCAD binary')}
class OpenSCADMeshBoolean:
def IsActive(self):
return not FreeCADGui.Control.activeDialog() and \
len(FreeCADGui.Selection.getSelection()) >= 1
def Activated(self):
panel = OpenSCADMeshBooleanTask()
FreeCADGui.Control.showDialog(panel)
def GetResources(self):
return {'Pixmap' : 'OpenSCAD_MeshBooleans', 'MenuText': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_MeshBoolean',\
'Mesh Boolean...'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_MeshBoolean',\
'Export objects as meshes and use OpenSCAD to perform a boolean operation.')}
class Hull:
def IsActive(self):
return len(FreeCADGui.Selection.getSelection()) >= 2
def Activated(self):
import Part,OpenSCADFeatures
import importCSG
selection=FreeCADGui.Selection.getSelectionEx()
objList = []
for selobj in selection:
objList.append(selobj.Object)
selobj.Object.ViewObject.hide()
importCSG.process_ObjectsViaOpenSCAD(FreeCAD.activeDocument(),objList,"hull")
FreeCAD.ActiveDocument.recompute()
def GetResources(self):
return {'Pixmap' : 'OpenSCAD_Hull', 'MenuText': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_Hull',\
'Hull'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_Hull',\
'Perform Hull')}
class Minkowski:
def IsActive(self):
return len(FreeCADGui.Selection.getSelection()) >= 2
def Activated(self):
import Part,OpenSCADFeatures
import importCSG
selection=FreeCADGui.Selection.getSelectionEx()
objList = []
for selobj in selection:
objList.append(selobj.Object)
selobj.Object.ViewObject.hide()
importCSG.process_ObjectsViaOpenSCAD(FreeCAD.activeDocument(),objList,"minkowski")
FreeCAD.ActiveDocument.recompute()
def GetResources(self):
return {'Pixmap' : 'OpenSCAD_Minkowski', 'MenuText': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_Minkowski',\
'Minkowski'), 'ToolTip': \
QtCore.QT_TRANSLATE_NOOP('OpenSCAD_Minkowski',\
'Perform Minkowski')}
FreeCADGui.addCommand('OpenSCAD_ColorCodeShape',ColorCodeShape())
FreeCADGui.addCommand('OpenSCAD_ExplodeGroup',ExplodeGroup())
FreeCADGui.addCommand('OpenSCAD_Edgestofaces',Edgestofaces())
FreeCADGui.addCommand('OpenSCAD_RefineShapeFeature',RefineShapeFeature())
FreeCADGui.addCommand('OpenSCAD_IncreaseToleranceFeature',IncreaseToleranceFeature())
FreeCADGui.addCommand('OpenSCAD_ExpandPlacements',ExpandPlacements())
FreeCADGui.addCommand('OpenSCAD_ReplaceObject',ReplaceObject())
FreeCADGui.addCommand('OpenSCAD_RemoveSubtree',RemoveSubtree())
FreeCADGui.addCommand('OpenSCAD_AddOpenSCADElement',AddOpenSCADElement())
FreeCADGui.addCommand('OpenSCAD_MeshBoolean',OpenSCADMeshBoolean())
FreeCADGui.addCommand('OpenSCAD_Hull',Hull())
FreeCADGui.addCommand('OpenSCAD_Minkowski',Minkowski())
| lgpl-2.1 |
LTMana/code | Python/Django/homework/backend/firstapp/migrations/0001_initial.py | 1 | 1554 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-05 04:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=500)),
('img', models.CharField(max_length=250)),
('content', models.TextField(blank=True, null=True)),
('views', models.IntegerField()),
('favs', models.IntegerField()),
('createtime', models.DateField()),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=500)),
('avatar', models.CharField(default='static/images/default.png', max_length=250)),
('content', models.TextField(blank=True, null=True)),
('createtime', models.DateField(auto_now=True)),
('belong_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='under_comments', to='firstapp.Article')),
],
),
]
| mit |
acshan/odoo | addons/mrp/__init__.py | 437 | 1165 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import mrp
import stock
import product
import wizard
import report
import company
import procurement
import res_config
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jolyonb/edx-platform | lms/djangoapps/grades/tests/base.py | 5 | 4430 | """
Base file for Grades tests
"""
from crum import set_current_request
from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory
from lms.djangoapps.course_blocks.api import get_course_blocks
from openedx.core.djangolib.testing.utils import get_mock_request
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from ..course_data import CourseData
from ..subsection_grade_factory import SubsectionGradeFactory
class GradeTestBase(SharedModuleStoreTestCase):
"""
Base class for some Grades tests.
"""
@classmethod
def setUpClass(cls):
super(GradeTestBase, cls).setUpClass()
cls.course = CourseFactory.create()
with cls.store.bulk_operations(cls.course.id):
cls.chapter = ItemFactory.create(
parent=cls.course,
category="chapter",
display_name="Test Chapter"
)
cls.sequence = ItemFactory.create(
parent=cls.chapter,
category='sequential',
display_name="Test Sequential X",
graded=True,
format="Homework"
)
cls.vertical = ItemFactory.create(
parent=cls.sequence,
category='vertical',
display_name='Test Vertical 1'
)
problem_xml = MultipleChoiceResponseXMLFactory().build_xml(
question_text='The correct answer is Choice 3',
choices=[False, False, True, False],
choice_names=['choice_0', 'choice_1', 'choice_2', 'choice_3']
)
cls.problem = ItemFactory.create(
parent=cls.vertical,
category="problem",
display_name="Test Problem",
data=problem_xml
)
cls.sequence2 = ItemFactory.create(
parent=cls.chapter,
category='sequential',
display_name="Test Sequential A",
graded=True,
format="Homework"
)
cls.problem2 = ItemFactory.create(
parent=cls.sequence2,
category="problem",
display_name="Test Problem",
data=problem_xml
)
# AED 2017-06-19: make cls.sequence belong to multiple parents,
# so we can test that DAGs with this shape are handled correctly.
cls.chapter_2 = ItemFactory.create(
parent=cls.course,
category='chapter',
display_name='Test Chapter 2'
)
cls.chapter_2.children.append(cls.sequence.location)
cls.store.update_item(cls.chapter_2, UserFactory().id)
def setUp(self):
super(GradeTestBase, self).setUp()
self.addCleanup(set_current_request, None)
self.request = get_mock_request(UserFactory())
self.client.login(username=self.request.user.username, password="test")
self._set_grading_policy()
self.course_structure = get_course_blocks(self.request.user, self.course.location)
self.course_data = CourseData(self.request.user, structure=self.course_structure)
self.subsection_grade_factory = SubsectionGradeFactory(self.request.user, self.course, self.course_structure)
CourseEnrollment.enroll(self.request.user, self.course.id)
def _set_grading_policy(self, passing=0.5):
"""
Updates the course's grading policy.
"""
self.grading_policy = {
"GRADER": [
{
"type": "Homework",
"min_count": 1,
"drop_count": 0,
"short_label": "HW",
"weight": 1.0,
},
{
"type": "NoCredit",
"min_count": 0,
"drop_count": 0,
"short_label": "NC",
"weight": 0.0,
},
],
"GRADE_CUTOFFS": {
"Pass": passing,
},
}
self.course.set_grading_policy(self.grading_policy)
self.store.update_item(self.course, 0)
| agpl-3.0 |
techaddict/spark | python/run-tests.py | 7 | 13052 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import logging
from argparse import ArgumentParser
import os
import re
import shutil
import subprocess
import sys
import tempfile
from threading import Thread, Lock
import time
import uuid
if sys.version < '3':
import Queue
else:
import queue as Queue
from multiprocessing import Manager
# Append `SPARK_HOME/dev` to the Python path so that we can import the sparktestsupport module
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../dev/"))
from sparktestsupport import SPARK_HOME # noqa (suppress pep8 warnings)
from sparktestsupport.shellutils import which, subprocess_check_output # noqa
from sparktestsupport.modules import all_modules, pyspark_sql # noqa
python_modules = dict((m.name, m) for m in all_modules if m.python_test_goals if m.name != 'root')
def print_red(text):
print('\033[31m' + text + '\033[0m')
SKIPPED_TESTS = Manager().dict()
LOG_FILE = os.path.join(SPARK_HOME, "python/unit-tests.log")
FAILURE_REPORTING_LOCK = Lock()
LOGGER = logging.getLogger()
# Find out where the assembly jars are located.
# TODO: revisit for Scala 2.13
for scala in ["2.12"]:
build_dir = os.path.join(SPARK_HOME, "assembly", "target", "scala-" + scala)
if os.path.isdir(build_dir):
SPARK_DIST_CLASSPATH = os.path.join(build_dir, "jars", "*")
break
else:
raise Exception("Cannot find assembly build directory, please build Spark first.")
def run_individual_python_test(target_dir, test_name, pyspark_python):
env = dict(os.environ)
env.update({
'SPARK_DIST_CLASSPATH': SPARK_DIST_CLASSPATH,
'SPARK_TESTING': '1',
'SPARK_PREPEND_CLASSES': '1',
'PYSPARK_PYTHON': which(pyspark_python),
'PYSPARK_DRIVER_PYTHON': which(pyspark_python)
})
# Create a unique temp directory under 'target/' for each run. The TMPDIR variable is
# recognized by the tempfile module to override the default system temp directory.
tmp_dir = os.path.join(target_dir, str(uuid.uuid4()))
while os.path.isdir(tmp_dir):
tmp_dir = os.path.join(target_dir, str(uuid.uuid4()))
os.mkdir(tmp_dir)
env["TMPDIR"] = tmp_dir
# Also override the JVM's temp directory by setting driver and executor options.
spark_args = [
"--conf", "spark.driver.extraJavaOptions=-Djava.io.tmpdir={0}".format(tmp_dir),
"--conf", "spark.executor.extraJavaOptions=-Djava.io.tmpdir={0}".format(tmp_dir),
"pyspark-shell"
]
env["PYSPARK_SUBMIT_ARGS"] = " ".join(spark_args)
LOGGER.info("Starting test(%s): %s", pyspark_python, test_name)
start_time = time.time()
try:
per_test_output = tempfile.TemporaryFile()
retcode = subprocess.Popen(
[os.path.join(SPARK_HOME, "bin/pyspark")] + test_name.split(),
stderr=per_test_output, stdout=per_test_output, env=env).wait()
shutil.rmtree(tmp_dir, ignore_errors=True)
except:
LOGGER.exception("Got exception while running %s with %s", test_name, pyspark_python)
# Here, we use os._exit() instead of sys.exit() in order to force Python to exit even if
# this code is invoked from a thread other than the main thread.
os._exit(1)
duration = time.time() - start_time
# Exit on the first failure.
if retcode != 0:
try:
with FAILURE_REPORTING_LOCK:
with open(LOG_FILE, 'ab') as log_file:
per_test_output.seek(0)
log_file.writelines(per_test_output)
per_test_output.seek(0)
for line in per_test_output:
decoded_line = line.decode()
if not re.match('[0-9]+', decoded_line):
print(decoded_line, end='')
per_test_output.close()
except:
LOGGER.exception("Got an exception while trying to print failed test output")
finally:
print_red("\nHad test failures in %s with %s; see logs." % (test_name, pyspark_python))
# Here, we use os._exit() instead of sys.exit() in order to force Python to exit even if
# this code is invoked from a thread other than the main thread.
os._exit(-1)
else:
skipped_counts = 0
try:
per_test_output.seek(0)
# Here expects skipped test output from unittest when verbosity level is
# 2 (or --verbose option is enabled).
decoded_lines = map(lambda line: line.decode(), iter(per_test_output))
skipped_tests = list(filter(
lambda line: re.search(r'test_.* \(pyspark\..*\) ... (skip|SKIP)', line),
decoded_lines))
skipped_counts = len(skipped_tests)
if skipped_counts > 0:
key = (pyspark_python, test_name)
SKIPPED_TESTS[key] = skipped_tests
per_test_output.close()
except:
import traceback
print_red("\nGot an exception while trying to store "
"skipped test output:\n%s" % traceback.format_exc())
# Here, we use os._exit() instead of sys.exit() in order to force Python to exit even if
# this code is invoked from a thread other than the main thread.
os._exit(-1)
if skipped_counts != 0:
LOGGER.info(
"Finished test(%s): %s (%is) ... %s tests were skipped", pyspark_python, test_name,
duration, skipped_counts)
else:
LOGGER.info(
"Finished test(%s): %s (%is)", pyspark_python, test_name, duration)
def get_default_python_executables():
python_execs = [x for x in ["python2.7", "python3.6", "pypy"] if which(x)]
if "python2.7" not in python_execs:
LOGGER.warning("Not testing against `python2.7` because it could not be found; falling"
" back to `python` instead")
python_execs.insert(0, "python")
return python_execs
def parse_opts():
parser = ArgumentParser(
prog="run-tests"
)
parser.add_argument(
"--python-executables", type=str, default=','.join(get_default_python_executables()),
help="A comma-separated list of Python executables to test against (default: %(default)s)"
)
parser.add_argument(
"--modules", type=str,
default=",".join(sorted(python_modules.keys())),
help="A comma-separated list of Python modules to test (default: %(default)s)"
)
parser.add_argument(
"-p", "--parallelism", type=int, default=4,
help="The number of suites to test in parallel (default %(default)d)"
)
parser.add_argument(
"--verbose", action="store_true",
help="Enable additional debug logging"
)
group = parser.add_argument_group("Developer Options")
group.add_argument(
"--testnames", type=str,
default=None,
help=(
"A comma-separated list of specific modules, classes and functions of doctest "
"or unittest to test. "
"For example, 'pyspark.sql.foo' to run the module as unittests or doctests, "
"'pyspark.sql.tests FooTests' to run the specific class of unittests, "
"'pyspark.sql.tests FooTests.test_foo' to run the specific unittest in the class. "
"'--modules' option is ignored if they are given.")
)
args, unknown = parser.parse_known_args()
if unknown:
parser.error("Unsupported arguments: %s" % ' '.join(unknown))
if args.parallelism < 1:
parser.error("Parallelism cannot be less than 1")
return args
def _check_coverage(python_exec):
# Make sure if coverage is installed.
try:
subprocess_check_output(
[python_exec, "-c", "import coverage"],
stderr=open(os.devnull, 'w'))
except:
print_red("Coverage is not installed in Python executable '%s' "
"but 'COVERAGE_PROCESS_START' environment variable is set, "
"exiting." % python_exec)
sys.exit(-1)
def main():
opts = parse_opts()
if opts.verbose:
log_level = logging.DEBUG
else:
log_level = logging.INFO
should_test_modules = opts.testnames is None
logging.basicConfig(stream=sys.stdout, level=log_level, format="%(message)s")
LOGGER.info("Running PySpark tests. Output is in %s", LOG_FILE)
if os.path.exists(LOG_FILE):
os.remove(LOG_FILE)
python_execs = opts.python_executables.split(',')
LOGGER.info("Will test against the following Python executables: %s", python_execs)
if should_test_modules:
modules_to_test = []
for module_name in opts.modules.split(','):
if module_name in python_modules:
modules_to_test.append(python_modules[module_name])
else:
print("Error: unrecognized module '%s'. Supported modules: %s" %
(module_name, ", ".join(python_modules)))
sys.exit(-1)
LOGGER.info("Will test the following Python modules: %s", [x.name for x in modules_to_test])
else:
testnames_to_test = opts.testnames.split(',')
LOGGER.info("Will test the following Python tests: %s", testnames_to_test)
task_queue = Queue.PriorityQueue()
for python_exec in python_execs:
# Check if the python executable has coverage installed when 'COVERAGE_PROCESS_START'
# environmental variable is set.
if "COVERAGE_PROCESS_START" in os.environ:
_check_coverage(python_exec)
python_implementation = subprocess_check_output(
[python_exec, "-c", "import platform; print(platform.python_implementation())"],
universal_newlines=True).strip()
LOGGER.debug("%s python_implementation is %s", python_exec, python_implementation)
LOGGER.debug("%s version is: %s", python_exec, subprocess_check_output(
[python_exec, "--version"], stderr=subprocess.STDOUT, universal_newlines=True).strip())
if should_test_modules:
for module in modules_to_test:
if python_implementation not in module.blacklisted_python_implementations:
for test_goal in module.python_test_goals:
heavy_tests = ['pyspark.streaming.tests', 'pyspark.mllib.tests',
'pyspark.tests', 'pyspark.sql.tests', 'pyspark.ml.tests']
if any(map(lambda prefix: test_goal.startswith(prefix), heavy_tests)):
priority = 0
else:
priority = 100
task_queue.put((priority, (python_exec, test_goal)))
else:
for test_goal in testnames_to_test:
task_queue.put((0, (python_exec, test_goal)))
# Create the target directory before starting tasks to avoid races.
target_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'target'))
if not os.path.isdir(target_dir):
os.mkdir(target_dir)
def process_queue(task_queue):
while True:
try:
(priority, (python_exec, test_goal)) = task_queue.get_nowait()
except Queue.Empty:
break
try:
run_individual_python_test(target_dir, test_goal, python_exec)
finally:
task_queue.task_done()
start_time = time.time()
for _ in range(opts.parallelism):
worker = Thread(target=process_queue, args=(task_queue,))
worker.daemon = True
worker.start()
try:
task_queue.join()
except (KeyboardInterrupt, SystemExit):
print_red("Exiting due to interrupt")
sys.exit(-1)
total_duration = time.time() - start_time
LOGGER.info("Tests passed in %i seconds", total_duration)
for key, lines in sorted(SKIPPED_TESTS.items()):
pyspark_python, test_name = key
LOGGER.info("\nSkipped tests in %s with %s:" % (test_name, pyspark_python))
for line in lines:
LOGGER.info(" %s" % line.rstrip())
if __name__ == "__main__":
main()
| apache-2.0 |
MackZxh/OCA-Choice | knowledge/attachment_preview/model/ir_attachment.py | 9 | 2843 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2014 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import collections
import os.path
import mimetypes
import base64
from openerp.osv.orm import Model
class IrAttachment(Model):
_inherit = 'ir.attachment'
def get_binary_extension(
self, cr, uid, model, ids, binary_field, filename_field=None,
context=None):
result = {}
for this in self.pool[model].browse(
cr, uid,
ids if isinstance(ids, collections.Iterable) else [ids],
context=context):
if not this.id:
result[this.id] = False
continue
extension = ''
if filename_field and this[filename_field]:
filename, extension = os.path.splitext(this[filename_field])
if not this[binary_field]:
result[this.id] = False
continue
if not extension:
try:
import magic
ms = magic.open(
hasattr(magic, 'MAGIC_MIME_TYPE') and
magic.MAGIC_MIME_TYPE or magic.MAGIC_MIME)
ms.load()
mimetype = ms.buffer(
base64.b64decode(this[binary_field]))
except ImportError:
(mimetype, encoding) = mimetypes.guess_type(
'data:;base64,' + this[binary_field], strict=False)
extension = mimetypes.guess_extension(
mimetype.split(';')[0], strict=False)
result[this.id] = (extension or '').lstrip('.').lower()
return result if isinstance(ids, collections.Iterable) else result[ids]
def get_attachment_extension(self, cr, uid, ids, context=None):
return self.get_binary_extension(
cr, uid, self._name, ids, 'datas', 'datas_fname', context=context)
| lgpl-3.0 |
AOSPU/external_chromium_org | build/linux/install-arm-sysroot.py | 15 | 2718 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script to install ARM root image for cross building of ARM chrome on linux.
This script can be run manually but is more often run as part of gclient
hooks. When run from hooks this script should be a no-op on non-linux
platforms.
The sysroot image could be constructed from scratch based on the current
state or precise/arm but for consistency we currently use a pre-built root
image which was originally designed for building trusted NaCl code. The image
will normally need to be rebuilt every time chrome's build dependancies are
changed.
Steps to rebuild the arm sysroot image:
- cd $SRC/native_client
- ./tools/trusted_cross_toolchains/trusted-toolchain-creator.armel.precise.sh \
UpdatePackageLists
- ./tools/trusted_cross_toolchains/trusted-toolchain-creator.armel.precise.sh \
BuildJail $SRC/out/arm-sysroot.tar.gz
- gsutil cp -a public-read $SRC/out/arm-sysroot.tar.gz \
nativeclient-archive2/toolchain/$NACL_REV/sysroot-arm-trusted.tgz
"""
import os
import shutil
import subprocess
import sys
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
URL_PREFIX = 'https://storage.googleapis.com'
URL_PATH = 'nativeclient-archive2/toolchain'
REVISION = 13035
TARBALL = 'sysroot-arm-trusted.tgz'
def main(args):
if '--linux-only' in args:
# This argument is passed when run from the gclient hooks.
# In this case we return early on non-linux platforms
# or if GYP_DEFINES doesn't include target_arch=arm
if not sys.platform.startswith('linux'):
return 0
if "target_arch=arm" not in os.environ.get('GYP_DEFINES', ''):
return 0
src_root = os.path.dirname(os.path.dirname(SCRIPT_DIR))
sysroot = os.path.join(src_root, 'arm-sysroot')
url = "%s/%s/%s/%s" % (URL_PREFIX, URL_PATH, REVISION, TARBALL)
stamp = os.path.join(sysroot, ".stamp")
if os.path.exists(stamp):
with open(stamp) as s:
if s.read() == url:
print "ARM root image already up-to-date: %s" % sysroot
return 0
print "Installing ARM root image: %s" % sysroot
if os.path.isdir(sysroot):
shutil.rmtree(sysroot)
os.mkdir(sysroot)
tarball = os.path.join(sysroot, TARBALL)
curl = ['curl', '--fail', '-L', url, '-o', tarball]
if os.isatty(sys.stdout.fileno()):
curl.append('--progress')
else:
curl.append('--silent')
subprocess.check_call(curl)
subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
os.remove(tarball)
with open(stamp, 'w') as s:
s.write(url)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause |
ltucker/melkman | tests/helpers.py | 1 | 6931 | from melkman.green import green_init
green_init()
from datetime import datetime, timedelta
from eventlet.green import socket
from eventlet.support.greenlets import GreenletExit
from eventlet.wsgi import server as wsgi_server
import os
import time
from urlparse import urlsplit
from urllib import quote_plus
from webob import Request, Response
from melk.util.dibject import Dibject, dibjectify
from melk.util.hash import melk_id
from melk.util.nonce import nonce_str
from melkman.context import Context
__all__ = ['make_db', 'fresh_context', 'data_path', 'test_yaml_file', 'random_id', 'rfc3339_date', 'melk_ids_in', 'random_atom_feed',
'make_atom_feed', 'dummy_atom_entries', 'make_atom_entry', 'dummy_news_item', 'epeq_datetime',
'append_param', 'no_micro', 'TestHTTPServer', 'FileServer', 'contextual']
def data_path():
here = os.path.abspath(os.path.dirname(__file__))
return os.path.join(here, 'data')
def test_yaml_file():
here = os.path.abspath(os.path.dirname(__file__))
return os.path.join(here, 'test.yaml')
def make_db():
ctx = fresh_context()
return ctx.db
def fresh_context():
ctx = Context.from_yaml(test_yaml_file())
ctx.bootstrap(purge=True)
return ctx
def random_id():
return melk_id(nonce_str())
def rfc3339_date(timestamp):
"""
accepts datetime
returns RFC 3339 date
"""
return time.strftime('%Y-%m-%dT%H:%M:%SZ', timestamp.timetuple())
def melk_ids_in(content, url):
from melkman.parse import parse_feed
fp = parse_feed(content, url)
return [x.melk_id for x in fp.entries]
def random_atom_feed(feed_id, nentries, base_timestamp=None, **kw):
if base_timestamp is None:
base_timestamp = datetime.utcnow()
entries = dummy_atom_entries(nentries, base_timestamp)
return make_atom_feed(feed_id, entries, timestamp=base_timestamp + timedelta(seconds=nentries), **kw)
def make_atom_feed(feed_id, entries,
title='Some Dummy Feed',
timestamp=None,
link='http://example.org/feed',
author='Jane Dough',
hub_urls=None):
if timestamp is None:
timestamp = datetime.utcnow()
updated_str = rfc3339_date(timestamp)
doc = """<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<id>%s</id>
<title>%s</title>
<link rel="self" href="%s"/>
<updated>%s</updated>
<author>
<name>%s</name>
</author>
""" % (feed_id, title, link, updated_str, author)
if hub_urls is not None:
for hub_url in hub_urls:
doc += '<link rel="hub" href="%s" />' % hub_url
for entry in entries:
doc += entry
doc += "</feed>"
return doc
def dummy_atom_entries(n, base_timestamp=None):
if base_timestamp is None:
base_timestamp = datetime.utcnow()
entries = []
for i in range(n):
iid = random_id()
timestamp = base_timestamp + timedelta(seconds=i)
entries.append(make_atom_entry(iid, timestamp=timestamp))
entries.reverse()
return entries
def make_atom_entry(id, title='This is the title',
author='Jane Dough',
link='http://example.com/link',
timestamp=None,
summary='Some Text.'):
if timestamp is None:
timestamp = datetime.utcnow()
updated_str = rfc3339_date(timestamp)
return """<entry>
<id>%s</id>
<title>%s</title>
<link rel="alternate" href="%s"/>
<author><name>%s</name></author>
<updated>%s</updated>
<summary>%s</summary>
</entry>
""" % (id, title, link, author, updated_str, summary)
class DummyItem(Dibject):
def load_full_item(self, db):
return self
def dummy_news_item(d):
di = DummyItem(dibjectify(d))
di.setdefault('author', 'Whoever T. Merriweather')
di.setdefault('item_id', random_id())
di.setdefault('timestamp', datetime.utcnow())
di.setdefault('title', 'The News Title')
di.setdefault('link', 'http://example.org/blagosphere?id=12')
di.setdefault('source_title', 'The Blags')
di.setdefault('source_url', 'http://example.org/blagosphere')
di.setdefault('summary', 'abaraljsrs sjrkja rsj klrjewori ew rwa riojweroiwer iowr wre')
di.setdefault('details', Dibject())
return di
class TestHTTPServer(object):
def __init__(self, port=9291):
self.port = port
def run(self):
try:
server = socket.socket()
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(('127.0.0.1', self.port))
server.listen(50)
wsgi_server(server, self)
except GreenletExit:
pass
def __call__(self, environ, start_response):
res = Response()
res.status = 404
return res(environ, start_response)
class FileServer(TestHTTPServer):
"""
little file server for testing
"""
def __init__(self, www_dir, port=9292):
TestHTTPServer.__init__(self, port)
self.requests = 0
self.www_dir = os.path.abspath(www_dir)
def url_for(self, path):
return 'http://localhost:%d/%s' % (self.port, path)
def __call__(self, environ, start_response):
self.requests += 1
req = Request(environ)
res = Response()
filename = req.path_info.lstrip('/')
filename = os.path.abspath(os.path.join(self.www_dir, filename))
if filename.startswith(self.www_dir) and os.path.isfile(filename):
res.status = 200
res.body = open(filename).read()
else:
res.status = 404
return res(environ, start_response)
def epeq_datetime(t1, t2):
return abs(t1 - t2) < timedelta(seconds=1)
def no_micro(dt):
return dt.replace(microsecond=0)
def append_param(url, k, v):
if len(urlsplit(url)[3]) > 0:
return '%s&%s=%s' % (url, quote_plus(k), quote_plus(v))
else:
return '%s?%s=%s' % (url, quote_plus(k), quote_plus(v))
def contextual(t):
from eventlet import sleep
from greenamqp.client_0_8 import connection
connection.DEBUG_LEAKS = True
def inner():
start_connections = connection.connection_count
ctx = fresh_context()
with ctx:
rc = t(ctx)
sleep(0)
assert len(ctx._locals_by_greenlet) == 0, 'Leaked %d greenlet storages' % len(ctx._locals_by_greenlet)
assert ctx._broker is None, 'Broker connection was not closed.'
end_connections = connection.connection_count
assert start_connections == end_connections, 'Leaked %d amqp connections (%d leaked in total)' % (end_connections - start_connections, end_connections)
return rc
inner.__name__ = t.__name__
return inner | gpl-2.0 |
joopert/home-assistant | tests/components/zwave/test_light.py | 4 | 15115 | """Test Z-Wave lights."""
from unittest.mock import patch, MagicMock
from homeassistant.components import zwave
from homeassistant.components.zwave import const, light
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
ATTR_TRANSITION,
SUPPORT_BRIGHTNESS,
SUPPORT_TRANSITION,
SUPPORT_COLOR,
ATTR_WHITE_VALUE,
SUPPORT_COLOR_TEMP,
SUPPORT_WHITE_VALUE,
)
from tests.mock.zwave import MockNode, MockValue, MockEntityValues, value_changed
class MockLightValues(MockEntityValues):
"""Mock Z-Wave light values."""
def __init__(self, **kwargs):
"""Initialize the mock zwave values."""
self.dimming_duration = None
self.color = None
self.color_channels = None
super().__init__(**kwargs)
def test_get_device_detects_dimmer(mock_openzwave):
"""Test get_device returns a normal dimmer."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert isinstance(device, light.ZwaveDimmer)
assert device.supported_features == SUPPORT_BRIGHTNESS
def test_get_device_detects_colorlight(mock_openzwave):
"""Test get_device returns a color light."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert isinstance(device, light.ZwaveColorLight)
assert device.supported_features == SUPPORT_BRIGHTNESS | SUPPORT_COLOR
def test_get_device_detects_zw098(mock_openzwave):
"""Test get_device returns a zw098 color light."""
node = MockNode(
manufacturer_id="0086",
product_id="0062",
command_classes=[const.COMMAND_CLASS_SWITCH_COLOR],
)
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert isinstance(device, light.ZwaveColorLight)
assert device.supported_features == (
SUPPORT_BRIGHTNESS | SUPPORT_COLOR | SUPPORT_COLOR_TEMP
)
def test_get_device_detects_rgbw_light(mock_openzwave):
"""Test get_device returns a color light."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
color_channels = MockValue(data=0x1D, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
device.value_added()
assert isinstance(device, light.ZwaveColorLight)
assert device.supported_features == (
SUPPORT_BRIGHTNESS | SUPPORT_COLOR | SUPPORT_WHITE_VALUE
)
def test_dimmer_turn_on(mock_openzwave):
"""Test turning on a dimmable Z-Wave light."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
device.turn_on()
assert node.set_dimmer.called
value_id, brightness = node.set_dimmer.mock_calls[0][1]
assert value_id == value.value_id
assert brightness == 255
node.reset_mock()
device.turn_on(**{ATTR_BRIGHTNESS: 120})
assert node.set_dimmer.called
value_id, brightness = node.set_dimmer.mock_calls[0][1]
assert value_id == value.value_id
assert brightness == 46 # int(120 / 255 * 99)
with patch.object(light, "_LOGGER", MagicMock()) as mock_logger:
device.turn_on(**{ATTR_TRANSITION: 35})
assert mock_logger.debug.called
assert node.set_dimmer.called
msg, entity_id = mock_logger.debug.mock_calls[0][1]
assert entity_id == device.entity_id
def test_dimmer_min_brightness(mock_openzwave):
"""Test turning on a dimmable Z-Wave light to its minimum brightness."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert not device.is_on
device.turn_on(**{ATTR_BRIGHTNESS: 1})
assert device.is_on
assert device.brightness == 1
device.turn_on(**{ATTR_BRIGHTNESS: 0})
assert device.is_on
assert device.brightness == 0
def test_dimmer_transitions(mock_openzwave):
"""Test dimming transition on a dimmable Z-Wave light."""
node = MockNode()
value = MockValue(data=0, node=node)
duration = MockValue(data=0, node=node)
values = MockLightValues(primary=value, dimming_duration=duration)
device = light.get_device(node=node, values=values, node_config={})
assert device.supported_features == SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
# Test turn_on
# Factory Default
device.turn_on()
assert duration.data == 0xFF
# Seconds transition
device.turn_on(**{ATTR_TRANSITION: 45})
assert duration.data == 45
# Minutes transition
device.turn_on(**{ATTR_TRANSITION: 245})
assert duration.data == 0x83
# Clipped transition
device.turn_on(**{ATTR_TRANSITION: 10000})
assert duration.data == 0xFE
# Test turn_off
# Factory Default
device.turn_off()
assert duration.data == 0xFF
# Seconds transition
device.turn_off(**{ATTR_TRANSITION: 45})
assert duration.data == 45
# Minutes transition
device.turn_off(**{ATTR_TRANSITION: 245})
assert duration.data == 0x83
# Clipped transition
device.turn_off(**{ATTR_TRANSITION: 10000})
assert duration.data == 0xFE
def test_dimmer_turn_off(mock_openzwave):
"""Test turning off a dimmable Z-Wave light."""
node = MockNode()
value = MockValue(data=46, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
device.turn_off()
assert node.set_dimmer.called
value_id, brightness = node.set_dimmer.mock_calls[0][1]
assert value_id == value.value_id
assert brightness == 0
def test_dimmer_value_changed(mock_openzwave):
"""Test value changed for dimmer lights."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert not device.is_on
value.data = 46
value_changed(value)
assert device.is_on
assert device.brightness == 118
def test_dimmer_refresh_value(mock_openzwave):
"""Test value changed for dimmer lights."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(
node=node,
values=values,
node_config={zwave.CONF_REFRESH_VALUE: True, zwave.CONF_REFRESH_DELAY: 5},
)
assert not device.is_on
with patch.object(light, "Timer", MagicMock()) as mock_timer:
value.data = 46
value_changed(value)
assert not device.is_on
assert mock_timer.called
assert len(mock_timer.mock_calls) == 2
timeout, callback = mock_timer.mock_calls[0][1][:2]
assert timeout == 5
assert mock_timer().start.called
assert len(mock_timer().start.mock_calls) == 1
with patch.object(light, "Timer", MagicMock()) as mock_timer_2:
value_changed(value)
assert not device.is_on
assert mock_timer().cancel.called
assert len(mock_timer_2.mock_calls) == 2
timeout, callback = mock_timer_2.mock_calls[0][1][:2]
assert timeout == 5
assert mock_timer_2().start.called
assert len(mock_timer_2().start.mock_calls) == 1
callback()
assert device.is_on
assert device.brightness == 118
def test_set_hs_color(mock_openzwave):
"""Test setting zwave light color."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB only
color_channels = MockValue(data=0x1C, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert color.data == "#0000000000"
device.turn_on(**{ATTR_HS_COLOR: (30, 50)})
assert color.data == "#ffbf7f0000"
def test_set_white_value(mock_openzwave):
"""Test setting zwave light color."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGBW
color_channels = MockValue(data=0x1D, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert color.data == "#0000000000"
device.turn_on(**{ATTR_WHITE_VALUE: 200})
assert color.data == "#ffffffc800"
def test_disable_white_if_set_color(mock_openzwave):
"""
Test that _white is set to 0 if turn_on with ATTR_HS_COLOR.
See Issue #13930 - many RGBW ZWave bulbs will only activate the RGB LED to
produce color if _white is set to zero.
"""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB only
color_channels = MockValue(data=0x1C, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
device._white = 234
assert color.data == "#0000000000"
assert device.white_value == 234
device.turn_on(**{ATTR_HS_COLOR: (30, 50)})
assert device.white_value == 0
assert color.data == "#ffbf7f0000"
def test_zw098_set_color_temp(mock_openzwave):
"""Test setting zwave light color."""
node = MockNode(
manufacturer_id="0086",
product_id="0062",
command_classes=[const.COMMAND_CLASS_SWITCH_COLOR],
)
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB, warm white, cold white
color_channels = MockValue(data=0x1F, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert color.data == "#0000000000"
device.turn_on(**{ATTR_COLOR_TEMP: 200})
assert color.data == "#00000000ff"
device.turn_on(**{ATTR_COLOR_TEMP: 400})
assert color.data == "#000000ff00"
def test_rgb_not_supported(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports color temperature only
color_channels = MockValue(data=0x01, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color is None
def test_no_color_value(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color is None
def test_no_color_channels_value(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
values = MockLightValues(primary=value, color=color)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color is None
def test_rgb_value_changed(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB only
color_channels = MockValue(data=0x1C, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color == (0, 0)
color.data = "#ffbf800000"
value_changed(color)
assert device.hs_color == (29.764, 49.804)
def test_rgbww_value_changed(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB, Warm White
color_channels = MockValue(data=0x1D, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color == (0, 0)
assert device.white_value == 0
color.data = "#c86400c800"
value_changed(color)
assert device.hs_color == (30, 100)
assert device.white_value == 200
def test_rgbcw_value_changed(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB, Cold White
color_channels = MockValue(data=0x1E, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color == (0, 0)
assert device.white_value == 0
color.data = "#c86400c800"
value_changed(color)
assert device.hs_color == (30, 100)
assert device.white_value == 200
def test_ct_value_changed(mock_openzwave):
"""Test value changed for zw098 lights."""
node = MockNode(
manufacturer_id="0086",
product_id="0062",
command_classes=[const.COMMAND_CLASS_SWITCH_COLOR],
)
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB, Cold White
color_channels = MockValue(data=0x1F, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert device.color_temp == light.TEMP_MID_HASS
color.data = "#000000ff00"
value_changed(color)
assert device.color_temp == light.TEMP_WARM_HASS
color.data = "#00000000ff"
value_changed(color)
assert device.color_temp == light.TEMP_COLD_HASS
| apache-2.0 |
facebook/mysql-5.6 | xtrabackup/test/kewpie/percona_tests/xtrabackup_disabled/bug810269_test.py | 24 | 6930 | #! /usr/bin/env python
# -*- mode: python; indent-tabs-mode: nil; -*-
# vim:expandtab:shiftwidth=2:tabstop=2:smarttab:
#
# Copyright (C) 2011 Patrick Crews
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import shutil
import tarfile
from lib.util.mysqlBaseTestCase import mysqlBaseTestCase
server_requirements = [["--innodb_strict_mode --innodb_file_per_table --innodb_file_format=Barracuda"]]
servers = []
server_manager = None
test_executor = None
# we explicitly use the --no-timestamp option
# here. We will be using a generic / vanilla backup dir
backup_path = None
def skip_checks(system_manager):
if not system_manager.code_manager.test_tree.innodb_version:
return True, "Test requires XtraDB or Innodb plugin."
return False, ''
class basicTest(mysqlBaseTestCase):
def setUp(self):
master_server = servers[0] # assumption that this is 'master'
backup_path = os.path.join(master_server.vardir, '_xtrabackup')
# remove backup paths
for del_path in [backup_path]:
if os.path.exists(del_path):
shutil.rmtree(del_path)
def load_table(self, table_name, row_count, server):
queries = []
for i in range(row_count):
queries.append("INSERT INTO %s VALUES (%d, %d)" %(table_name,i, row_count))
retcode, result = self.execute_queries(queries, server)
self.assertEqual(retcode, 0, msg=result)
def test_bug810269(self):
""" Bug #665210: tar4ibd does not support innodb row_format=compressed
Bug #810269: tar4ibd does not check for doublewrite buffer pages
"""
self.servers = servers
master_server = servers[0]
logging = test_executor.logging
innobackupex = test_executor.system_manager.innobackupex_path
xtrabackup = test_executor.system_manager.xtrabackup_path
backup_path = os.path.join(master_server.vardir, '_xtrabackup')
tar_file_path = os.path.join(backup_path,'out.tar')
output_path = os.path.join(master_server.vardir, 'innobackupex.out')
exec_path = os.path.dirname(innobackupex)
table_name = "t1"
# populate our server with a test bed
queries = ["DROP TABLE IF EXISTS %s" %(table_name)
,("CREATE TABLE %s "
"(`a` int(11) DEFAULT NULL, "
"`number` int(11) DEFAULT NULL) "
" ENGINE=InnoDB DEFAULT CHARSET=latin1"
%(table_name)
)
# compress tables
,("ALTER TABLE %s ENGINE=InnoDB "
"ROW_FORMAT=compressed KEY_BLOCK_SIZE=4"
%(table_name)
)
]
retcode, result = self.execute_queries(queries, master_server)
self.assertEqual(retcode, 0, msg = result)
row_count = 10000
self.load_table(table_name, row_count, master_server)
# get a checksum that we'll compare against post-restore
query = "CHECKSUM TABLE %s" %table_name
retcode, orig_checksum = self.execute_query(query, master_server)
self.assertEqual(retcode, 0, orig_checksum)
# take a backup
try:
os.mkdir(backup_path)
except OSError:
pass
cmd = [ innobackupex
, "--defaults-file=%s" %master_server.cnf_file
, "--stream=tar"
, "--user=root"
, "--port=%d" %master_server.master_port
, "--host=127.0.0.1"
, "--no-timestamp"
, "--ibbackup=%s" %xtrabackup
, "%s > %s" %(backup_path,tar_file_path)
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertTrue(retcode==0,output)
# stop the server
master_server.stop()
# extract our backup tarball
cmd = "tar -ivxf %s" %tar_file_path
retcode, output = self.execute_cmd(cmd, output_path, backup_path, True)
self.assertEqual(retcode,0,output)
# Check for Bug 723318 - seems quicker than separate test case
self.assertTrue('xtrabackup_binary' in os.listdir(backup_path)
, msg = "Bug723318: xtrabackup_binary not included in tar archive when streaming")
# do prepare on backup
cmd = [ innobackupex
, "--apply-log"
, "--no-timestamp"
, "--use-memory=500M"
, "--ibbackup=%s" %xtrabackup
, backup_path
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertEqual(retcode,0,output)
# remove old datadir
shutil.rmtree(master_server.datadir)
os.mkdir(master_server.datadir)
# restore from backup
cmd = [ innobackupex
, "--defaults-file=%s" %master_server.cnf_file
, "--copy-back"
, "--ibbackup=%s" %(xtrabackup)
, backup_path
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertEqual(retcode,0, output)
# restart server (and ensure it doesn't crash)
master_server.start()
self.assertEqual(master_server.status,1, 'Server failed restart from restored datadir...')
# Check the server is ok
# get a checksum that we'll compare against pre-restore
query = "CHECKSUM TABLE %s" %table_name
retcode, restored_checksum = self.execute_query(query, master_server)
self.assertEqual(retcode, 0, restored_checksum)
self.assertEqual(orig_checksum, restored_checksum, "%s || %s" %(orig_checksum, restored_checksum))
| gpl-2.0 |
VarchukVladimir/gizer | gizer/opinsert.py | 2 | 3720 | #!/usr/bin/env python
""" Handler of timestamps which operation type is insert op=insert """
__author__ = "Yaroslav Litvinov"
__copyright__ = "Copyright 2016, Rackspace Inc."
__email__ = "yaroslav.litvinov@rackspace.com"
ENCODE_ESCAPE = 1
ENCODE_ONLY = 0
NO_ENCODE_NO_ESCAPE = None
def format_string_insert_query(table, psql_schema_name, table_prefix):
""" format string to be used with execute
@param table object schema_engine.SqlTable
@param psql_schema_name
@param table_prefix
"""
if len(psql_schema_name):
psql_schema_name += '.'
table_name = table.table_name
if len(table_prefix):
table_name = table_prefix + table_name
fmt_string = 'INSERT INTO %s"%s" (%s) VALUES(%s);' \
% (psql_schema_name, table_name, \
', '.join(['"'+i+'"' for i in table.sql_column_names]), \
', '.join(['%s' for i in table.sql_column_names]))
return fmt_string
def escape_val(val, escape):
""" @param escape if True then escape special character"""
if type(val) is str or type(val) is unicode:
if escape == ENCODE_ESCAPE:
return val.encode('unicode-escape').encode('utf-8')
elif escape == ENCODE_ONLY:
return val.encode('utf-8')
else:
# NO_ENCODE_NO_ESCAPE
return val
else:
return val
def index_columns_as_dict(table):
""" get dict with index columns, value is column index in row
@param table object schema_engine.SqlTable"""
res = {}
for col_i in xrange(len(table.sql_column_names)):
column_name = table.sql_column_names[col_i]
col = table.sql_columns[column_name]
if col.index_key():
res[col.index_key()] = col_i
return res
def apply_indexes_to_table_rows(rows, index_keys, initial_indexes={}):
""" get list of rows, every row is values list
@param index_keys {'index_name': 'column index'}
@param initial_indexes dict of indexes from db tables"""
for index_key in index_keys:
if index_key and index_key in initial_indexes:
col_i = index_keys[index_key]
# adjust all column's indexes
for row_i in xrange(len(rows)):
rows[row_i][col_i] = \
initial_indexes[index_key] + rows[row_i][col_i]
return rows
def table_rows_list(table, escape, null_value=None):
""" get list of rows, every row is values list
@param table object schema_engine.SqlTable"""
res = []
firstcolname = table.sql_column_names[0]
reccount = len(table.sql_columns[firstcolname].values)
for val_i in xrange(reccount):
values = []
for column_name in table.sql_column_names:
col = table.sql_columns[column_name]
if col.values[val_i] is not None:
val = escape_val(col.values[val_i], escape)
else:
val = null_value
values.append(val)
res.append(values)
return res
def generate_insert_queries(table, psql_schema_name, table_prefix,
initial_indexes={}):
""" get insert queries as
tuple: (format string, [(list,of,values,as,tuples)])
@param table object schema_engine.SqlTable
@param initial_indexes dict of indexes from db tables"""
fmt_string = format_string_insert_query(table,
psql_schema_name, table_prefix)
index_keys = index_columns_as_dict(table)
rows = apply_indexes_to_table_rows(
table_rows_list(table, NO_ENCODE_NO_ESCAPE),
index_keys,
initial_indexes)
queries = []
for row in rows:
queries.append(tuple(row))
return (fmt_string, queries)
| apache-2.0 |
borisroman/vdsm | lib/vdsm/exception.py | 3 | 1193 | #
# Copyright 2012 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
class VdsmException(Exception):
code = 0
message = "Vdsm Exception"
def __init__(self, code=0, message='Vdsm Exception'):
self.code = code
self.message = message
def __str__(self):
return self.message
def response(self):
return {'status': {'code': self.code, 'message': str(self)}}
| gpl-2.0 |
antonio-fr/bitcoin | qa/rpc-tests/walletbackup.py | 132 | 7263 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Exercise the wallet backup code. Ported from walletbackup.sh.
Test case is:
4 nodes. 1 2 and 3 send transactions between each other,
fourth node is a miner.
1 2 3 each mine a block to start, then
Miner creates 100 blocks so 1 2 3 each have 50 mature
coins to spend.
Then 5 iterations of 1/2/3 sending coins amongst
themselves to get transactions in the wallets,
and the miner mining one block.
Wallets are backed up using dumpwallet/backupwallet.
Then 5 more iterations of transactions and mining a block.
Miner then generates 101 more blocks, so any
transaction fees paid mature.
Sanity check:
Sum(1,2,3,4 balances) == 114*50
1/2/3 are shutdown, and their wallets erased.
Then restore using wallet.dat backup. And
confirm 1/2/3/4 balances are same as before.
Shutdown again, restore using importwallet,
and confirm again balances are correct.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from random import randint
import logging
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
class WalletBackupTest(BitcoinTestFramework):
def setup_chain(self):
logging.info("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 4)
# This mirrors how the network was setup in the bash test
def setup_network(self, split=False):
# nodes 1, 2,3 are spenders, let's give them a keypool=100
extra_args = [["-keypool=100"], ["-keypool=100"], ["-keypool=100"], []]
self.nodes = start_nodes(4, self.options.tmpdir, extra_args)
connect_nodes(self.nodes[0], 3)
connect_nodes(self.nodes[1], 3)
connect_nodes(self.nodes[2], 3)
connect_nodes(self.nodes[2], 0)
self.is_network_split=False
self.sync_all()
def one_send(self, from_node, to_address):
if (randint(1,2) == 1):
amount = Decimal(randint(1,10)) / Decimal(10)
self.nodes[from_node].sendtoaddress(to_address, amount)
def do_one_round(self):
a0 = self.nodes[0].getnewaddress()
a1 = self.nodes[1].getnewaddress()
a2 = self.nodes[2].getnewaddress()
self.one_send(0, a1)
self.one_send(0, a2)
self.one_send(1, a0)
self.one_send(1, a2)
self.one_send(2, a0)
self.one_send(2, a1)
# Have the miner (node3) mine a block.
# Must sync mempools before mining.
sync_mempools(self.nodes)
self.nodes[3].generate(1)
# As above, this mirrors the original bash test.
def start_three(self):
self.nodes[0] = start_node(0, self.options.tmpdir)
self.nodes[1] = start_node(1, self.options.tmpdir)
self.nodes[2] = start_node(2, self.options.tmpdir)
connect_nodes(self.nodes[0], 3)
connect_nodes(self.nodes[1], 3)
connect_nodes(self.nodes[2], 3)
connect_nodes(self.nodes[2], 0)
def stop_three(self):
stop_node(self.nodes[0], 0)
stop_node(self.nodes[1], 1)
stop_node(self.nodes[2], 2)
def erase_three(self):
os.remove(self.options.tmpdir + "/node0/regtest/wallet.dat")
os.remove(self.options.tmpdir + "/node1/regtest/wallet.dat")
os.remove(self.options.tmpdir + "/node2/regtest/wallet.dat")
def run_test(self):
logging.info("Generating initial blockchain")
self.nodes[0].generate(1)
sync_blocks(self.nodes)
self.nodes[1].generate(1)
sync_blocks(self.nodes)
self.nodes[2].generate(1)
sync_blocks(self.nodes)
self.nodes[3].generate(100)
sync_blocks(self.nodes)
assert_equal(self.nodes[0].getbalance(), 50)
assert_equal(self.nodes[1].getbalance(), 50)
assert_equal(self.nodes[2].getbalance(), 50)
assert_equal(self.nodes[3].getbalance(), 0)
logging.info("Creating transactions")
# Five rounds of sending each other transactions.
for i in range(5):
self.do_one_round()
logging.info("Backing up")
tmpdir = self.options.tmpdir
self.nodes[0].backupwallet(tmpdir + "/node0/wallet.bak")
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.dump")
self.nodes[1].backupwallet(tmpdir + "/node1/wallet.bak")
self.nodes[1].dumpwallet(tmpdir + "/node1/wallet.dump")
self.nodes[2].backupwallet(tmpdir + "/node2/wallet.bak")
self.nodes[2].dumpwallet(tmpdir + "/node2/wallet.dump")
logging.info("More transactions")
for i in range(5):
self.do_one_round()
# Generate 101 more blocks, so any fees paid mature
self.nodes[3].generate(101)
self.sync_all()
balance0 = self.nodes[0].getbalance()
balance1 = self.nodes[1].getbalance()
balance2 = self.nodes[2].getbalance()
balance3 = self.nodes[3].getbalance()
total = balance0 + balance1 + balance2 + balance3
# At this point, there are 214 blocks (103 for setup, then 10 rounds, then 101.)
# 114 are mature, so the sum of all wallets should be 114 * 50 = 5700.
assert_equal(total, 5700)
##
# Test restoring spender wallets from backups
##
logging.info("Restoring using wallet.dat")
self.stop_three()
self.erase_three()
# Start node2 with no chain
shutil.rmtree(self.options.tmpdir + "/node2/regtest/blocks")
shutil.rmtree(self.options.tmpdir + "/node2/regtest/chainstate")
# Restore wallets from backup
shutil.copyfile(tmpdir + "/node0/wallet.bak", tmpdir + "/node0/regtest/wallet.dat")
shutil.copyfile(tmpdir + "/node1/wallet.bak", tmpdir + "/node1/regtest/wallet.dat")
shutil.copyfile(tmpdir + "/node2/wallet.bak", tmpdir + "/node2/regtest/wallet.dat")
logging.info("Re-starting nodes")
self.start_three()
sync_blocks(self.nodes)
assert_equal(self.nodes[0].getbalance(), balance0)
assert_equal(self.nodes[1].getbalance(), balance1)
assert_equal(self.nodes[2].getbalance(), balance2)
logging.info("Restoring using dumped wallet")
self.stop_three()
self.erase_three()
#start node2 with no chain
shutil.rmtree(self.options.tmpdir + "/node2/regtest/blocks")
shutil.rmtree(self.options.tmpdir + "/node2/regtest/chainstate")
self.start_three()
assert_equal(self.nodes[0].getbalance(), 0)
assert_equal(self.nodes[1].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 0)
self.nodes[0].importwallet(tmpdir + "/node0/wallet.dump")
self.nodes[1].importwallet(tmpdir + "/node1/wallet.dump")
self.nodes[2].importwallet(tmpdir + "/node2/wallet.dump")
sync_blocks(self.nodes)
assert_equal(self.nodes[0].getbalance(), balance0)
assert_equal(self.nodes[1].getbalance(), balance1)
assert_equal(self.nodes[2].getbalance(), balance2)
if __name__ == '__main__':
WalletBackupTest().main()
| mit |
Qalthos/ansible | lib/ansible/modules/monitoring/bigpanda.py | 92 | 5714 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bigpanda
author: "Hagai Kariti (@hkariti)"
short_description: Notify BigPanda about deployments
version_added: "1.8"
description:
- Notify BigPanda when deployments start and end (successfully or not). Returns a deployment object containing all the parameters for future module calls.
options:
component:
description:
- "The name of the component being deployed. Ex: billing"
required: true
aliases: ['name']
version:
description:
- The deployment version.
required: true
token:
description:
- API token.
required: true
state:
description:
- State of the deployment.
required: true
choices: ['started', 'finished', 'failed']
hosts:
description:
- Name of affected host name. Can be a list.
required: false
default: machine's hostname
aliases: ['host']
env:
description:
- The environment name, typically 'production', 'staging', etc.
required: false
owner:
description:
- The person responsible for the deployment.
required: false
description:
description:
- Free text description of the deployment.
required: false
url:
description:
- Base URL of the API server.
required: False
default: https://api.bigpanda.io
validate_certs:
description:
- If C(no), SSL certificates for the target url will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
type: bool
# informational: requirements for nodes
requirements: [ ]
'''
EXAMPLES = '''
- bigpanda:
component: myapp
version: '1.3'
token: '{{ bigpanda_token }}'
state: started
- bigpanda:
component: myapp
version: '1.3'
token: '{{ bigpanda_token }}'
state: finished
# If outside servers aren't reachable from your machine, use delegate_to and override hosts:
- bigpanda:
component: myapp
version: '1.3'
token: '{{ bigpanda_token }}'
hosts: '{{ ansible_hostname }}'
state: started
delegate_to: localhost
register: deployment
- bigpanda:
component: '{{ deployment.component }}'
version: '{{ deployment.version }}'
token: '{{ deployment.token }}'
state: finished
delegate_to: localhost
'''
# ===========================================
# Module execution.
#
import json
import socket
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import fetch_url
def main():
module = AnsibleModule(
argument_spec=dict(
component=dict(required=True, aliases=['name']),
version=dict(required=True),
token=dict(required=True, no_log=True),
state=dict(required=True, choices=['started', 'finished', 'failed']),
hosts=dict(required=False, default=[socket.gethostname()], aliases=['host']),
env=dict(required=False),
owner=dict(required=False),
description=dict(required=False),
message=dict(required=False),
source_system=dict(required=False, default='ansible'),
validate_certs=dict(default='yes', type='bool'),
url=dict(required=False, default='https://api.bigpanda.io'),
),
supports_check_mode=True,
)
token = module.params['token']
state = module.params['state']
url = module.params['url']
# Build the common request body
body = dict()
for k in ('component', 'version', 'hosts'):
v = module.params[k]
if v is not None:
body[k] = v
if not isinstance(body['hosts'], list):
body['hosts'] = [body['hosts']]
# Insert state-specific attributes to body
if state == 'started':
for k in ('source_system', 'env', 'owner', 'description'):
v = module.params[k]
if v is not None:
body[k] = v
request_url = url + '/data/events/deployments/start'
else:
message = module.params['message']
if message is not None:
body['errorMessage'] = message
if state == 'finished':
body['status'] = 'success'
else:
body['status'] = 'failure'
request_url = url + '/data/events/deployments/end'
# Build the deployment object we return
deployment = dict(token=token, url=url)
deployment.update(body)
if 'errorMessage' in deployment:
message = deployment.pop('errorMessage')
deployment['message'] = message
# If we're in check mode, just exit pretending like we succeeded
if module.check_mode:
module.exit_json(changed=True, **deployment)
# Send the data to bigpanda
data = json.dumps(body)
headers = {'Authorization': 'Bearer %s' % token, 'Content-Type': 'application/json'}
try:
response, info = fetch_url(module, request_url, data=data, headers=headers)
if info['status'] == 200:
module.exit_json(changed=True, **deployment)
else:
module.fail_json(msg=json.dumps(info))
except Exception as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| gpl-3.0 |
rdobson/python-hwinfo | hwinfo/host/cpuinfo.py | 1 | 1269 | """Module for parsing the output of /proc/cpuinfo"""
from hwinfo.util import CommandParser
REGEX_TEMPLATE = r'%s([\ \t])+\:\ (?P<%s>.*)'
class CPUInfoParser(CommandParser):
ITEM_SEPERATOR = "\n\n"
ITEM_REGEXS = [
REGEX_TEMPLATE % ('processor', 'processor'),
REGEX_TEMPLATE % ('vendor_id', 'vendor_id'),
REGEX_TEMPLATE % (r'cpu\ family', 'cpu_family'),
REGEX_TEMPLATE % ('model', 'model'),
REGEX_TEMPLATE % (r'model\ name', 'model_name'),
REGEX_TEMPLATE % ('stepping', 'stepping'),
REGEX_TEMPLATE % ('microcode', 'microcode'),
REGEX_TEMPLATE % (r'cpu\ MHz', 'cpu_mhz'),
REGEX_TEMPLATE % (r'cache\ size', 'cache_size'),
REGEX_TEMPLATE % (r'fpu', 'fpu'),
REGEX_TEMPLATE % (r'fpu_exception', 'fpu_exception'),
REGEX_TEMPLATE % (r'cpuid\ level', 'cpuid_level'),
REGEX_TEMPLATE % (r'wp', 'wp'),
REGEX_TEMPLATE % (r'flags', 'flags'),
REGEX_TEMPLATE % (r'bogomips', 'bogomips'),
REGEX_TEMPLATE % (r'clflush\ size', 'clflush_size'),
REGEX_TEMPLATE % (r'cache_alignment', 'cache_alignment'),
REGEX_TEMPLATE % (r'address\ sizes', 'address_sizes'),
REGEX_TEMPLATE % (r'power\ management', 'power_management'),
]
| lgpl-2.1 |
bigmlcom/bigmler | bigmler/tests/test_03_unicode_command.py | 1 | 3461 | # -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Copyright 2015-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" Testing dataset creation with unicode characters
"""
from bigmler.tests.world import (world, common_setup_module,
common_teardown_module, teardown_class)
import bigmler.tests.dataset_advanced_steps as dataset_adv
import bigmler.tests.basic_tst_prediction_steps as test_pred
def setup_module():
"""Setup for the module
"""
common_setup_module()
def teardown_module():
"""Teardown for the module
"""
common_teardown_module()
class TestUnicode(object):
def teardown(self):
"""Calling generic teardown for every method
"""
print("\nEnd of tests in: %s\n-------------------\n" % __name__)
teardown_class()
def setup(self):
"""
Debug information
"""
print("\n-------------------\nTests in: %s\n" % __name__)
def test_scenario1(self):
"""
Scenario: Successfully building test predictions from dataset specifying objective field and model fields
Given I create a BigML dataset from "<data>" and store logs in "<output_dir>"
And I check that the source has been created
And I check that the dataset has been created
And I create BigML resources using dataset, objective field <objective> and model fields <fields> to test "<test>" and log predictions in "<output>"
And I check that the model has been created
And I check that the predictions are ready
Then the local prediction file is like "<predictions_file>"
Examples:
|data | output_dir | test | output |predictions_file | objective | fields |
| ../data/iris_2fb.csv| ./scénario1 | ../data/test_iris2fb.csv | ./scénario1/predictions.csv | ./check_files/predictions_iris_2fb.csv | spécies | "pétal width" |
"""
print(self.test_scenario1.__doc__)
examples = [
['data/iris_2fb.csv', 'scénario1', 'data/test_iris2fb.csv', 'scénario1/predictions.csv', 'check_files/predictions_iris_2fb.csv', 'spécies', '"pétal width"']]
for example in examples:
print("\nTesting with:\n", example)
dataset_adv.i_create_dataset(self, data=example[0], output_dir=example[1])
test_pred.i_check_create_source(self)
test_pred.i_check_create_dataset(self, suffix=None)
test_pred.i_create_resources_from_dataset_objective_model(self, objective=example[5], fields=example[6], test=example[2], output=example[3])
test_pred.i_check_create_model(self)
test_pred.i_check_create_predictions(self)
test_pred.i_check_predictions(self, example[4])
| apache-2.0 |
shaistaansari/django | tests/gis_tests/layermap/models.py | 67 | 2507 | from django.utils.encoding import python_2_unicode_compatible
from ..models import models
@python_2_unicode_compatible
class NamedModel(models.Model):
name = models.CharField(max_length=25)
objects = models.GeoManager()
class Meta:
abstract = True
required_db_features = ['gis_enabled']
def __str__(self):
return self.name
class State(NamedModel):
pass
class County(NamedModel):
state = models.ForeignKey(State)
mpoly = models.MultiPolygonField(srid=4269) # Multipolygon in NAD83
class CountyFeat(NamedModel):
poly = models.PolygonField(srid=4269)
class City(NamedModel):
name_txt = models.TextField(default='')
name_short = models.CharField(max_length=5)
population = models.IntegerField()
density = models.DecimalField(max_digits=7, decimal_places=1)
dt = models.DateField()
point = models.PointField()
class Meta:
app_label = 'layermap'
required_db_features = ['gis_enabled']
class Interstate(NamedModel):
length = models.DecimalField(max_digits=6, decimal_places=2)
path = models.LineStringField()
class Meta:
app_label = 'layermap'
required_db_features = ['gis_enabled']
# Same as `City` above, but for testing model inheritance.
class CityBase(NamedModel):
population = models.IntegerField()
density = models.DecimalField(max_digits=7, decimal_places=1)
point = models.PointField()
class ICity1(CityBase):
dt = models.DateField()
class Meta(CityBase.Meta):
pass
class ICity2(ICity1):
dt_time = models.DateTimeField(auto_now=True)
class Meta(ICity1.Meta):
pass
class Invalid(models.Model):
point = models.PointField()
class Meta:
required_db_features = ['gis_enabled']
# Mapping dictionaries for the models above.
co_mapping = {
'name': 'Name',
# ForeignKey's use another mapping dictionary for the _related_ Model (State in this case).
'state': {'name': 'State'},
'mpoly': 'MULTIPOLYGON', # Will convert POLYGON features into MULTIPOLYGONS.
}
cofeat_mapping = {'name': 'Name',
'poly': 'POLYGON',
}
city_mapping = {'name': 'Name',
'population': 'Population',
'density': 'Density',
'dt': 'Created',
'point': 'POINT',
}
inter_mapping = {'name': 'Name',
'length': 'Length',
'path': 'LINESTRING',
}
| bsd-3-clause |
saifrahmed/bokeh | bokeh/models/axes.py | 33 | 5946 | """ Guide renderers for various kinds of axes that can be added to
Bokeh plots
"""
from __future__ import absolute_import
from ..properties import Int, Float, String, Enum, Bool, Datetime, Auto, Instance, Tuple, Either, Include
from ..mixins import LineProps, TextProps
from ..enums import Location
from .renderers import GuideRenderer
from .tickers import Ticker, BasicTicker, LogTicker, CategoricalTicker, DatetimeTicker
from .formatters import (TickFormatter, BasicTickFormatter, LogTickFormatter,
CategoricalTickFormatter, DatetimeTickFormatter)
class Axis(GuideRenderer):
""" A base class that defines common properties for all axis types.
``Axis`` is not generally useful to instantiate on its own.
"""
visible = Bool(True, help="""
Ability to hide the entire axis from the plot.
""")
location = Either(Auto, Enum(Location), help="""
Where should labels and ticks be located in relation to the axis rule.
""")
bounds = Either(Auto, Tuple(Float, Float), Tuple(Datetime, Datetime), help="""
Bounds for the rendered axis. If unset, the axis will span the
entire plot in the given dimension.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen
locations when rendering an axis on the plot. If unset, use the
default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen
locations when rendering an axis on the plot. If unset, use the
default y-range.
""")
ticker = Instance(Ticker, help="""
A Ticker to use for computing locations of axis components.
""")
formatter = Instance(TickFormatter, help="""
A TickFormatter to use for formatting the visual appearance
of ticks.
""")
axis_label = String(help="""
A text label for the axis, displayed parallel to the axis rule.
.. note::
LaTeX notation is not currently supported; please see
:bokeh-issue:`647` to track progress or contribute.
""")
axis_label_standoff = Int(help="""
The distance in pixels that the axis labels should be offset
from the tick labels.
""")
axis_label_props = Include(TextProps, help="""
The %s of the axis label.
""")
major_label_standoff = Int(help="""
The distance in pixels that the major tick labels should be
offset from the associated ticks.
""")
major_label_orientation = Either(Enum("horizontal", "vertical"), Float, help="""
What direction the major label text should be oriented. If a i
number is supplied, the angle of the text is measured from horizontal.
""")
major_label_props = Include(TextProps, help="""
The %s of the major tick labels.
""")
axis_props = Include(LineProps, help="""
The %s of the axis line.
""")
major_tick_props = Include(LineProps, help="""
The %s of the major ticks.
""")
major_tick_in = Int(help="""
The distance in pixels that major ticks should extend into the
main plot area.
""")
major_tick_out = Int(help="""
The distance in pixels that major ticks should extend out of the
main plot area.
""")
minor_tick_props = Include(LineProps, help="""
The %s of the minor ticks.
""")
minor_tick_in = Int(help="""
The distance in pixels that minor ticks should extend into the
main plot area.
""")
minor_tick_out = Int(help="""
The distance in pixels that major ticks should extend out of the
main plot area.
""")
class ContinuousAxis(Axis):
""" A base class for all numeric, non-categorica axes types.
``ContinuousAxis`` is not generally useful to instantiate on its own.
"""
pass
class LinearAxis(ContinuousAxis):
""" An axis that picks nice numbers for tick locations on a
linear scale. Configured with a ``BasicTickFormatter`` by default.
"""
def __init__(self, ticker=None, formatter=None, **kwargs):
if ticker is None:
ticker = BasicTicker()
if formatter is None:
formatter = BasicTickFormatter()
super(LinearAxis, self).__init__(ticker=ticker, formatter=formatter, **kwargs)
class LogAxis(ContinuousAxis):
""" An axis that picks nice numbers for tick locations on a
log scale. Configured with a ``LogTickFormatter`` by default.
"""
def __init__(self, ticker=None, formatter=None, **kwargs):
if ticker is None:
ticker = LogTicker(num_minor_ticks=10)
if formatter is None:
formatter = LogTickFormatter(ticker=ticker)
super(LogAxis, self).__init__(ticker=ticker, formatter=formatter, **kwargs)
class CategoricalAxis(Axis):
""" An axis that picks evenly spaced tick locations for a
collection of categories/factors.
"""
def __init__(self, ticker=None, formatter=None, **kwargs):
if ticker is None:
ticker = CategoricalTicker()
if formatter is None:
formatter = CategoricalTickFormatter()
super(CategoricalAxis, self).__init__(ticker=ticker, formatter=formatter, **kwargs)
class DatetimeAxis(LinearAxis):
""" An LinearAxis that picks nice numbers for tick locations on
a datetime scale. Configured with a ``DatetimeTickFormatter`` by
default.
"""
axis_label = String("date", help="""
DateTime ``axis_label`` defaults to "date".
""")
# TODO: (bev) this should be an Enum, if it is exposed at all
scale = String("time")
num_labels = Int(8)
char_width = Int(10)
fill_ratio = Float(0.3)
def __init__(self, ticker=None, formatter=None, **kwargs):
if ticker is None:
ticker = DatetimeTicker()
if formatter is None:
formatter = DatetimeTickFormatter()
super(DatetimeAxis, self).__init__(ticker=ticker, formatter=formatter, **kwargs)
| bsd-3-clause |
open-craft/xblock-dalite | dalite_xblock/utils.py | 1 | 1894 | # -*- coding: utf-8 -*-
"""Dalite XBlock utils."""
from lazy.lazy import lazy
def _(text): # pylint: disable=invalid-name
"""
Make '_' a no-op so we can scrape strings.
:return text
"""
return text
# pylint: disable=protected-access
class FieldValuesContextManager(object):
"""
Allow using bound methods as XBlock field values provider.
Black wizardy to workaround the fact that field values can be callable, but that callable should be
parameterless, and we need current XBlock to get a list of values
"""
def __init__(self, block, field_name, field_values_callback):
"""
Initialize FieldValuesContextManager.
:param XBlock block: XBlock containing field to wrap
:param string field_name: Target field name
:param () -> list[Any] field_values_callback: Values provider callback (can be bound or unbound method)
"""
self._block = block
self._field_name = field_name
self._callback = field_values_callback
self._old_values_value = None
@lazy
def field(self):
"""
Return field descriptor to wrap.
:rtype: xblock.fields.Field
"""
return self._block.fields[self._field_name]
def __enter__(self):
"""Enter context managed-section."""
self._old_values_value = self.field.values
self.field._values = self._callback
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Exit from context managed-section.
:param type|None exc_type: Type of exception thrown or None
:param Exception|None exc_type: Exception thrown or None
:param exc_tb: Exception traceback or None
:rtype: bool
:returns: True if exception should be suppressed, False otherwise
"""
self.field._values = self._old_values_value
return False
| agpl-3.0 |
Eric89GXL/scipy | scipy/optimize/_trustregion_constr/equality_constrained_sqp.py | 10 | 8676 | """Byrd-Omojokun Trust-Region SQP method."""
from __future__ import division, print_function, absolute_import
from scipy.sparse import eye as speye
from .projections import projections
from .qp_subproblem import modified_dogleg, projected_cg, box_intersections
import numpy as np
from numpy.linalg import norm
__all__ = ['equality_constrained_sqp']
def default_scaling(x):
n, = np.shape(x)
return speye(n)
def equality_constrained_sqp(fun_and_constr, grad_and_jac, lagr_hess,
x0, fun0, grad0, constr0,
jac0, stop_criteria,
state,
initial_penalty,
initial_trust_radius,
factorization_method,
trust_lb=None,
trust_ub=None,
scaling=default_scaling):
"""Solve nonlinear equality-constrained problem using trust-region SQP.
Solve optimization problem:
minimize fun(x)
subject to: constr(x) = 0
using Byrd-Omojokun Trust-Region SQP method described in [1]_. Several
implementation details are based on [2]_ and [3]_, p. 549.
References
----------
.. [1] Lalee, Marucha, Jorge Nocedal, and Todd Plantenga. "On the
implementation of an algorithm for large-scale equality
constrained optimization." SIAM Journal on
Optimization 8.3 (1998): 682-706.
.. [2] Byrd, Richard H., Mary E. Hribar, and Jorge Nocedal.
"An interior point algorithm for large-scale nonlinear
programming." SIAM Journal on Optimization 9.4 (1999): 877-900.
.. [3] Nocedal, Jorge, and Stephen J. Wright. "Numerical optimization"
Second Edition (2006).
"""
PENALTY_FACTOR = 0.3 # Rho from formula (3.51), reference [2]_, p.891.
LARGE_REDUCTION_RATIO = 0.9
INTERMEDIARY_REDUCTION_RATIO = 0.3
SUFFICIENT_REDUCTION_RATIO = 1e-8 # Eta from reference [2]_, p.892.
TRUST_ENLARGEMENT_FACTOR_L = 7.0
TRUST_ENLARGEMENT_FACTOR_S = 2.0
MAX_TRUST_REDUCTION = 0.5
MIN_TRUST_REDUCTION = 0.1
SOC_THRESHOLD = 0.1
TR_FACTOR = 0.8 # Zeta from formula (3.21), reference [2]_, p.885.
BOX_FACTOR = 0.5
n, = np.shape(x0) # Number of parameters
# Set default lower and upper bounds.
if trust_lb is None:
trust_lb = np.full(n, -np.inf)
if trust_ub is None:
trust_ub = np.full(n, np.inf)
# Initial values
x = np.copy(x0)
trust_radius = initial_trust_radius
penalty = initial_penalty
# Compute Values
f = fun0
c = grad0
b = constr0
A = jac0
S = scaling(x)
# Get projections
Z, LS, Y = projections(A, factorization_method)
# Compute least-square lagrange multipliers
v = -LS.dot(c)
# Compute Hessian
H = lagr_hess(x, v)
# Update state parameters
optimality = norm(c + A.T.dot(v), np.inf)
constr_violation = norm(b, np.inf) if len(b) > 0 else 0
cg_info = {'niter': 0, 'stop_cond': 0,
'hits_boundary': False}
last_iteration_failed = False
while not stop_criteria(state, x, last_iteration_failed,
optimality, constr_violation,
trust_radius, penalty, cg_info):
# Normal Step - `dn`
# minimize 1/2*||A dn + b||^2
# subject to:
# ||dn|| <= TR_FACTOR * trust_radius
# BOX_FACTOR * lb <= dn <= BOX_FACTOR * ub.
dn = modified_dogleg(A, Y, b,
TR_FACTOR*trust_radius,
BOX_FACTOR*trust_lb,
BOX_FACTOR*trust_ub)
# Tangential Step - `dt`
# Solve the QP problem:
# minimize 1/2 dt.T H dt + dt.T (H dn + c)
# subject to:
# A dt = 0
# ||dt|| <= sqrt(trust_radius**2 - ||dn||**2)
# lb - dn <= dt <= ub - dn
c_t = H.dot(dn) + c
b_t = np.zeros_like(b)
trust_radius_t = np.sqrt(trust_radius**2 - np.linalg.norm(dn)**2)
lb_t = trust_lb - dn
ub_t = trust_ub - dn
dt, cg_info = projected_cg(H, c_t, Z, Y, b_t,
trust_radius_t,
lb_t, ub_t)
# Compute update (normal + tangential steps).
d = dn + dt
# Compute second order model: 1/2 d H d + c.T d + f.
quadratic_model = 1/2*(H.dot(d)).dot(d) + c.T.dot(d)
# Compute linearized constraint: l = A d + b.
linearized_constr = A.dot(d)+b
# Compute new penalty parameter according to formula (3.52),
# reference [2]_, p.891.
vpred = norm(b) - norm(linearized_constr)
# Guarantee `vpred` always positive,
# regardless of roundoff errors.
vpred = max(1e-16, vpred)
previous_penalty = penalty
if quadratic_model > 0:
new_penalty = quadratic_model / ((1-PENALTY_FACTOR)*vpred)
penalty = max(penalty, new_penalty)
# Compute predicted reduction according to formula (3.52),
# reference [2]_, p.891.
predicted_reduction = -quadratic_model + penalty*vpred
# Compute merit function at current point
merit_function = f + penalty*norm(b)
# Evaluate function and constraints at trial point
x_next = x + S.dot(d)
f_next, b_next = fun_and_constr(x_next)
# Compute merit function at trial point
merit_function_next = f_next + penalty*norm(b_next)
# Compute actual reduction according to formula (3.54),
# reference [2]_, p.892.
actual_reduction = merit_function - merit_function_next
# Compute reduction ratio
reduction_ratio = actual_reduction / predicted_reduction
# Second order correction (SOC), reference [2]_, p.892.
if reduction_ratio < SUFFICIENT_REDUCTION_RATIO and \
norm(dn) <= SOC_THRESHOLD * norm(dt):
# Compute second order correction
y = -Y.dot(b_next)
# Make sure increment is inside box constraints
_, t, intersect = box_intersections(d, y, trust_lb, trust_ub)
# Compute tentative point
x_soc = x + S.dot(d + t*y)
f_soc, b_soc = fun_and_constr(x_soc)
# Recompute actual reduction
merit_function_soc = f_soc + penalty*norm(b_soc)
actual_reduction_soc = merit_function - merit_function_soc
# Recompute reduction ratio
reduction_ratio_soc = actual_reduction_soc / predicted_reduction
if intersect and reduction_ratio_soc >= SUFFICIENT_REDUCTION_RATIO:
x_next = x_soc
f_next = f_soc
b_next = b_soc
reduction_ratio = reduction_ratio_soc
# Readjust trust region step, formula (3.55), reference [2]_, p.892.
if reduction_ratio >= LARGE_REDUCTION_RATIO:
trust_radius = max(TRUST_ENLARGEMENT_FACTOR_L * norm(d),
trust_radius)
elif reduction_ratio >= INTERMEDIARY_REDUCTION_RATIO:
trust_radius = max(TRUST_ENLARGEMENT_FACTOR_S * norm(d),
trust_radius)
# Reduce trust region step, according to reference [3]_, p.696.
elif reduction_ratio < SUFFICIENT_REDUCTION_RATIO:
trust_reduction \
= (1-SUFFICIENT_REDUCTION_RATIO)/(1-reduction_ratio)
new_trust_radius = trust_reduction * norm(d)
if new_trust_radius >= MAX_TRUST_REDUCTION * trust_radius:
trust_radius *= MAX_TRUST_REDUCTION
elif new_trust_radius >= MIN_TRUST_REDUCTION * trust_radius:
trust_radius = new_trust_radius
else:
trust_radius *= MIN_TRUST_REDUCTION
# Update iteration
if reduction_ratio >= SUFFICIENT_REDUCTION_RATIO:
x = x_next
f, b = f_next, b_next
c, A = grad_and_jac(x)
S = scaling(x)
# Get projections
Z, LS, Y = projections(A, factorization_method)
# Compute least-square lagrange multipliers
v = -LS.dot(c)
# Compute Hessian
H = lagr_hess(x, v)
# Set Flag
last_iteration_failed = False
# Otimality values
optimality = norm(c + A.T.dot(v), np.inf)
constr_violation = norm(b, np.inf) if len(b) > 0 else 0
else:
penalty = previous_penalty
last_iteration_failed = True
return x, state
| bsd-3-clause |
falbassini/Samples | python/v2.2/create_floodlight_activity_group.py | 3 | 2300 | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example creates a new activity group for a floodlight configuration.
To get a floodlight configuration ID, run get_advertisers.py.
"""
import argparse
import sys
from apiclient import sample_tools
from oauth2client import client
# Declare command-line flags.
argparser = argparse.ArgumentParser(add_help=False)
argparser.add_argument(
'profile_id', type=int,
help='The ID of the profile to add a placement for')
argparser.add_argument(
'floodlight_config_id', type=int,
help='The ID of the floodlight config to create a group for')
def main(argv):
# Authenticate and construct service.
service, flags = sample_tools.init(
argv, 'dfareporting', 'v2.2', __doc__, __file__, parents=[argparser],
scope=['https://www.googleapis.com/auth/dfareporting',
'https://www.googleapis.com/auth/dfatrafficking'])
profile_id = flags.profile_id
floodlight_config_id = flags.floodlight_config_id
try:
# Construct and save floodlight activity group.
activity_group = {
'name': 'Test Floodlight Activity Group',
'floodlightConfigurationId': floodlight_config_id,
'type': 'COUNTER'
}
request = service.floodlightActivityGroups().insert(
profileId=profile_id, body=activity_group)
# Execute request and print response.
response = request.execute()
print ('Created floodlight activity group with ID %s and name "%s".'
% (response['id'], response['name']))
except client.AccessTokenRefreshError:
print ('The credentials have been revoked or expired, please re-run the '
'application to re-authorize')
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 |
jordiblasco/easybuild-easyblocks | easybuild/easyblocks/generic/__init__.py | 24 | 1376 | ##
# Copyright 2009-2015 Ghent University
# Copyright 2009-2015 Stijn De Weirdt
# Copyright 2010 Dries Verdegem
# Copyright 2010-2015 Kenneth Hoste
# Copyright 2011 Pieter De Baets
# Copyright 2011-2015 Jens Timmerman
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
from pkgutil import extend_path
# we're not the only ones in this namespace
__path__ = extend_path(__path__, __name__) #@ReservedAssignment
| gpl-2.0 |
Nitaco/ansible | lib/ansible/modules/network/f5/bigip_gtm_pool.py | 6 | 37181 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_gtm_pool
short_description: Manages F5 BIG-IP GTM pools
description:
- Manages F5 BIG-IP GTM pools.
version_added: 2.4
options:
state:
description:
- Pool state. When C(present), ensures that the pool is created and enabled.
When C(absent), ensures that the pool is removed from the system. When
C(enabled) or C(disabled), ensures that the pool is enabled or disabled
(respectively) on the remote device.
choices:
- present
- absent
- enabled
- disabled
default: present
preferred_lb_method:
description:
- The load balancing mode that the system tries first.
choices:
- round-robin
- return-to-dns
- ratio
- topology
- static-persistence
- global-availability
- virtual-server-capacity
- least-connections
- lowest-round-trip-time
- fewest-hops
- packet-rate
- cpu
- completion-rate
- quality-of-service
- kilobytes-per-second
- drop-packet
- fallback-ip
- virtual-server-score
alternate_lb_method:
description:
- The load balancing mode that the system tries if the
C(preferred_lb_method) is unsuccessful in picking a pool.
choices:
- round-robin
- return-to-dns
- none
- ratio
- topology
- static-persistence
- global-availability
- virtual-server-capacity
- packet-rate
- drop-packet
- fallback-ip
- virtual-server-score
fallback_lb_method:
description:
- The load balancing mode that the system tries if both the
C(preferred_lb_method) and C(alternate_lb_method)s are unsuccessful
in picking a pool.
choices:
- round-robin
- return-to-dns
- ratio
- topology
- static-persistence
- global-availability
- virtual-server-capacity
- least-connections
- lowest-round-trip-time
- fewest-hops
- packet-rate
- cpu
- completion-rate
- quality-of-service
- kilobytes-per-second
- drop-packet
- fallback-ip
- virtual-server-score
- none
fallback_ip:
description:
- Specifies the IPv4, or IPv6 address of the server to which the system
directs requests when it cannot use one of its pools to do so.
Note that the system uses the fallback IP only if you select the
C(fallback_ip) load balancing method.
type:
description:
- The type of GTM pool that you want to create. On BIG-IP releases
prior to version 12, this parameter is not required. On later versions
of BIG-IP, this is a required parameter.
choices:
- a
- aaaa
- cname
- mx
- naptr
- srv
name:
description:
- Name of the GTM pool.
required: True
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
members:
description:
- Members to assign to the pool.
- The order of the members in this list is the order that they will be listed in the pool.
suboptions:
server:
description:
- Name of the server which the pool member is a part of.
required: True
virtual_server:
description:
- Name of the virtual server, associated with the server, that the pool member is a part of.
required: True
version_added: 2.6
monitors:
description:
- Specifies the health monitors that the system currently uses to monitor this resource.
- When C(availability_requirements.type) is C(require), you may only have a single monitor in the
C(monitors) list.
version_added: 2.6
availability_requirements:
description:
- Specifies, if you activate more than one health monitor, the number of health
monitors that must receive successful responses in order for the link to be
considered available.
suboptions:
type:
description:
- Monitor rule type when C(monitors) is specified.
- When creating a new pool, if this value is not specified, the default of 'all' will be used.
choices: ['all', 'at_least', 'require']
at_least:
description:
- Specifies the minimum number of active health monitors that must be successful
before the link is considered up.
- This parameter is only relevant when a C(type) of C(at_least) is used.
- This parameter will be ignored if a type of either C(all) or C(require) is used.
number_of_probes:
description:
- Specifies the minimum number of probes that must succeed for this server to be declared up.
- When creating a new virtual server, if this parameter is specified, then the C(number_of_probers)
parameter must also be specified.
- The value of this parameter should always be B(lower) than, or B(equal to), the value of C(number_of_probers).
- This parameter is only relevant when a C(type) of C(require) is used.
- This parameter will be ignored if a type of either C(all) or C(at_least) is used.
number_of_probers:
description:
- Specifies the number of probers that should be used when running probes.
- When creating a new virtual server, if this parameter is specified, then the C(number_of_probes)
parameter must also be specified.
- The value of this parameter should always be B(higher) than, or B(equal to), the value of C(number_of_probers).
- This parameter is only relevant when a C(type) of C(require) is used.
- This parameter will be ignored if a type of either C(all) or C(at_least) is used.
version_added: 2.6
notes:
- Requires the netaddr Python package on the host. This is as easy as
pip install netaddr.
extends_documentation_fragment: f5
requirements:
- netaddr
author:
- Tim Rupp (@caphrim007)
'''
RETURN = r'''
preferred_lb_method:
description: New preferred load balancing method for the pool.
returned: changed
type: string
sample: topology
alternate_lb_method:
description: New alternate load balancing method for the pool.
returned: changed
type: string
sample: drop-packet
fallback_lb_method:
description: New fallback load balancing method for the pool.
returned: changed
type: string
sample: fewest-hops
fallback_ip:
description: New fallback IP used when load balacing using the C(fallback_ip) method.
returned: changed
type: string
sample: 10.10.10.10
monitors:
description: The new list of monitors for the resource.
returned: changed
type: list
sample: ['/Common/monitor1', '/Common/monitor2']
members:
description: List of members in the pool.
returned: changed
type: complex
contains:
server:
description: The name of the server portion of the member.
returned: changed
type: string
virtual_server:
description: The name of the virtual server portion of the member.
returned: changed
type: string
'''
EXAMPLES = r'''
- name: Create a GTM pool
bigip_gtm_pool:
server: lb.mydomain.com
user: admin
password: secret
name: my_pool
delegate_to: localhost
- name: Disable pool
bigip_gtm_pool:
server: lb.mydomain.com
user: admin
password: secret
state: disabled
name: my_pool
delegate_to: localhost
'''
import copy
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from distutils.version import LooseVersion
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from f5.sdk_exception import LazyAttributesRequired
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from f5.sdk_exception import LazyAttributesRequired
except ImportError:
HAS_F5SDK = False
try:
from netaddr import IPAddress, AddrFormatError
HAS_NETADDR = True
except ImportError:
HAS_NETADDR = False
class Parameters(AnsibleF5Parameters):
api_map = {
'loadBalancingMode': 'preferred_lb_method',
'alternateMode': 'alternate_lb_method',
'fallbackMode': 'fallback_lb_method',
'verifyMemberAvailability': 'verify_member_availability',
'fallbackIpv4': 'fallback_ip',
'fallbackIpv6': 'fallback_ip',
'fallbackIp': 'fallback_ip',
'membersReference': 'members',
'monitor': 'monitors'
}
updatables = [
'alternate_lb_method',
'fallback_ip',
'fallback_lb_method',
'members',
'monitors',
'preferred_lb_method',
'state',
]
returnables = [
'alternate_lb_method',
'fallback_ip',
'fallback_lb_method',
'members',
'monitors',
'preferred_lb_method',
'enabled',
'disabled'
]
api_attributes = [
'alternateMode',
'disabled',
'enabled',
'fallbackIp',
'fallbackIpv4',
'fallbackIpv6',
'fallbackMode',
'loadBalancingMode',
'members',
'verifyMemberAvailability',
# The monitor attribute is not included here, because it can break the
# API calls to the device. If this bug is ever fixed, uncomment this code.
#
# monitor
]
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
@property
def collection(self):
type_map = dict(
a='a_s',
aaaa='aaaas',
cname='cnames',
mx='mxs',
naptr='naptrs',
srv='srvs'
)
if self._values['type'] is None:
return None
wideip_type = self._values['type']
return type_map[wideip_type]
@property
def type(self):
if self._values['type'] is None:
return None
return str(self._values['type'])
@property
def verify_member_availability(self):
if self._values['verify_member_availability'] is None:
return None
elif self._values['verify_member_availability']:
return 'enabled'
else:
return 'disabled'
@property
def fallback_ip(self):
if self._values['fallback_ip'] is None:
return None
if self._values['fallback_ip'] == 'any':
return 'any'
if self._values['fallback_ip'] == 'any6':
return 'any6'
try:
address = IPAddress(self._values['fallback_ip'])
if address.version == 4:
return str(address.ip)
elif address.version == 6:
return str(address.ip)
return None
except AddrFormatError:
raise F5ModuleError(
'The provided fallback address is not a valid IPv4 address'
)
@property
def state(self):
if self._values['state'] == 'enabled':
return 'present'
return self._values['state']
@property
def enabled(self):
if self._values['enabled'] is None:
return None
return True
@property
def disabled(self):
if self._values['disabled'] is None:
return None
return True
class ApiParameters(Parameters):
@property
def members(self):
result = []
if self._values['members'] is None or 'items' not in self._values['members']:
return []
for item in self._values['members']['items']:
result.append(dict(item=item['fullPath'], order=item['memberOrder']))
result = [x['item'] for x in sorted(result, key=lambda k: k['order'])]
return result
@property
def availability_requirement_type(self):
if self._values['monitors'] is None:
return None
if 'min ' in self._values['monitors']:
return 'at_least'
elif 'require ' in self._values['monitors']:
return 'require'
else:
return 'all'
@property
def monitors_list(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def monitors(self):
if self._values['monitors'] is None:
return None
if self._values['monitors'] == 'default':
return 'default'
monitors = [fq_name(self.partition, x) for x in self.monitors_list]
if self.availability_requirement_type == 'at_least':
monitors = ' '.join(monitors)
result = 'min {0} of {{ {1} }}'.format(self.at_least, monitors)
elif self.availability_requirement_type == 'require':
monitors = ' '.join(monitors)
result = 'require {0} from {1} {{ {2} }}'.format(self.number_of_probes, self.number_of_probers, monitors)
else:
result = ' and '.join(monitors).strip()
return result
@property
def number_of_probes(self):
"""Returns the probes value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probes" value that can be updated in the module.
Returns:
int: The probes value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+(?P<probes>\d+)\s+from'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('probes')
@property
def number_of_probers(self):
"""Returns the probers value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probers" value that can be updated in the module.
Returns:
int: The probers value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+\d+\s+from\s+(?P<probers>\d+)\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('probers')
@property
def at_least(self):
"""Returns the 'at least' value from the monitor string.
The monitor string for a Require monitor looks like this.
min 1 of { /Common/gateway_icmp }
This method parses out the first of the numeric values. This values represents
the "at_least" value that can be updated in the module.
Returns:
int: The at_least value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'min\s+(?P<least>\d+)\s+of\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('least')
class ModuleParameters(Parameters):
def _get_availability_value(self, type):
if self._values['availability_requirements'] is None:
return None
if self._values['availability_requirements'][type] is None:
return None
return int(self._values['availability_requirements'][type])
@property
def members(self):
if self._values['members'] is None:
return None
if len(self._values['members']) == 1 and self._values['members'][0] == '':
return []
result = []
for member in self._values['members']:
if 'server' not in member:
raise F5ModuleError(
"One of the provided members is missing a 'server' sub-option."
)
if 'virtual_server' not in member:
raise F5ModuleError(
"One of the provided members is missing a 'virtual_server' sub-option."
)
name = '{0}:{1}'.format(member['server'], member['virtual_server'])
name = fq_name(self.partition, name)
if name in result:
continue
result.append(name)
result = list(result)
return result
@property
def monitors_list(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def monitors(self):
if self._values['monitors'] is None:
return None
if len(self._values['monitors']) == 1 and self._values['monitors'][0] == '':
return 'default'
monitors = [fq_name(self.partition, x) for x in self.monitors_list]
if self.availability_requirement_type == 'at_least':
if self.at_least > len(self.monitors_list):
raise F5ModuleError(
"The 'at_least' value must not exceed the number of 'monitors'."
)
monitors = ' '.join(monitors)
result = 'min {0} of {{ {1} }}'.format(self.at_least, monitors)
elif self.availability_requirement_type == 'require':
monitors = ' '.join(monitors)
if self.number_of_probes > self.number_of_probers:
raise F5ModuleError(
"The 'number_of_probes' must not exceed the 'number_of_probers'."
)
result = 'require {0} from {1} {{ {2} }}'.format(self.number_of_probes, self.number_of_probers, monitors)
else:
result = ' and '.join(monitors).strip()
return result
@property
def availability_requirement_type(self):
if self._values['availability_requirements'] is None:
return None
return self._values['availability_requirements']['type']
@property
def number_of_probes(self):
return self._get_availability_value('number_of_probes')
@property
def number_of_probers(self):
return self._get_availability_value('number_of_probers')
@property
def at_least(self):
return self._get_availability_value('at_least')
class Changes(Parameters):
pass
class UsableChanges(Changes):
@property
def monitors(self):
if self._values['monitors'] is None:
return None
return self._values['monitors']
@property
def members(self):
results = []
if self._values['members'] is None:
return None
for idx, member in enumerate(self._values['members']):
result = dict(
name=member,
memberOrder=idx
)
results.append(result)
return results
class ReportableChanges(Changes):
@property
def members(self):
results = []
if self._values['members'] is None:
return None
for member in self._values['members']:
parts = member.split(':')
results.append(dict(
server=fq_name(self.partition, parts[0]),
virtual_server=fq_name(self.partition, parts[1])
))
return results
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def state(self):
if self.want.state == 'disabled' and self.have.enabled:
return dict(
disabled=True
)
elif self.want.state in ['present', 'enabled'] and self.have.disabled:
return dict(
enabled=True
)
@property
def monitors(self):
if self.want.monitors is None:
return None
if self.want.monitors == 'default' and self.have.monitors == 'default':
return None
if self.want.monitors == 'default' and self.have.monitors is None:
return None
if self.want.monitors == 'default' and len(self.have.monitors) > 0:
return 'default'
if self.have.monitors is None:
return self.want.monitors
if self.have.monitors != self.want.monitors:
return self.want.monitors
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.kwargs = kwargs
self.client = kwargs.get('client', None)
def exec_module(self):
if not self.gtm_provisioned():
raise F5ModuleError(
"GTM must be provisioned to use this module."
)
if self.version_is_less_than_12():
manager = self.get_manager('untyped')
else:
manager = self.get_manager('typed')
return manager.exec_module()
def get_manager(self, type):
if type == 'typed':
return TypedManager(**self.kwargs)
elif type == 'untyped':
return UntypedManager(**self.kwargs)
def version_is_less_than_12(self):
version = self.client.api.tmos_version
if LooseVersion(version) < LooseVersion('12.0.0'):
return True
else:
return False
def gtm_provisioned(self):
resource = self.client.api.tm.sys.dbs.db.load(
name='provisioned.cpu.gtm'
)
if int(resource.value) == 0:
return False
return True
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = ModuleParameters(params=self.module.params)
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state in ["present", "disabled"]:
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def create(self):
if self.want.state == 'disabled':
self.want.update({'disabled': True})
elif self.want.state in ['present', 'enabled']:
self.want.update({'enabled': True})
self._set_changed_options()
if self.want.availability_requirement_type == 'require' and len(self.want.monitors_list) > 1:
raise F5ModuleError(
"Only one monitor may be specified when using an availability_requirement type of 'require'"
)
if self.module.check_mode:
return True
self.create_on_device()
if self.exists():
return True
else:
raise F5ModuleError("Failed to create the GTM pool")
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the GTM pool")
return True
def update_monitors_on_device(self):
"""Updates the monitors string on a virtual server
There is a long-standing bug in GTM virtual servers where the monitor value
is a string that includes braces. These braces cause the REST API to panic and
fail to update or create any resources that have an "at_least" or "require"
set of availability_requirements.
This method exists to do a tmsh command to cause the update to take place on
the device.
Preferably, this method can be removed and the bug be fixed. The API should
be working, obviously, but the more concerning issue is if tmsh commands change
over time, breaking this method.
"""
command = 'tmsh modify gtm pool {0} /{1}/{2} monitor {3}'.format(
self.want.type, self.want.partition, self.want.name, self.want.monitors
)
output = self.client.api.tm.util.bash.exec_cmd(
'run',
utilCmdArgs='-c "{0}"'.format(command)
)
try:
if hasattr(output, 'commandResult'):
if len(output.commandResult.strip()) > 0:
raise F5ModuleError(output.commandResult)
except (AttributeError, NameError, LazyAttributesRequired):
pass
return True
class TypedManager(BaseManager):
def __init__(self, *args, **kwargs):
super(TypedManager, self).__init__(**kwargs)
if self.want.type is None:
raise F5ModuleError(
"The 'type' option is required for BIG-IP instances "
"greater than or equal to 12.x"
)
def present(self):
types = [
'a', 'aaaa', 'cname', 'mx', 'naptr', 'srv'
]
if self.want.type is None:
raise F5ModuleError(
"A pool 'type' must be specified"
)
elif self.want.type not in types:
raise F5ModuleError(
"The specified pool type is invalid"
)
return super(TypedManager, self).present()
def exists(self):
pools = self.client.api.tm.gtm.pools
collection = getattr(pools, self.want.collection)
resource = getattr(collection, self.want.type)
result = resource.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.changes.api_params()
pools = self.client.api.tm.gtm.pools
collection = getattr(pools, self.want.collection)
resource = getattr(collection, self.want.type)
result = resource.load(
name=self.want.name,
partition=self.want.partition
)
if params:
result.modify(**params)
if self.want.monitors:
self.update_monitors_on_device()
def read_current_from_device(self):
pools = self.client.api.tm.gtm.pools
collection = getattr(pools, self.want.collection)
resource = getattr(collection, self.want.type)
result = resource.load(
name=self.want.name,
partition=self.want.partition,
requests_params=dict(
params=dict(
expandSubcollections='true'
)
)
)
result = result.attrs
return ApiParameters(params=result)
def create_on_device(self):
params = self.changes.api_params()
pools = self.client.api.tm.gtm.pools
collection = getattr(pools, self.want.collection)
resource = getattr(collection, self.want.type)
resource.create(
name=self.want.name,
partition=self.want.partition,
**params
)
if self.want.monitors:
self.update_monitors_on_device()
def remove_from_device(self):
pools = self.client.api.tm.gtm.pools
collection = getattr(pools, self.want.collection)
resource = getattr(collection, self.want.type)
resource = resource.load(
name=self.want.name,
partition=self.want.partition
)
if resource:
resource.delete()
class UntypedManager(BaseManager):
def exists(self):
result = self.client.api.tm.gtm.pools.pool.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.changes.api_params()
resource = self.client.api.tm.gtm.pools.pool.load(
name=self.want.name,
partition=self.want.partition
)
resource.modify(**params)
if self.want.monitors:
self.update_monitors_on_device()
def read_current_from_device(self):
resource = self.client.api.tm.gtm.pools.pool.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return ApiParameters(params=result)
def create_on_device(self):
params = self.changes.api_params()
self.client.api.tm.gtm.pools.pool.create(
name=self.want.name,
partition=self.want.partition,
**params
)
if self.want.monitors:
self.update_monitors_on_device()
def remove_from_device(self):
resource = self.client.api.tm.gtm.pools.pool.load(
name=self.want.name,
partition=self.want.partition
)
resource.delete()
class ArgumentSpec(object):
def __init__(self):
self.states = ['absent', 'present', 'enabled', 'disabled']
self.preferred_lb_methods = [
'round-robin', 'return-to-dns', 'ratio', 'topology',
'static-persistence', 'global-availability',
'virtual-server-capacity', 'least-connections',
'lowest-round-trip-time', 'fewest-hops', 'packet-rate', 'cpu',
'completion-rate', 'quality-of-service', 'kilobytes-per-second',
'drop-packet', 'fallback-ip', 'virtual-server-score'
]
self.alternate_lb_methods = [
'round-robin', 'return-to-dns', 'none', 'ratio', 'topology',
'static-persistence', 'global-availability',
'virtual-server-capacity', 'packet-rate', 'drop-packet',
'fallback-ip', 'virtual-server-score'
]
self.fallback_lb_methods = copy.copy(self.preferred_lb_methods)
self.fallback_lb_methods.append('none')
self.types = [
'a', 'aaaa', 'cname', 'mx', 'naptr', 'srv'
]
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
state=dict(
default='present',
choices=self.states,
),
preferred_lb_method=dict(
choices=self.preferred_lb_methods,
),
fallback_lb_method=dict(
choices=self.fallback_lb_methods,
),
alternate_lb_method=dict(
choices=self.alternate_lb_methods,
),
fallback_ip=dict(),
type=dict(
choices=self.types
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
members=dict(
type='list',
options=dict(
server=dict(required=True),
virtual_server=dict(required=True)
)
),
availability_requirements=dict(
type='dict',
options=dict(
type=dict(
choices=['all', 'at_least', 'require'],
required=True
),
at_least=dict(type='int'),
number_of_probes=dict(type='int'),
number_of_probers=dict(type='int')
),
mutually_exclusive=[
['at_least', 'number_of_probes'],
['at_least', 'number_of_probers'],
],
required_if=[
['type', 'at_least', ['at_least']],
['type', 'require', ['number_of_probes', 'number_of_probers']]
]
),
monitors=dict(type='list'),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_if = [
['preferred_lb_method', 'fallback-ip', ['fallback_ip']],
['fallback_lb_method', 'fallback-ip', ['fallback_ip']],
['alternate_lb_method', 'fallback-ip', ['fallback_ip']]
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
required_if=spec.required_if
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
if not HAS_NETADDR:
module.fail_json(msg="The python netaddr module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
TaskEvolution/Task-Coach-Evolution | taskcoach/taskcoachlib/tools/anonymize.py | 1 | 2444 | '''
Task Coach - Your friendly task manager
Copyright (C) 2011 Task Coach developers <developers@taskcoach.org>
Task Coach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Task Coach is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from xml.etree import ElementTree as ET
import os
def anonymize(filename):
''' Anonymize the file specified by the filename by reading its contents,
replacing the contents with X's and saving the anonymized contents to
a copy of the file. '''
def anonymize_string(string):
''' Return an anonymized version of the string. '''
return u'X' * len(string)
def anonymize_text(text):
''' Return an anonymized version of the text, keeping the line
breaks. '''
return '\n'.join([anonymize_string(line) for line in text.split('\n')])
def anonymize_node(node):
''' Recursively anonymize the node and all of its child nodes. '''
for child in node:
anonymize_node(child)
if 'subject' in node.attrib:
node.attrib['subject'] = anonymize_string(node.attrib['subject'])
if node.tag in ('description', 'data') and node.text:
node.text = anonymize_text(node.text)
if node.tag == 'data':
node.attrib['extension'] = \
anonymize_string(node.attrib['extension'])
if node.tag == 'property' and 'name' in node.attrib and \
node.attrib['name'] == 'username':
node.text = 'XXX' # pylint: disable=W0511
if node.tag == 'attachment' and 'location' in node.attrib:
node.attrib['location'] = anonymize_string(node.attrib['location'])
tree = ET.parse(file(filename, 'rb'))
anonymize_node(tree.getroot())
name, ext = os.path.splitext(filename)
anonymized_filename = name + '.anonymized' + ext
tree.write(anonymized_filename)
return anonymized_filename
| gpl-3.0 |
twalthr/flink | flink-python/pyflink/common/typeinfo.py | 6 | 36879 | ################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import calendar
import datetime
import time
from enum import Enum
from typing import List, Union
from py4j.java_gateway import JavaClass, JavaObject
from pyflink.java_gateway import get_gateway
__all__ = ['TypeInformation', 'Types']
class TypeInformation(object):
"""
TypeInformation is the core class of Flink's type system. FLink requires a type information
for all types that are used as input or return type of a user function. This type information
class acts as the tool to generate serializers and comparators, and to perform semantic checks
such as whether the fields that are used as join/grouping keys actually exist.
The type information also bridges between the programming languages object model and a logical
flat schema. It maps fields from the types to columns (fields) in a flat schema. Not all fields
from a type are mapped to a separate fields in the flat schema and often, entire types are
mapped to one field.. It is important to notice that the schema must hold for all instances of a
type. For that reason, elements in lists and arrays are not assigned to individual fields, but
the lists and arrays are considered to be one field in total, to account for different lengths
in the arrays.
a) Basic types are indivisible and are considered as a single field.
b) Arrays and collections are one field.
c) Tuples represents as many fields as the class has fields.
To represent this properly, each type has an arity (the number of fields it contains directly),
and a total number of fields (number of fields in the entire schema of this type, including
nested types).
"""
def __init__(self):
self._j_typeinfo = None
def get_java_type_info(self) -> JavaObject:
pass
def need_conversion(self):
"""
Does this type need to conversion between Python object and internal Wrapper object.
"""
return False
def to_internal_type(self, obj):
"""
Converts a Python object into an internal object.
"""
return obj
def from_internal_type(self, obj):
"""
Converts an internal object into a native Python object.
"""
return obj
class BasicType(Enum):
STRING = "String"
BYTE = "Byte"
BOOLEAN = "Boolean"
SHORT = "Short"
INT = "Integer"
LONG = "Long"
FLOAT = "Float"
DOUBLE = "Double"
CHAR = "Char"
BIG_INT = "BigInteger"
BIG_DEC = "BigDecimal"
class BasicTypeInfo(TypeInformation):
"""
Type information for primitive types (int, long, double, byte, ...), String, BigInteger,
and BigDecimal.
"""
def __init__(self, basic_type: BasicType):
self._basic_type = basic_type
super(BasicTypeInfo, self).__init__()
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
JBasicTypeInfo = get_gateway().jvm.org.apache.flink.api.common.typeinfo.BasicTypeInfo
if self._basic_type == BasicType.STRING:
self._j_typeinfo = JBasicTypeInfo.STRING_TYPE_INFO
elif self._basic_type == BasicType.BYTE:
self._j_typeinfo = JBasicTypeInfo.BYTE_TYPE_INFO
elif self._basic_type == BasicType.BOOLEAN:
self._j_typeinfo = JBasicTypeInfo.BOOLEAN_TYPE_INFO
elif self._basic_type == BasicType.SHORT:
self._j_typeinfo = JBasicTypeInfo.SHORT_TYPE_INFO
elif self._basic_type == BasicType.INT:
self._j_typeinfo = JBasicTypeInfo.INT_TYPE_INFO
elif self._basic_type == BasicType.LONG:
self._j_typeinfo = JBasicTypeInfo.LONG_TYPE_INFO
elif self._basic_type == BasicType.FLOAT:
self._j_typeinfo = JBasicTypeInfo.FLOAT_TYPE_INFO
elif self._basic_type == BasicType.DOUBLE:
self._j_typeinfo = JBasicTypeInfo.DOUBLE_TYPE_INFO
elif self._basic_type == BasicType.CHAR:
self._j_typeinfo = JBasicTypeInfo.CHAR_TYPE_INFO
elif self._basic_type == BasicType.BIG_INT:
self._j_typeinfo = JBasicTypeInfo.BIG_INT_TYPE_INFO
elif self._basic_type == BasicType.BIG_DEC:
self._j_typeinfo = JBasicTypeInfo.BIG_DEC_TYPE_INFO
else:
raise TypeError("Invalid BasicType %s." % self._basic_type)
return self._j_typeinfo
def __eq__(self, o) -> bool:
if isinstance(o, BasicTypeInfo):
return self._basic_type == o._basic_type
return False
def __repr__(self):
return self._basic_type.value
@staticmethod
def STRING_TYPE_INFO():
return BasicTypeInfo(BasicType.STRING)
@staticmethod
def BOOLEAN_TYPE_INFO():
return BasicTypeInfo(BasicType.BOOLEAN)
@staticmethod
def BYTE_TYPE_INFO():
return BasicTypeInfo(BasicType.BYTE)
@staticmethod
def SHORT_TYPE_INFO():
return BasicTypeInfo(BasicType.SHORT)
@staticmethod
def INT_TYPE_INFO():
return BasicTypeInfo(BasicType.INT)
@staticmethod
def LONG_TYPE_INFO():
return BasicTypeInfo(BasicType.LONG)
@staticmethod
def FLOAT_TYPE_INFO():
return BasicTypeInfo(BasicType.FLOAT)
@staticmethod
def DOUBLE_TYPE_INFO():
return BasicTypeInfo(BasicType.DOUBLE)
@staticmethod
def CHAR_TYPE_INFO():
return BasicTypeInfo(BasicType.CHAR)
@staticmethod
def BIG_INT_TYPE_INFO():
return BasicTypeInfo(BasicType.BIG_INT)
@staticmethod
def BIG_DEC_TYPE_INFO():
return BasicTypeInfo(BasicType.BIG_DEC)
class SqlTimeTypeInfo(TypeInformation):
"""
SqlTimeTypeInfo enables users to get Sql Time TypeInfo.
"""
@staticmethod
def DATE():
return DateTypeInfo()
@staticmethod
def TIME():
return TimeTypeInfo()
@staticmethod
def TIMESTAMP():
return TimestampTypeInfo()
class PrimitiveArrayTypeInfo(TypeInformation):
"""
A TypeInformation for arrays of primitive types (int, long, double, ...).
Supports the creation of dedicated efficient serializers for these types.
"""
def __init__(self, element_type: TypeInformation):
self._element_type = element_type
super(PrimitiveArrayTypeInfo, self).__init__()
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
JPrimitiveArrayTypeInfo = get_gateway().jvm.org.apache.flink.api.common.typeinfo \
.PrimitiveArrayTypeInfo
if self._element_type == Types.BOOLEAN():
self._j_typeinfo = JPrimitiveArrayTypeInfo.BOOLEAN_PRIMITIVE_ARRAY_TYPE_INFO
elif self._element_type == Types.BYTE():
self._j_typeinfo = JPrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO
elif self._element_type == Types.SHORT():
self._j_typeinfo = JPrimitiveArrayTypeInfo.SHORT_PRIMITIVE_ARRAY_TYPE_INFO
elif self._element_type == Types.INT():
self._j_typeinfo = JPrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO
elif self._element_type == Types.LONG():
self._j_typeinfo = JPrimitiveArrayTypeInfo.LONG_PRIMITIVE_ARRAY_TYPE_INFO
elif self._element_type == Types.FLOAT():
self._j_typeinfo = JPrimitiveArrayTypeInfo.FLOAT_PRIMITIVE_ARRAY_TYPE_INFO
elif self._element_type == Types.DOUBLE():
self._j_typeinfo = JPrimitiveArrayTypeInfo.DOUBLE_PRIMITIVE_ARRAY_TYPE_INFO
elif self._element_type == Types.CHAR():
self._j_typeinfo = JPrimitiveArrayTypeInfo.CHAR_PRIMITIVE_ARRAY_TYPE_INFO
else:
raise TypeError("Invalid element type for a primitive array.")
return self._j_typeinfo
def __eq__(self, o) -> bool:
if isinstance(o, PrimitiveArrayTypeInfo):
return self._element_type == o._element_type
return False
def __repr__(self) -> str:
return "PrimitiveArrayTypeInfo<%s>" % self._element_type
class BasicArrayTypeInfo(TypeInformation):
"""
A TypeInformation for arrays of boxed primitive types (Integer, Long, Double, ...).
Supports the creation of dedicated efficient serializers for these types.
"""
def __init__(self, element_type: TypeInformation):
self._element_type = element_type
super(BasicArrayTypeInfo, self).__init__()
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
JBasicArrayTypeInfo = get_gateway().jvm.org.apache.flink.api.common.typeinfo \
.BasicArrayTypeInfo
if self._element_type == Types.BOOLEAN():
self._j_typeinfo = JBasicArrayTypeInfo.BOOLEAN_ARRAY_TYPE_INFO
elif self._element_type == Types.BYTE():
self._j_typeinfo = JBasicArrayTypeInfo.BYTE_ARRAY_TYPE_INFO
elif self._element_type == Types.SHORT():
self._j_typeinfo = JBasicArrayTypeInfo.SHORT_ARRAY_TYPE_INFO
elif self._element_type == Types.INT():
self._j_typeinfo = JBasicArrayTypeInfo.INT_ARRAY_TYPE_INFO
elif self._element_type == Types.LONG():
self._j_typeinfo = JBasicArrayTypeInfo.LONG_ARRAY_TYPE_INFO
elif self._element_type == Types.FLOAT():
self._j_typeinfo = JBasicArrayTypeInfo.FLOAT_ARRAY_TYPE_INFO
elif self._element_type == Types.DOUBLE():
self._j_typeinfo = JBasicArrayTypeInfo.DOUBLE_ARRAY_TYPE_INFO
elif self._element_type == Types.CHAR():
self._j_typeinfo = JBasicArrayTypeInfo.CHAR_ARRAY_TYPE_INFO
elif self._element_type == Types.STRING():
self._j_typeinfo = JBasicArrayTypeInfo.STRING_ARRAY_TYPE_INFO
else:
raise TypeError("Invalid element type for a primitive array.")
return self._j_typeinfo
def __eq__(self, o) -> bool:
if isinstance(o, BasicArrayTypeInfo):
return self._element_type == o._element_type
return False
def __repr__(self):
return "BasicArrayTypeInfo<%s>" % self._element_type
class ObjectArrayTypeInfo(TypeInformation):
"""
A TypeInformation for arrays of non-primitive types.
"""
def __init__(self, element_type: TypeInformation):
self._element_type = element_type
super(ObjectArrayTypeInfo, self).__init__()
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
JTypes = get_gateway().jvm.org.apache.flink.api.common.typeinfo.Types
self._j_typeinfo = JTypes.OBJECT_ARRAY(self._element_type.get_java_type_info())
return self._j_typeinfo
def __eq__(self, o) -> bool:
if isinstance(o, ObjectArrayTypeInfo):
return self._element_type == o._element_type
return False
def __repr__(self):
return "ObjectArrayTypeInfo<%s>" % self._element_type
class PickledBytesTypeInfo(TypeInformation):
"""
A PickledBytesTypeInfo indicates the data is a primitive byte array generated by pickle
serializer.
"""
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
self._j_typeinfo = get_gateway().jvm.org.apache.flink.streaming.api.typeinfo.python\
.PickledByteArrayTypeInfo.PICKLED_BYTE_ARRAY_TYPE_INFO
return self._j_typeinfo
def __eq__(self, o: object) -> bool:
return isinstance(o, PickledBytesTypeInfo)
def __repr__(self):
return "PickledByteArrayTypeInfo"
class RowTypeInfo(TypeInformation):
"""
TypeInformation for Row.
"""
def __init__(self, field_types: List[TypeInformation], field_names: List[str] = None):
self._field_types = field_types
self._field_names = field_names
self._need_conversion = [f.need_conversion() if isinstance(f, TypeInformation) else None
for f in self._field_types]
self._need_serialize_any_field = any(self._need_conversion)
super(RowTypeInfo, self).__init__()
def get_field_names(self) -> List[str]:
if not self._field_names:
j_field_names = self.get_java_type_info().getFieldNames()
self._field_names = [name for name in j_field_names]
return self._field_names
def get_field_index(self, field_name: str) -> int:
if self._field_names:
return self._field_names.index(field_name)
return -1
def get_field_types(self) -> List[TypeInformation]:
return self._field_types
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
j_types_array = get_gateway()\
.new_array(get_gateway().jvm.org.apache.flink.api.common.typeinfo.TypeInformation,
len(self._field_types))
for i in range(len(self._field_types)):
field_type = self._field_types[i]
if isinstance(field_type, TypeInformation):
j_types_array[i] = field_type.get_java_type_info()
if self._field_names is None:
self._j_typeinfo = get_gateway().jvm\
.org.apache.flink.api.java.typeutils.RowTypeInfo(j_types_array)
else:
j_names_array = get_gateway().new_array(get_gateway().jvm.java.lang.String,
len(self._field_names))
for i in range(len(self._field_names)):
j_names_array[i] = self._field_names[i]
self._j_typeinfo = get_gateway().jvm\
.org.apache.flink.api.java.typeutils.RowTypeInfo(j_types_array, j_names_array)
return self._j_typeinfo
def __eq__(self, other) -> bool:
if isinstance(other, RowTypeInfo):
return self._field_types == other._field_types
return False
def __repr__(self) -> str:
if self._field_names:
return "RowTypeInfo(%s)" % ', '.join([field_name + ': ' + str(field_type)
for field_name, field_type in
zip(self.get_field_names(),
self.get_field_types())])
else:
return "RowTypeInfo(%s)" % ', '.join(
[str(field_type) for field_type in self._field_types])
def need_conversion(self):
return True
def to_internal_type(self, obj):
if obj is None:
return
if self._need_serialize_any_field:
# Only calling to_internal_type function for fields that need conversion
if isinstance(obj, dict):
return tuple(f.to_internal_type(obj.get(n)) if c else obj.get(n)
for n, f, c in zip(self.get_field_names(), self._field_types,
self._need_conversion))
elif isinstance(obj, (tuple, list)):
return tuple(f.to_internal_type(v) if c else v
for f, v, c in zip(self._field_types, obj, self._need_conversion))
elif hasattr(obj, "__dict__"):
d = obj.__dict__
return tuple(f.to_internal_type(d.get(n)) if c else d.get(n)
for n, f, c in zip(self.get_field_names(), self._field_types,
self._need_conversion))
else:
raise ValueError("Unexpected tuple %r with RowTypeInfo" % obj)
else:
if isinstance(obj, dict):
return tuple(obj.get(n) for n in self.get_field_names())
elif isinstance(obj, (list, tuple)):
return tuple(obj)
elif hasattr(obj, "__dict__"):
d = obj.__dict__
return tuple(d.get(n) for n in self.get_field_names())
else:
raise ValueError("Unexpected tuple %r with RowTypeInfo" % obj)
def from_internal_type(self, obj):
if obj is None:
return
if isinstance(obj, (tuple, list)):
# it's already converted by pickler
return obj
if self._need_serialize_any_field:
# Only calling from_internal_type function for fields that need conversion
values = [f.from_internal_type(v) if c else v
for f, v, c in zip(self._field_types, obj, self._need_conversion)]
else:
values = obj
return tuple(values)
class TupleTypeInfo(TypeInformation):
"""
TypeInformation for Tuple.
"""
def __init__(self, field_types: List[TypeInformation]):
self._field_types = field_types
super(TupleTypeInfo, self).__init__()
def get_field_types(self) -> List[TypeInformation]:
return self._field_types
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
j_types_array = get_gateway().new_array(
get_gateway().jvm.org.apache.flink.api.common.typeinfo.TypeInformation,
len(self._field_types))
for i in range(len(self._field_types)):
field_type = self._field_types[i]
if isinstance(field_type, TypeInformation):
j_types_array[i] = field_type.get_java_type_info()
self._j_typeinfo = get_gateway().jvm \
.org.apache.flink.api.java.typeutils.TupleTypeInfo(j_types_array)
return self._j_typeinfo
def __eq__(self, other) -> bool:
if isinstance(other, TupleTypeInfo):
return self._field_types == other._field_types
return False
def __repr__(self) -> str:
return "TupleTypeInfo(%s)" % ', '.join(
[str(field_type) for field_type in self._field_types])
class DateTypeInfo(TypeInformation):
"""
TypeInformation for Date.
"""
def __init__(self):
super(DateTypeInfo, self).__init__()
EPOCH_ORDINAL = datetime.datetime(1970, 1, 1).toordinal()
def need_conversion(self):
return True
def to_internal_type(self, d):
if d is not None:
return d.toordinal() - self.EPOCH_ORDINAL
def from_internal_type(self, v):
if v is not None:
return datetime.date.fromordinal(v + self.EPOCH_ORDINAL)
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
self._j_typeinfo = get_gateway().jvm\
.org.apache.flink.api.common.typeinfo.SqlTimeTypeInfo.DATE
return self._j_typeinfo
def __eq__(self, o: object) -> bool:
return isinstance(o, DateTypeInfo)
def __repr__(self):
return "DateTypeInfo"
class TimeTypeInfo(TypeInformation):
"""
TypeInformation for Time.
"""
EPOCH_ORDINAL = calendar.timegm(time.localtime(0)) * 10 ** 6
def need_conversion(self):
return True
def to_internal_type(self, t):
if t is not None:
if t.tzinfo is not None:
offset = t.utcoffset()
offset = offset if offset else datetime.timedelta()
offset_microseconds =\
(offset.days * 86400 + offset.seconds) * 10 ** 6 + offset.microseconds
else:
offset_microseconds = self.EPOCH_ORDINAL
minutes = t.hour * 60 + t.minute
seconds = minutes * 60 + t.second
return seconds * 10 ** 6 + t.microsecond - offset_microseconds
def from_internal_type(self, t):
if t is not None:
seconds, microseconds = divmod(t + self.EPOCH_ORDINAL, 10 ** 6)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
return datetime.time(hours, minutes, seconds, microseconds)
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
self._j_typeinfo = get_gateway().jvm\
.org.apache.flink.api.common.typeinfo.SqlTimeTypeInfo.TIME
return self._j_typeinfo
def __eq__(self, o: object) -> bool:
return isinstance(o, TimeTypeInfo)
def __repr__(self) -> str:
return "TimeTypeInfo"
class TimestampTypeInfo(TypeInformation):
"""
TypeInformation for Timestamp.
"""
def need_conversion(self):
return True
def to_internal_type(self, dt):
if dt is not None:
seconds = (calendar.timegm(dt.utctimetuple()) if dt.tzinfo
else time.mktime(dt.timetuple()))
return int(seconds) * 10 ** 6 + dt.microsecond
def from_internal_type(self, ts):
if ts is not None:
return datetime.datetime.fromtimestamp(ts // 10 ** 6).replace(microsecond=ts % 10 ** 6)
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
self._j_typeinfo = get_gateway().jvm\
.org.apache.flink.api.common.typeinfo.SqlTimeTypeInfo.TIMESTAMP
return self._j_typeinfo
def __eq__(self, o: object) -> bool:
return isinstance(o, TimestampTypeInfo)
def __repr__(self):
return "TimestampTypeInfo"
class ListTypeInfo(TypeInformation):
"""
A TypeInformation for the list types.
"""
def __init__(self, element_type: TypeInformation):
self.elem_type = element_type
super(ListTypeInfo, self).__init__()
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
self._j_typeinfo = get_gateway().jvm\
.org.apache.flink.api.common.typeinfo.Types.LIST(
self.elem_type.get_java_type_info())
return self._j_typeinfo
def __eq__(self, other):
if isinstance(other, ListTypeInfo):
return self.elem_type == other.elem_type
else:
return False
def __repr__(self):
return "ListTypeInfo<%s>" % self.elem_type
class MapTypeInfo(TypeInformation):
def __init__(self, key_type_info: TypeInformation, value_type_info: TypeInformation):
self._key_type_info = key_type_info
self._value_type_info = value_type_info
super(MapTypeInfo, self).__init__()
def get_java_type_info(self) -> JavaObject:
if not self._j_typeinfo:
self._j_typeinfo = get_gateway().jvm\
.org.apache.flink.api.common.typeinfo.Types.MAP(
self._key_type_info.get_java_type_info(),
self._value_type_info.get_java_type_info())
return self._j_typeinfo
def __eq__(self, other):
if isinstance(other, MapTypeInfo):
return self._key_type_info == other._key_type_info and \
self._value_type_info == other._value_type_info
def __repr__(self) -> str:
return 'MapTypeInfo<{}, {}>'.format(self._key_type_info, self._value_type_info)
class Types(object):
"""
This class gives access to the type information of the most common types for which Flink has
built-in serializers and comparators.
"""
@staticmethod
def STRING() -> TypeInformation:
"""
Returns type information for string. Supports a None value.
"""
return BasicTypeInfo.STRING_TYPE_INFO()
@staticmethod
def BYTE() -> TypeInformation:
"""
Returns type information for byte. Does not support a None value.
"""
return BasicTypeInfo.BYTE_TYPE_INFO()
@staticmethod
def BOOLEAN() -> TypeInformation:
"""
Returns type information for bool. Does not support a None value.
"""
return BasicTypeInfo.BOOLEAN_TYPE_INFO()
@staticmethod
def SHORT() -> TypeInformation:
"""
Returns type information for short. Does not support a None value.
"""
return BasicTypeInfo.SHORT_TYPE_INFO()
@staticmethod
def INT() -> TypeInformation:
"""
Returns type information for int. Does not support a None value.
"""
return BasicTypeInfo.INT_TYPE_INFO()
@staticmethod
def LONG() -> TypeInformation:
"""
Returns type information for long. Does not support a None value.
"""
return BasicTypeInfo.LONG_TYPE_INFO()
@staticmethod
def FLOAT() -> TypeInformation:
"""
Returns type information for float. Does not support a None value.
"""
return BasicTypeInfo.FLOAT_TYPE_INFO()
@staticmethod
def DOUBLE() -> TypeInformation:
"""
Returns type information for double. Does not support a None value.
"""
return BasicTypeInfo.DOUBLE_TYPE_INFO()
@staticmethod
def CHAR() -> TypeInformation:
"""
Returns type information for char. Does not support a None value.
"""
return BasicTypeInfo.CHAR_TYPE_INFO()
@staticmethod
def BIG_INT() -> TypeInformation:
"""
Returns type information for BigInteger. Supports a None value.
"""
return BasicTypeInfo.BIG_INT_TYPE_INFO()
@staticmethod
def BIG_DEC() -> TypeInformation:
"""
Returns type information for BigDecimal. Supports a None value.
"""
return BasicTypeInfo.BIG_DEC_TYPE_INFO()
@staticmethod
def SQL_DATE() -> TypeInformation:
"""
Returns type information for Date. Supports a None value.
"""
return SqlTimeTypeInfo.DATE()
@staticmethod
def SQL_TIME() -> TypeInformation:
"""
Returns type information for Time. Supports a None value.
"""
return SqlTimeTypeInfo.TIME()
@staticmethod
def SQL_TIMESTAMP() -> TypeInformation:
"""
Returns type information for Timestamp. Supports a None value.
"""
return SqlTimeTypeInfo.TIMESTAMP()
@staticmethod
def PICKLED_BYTE_ARRAY() -> TypeInformation:
"""
Returns type information which uses pickle for serialization/deserialization.
"""
return PickledBytesTypeInfo()
@staticmethod
def ROW(field_types: List[TypeInformation]):
"""
Returns type information for Row with fields of the given types. A row itself must not be
null.
:param field_types: the types of the row fields, e.g., Types.String(), Types.INT()
"""
return RowTypeInfo(field_types)
@staticmethod
def ROW_NAMED(field_names: List[str], field_types: List[TypeInformation]):
"""
Returns type information for Row with fields of the given types and with given names. A row
must not be null.
:param field_names: array of field names.
:param field_types: array of field types.
"""
return RowTypeInfo(field_types, field_names)
@staticmethod
def TUPLE(field_types: List[TypeInformation]):
"""
Returns type information for Tuple with fields of the given types. A Tuple itself must not
be null.
:param field_types: array of field types.
"""
return TupleTypeInfo(field_types)
@staticmethod
def PRIMITIVE_ARRAY(element_type: TypeInformation):
"""
Returns type information for arrays of primitive type (such as byte[]). The array must not
be null.
:param element_type: element type of the array (e.g. Types.BOOLEAN(), Types.INT(),
Types.DOUBLE())
"""
return PrimitiveArrayTypeInfo(element_type)
@staticmethod
def BASIC_ARRAY(element_type: TypeInformation) -> TypeInformation:
"""
Returns type information for arrays of boxed primitive type (such as Integer[]).
:param element_type: element type of the array (e.g. Types.BOOLEAN(), Types.INT(),
Types.DOUBLE())
"""
return BasicArrayTypeInfo(element_type)
@staticmethod
def OBJECT_ARRAY(element_type: TypeInformation) -> TypeInformation:
"""
Returns type information for arrays of non-primitive types. The array itself must not be
None. None values for elements are supported.
:param element_type: element type of the array
"""
return ObjectArrayTypeInfo(element_type)
@staticmethod
def MAP(key_type_info: TypeInformation, value_type_info: TypeInformation) -> TypeInformation:
"""
Special TypeInformation used by MapStateDescriptor
:param key_type_info: Element type of key (e.g. Types.BOOLEAN(), Types.INT(),
Types.DOUBLE())
:param value_type_info: Element type of value (e.g. Types.BOOLEAN(), Types.INT(),
Types.DOUBLE())
"""
return MapTypeInfo(key_type_info, value_type_info)
@staticmethod
def LIST(element_type_info: TypeInformation) -> TypeInformation:
"""
A TypeInformation for the list type.
:param element_type_info: The type of the elements in the list
"""
return ListTypeInfo(element_type_info)
def _from_java_type(j_type_info: JavaObject) -> TypeInformation:
gateway = get_gateway()
JBasicTypeInfo = gateway.jvm.org.apache.flink.api.common.typeinfo.BasicTypeInfo
if _is_instance_of(j_type_info, JBasicTypeInfo.STRING_TYPE_INFO):
return Types.STRING()
elif _is_instance_of(j_type_info, JBasicTypeInfo.BOOLEAN_TYPE_INFO):
return Types.BOOLEAN()
elif _is_instance_of(j_type_info, JBasicTypeInfo.BYTE_TYPE_INFO):
return Types.BYTE()
elif _is_instance_of(j_type_info, JBasicTypeInfo.SHORT_TYPE_INFO):
return Types.SHORT()
elif _is_instance_of(j_type_info, JBasicTypeInfo.INT_TYPE_INFO):
return Types.INT()
elif _is_instance_of(j_type_info, JBasicTypeInfo.LONG_TYPE_INFO):
return Types.LONG()
elif _is_instance_of(j_type_info, JBasicTypeInfo.FLOAT_TYPE_INFO):
return Types.FLOAT()
elif _is_instance_of(j_type_info, JBasicTypeInfo.DOUBLE_TYPE_INFO):
return Types.DOUBLE()
elif _is_instance_of(j_type_info, JBasicTypeInfo.CHAR_TYPE_INFO):
return Types.CHAR()
elif _is_instance_of(j_type_info, JBasicTypeInfo.BIG_INT_TYPE_INFO):
return Types.BIG_INT()
elif _is_instance_of(j_type_info, JBasicTypeInfo.BIG_DEC_TYPE_INFO):
return Types.BIG_DEC()
JSqlTimeTypeInfo = gateway.jvm.org.apache.flink.api.common.typeinfo.SqlTimeTypeInfo
if _is_instance_of(j_type_info, JSqlTimeTypeInfo.DATE):
return Types.SQL_DATE()
elif _is_instance_of(j_type_info, JSqlTimeTypeInfo.TIME):
return Types.SQL_TIME()
elif _is_instance_of(j_type_info, JSqlTimeTypeInfo.TIMESTAMP):
return Types.SQL_TIMESTAMP()
JPrimitiveArrayTypeInfo = gateway.jvm.org.apache.flink.api.common.typeinfo \
.PrimitiveArrayTypeInfo
if _is_instance_of(j_type_info, JPrimitiveArrayTypeInfo.BOOLEAN_PRIMITIVE_ARRAY_TYPE_INFO):
return Types.PRIMITIVE_ARRAY(Types.BOOLEAN())
elif _is_instance_of(j_type_info, JPrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO):
return Types.PRIMITIVE_ARRAY(Types.BYTE())
elif _is_instance_of(j_type_info, JPrimitiveArrayTypeInfo.SHORT_PRIMITIVE_ARRAY_TYPE_INFO):
return Types.PRIMITIVE_ARRAY(Types.SHORT())
elif _is_instance_of(j_type_info, JPrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO):
return Types.PRIMITIVE_ARRAY(Types.INT())
elif _is_instance_of(j_type_info, JPrimitiveArrayTypeInfo.LONG_PRIMITIVE_ARRAY_TYPE_INFO):
return Types.PRIMITIVE_ARRAY(Types.LONG())
elif _is_instance_of(j_type_info, JPrimitiveArrayTypeInfo.FLOAT_PRIMITIVE_ARRAY_TYPE_INFO):
return Types.PRIMITIVE_ARRAY(Types.FLOAT())
elif _is_instance_of(j_type_info, JPrimitiveArrayTypeInfo.DOUBLE_PRIMITIVE_ARRAY_TYPE_INFO):
return Types.PRIMITIVE_ARRAY(Types.DOUBLE())
elif _is_instance_of(j_type_info, JPrimitiveArrayTypeInfo.CHAR_PRIMITIVE_ARRAY_TYPE_INFO):
return Types.PRIMITIVE_ARRAY(Types.CHAR())
JBasicArrayTypeInfo = gateway.jvm.org.apache.flink.api.common.typeinfo.BasicArrayTypeInfo
if _is_instance_of(j_type_info, JBasicArrayTypeInfo.BOOLEAN_ARRAY_TYPE_INFO):
return Types.BASIC_ARRAY(Types.BOOLEAN())
elif _is_instance_of(j_type_info, JBasicArrayTypeInfo.BYTE_ARRAY_TYPE_INFO):
return Types.BASIC_ARRAY(Types.BYTE())
elif _is_instance_of(j_type_info, JBasicArrayTypeInfo.SHORT_ARRAY_TYPE_INFO):
return Types.BASIC_ARRAY(Types.SHORT())
elif _is_instance_of(j_type_info, JBasicArrayTypeInfo.INT_ARRAY_TYPE_INFO):
return Types.BASIC_ARRAY(Types.INT())
elif _is_instance_of(j_type_info, JBasicArrayTypeInfo.LONG_ARRAY_TYPE_INFO):
return Types.BASIC_ARRAY(Types.LONG())
elif _is_instance_of(j_type_info, JBasicArrayTypeInfo.FLOAT_ARRAY_TYPE_INFO):
return Types.BASIC_ARRAY(Types.FLOAT())
elif _is_instance_of(j_type_info, JBasicArrayTypeInfo.DOUBLE_ARRAY_TYPE_INFO):
return Types.BASIC_ARRAY(Types.DOUBLE())
elif _is_instance_of(j_type_info, JBasicArrayTypeInfo.CHAR_ARRAY_TYPE_INFO):
return Types.BASIC_ARRAY(Types.CHAR())
elif _is_instance_of(j_type_info, JBasicArrayTypeInfo.STRING_ARRAY_TYPE_INFO):
return Types.BASIC_ARRAY(Types.STRING())
JObjectArrayTypeInfo = gateway.jvm.org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo
if _is_instance_of(j_type_info, JObjectArrayTypeInfo):
return Types.OBJECT_ARRAY(_from_java_type(j_type_info.getComponentInfo()))
JPickledBytesTypeInfo = gateway.jvm \
.org.apache.flink.streaming.api.typeinfo.python.PickledByteArrayTypeInfo\
.PICKLED_BYTE_ARRAY_TYPE_INFO
if _is_instance_of(j_type_info, JPickledBytesTypeInfo):
return Types.PICKLED_BYTE_ARRAY()
JRowTypeInfo = gateway.jvm.org.apache.flink.api.java.typeutils.RowTypeInfo
if _is_instance_of(j_type_info, JRowTypeInfo):
j_row_field_names = j_type_info.getFieldNames()
j_row_field_types = j_type_info.getFieldTypes()
row_field_types = [_from_java_type(j_row_field_type) for j_row_field_type in
j_row_field_types]
row_field_names = [field_name for field_name in j_row_field_names]
return Types.ROW_NAMED(row_field_names, row_field_types)
JTupleTypeInfo = gateway.jvm.org.apache.flink.api.java.typeutils.TupleTypeInfo
if _is_instance_of(j_type_info, JTupleTypeInfo):
j_field_types = []
for i in range(j_type_info.getArity()):
j_field_types.append(j_type_info.getTypeAt(i))
field_types = [_from_java_type(j_field_type) for j_field_type in j_field_types]
return TupleTypeInfo(field_types)
JMapTypeInfo = get_gateway().jvm.org.apache.flink.api.java.typeutils.MapTypeInfo
if _is_instance_of(j_type_info, JMapTypeInfo):
j_key_type_info = j_type_info.getKeyTypeInfo()
j_value_type_info = j_type_info.getValueTypeInfo()
return MapTypeInfo(_from_java_type(j_key_type_info), _from_java_type(j_value_type_info))
JListTypeInfo = get_gateway().jvm.org.apache.flink.api.java.typeutils.ListTypeInfo
if _is_instance_of(j_type_info, JListTypeInfo):
j_element_type_info = j_type_info.getElementTypeInfo()
return ListTypeInfo(_from_java_type(j_element_type_info))
raise TypeError("The java type info: %s is not supported in PyFlink currently." % j_type_info)
def _is_instance_of(java_object: JavaObject, java_type: Union[JavaObject, JavaClass]) -> bool:
if isinstance(java_type, JavaObject):
return java_object.equals(java_type)
elif isinstance(java_type, JavaClass):
return java_object.getClass().isAssignableFrom(java_type._java_lang_class)
return False
| apache-2.0 |
zverevalexei/trex-http-proxy | trex_client/external_libs/scapy-2.3.1/python2/scapy/layers/ipsec.py | 9 | 35424 | #############################################################################
## ipsec.py --- IPSec support for Scapy ##
## ##
## Copyright (C) 2014 6WIND ##
## ##
## This program is free software; you can redistribute it and/or modify it ##
## under the terms of the GNU General Public License version 2 as ##
## published by the Free Software Foundation. ##
## ##
## This program is distributed in the hope that it will be useful, but ##
## WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ##
## General Public License for more details. ##
#############################################################################
"""
IPSec layer
===========
Example of use:
>>> sa = SecurityAssociation(ESP, spi=0xdeadbeef, crypt_algo='AES-CBC',
... crypt_key='sixteenbytes key')
>>> p = IP(src='1.1.1.1', dst='2.2.2.2')
>>> p /= TCP(sport=45012, dport=80)
>>> p /= Raw('testdata')
>>> p = IP(str(p))
>>> p
<IP version=4L ihl=5L tos=0x0 len=48 id=1 flags= frag=0L ttl=64 proto=tcp chksum=0x74c2 src=1.1.1.1 dst=2.2.2.2 options=[] |<TCP sport=45012 dport=http seq=0 ack=0 dataofs=5L reserved=0L flags=S window=8192 chksum=0x1914 urgptr=0 options=[] |<Raw load='testdata' |>>>
>>>
>>> e = sa.encrypt(p)
>>> e
<IP version=4L ihl=5L tos=0x0 len=76 id=1 flags= frag=0L ttl=64 proto=esp chksum=0x747a src=1.1.1.1 dst=2.2.2.2 |<ESP spi=0xdeadbeef seq=1 data='\xf8\xdb\x1e\x83[T\xab\\\xd2\x1b\xed\xd1\xe5\xc8Y\xc2\xa5d\x92\xc1\x05\x17\xa6\x92\x831\xe6\xc1]\x9a\xd6K}W\x8bFfd\xa5B*+\xde\xc8\x89\xbf{\xa9' |>>
>>>
>>> d = sa.decrypt(e)
>>> d
<IP version=4L ihl=5L tos=0x0 len=48 id=1 flags= frag=0L ttl=64 proto=tcp chksum=0x74c2 src=1.1.1.1 dst=2.2.2.2 |<TCP sport=45012 dport=http seq=0 ack=0 dataofs=5L reserved=0L flags=S window=8192 chksum=0x1914 urgptr=0 options=[] |<Raw load='testdata' |>>>
>>>
>>> d == p
True
"""
import socket
import fractions
from scapy.data import IP_PROTOS
from scapy.fields import ByteEnumField, ByteField, StrField, XIntField, IntField, \
ShortField, PacketField
from scapy.packet import Packet, bind_layers, Raw
from scapy.layers.inet import IP, UDP
from scapy.layers.inet6 import IPv6, IPv6ExtHdrHopByHop, IPv6ExtHdrDestOpt, \
IPv6ExtHdrRouting
#------------------------------------------------------------------------------
class AH(Packet):
"""
Authentication Header
See https://tools.ietf.org/rfc/rfc4302.txt
"""
name = 'AH'
fields_desc = [
ByteEnumField('nh', None, IP_PROTOS),
ByteField('payloadlen', None),
ShortField('reserved', None),
XIntField('spi', 0x0),
IntField('seq', 0),
StrField('icv', None),
StrField('padding', None),
]
overload_fields = {
IP: {'proto': socket.IPPROTO_AH},
IPv6: {'nh': socket.IPPROTO_AH},
IPv6ExtHdrHopByHop: {'nh': socket.IPPROTO_AH},
IPv6ExtHdrDestOpt: {'nh': socket.IPPROTO_AH},
IPv6ExtHdrRouting: {'nh': socket.IPPROTO_AH},
}
bind_layers(IP, AH, proto=socket.IPPROTO_AH)
bind_layers(IPv6, AH, nh=socket.IPPROTO_AH)
#------------------------------------------------------------------------------
class ESP(Packet):
"""
Encapsulated Security Payload
See https://tools.ietf.org/rfc/rfc4303.txt
"""
name = 'ESP'
fields_desc = [
XIntField('spi', 0x0),
IntField('seq', 0),
StrField('data', None),
]
overload_fields = {
IP: {'proto': socket.IPPROTO_ESP},
IPv6: {'nh': socket.IPPROTO_ESP},
IPv6ExtHdrHopByHop: {'nh': socket.IPPROTO_ESP},
IPv6ExtHdrDestOpt: {'nh': socket.IPPROTO_ESP},
IPv6ExtHdrRouting: {'nh': socket.IPPROTO_ESP},
}
bind_layers(IP, ESP, proto=socket.IPPROTO_ESP)
bind_layers(IPv6, ESP, nh=socket.IPPROTO_ESP)
bind_layers(UDP, ESP, dport=4500) # NAT-Traversal encapsulation
bind_layers(UDP, ESP, sport=4500) # NAT-Traversal encapsulation
#------------------------------------------------------------------------------
class _ESPPlain(Packet):
"""
Internal class to represent unencrypted ESP packets.
"""
name = 'ESP'
fields_desc = [
XIntField('spi', 0x0),
IntField('seq', 0),
StrField('iv', ''),
PacketField('data', '', Raw),
StrField('padding', ''),
ByteField('padlen', 0),
ByteEnumField('nh', 0, IP_PROTOS),
StrField('icv', ''),
]
def data_for_encryption(self):
return str(self.data) + self.padding + chr(self.padlen) + chr(self.nh)
#------------------------------------------------------------------------------
try:
from Crypto.Cipher import AES
from Crypto.Cipher import DES
from Crypto.Cipher import DES3
from Crypto.Cipher import CAST
from Crypto.Cipher import Blowfish
from Crypto.Util import Counter
from Crypto import Random
except ImportError:
# no error if pycrypto is not available but encryption won't be supported
AES = None
DES = None
DES3 = None
CAST = None
Blowfish = None
Random = None
#------------------------------------------------------------------------------
def _lcm(a, b):
"""
Least Common Multiple between 2 integers.
"""
if a == 0 or b == 0:
return 0
else:
return abs(a * b) // fractions.gcd(a, b)
class CryptAlgo(object):
"""
IPSec encryption algorithm
"""
def __init__(self, name, cipher, mode, block_size=None, iv_size=None, key_size=None):
"""
@param name: the name of this encryption algorithm
@param cipher: a Cipher module
@param mode: the mode used with the cipher module
@param block_size: the length a block for this algo. Defaults to the
`block_size` of the cipher.
@param iv_size: the length of the initialization vector of this algo.
Defaults to the `block_size` of the cipher.
@param key_size: an integer or list/tuple of integers. If specified,
force the secret keys length to one of the values.
Defaults to the `key_size` of the cipher.
"""
self.name = name
self.cipher = cipher
self.mode = mode
if block_size is not None:
self.block_size = block_size
elif cipher is not None:
self.block_size = cipher.block_size
else:
self.block_size = 1
if iv_size is None:
self.iv_size = self.block_size
else:
self.iv_size = iv_size
if key_size is not None:
self.key_size = key_size
elif cipher is not None:
self.key_size = cipher.key_size
else:
self.key_size = None
def check_key(self, key):
"""
Check that the key length is valid.
@param key: a byte string
"""
if self.key_size and not (len(key) == self.key_size or len(key) in self.key_size):
raise TypeError('invalid key size %s, must be %s' %
(len(key), self.key_size))
def generate_iv(self):
"""
Generate a random initialization vector. If pycrypto is not available,
return a buffer of the correct length filled with only '\x00'.
"""
if Random:
return Random.get_random_bytes(self.iv_size)
else:
return chr(0) * self.iv_size
def new_cipher(self, key, iv):
"""
@param key: the secret key, a byte string
@param iv: the initialization vector, a byte string
@return: an initialized cipher object for this algo
"""
if (hasattr(self.cipher, 'MODE_CTR') and self.mode == self.cipher.MODE_CTR
or hasattr(self.cipher, 'MODE_GCM') and self.mode == self.cipher.MODE_GCM):
# in counter mode, the "iv" must be incremented for each block
# it is calculated like this:
# +---------+------------------+---------+
# | nonce | IV | counter |
# +---------+------------------+---------+
# m bytes n bytes 4 bytes
# <-------------------------------------->
# block_size
nonce_size = self.cipher.block_size - self.iv_size - 4
# instead of asking for an extra parameter, we extract the last
# nonce_size bytes of the key and use them as the nonce.
# +----------------------------+---------+
# | cipher key | nonce |
# +----------------------------+---------+
# <--------->
# nonce_size
cipher_key, nonce = key[:-nonce_size], key[-nonce_size:]
return self.cipher.new(cipher_key, self.mode,
counter=Counter.new(4 * 8, prefix=nonce + iv))
else:
return self.cipher.new(key, self.mode, iv)
def pad(self, esp):
"""
Add the correct amount of padding so that the data to encrypt is
exactly a multiple of the algorithm's block size.
Also, make sure that the total ESP packet length is a multiple of 4 or
8 bytes with IP or IPv6 respectively.
@param esp: an unencrypted _ESPPlain packet
"""
# 2 extra bytes for padlen and nh
data_len = len(esp.data) + 2
# according to the RFC4303, section 2.4. Padding (for Encryption)
# the size of the ESP payload must be a multiple of 32 bits
align = _lcm(self.block_size, 4)
# pad for block size
esp.padlen = -data_len % align
# padding must be an array of bytes starting from 1 to padlen
esp.padding = ''
for b in range(1, esp.padlen + 1):
esp.padding += chr(b)
# If the following test fails, it means that this algo does not comply
# with the RFC
payload_len = len(esp.iv) + len(esp.data) + len(esp.padding) + 2
if payload_len % 4 != 0:
raise ValueError('The size of the ESP data is not aligned to 32 bits after padding.')
return esp
def encrypt(self, esp, key):
"""
Encrypt an ESP packet
@param esp: an unencrypted _ESPPlain packet with valid padding
@param key: the secret key used for encryption
@return: a valid ESP packet encrypted with this algorithm
"""
data = esp.data_for_encryption()
if self.cipher:
self.check_key(key)
cipher = self.new_cipher(key, esp.iv)
data = cipher.encrypt(data)
return ESP(spi=esp.spi, seq=esp.seq, data=esp.iv + data)
def decrypt(self, esp, key, icv_size=0):
"""
Decrypt an ESP packet
@param esp: an encrypted ESP packet
@param key: the secret key used for encryption
@param icv_size: the length of the icv used for integrity check
@return: a valid ESP packet encrypted with this algorithm
"""
self.check_key(key)
iv = esp.data[:self.iv_size]
data = esp.data[self.iv_size:len(esp.data) - icv_size]
icv = esp.data[len(esp.data) - icv_size:]
if self.cipher:
cipher = self.new_cipher(key, iv)
data = cipher.decrypt(data)
# extract padlen and nh
padlen = ord(data[-2])
nh = ord(data[-1])
# then use padlen to determine data and padding
data = data[:len(data) - padlen - 2]
padding = data[len(data) - padlen - 2: len(data) - 2]
return _ESPPlain(spi=esp.spi,
seq=esp.seq,
iv=iv,
data=data,
padding=padding,
padlen=padlen,
nh=nh,
icv=icv)
#------------------------------------------------------------------------------
# The names of the encryption algorithms are the same than in scapy.contrib.ikev2
# see http://www.iana.org/assignments/ikev2-parameters/ikev2-parameters.xhtml
CRYPT_ALGOS = {
'NULL': CryptAlgo('NULL', cipher=None, mode=None, iv_size=0),
}
if AES:
CRYPT_ALGOS['AES-CBC'] = CryptAlgo('AES-CBC',
cipher=AES,
mode=AES.MODE_CBC)
# specific case for counter mode:
# the last 4 bytes of the key are used to carry the nonce of the counter
CRYPT_ALGOS['AES-CTR'] = CryptAlgo('AES-CTR',
cipher=AES,
mode=AES.MODE_CTR,
block_size=1,
iv_size=8,
key_size=(16 + 4, 24 + 4, 32 + 4))
if DES:
CRYPT_ALGOS['DES'] = CryptAlgo('DES',
cipher=DES,
mode=DES.MODE_CBC)
if Blowfish:
CRYPT_ALGOS['Blowfish'] = CryptAlgo('Blowfish',
cipher=Blowfish,
mode=Blowfish.MODE_CBC)
if DES3:
CRYPT_ALGOS['3DES'] = CryptAlgo('3DES',
cipher=DES3,
mode=DES3.MODE_CBC)
if CAST:
CRYPT_ALGOS['CAST'] = CryptAlgo('CAST',
cipher=CAST,
mode=CAST.MODE_CBC)
#------------------------------------------------------------------------------
try:
from Crypto.Hash import HMAC
from Crypto.Hash import SHA
from Crypto.Hash import MD5
from Crypto.Hash import SHA256
from Crypto.Hash import SHA384
from Crypto.Hash import SHA512
except ImportError:
# no error if pycrypto is not available but authentication won't be supported
HMAC = None
SHA = None
MD5 = None
SHA256 = None
SHA384 = None
try:
from Crypto.Hash import XCBCMAC
except ImportError:
XCBCMAC = None
#------------------------------------------------------------------------------
class IPSecIntegrityError(Exception):
"""
Error risen when the integrity check fails.
"""
pass
class AuthAlgo(object):
"""
IPSec integrity algorithm
"""
def __init__(self, name, mac, digestmod, icv_size, key_size=None):
"""
@param name: the name of this integrity algorithm
@param mac: a Message Authentication Code module
@param digestmod: a Hash or Cipher module
@param icv_size: the length of the integrity check value of this algo
@param key_size: an integer or list/tuple of integers. If specified,
force the secret keys length to one of the values.
Defaults to the `key_size` of the cipher.
"""
self.name = name
self.mac = mac
self.digestmod = digestmod
self.icv_size = icv_size
self.key_size = key_size
def check_key(self, key):
"""
Check that the key length is valid.
@param key: a byte string
"""
if self.key_size and len(key) not in self.key_size:
raise TypeError('invalid key size %s, must be one of %s' %
(len(key), self.key_size))
def new_mac(self, key):
"""
@param key: a byte string
@return: an initialized mac object for this algo
"""
if self.mac is XCBCMAC:
# specific case here, ciphermod instead of digestmod
return self.mac.new(key, ciphermod=self.digestmod)
else:
return self.mac.new(key, digestmod=self.digestmod)
def sign(self, pkt, key):
"""
Sign an IPSec (ESP or AH) packet with this algo.
@param pkt: a packet that contains a valid encrypted ESP or AH layer
@param key: the authentication key, a byte string
@return: the signed packet
"""
if not self.mac:
return pkt
self.check_key(key)
mac = self.new_mac(key)
if pkt.haslayer(ESP):
mac.update(str(pkt[ESP]))
pkt[ESP].data += mac.digest()[:self.icv_size]
elif pkt.haslayer(AH):
clone = zero_mutable_fields(pkt.copy(), sending=True)
mac.update(str(clone))
pkt[AH].icv = mac.digest()[:self.icv_size]
return pkt
def verify(self, pkt, key):
"""
Check that the integrity check value (icv) of a packet is valid.
@param pkt: a packet that contains a valid encrypted ESP or AH layer
@param key: the authentication key, a byte string
@raise IPSecIntegrityError: if the integrity check fails
"""
if not self.mac or self.icv_size == 0:
return
self.check_key(key)
mac = self.new_mac(key)
pkt_icv = 'not found'
computed_icv = 'not computed'
if isinstance(pkt, ESP):
pkt_icv = pkt.data[len(pkt.data) - self.icv_size:]
pkt = pkt.copy()
pkt.data = pkt.data[:len(pkt.data) - self.icv_size]
mac.update(str(pkt))
computed_icv = mac.digest()[:self.icv_size]
elif pkt.haslayer(AH):
pkt_icv = pkt[AH].icv[:self.icv_size]
clone = zero_mutable_fields(pkt.copy(), sending=False)
mac.update(str(clone))
computed_icv = mac.digest()[:self.icv_size]
if pkt_icv != computed_icv:
raise IPSecIntegrityError('pkt_icv=%r, computed_icv=%r' %
(pkt_icv, computed_icv))
#------------------------------------------------------------------------------
# The names of the integrity algorithms are the same than in scapy.contrib.ikev2
# see http://www.iana.org/assignments/ikev2-parameters/ikev2-parameters.xhtml
AUTH_ALGOS = {
'NULL': AuthAlgo('NULL', mac=None, digestmod=None, icv_size=0),
}
if HMAC:
if SHA:
AUTH_ALGOS['HMAC-SHA1-96'] = AuthAlgo('HMAC-SHA1-96',
mac=HMAC,
digestmod=SHA,
icv_size=12)
if SHA256:
AUTH_ALGOS['SHA2-256-128'] = AuthAlgo('SHA2-256-128',
mac=HMAC,
digestmod=SHA256,
icv_size=16)
if SHA384:
AUTH_ALGOS['SHA2-384-192'] = AuthAlgo('SHA2-384-192',
mac=HMAC,
digestmod=SHA384,
icv_size=24)
if SHA512:
AUTH_ALGOS['SHA2-512-256'] = AuthAlgo('SHA2-512-256',
mac=HMAC,
digestmod=SHA512,
icv_size=32)
if MD5:
AUTH_ALGOS['HMAC-MD5-96'] = AuthAlgo('HMAC-MD5-96',
mac=HMAC,
digestmod=MD5,
icv_size=12)
if AES and XCBCMAC:
AUTH_ALGOS['AES-XCBC-96'] = AuthAlgo('AES-XCBC-96',
mac=XCBCMAC,
digestmod=AES,
icv_size=12,
key_size=(16,))
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
def split_for_transport(orig_pkt, transport_proto):
"""
Split an IP(v6) packet in the correct location to insert an ESP or AH
header.
@param orig_pkt: the packet to split. Must be an IP or IPv6 packet
@param transport_proto: the IPSec protocol number that will be inserted
at the split position.
@return: a tuple (header, nh, payload) where nh is the protocol number of
payload.
"""
header = orig_pkt.copy()
next_hdr = header.payload
nh = None
if header.version == 4:
nh = header.proto
header.proto = transport_proto
header.remove_payload()
del header.chksum
del header.len
return header, nh, next_hdr
else:
found_rt_hdr = False
prev = header
# Since the RFC 4302 is vague about where the ESP/AH headers should be
# inserted in IPv6, I chose to follow the linux implementation.
while isinstance(next_hdr, (IPv6ExtHdrHopByHop, IPv6ExtHdrRouting, IPv6ExtHdrDestOpt)):
if isinstance(next_hdr, IPv6ExtHdrHopByHop):
pass
if isinstance(next_hdr, IPv6ExtHdrRouting):
found_rt_hdr = True
elif isinstance(next_hdr, IPv6ExtHdrDestOpt) and found_rt_hdr:
break
prev = next_hdr
next_hdr = next_hdr.payload
nh = prev.nh
prev.nh = transport_proto
prev.remove_payload()
del header.plen
return header, nh, next_hdr
#------------------------------------------------------------------------------
# see RFC 4302 - Appendix A. Mutability of IP Options/Extension Headers
IMMUTABLE_IPV4_OPTIONS = (
0, # End Of List
1, # No OPeration
2, # Security
5, # Extended Security
6, # Commercial Security
20, # Router Alert
21, # Sender Directed Multi-Destination Delivery
)
def zero_mutable_fields(pkt, sending=False):
"""
When using AH, all "mutable" fields must be "zeroed" before calculating
the ICV. See RFC 4302, Section 3.3.3.1. Handling Mutable Fields.
@param pkt: an IP(v6) packet containing an AH layer.
NOTE: The packet will be modified
@param sending: if true, ipv6 routing headers will not be reordered
"""
if pkt.haslayer(AH):
pkt[AH].icv = chr(0) * len(pkt[AH].icv)
else:
raise TypeError('no AH layer found')
if pkt.version == 4:
# the tos field has been replaced by DSCP and ECN
# Routers may rewrite the DS field as needed to provide a
# desired local or end-to-end service
pkt.tos = 0
# an intermediate router might set the DF bit, even if the source
# did not select it.
pkt.flags = 0
# changed en route as a normal course of processing by routers
pkt.ttl = 0
# will change if any of these other fields change
pkt.chksum = 0
immutable_opts = []
for opt in pkt.options:
if opt.option in IMMUTABLE_IPV4_OPTIONS:
immutable_opts.append(opt)
else:
immutable_opts.append(Raw(chr(0) * len(opt)))
pkt.options = immutable_opts
else:
# holds DSCP and ECN
pkt.tc = 0
# The flow label described in AHv1 was mutable, and in RFC 2460 [DH98]
# was potentially mutable. To retain compatibility with existing AH
# implementations, the flow label is not included in the ICV in AHv2.
pkt.fl = 0
# same as ttl
pkt.hlim = 0
next_hdr = pkt.payload
while isinstance(next_hdr, (IPv6ExtHdrHopByHop, IPv6ExtHdrRouting, IPv6ExtHdrDestOpt)):
if isinstance(next_hdr, (IPv6ExtHdrHopByHop, IPv6ExtHdrDestOpt)):
for opt in next_hdr.options:
if opt.otype & 0x20:
# option data can change en-route and must be zeroed
opt.optdata = chr(0) * opt.optlen
elif isinstance(next_hdr, IPv6ExtHdrRouting) and sending:
# The sender must order the field so that it appears as it
# will at the receiver, prior to performing the ICV computation.
next_hdr.segleft = 0
if next_hdr.addresses:
final = next_hdr.addresses.pop()
next_hdr.addresses.insert(0, pkt.dst)
pkt.dst = final
else:
break
next_hdr = next_hdr.payload
return pkt
#------------------------------------------------------------------------------
class SecurityAssociation(object):
"""
This class is responsible of "encryption" and "decryption" of IPSec packets.
"""
SUPPORTED_PROTOS = (IP, IPv6)
def __init__(self, proto, spi, seq_num=1, crypt_algo=None, crypt_key=None,
auth_algo=None, auth_key=None, tunnel_header=None, nat_t_header=None):
"""
@param proto: the IPSec proto to use (ESP or AH)
@param spi: the Security Parameters Index of this SA
@param seq_num: the initial value for the sequence number on encrypted
packets
@param crypt_algo: the encryption algorithm name (only used with ESP)
@param crypt_key: the encryption key (only used with ESP)
@param auth_algo: the integrity algorithm name
@param auth_key: the integrity key
@param tunnel_header: an instance of a IP(v6) header that will be used
to encapsulate the encrypted packets.
@param nat_t_header: an instance of a UDP header that will be used
for NAT-Traversal.
"""
if proto not in (ESP, AH, ESP.name, AH.name):
raise ValueError("proto must be either ESP or AH")
if isinstance(proto, basestring):
self.proto = eval(proto)
else:
self.proto = proto
self.spi = spi
self.seq_num = seq_num
if crypt_algo:
if crypt_algo not in CRYPT_ALGOS:
raise TypeError('unsupported encryption algo %r, try %r' %
(crypt_algo, CRYPT_ALGOS.keys()))
self.crypt_algo = CRYPT_ALGOS[crypt_algo]
self.crypt_algo.check_key(crypt_key)
self.crypt_key = crypt_key
else:
self.crypt_algo = CRYPT_ALGOS['NULL']
self.crypt_key = None
if auth_algo:
if auth_algo not in AUTH_ALGOS:
raise TypeError('unsupported integrity algo %r, try %r' %
(auth_algo, AUTH_ALGOS.keys()))
self.auth_algo = AUTH_ALGOS[auth_algo]
self.auth_algo.check_key(auth_key)
self.auth_key = auth_key
else:
self.auth_algo = AUTH_ALGOS['NULL']
self.auth_key = None
if tunnel_header and not isinstance(tunnel_header, (IP, IPv6)):
raise TypeError('tunnel_header must be %s or %s' % (IP.name, IPv6.name))
self.tunnel_header = tunnel_header
if nat_t_header:
if proto is not ESP:
raise TypeError('nat_t_header is only allowed with ESP')
if not isinstance(nat_t_header, UDP):
raise TypeError('nat_t_header must be %s' % UDP.name)
self.nat_t_header = nat_t_header
def check_spi(self, pkt):
if pkt.spi != self.spi:
raise TypeError('packet spi=0x%x does not match the SA spi=0x%x' %
(pkt.spi, self.spi))
def _encrypt_esp(self, pkt, seq_num=None, iv=None):
if iv is None:
iv = self.crypt_algo.generate_iv()
else:
if len(iv) != self.crypt_algo.iv_size:
raise TypeError('iv length must be %s' % self.crypt_algo.iv_size)
esp = _ESPPlain(spi=self.spi, seq=seq_num or self.seq_num, iv=iv)
if self.tunnel_header:
tunnel = self.tunnel_header.copy()
if tunnel.version == 4:
del tunnel.proto
del tunnel.len
del tunnel.chksum
else:
del tunnel.nh
del tunnel.plen
pkt = tunnel.__class__(str(tunnel / pkt))
ip_header, nh, payload = split_for_transport(pkt, socket.IPPROTO_ESP)
esp.data = payload
esp.nh = nh
esp = self.crypt_algo.pad(esp)
esp = self.crypt_algo.encrypt(esp, self.crypt_key)
self.auth_algo.sign(esp, self.auth_key)
if self.nat_t_header:
nat_t_header = self.nat_t_header.copy()
nat_t_header.chksum = 0
del nat_t_header.len
if ip_header.version == 4:
del ip_header.proto
else:
del ip_header.nh
ip_header /= nat_t_header
if ip_header.version == 4:
ip_header.len = len(ip_header) + len(esp)
del ip_header.chksum
ip_header = ip_header.__class__(str(ip_header))
else:
ip_header.plen = len(ip_header.payload) + len(esp)
# sequence number must always change, unless specified by the user
if seq_num is None:
self.seq_num += 1
return ip_header / esp
def _encrypt_ah(self, pkt, seq_num=None):
ah = AH(spi=self.spi, seq=seq_num or self.seq_num,
icv=chr(0) * self.auth_algo.icv_size)
if self.tunnel_header:
tunnel = self.tunnel_header.copy()
if tunnel.version == 4:
del tunnel.proto
del tunnel.len
del tunnel.chksum
else:
del tunnel.nh
del tunnel.plen
pkt = tunnel.__class__(str(tunnel / pkt))
ip_header, nh, payload = split_for_transport(pkt, socket.IPPROTO_AH)
ah.nh = nh
if ip_header.version == 6 and len(ah) % 8 != 0:
# For IPv6, the total length of the header must be a multiple of
# 8-octet units.
ah.padding = chr(0) * (-len(ah) % 8)
elif len(ah) % 4 != 0:
# For IPv4, the total length of the header must be a multiple of
# 4-octet units.
ah.padding = chr(0) * (-len(ah) % 4)
# RFC 4302 - Section 2.2. Payload Length
# This 8-bit field specifies the length of AH in 32-bit words (4-byte
# units), minus "2".
ah.payloadlen = len(ah) / 4 - 2
if ip_header.version == 4:
ip_header.len = len(ip_header) + len(ah) + len(payload)
del ip_header.chksum
ip_header = ip_header.__class__(str(ip_header))
else:
ip_header.plen = len(ip_header.payload) + len(ah) + len(payload)
signed_pkt = self.auth_algo.sign(ip_header / ah / payload, self.auth_key)
# sequence number must always change, unless specified by the user
if seq_num is None:
self.seq_num += 1
return signed_pkt
def encrypt(self, pkt, seq_num=None, iv=None):
"""
Encrypt (and encapsulate) an IP(v6) packet with ESP or AH according
to this SecurityAssociation.
@param pkt: the packet to encrypt
@param seq_num: if specified, use this sequence number instead of the
generated one
@param iv: if specified, use this initialization vector for
encryption instead of a random one.
@return: the encrypted/encapsulated packet
"""
if not isinstance(pkt, self.SUPPORTED_PROTOS):
raise TypeError('cannot encrypt %s, supported protos are %s'
% (pkt.__class__, self.SUPPORTED_PROTOS))
if self.proto is ESP:
return self._encrypt_esp(pkt, seq_num=seq_num, iv=iv)
else:
return self._encrypt_ah(pkt, seq_num=seq_num)
def _decrypt_esp(self, pkt, verify=True):
encrypted = pkt[ESP]
if verify:
self.check_spi(pkt)
self.auth_algo.verify(encrypted, self.auth_key)
esp = self.crypt_algo.decrypt(encrypted, self.crypt_key,
self.auth_algo.icv_size)
if self.tunnel_header:
# drop the tunnel header and return the payload untouched
pkt.remove_payload()
if pkt.version == 4:
pkt.proto = esp.nh
else:
pkt.nh = esp.nh
cls = pkt.guess_payload_class(esp.data)
return cls(esp.data)
else:
ip_header = pkt
if ip_header.version == 4:
ip_header.proto = esp.nh
del ip_header.chksum
ip_header.remove_payload()
ip_header.len = len(ip_header) + len(esp.data)
# recompute checksum
ip_header = ip_header.__class__(str(ip_header))
else:
encrypted.underlayer.nh = esp.nh
encrypted.underlayer.remove_payload()
ip_header.plen = len(ip_header.payload) + len(esp.data)
cls = ip_header.guess_payload_class(esp.data)
# reassemble the ip_header with the ESP payload
return ip_header / cls(esp.data)
def _decrypt_ah(self, pkt, verify=True):
if verify:
self.check_spi(pkt)
self.auth_algo.verify(pkt, self.auth_key)
ah = pkt[AH]
payload = ah.payload
payload.remove_underlayer(None) # useless argument...
if self.tunnel_header:
return payload
else:
ip_header = pkt
if ip_header.version == 4:
ip_header.proto = ah.nh
del ip_header.chksum
ip_header.remove_payload()
ip_header.len = len(ip_header) + len(payload)
# recompute checksum
ip_header = ip_header.__class__(str(ip_header))
else:
ah.underlayer.nh = ah.nh
ah.underlayer.remove_payload()
ip_header.plen = len(ip_header.payload) + len(payload)
# reassemble the ip_header with the AH payload
return ip_header / payload
def decrypt(self, pkt, verify=True):
"""
Decrypt (and decapsulate) an IP(v6) packet containing ESP or AH.
@param pkt: the packet to decrypt
@param verify: if False, do not perform the integrity check
@return: the decrypted/decapsulated packet
@raise IPSecIntegrityError: if the integrity check fails
"""
if not isinstance(pkt, self.SUPPORTED_PROTOS):
raise TypeError('cannot decrypt %s, supported protos are %s'
% (pkt.__class__, self.SUPPORTED_PROTOS))
if self.proto is ESP and pkt.haslayer(ESP):
return self._decrypt_esp(pkt, verify=verify)
elif self.proto is AH and pkt.haslayer(AH):
return self._decrypt_ah(pkt, verify=verify)
else:
raise TypeError('%s has no %s layer' % (pkt, self.proto.name))
| mit |
b-jesch/service.fritzbox.callmonitor | resources/lib/PhoneBooks/pyicloud/vendorlibs/requests/packages/urllib3/util/ssl_.py | 152 | 11624 | from __future__ import absolute_import
import errno
import warnings
import hmac
from binascii import hexlify, unhexlify
from hashlib import md5, sha1, sha256
from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
SSLContext = None
HAS_SNI = False
create_default_context = None
IS_PYOPENSSL = False
# Maps the length of a digest to a possible hash function producing this digest
HASHFUNC_MAP = {
32: md5,
40: sha1,
64: sha256,
}
def _const_compare_digest_backport(a, b):
"""
Compare two digests of equal length in constant time.
The digests must be of type str/bytes.
Returns True if the digests match, and False otherwise.
"""
result = abs(len(a) - len(b))
for l, r in zip(bytearray(a), bytearray(b)):
result |= l ^ r
return result == 0
_const_compare_digest = getattr(hmac, 'compare_digest',
_const_compare_digest_backport)
try: # Test for SSL features
import ssl
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
from ssl import HAS_SNI # Has SNI?
except ImportError:
pass
try:
from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
except ImportError:
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
OP_NO_COMPRESSION = 0x20000
# A secure default.
# Sources for more information on TLS ciphers:
#
# - https://wiki.mozilla.org/Security/Server_Side_TLS
# - https://www.ssllabs.com/projects/best-practices/index.html
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
#
# The general intent is:
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
# - prefer ECDHE over DHE for better performance,
# - prefer any AES-GCM over any AES-CBC for better performance and security,
# - use 3DES as fallback which is secure but slow,
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
DEFAULT_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
'!eNULL:!MD5'
)
try:
from ssl import SSLContext # Modern SSL?
except ImportError:
import sys
class SSLContext(object): # Platform-specific: Python 2 & 3.1
supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
(3, 2) <= sys.version_info)
def __init__(self, protocol_version):
self.protocol = protocol_version
# Use default values from a real SSLContext
self.check_hostname = False
self.verify_mode = ssl.CERT_NONE
self.ca_certs = None
self.options = 0
self.certfile = None
self.keyfile = None
self.ciphers = None
def load_cert_chain(self, certfile, keyfile):
self.certfile = certfile
self.keyfile = keyfile
def load_verify_locations(self, cafile=None, capath=None):
self.ca_certs = cafile
if capath is not None:
raise SSLError("CA directories not supported in older Pythons")
def set_ciphers(self, cipher_suite):
if not self.supports_set_ciphers:
raise TypeError(
'Your version of Python does not support setting '
'a custom cipher suite. Please upgrade to Python '
'2.7, 3.2, or later if you need this functionality.'
)
self.ciphers = cipher_suite
def wrap_socket(self, socket, server_hostname=None, server_side=False):
warnings.warn(
'A true SSLContext object is not available. This prevents '
'urllib3 from configuring SSL appropriately and may cause '
'certain SSL connections to fail. You can upgrade to a newer '
'version of Python to solve this. For more information, see '
'https://urllib3.readthedocs.org/en/latest/security.html'
'#insecureplatformwarning.',
InsecurePlatformWarning
)
kwargs = {
'keyfile': self.keyfile,
'certfile': self.certfile,
'ca_certs': self.ca_certs,
'cert_reqs': self.verify_mode,
'ssl_version': self.protocol,
'server_side': server_side,
}
if self.supports_set_ciphers: # Platform-specific: Python 2.7+
return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
else: # Platform-specific: Python 2.6
return wrap_socket(socket, **kwargs)
def assert_fingerprint(cert, fingerprint):
"""
Checks if given fingerprint matches the supplied certificate.
:param cert:
Certificate as bytes object.
:param fingerprint:
Fingerprint as string of hexdigits, can be interspersed by colons.
"""
fingerprint = fingerprint.replace(':', '').lower()
digest_length = len(fingerprint)
hashfunc = HASHFUNC_MAP.get(digest_length)
if not hashfunc:
raise SSLError(
'Fingerprint of invalid length: {0}'.format(fingerprint))
# We need encode() here for py32; works on py2 and p33.
fingerprint_bytes = unhexlify(fingerprint.encode())
cert_digest = hashfunc(cert).digest()
if not _const_compare_digest(cert_digest, fingerprint_bytes):
raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
.format(fingerprint, hexlify(cert_digest)))
def resolve_cert_reqs(candidate):
"""
Resolves the argument to a numeric constant, which can be passed to
the wrap_socket function/method from the ssl module.
Defaults to :data:`ssl.CERT_NONE`.
If given a string it is assumed to be the name of the constant in the
:mod:`ssl` module or its abbrevation.
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
If it's neither `None` nor a string we assume it is already the numeric
constant which can directly be passed to wrap_socket.
"""
if candidate is None:
return CERT_NONE
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, 'CERT_' + candidate)
return res
return candidate
def resolve_ssl_version(candidate):
"""
like resolve_cert_reqs
"""
if candidate is None:
return PROTOCOL_SSLv23
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, 'PROTOCOL_' + candidate)
return res
return candidate
def create_urllib3_context(ssl_version=None, cert_reqs=None,
options=None, ciphers=None):
"""All arguments have the same meaning as ``ssl_wrap_socket``.
By default, this function does a lot of the same work that
``ssl.create_default_context`` does on Python 3.4+. It:
- Disables SSLv2, SSLv3, and compression
- Sets a restricted set of server ciphers
If you wish to enable SSLv3, you can do::
from urllib3.util import ssl_
context = ssl_.create_urllib3_context()
context.options &= ~ssl_.OP_NO_SSLv3
You can do the same to enable compression (substituting ``COMPRESSION``
for ``SSLv3`` in the last line above).
:param ssl_version:
The desired protocol version to use. This will default to
PROTOCOL_SSLv23 which will negotiate the highest protocol that both
the server and your installation of OpenSSL support.
:param cert_reqs:
Whether to require the certificate verification. This defaults to
``ssl.CERT_REQUIRED``.
:param options:
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
:param ciphers:
Which cipher suites to allow the server to select.
:returns:
Constructed SSLContext object with specified options
:rtype: SSLContext
"""
context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
# Setting the default here, as we may have no ssl module on import
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
if options is None:
options = 0
# SSLv2 is easily broken and is considered harmful and dangerous
options |= OP_NO_SSLv2
# SSLv3 has several problems and is now dangerous
options |= OP_NO_SSLv3
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
# (issue #309)
options |= OP_NO_COMPRESSION
context.options |= options
if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
context.verify_mode = cert_reqs
if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
# We do our own verification, including fingerprints and alternative
# hostnames. So disable it here
context.check_hostname = False
return context
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None, ciphers=None, ssl_context=None,
ca_cert_dir=None):
"""
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
the same meaning as they do when using :func:`ssl.wrap_socket`.
:param server_hostname:
When SNI is supported, the expected hostname of the certificate
:param ssl_context:
A pre-made :class:`SSLContext` object. If none is provided, one will
be created using :func:`create_urllib3_context`.
:param ciphers:
A string of ciphers we wish the client to support. This is not
supported on Python 2.6 as the ssl module does not support it.
:param ca_cert_dir:
A directory containing CA certificates in multiple separate files, as
supported by OpenSSL's -CApath flag or the capath argument to
SSLContext.load_verify_locations().
"""
context = ssl_context
if context is None:
context = create_urllib3_context(ssl_version, cert_reqs,
ciphers=ciphers)
if ca_certs or ca_cert_dir:
try:
context.load_verify_locations(ca_certs, ca_cert_dir)
except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2
raise SSLError(e)
# Py33 raises FileNotFoundError which subclasses OSError
# These are not equivalent unless we check the errno attribute
except OSError as e: # Platform-specific: Python 3.3 and beyond
if e.errno == errno.ENOENT:
raise SSLError(e)
raise
if certfile:
context.load_cert_chain(certfile, keyfile)
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
return context.wrap_socket(sock, server_hostname=server_hostname)
warnings.warn(
'An HTTPS request has been made, but the SNI (Subject Name '
'Indication) extension to TLS is not available on this platform. '
'This may cause the server to present an incorrect TLS '
'certificate, which can cause validation failures. You can upgrade to '
'a newer version of Python to solve this. For more information, see '
'https://urllib3.readthedocs.org/en/latest/security.html'
'#snimissingwarning.',
SNIMissingWarning
)
return context.wrap_socket(sock)
| gpl-2.0 |
mraspaud/mpop | mpop/satin/fy3_virr.py | 2 | 6378 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015, 2016 Adam.Dybbroe
# Author(s):
# Adam.Dybbroe <adam.dybbroe@smhi.se>
# Katerina.Melnik <kmelnik@scanex.ru>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""A VIRR reader for FY3-B and maybe A....
"""
import numpy as np
import os
import logging
from datetime import datetime
from ConfigParser import ConfigParser
from mpop import CONFIG_PATH
import h5py
from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp
LOGGER = logging.getLogger('virr')
def load(satscene, *args, **kwargs):
"""Read data from file and load it into *satscene*.
A possible *calibrate* keyword argument is passed to the AAPP reader.
Should be 0 for off (counts), 1 for default (brightness temperatures and
reflectances), and 2 for radiances only.
If *use_extern_calib* keyword argument is set True, use external
calibration data.
"""
del args
conf = ConfigParser()
conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg"))
options = {}
for option, value in conf.items(satscene.instrument_name + "-level2",
raw=True):
options[option] = value
if kwargs.get("filename") is not None:
options["full_filename"] = kwargs["filename"]
if kwargs.get("calibrate") is not None:
options["calibrate"] = kwargs["calibrate"]
else:
options["calibrate"] = True
LOGGER.debug("Calibrate = " + str(options["calibrate"]))
LOGGER.info("Loading instrument '%s'", satscene.instrument_name)
try:
CASES[satscene.instrument_name](satscene, options)
except KeyError:
raise KeyError("Unknown instrument '%s'" % satscene.instrument_name)
def load_virr(satscene, options):
"""Read the VIRR hdf5 file"""
if "filename" not in options:
raise IOError("No 1km virr filename given, cannot load")
values = {"orbit": satscene.orbit,
"satname": satscene.satname,
"instrument": satscene.instrument_name,
"satellite": satscene.fullname
}
filename = \
os.path.join(satscene.time_slot.strftime(options["dir"]) % values,
satscene.time_slot.strftime(
options["filename"])
% values)
LOGGER.debug("Filename= %s", filename)
datasets = ['EV_Emissive',
'EV_RefSB']
calibrate = options['calibrate']
LOGGER.debug("Calibrate = " + str(calibrate))
h5f = h5py.File(filename, 'r')
# Get geolocation information
lons = h5f['Longitude'][:]
lats = h5f['Latitude'][:]
# Mask out unrealistic values:
mask = np.logical_or(lats > 90., lons > 90.)
lons = np.ma.masked_array(lons, mask=mask)
lats = np.ma.masked_array(lats, mask=mask)
sunz = h5f['SolarZenith'][:]
slope = h5f['SolarZenith'].attrs['Slope'][0]
intercept = h5f['SolarZenith'].attrs['Intercept'][0]
sunz = sunz * slope + intercept
sunz = np.where(np.greater(sunz, 85.0), 85.0, sunz)
# Get the calibration information
# Emissive radiance coefficients:
emis_offs = h5f['Emissive_Radiance_Offsets'][:]
emis_scales = h5f['Emissive_Radiance_Scales'][:]
# Central wave number (unit = cm-1) for the three IR bands
# It is ordered according to decreasing wave number (increasing wavelength):
# 3.7 micron, 10.8 micron, 12 micron
emiss_centroid_wn = h5f.attrs['Emmisive_Centroid_Wave_Number']
# VIS/NIR calibration stuff:
refsb_cal_coeff = h5f.attrs['RefSB_Cal_Coefficients']
visnir_scales = refsb_cal_coeff[0::2]
visnir_offs = refsb_cal_coeff[1::2]
refsb_effective_wl = h5f.attrs['RefSB_Effective_Wavelength']
# Read the band data:
for dset in datasets:
band_data = h5f[dset]
valid_range = band_data.attrs['valid_range']
LOGGER.debug("valid-range = " + str(valid_range))
fillvalue = band_data.attrs['_FillValue']
band_names = band_data.attrs['band_name'].split(',')
slope = band_data.attrs['Slope']
intercept = band_data.attrs['Intercept']
units = band_data.attrs['units']
long_name = band_data.attrs['long_name']
LOGGER.debug('band names = ' + str(band_names))
for (i, band) in enumerate(band_names):
if band not in satscene.channels_to_load:
continue
LOGGER.debug("Reading channel %s, i=%d", band, i)
data = band_data[i]
bandmask = np.logical_or(np.less(data, valid_range[0]),
np.greater(data, valid_range[1]))
if calibrate:
if dset in ['EV_Emissive']:
data = (np.array([emis_offs[:, i]]).transpose() +
data * np.array([emis_scales[:, i]]).transpose())
# Radiance to Tb conversion.
# Pyspectral wants SI units,
# but radiance data are in mW/m^2/str/cm^-1 and wavenumbers are in cm^-1
# Therefore multply wavenumber by 100 and radiances by
# 10^-5
data = rad2temp(emiss_centroid_wn[i] * 100., data * 1e-5)
LOGGER.debug("IR data calibrated")
if dset in ['EV_RefSB']:
data = (visnir_offs[i] +
data * visnir_scales[i]) / np.cos(np.deg2rad(sunz))
satscene[band] = np.ma.masked_array(data,
mask=bandmask,
copy=False)
from pyresample import geometry
satscene.area = geometry.SwathDefinition(lons=lons, lats=lats)
h5f.close()
CASES = {
"virr": load_virr,
}
| gpl-3.0 |
bitmovin/bitmovin-python | bitmovin/resources/models/encodings/inputstreams/h264_picture_timing_trimming_input_stream.py | 1 | 1556 | from .abstract_trimming_input_stream import AbstractTrimmingInputStream
class H264PictureTimingTrimmingInputStream(AbstractTrimmingInputStream):
def __init__(self, input_stream_id, start_pic_timing=None, end_pic_timing=None, id_=None, custom_data=None,
name=None, description=None):
super().__init__(id_=id_, custom_data=custom_data, name=name, description=description,
input_stream_id=input_stream_id)
self.inputStreamId = input_stream_id
self.startPicTiming = start_pic_timing
self.endPicTiming = end_pic_timing
@classmethod
def parse_from_json_object(cls, json_object):
trimming_input_stream = AbstractTrimmingInputStream.parse_from_json_object(json_object=json_object)
input_stream_id = trimming_input_stream.inputStreamId
id_ = trimming_input_stream.id
custom_data = trimming_input_stream.customData
name = trimming_input_stream.name
description = trimming_input_stream.description
start_pic_timing = json_object.get('startPicTiming')
end_pic_timing = json_object.get('endPicTiming')
h264_picture_timing_trimming_input_stream = H264PictureTimingTrimmingInputStream(
input_stream_id=input_stream_id,
start_pic_timing=start_pic_timing,
end_pic_timing=end_pic_timing,
id_=id_,
custom_data=custom_data,
name=name,
description=description
)
return h264_picture_timing_trimming_input_stream
| unlicense |
HewlettPackard/oneview-redfish-toolkit | oneview_redfish_toolkit/tests/api/test_ethernet_interface.py | 1 | 2896 | # -*- coding: utf-8 -*-
# Copyright (2018) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from oneview_redfish_toolkit.api.ethernet_interface import \
EthernetInterface
from oneview_redfish_toolkit.tests.base_test import BaseTest
class TestEthernetInterface(BaseTest):
"""Tests for EthernetInterface class"""
def setUp(self):
"""Tests preparation"""
with open(
'oneview_redfish_toolkit/mockups/oneview/ServerProfile.json'
) as f:
self.server_profile = json.load(f)
def test_serialize_when_it_has_only_one_vlan(self):
# Tests the serialize function result against known result for
# a ethernet interface with only one vlan
with open(
'oneview_redfish_toolkit/mockups/oneview/'
'NetworkForEthernetInterface.json'
) as f:
network = json.load(f)
with open(
'oneview_redfish_toolkit/mockups/redfish/'
'EthernetInterfaceWithOnlyOneVlan.json'
) as f:
ethernet_interface_mockup = json.load(f)
conn_id_1 = self.server_profile["connectionSettings"]["connections"][0]
ethernet_interface = \
EthernetInterface.build(self.server_profile, conn_id_1, network)
result = json.loads(ethernet_interface.serialize())
self.assertEqualMockup(ethernet_interface_mockup, result)
def test_serialize_when_it_has_a_list_of_vlans(self):
# Tests the serialize function result against known result for
# a ethernet interface with list of vlans
with open(
'oneview_redfish_toolkit/mockups/oneview/'
'NetworkSetForEthernetInterface.json'
) as f:
network_set = json.load(f)
with open(
'oneview_redfish_toolkit/mockups/redfish/'
'EthernetInterfaceWithListOfVlans.json'
) as f:
ethernet_interface_mockup = json.load(f)
conn_id_2 = self.server_profile["connectionSettings"]["connections"][1]
ethernet_interface = \
EthernetInterface.build(self.server_profile,
conn_id_2,
network_set)
result = json.loads(ethernet_interface.serialize())
self.assertEqualMockup(ethernet_interface_mockup, result)
| apache-2.0 |
jernsthausen/datesplitter | lib/python2.7/site-packages/pip/_vendor/progress/bar.py | 404 | 2707 | # -*- coding: utf-8 -*-
# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import unicode_literals
from . import Progress
from .helpers import WritelnMixin
class Bar(WritelnMixin, Progress):
width = 32
message = ''
suffix = '%(index)d/%(max)d'
bar_prefix = ' |'
bar_suffix = '| '
empty_fill = ' '
fill = '#'
hide_cursor = True
def update(self):
filled_length = int(self.width * self.progress)
empty_length = self.width - filled_length
message = self.message % self
bar = self.fill * filled_length
empty = self.empty_fill * empty_length
suffix = self.suffix % self
line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix,
suffix])
self.writeln(line)
class ChargingBar(Bar):
suffix = '%(percent)d%%'
bar_prefix = ' '
bar_suffix = ' '
empty_fill = '∙'
fill = '█'
class FillingSquaresBar(ChargingBar):
empty_fill = '▢'
fill = '▣'
class FillingCirclesBar(ChargingBar):
empty_fill = '◯'
fill = '◉'
class IncrementalBar(Bar):
phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█')
def update(self):
nphases = len(self.phases)
expanded_length = int(nphases * self.width * self.progress)
filled_length = int(self.width * self.progress)
empty_length = self.width - filled_length
phase = expanded_length - (filled_length * nphases)
message = self.message % self
bar = self.phases[-1] * filled_length
current = self.phases[phase] if phase > 0 else ''
empty = self.empty_fill * max(0, empty_length - len(current))
suffix = self.suffix % self
line = ''.join([message, self.bar_prefix, bar, current, empty,
self.bar_suffix, suffix])
self.writeln(line)
class ShadyBar(IncrementalBar):
phases = (' ', '░', '▒', '▓', '█')
| mit |
pilliq/mongo-web-shell | webapps/server/crontab.py | 5 | 1707 | # Copyright 2013 10gen Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime, timedelta
from apscheduler.scheduler import Scheduler
from webapps.lib.db import get_db
from webapps.lib.util import get_internal_coll_name
EXPIRE_SESSION_EVERY = 600
EXPIRE_SESSION_DURATION = 1800
def run_scheduler(app):
scheduler = Scheduler()
expire_wrapper = lambda: expire_sessions(app)
scheduler.add_interval_job(expire_wrapper, seconds=EXPIRE_SESSION_EVERY)
scheduler.start()
print "APScheduler started successfully"
def expire_sessions(app):
with app.app_context():
db = get_db(MWSExceptions=False)
delta = timedelta(seconds=EXPIRE_SESSION_DURATION)
exp = datetime.now() - delta
sessions = db.clients.find({'timestamp': {'$lt': exp}})
for sess in sessions:
db.clients.remove(sess)
# Todo: Only remove collections if no one else is using this res_id
res_id = sess['res_id']
for c in sess['collections']:
db.drop_collection(get_internal_coll_name(res_id, c))
app.logger.info('Timed out expired sessions dead before %s' % exp)
| apache-2.0 |
pawelmhm/AutobahnPython | examples/twisted/websocket/echo_variants/client_with_params.py | 18 | 1921 | ###############################################################################
##
## Copyright (C) 2011-2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.internet import reactor
from autobahn.twisted.websocket import WebSocketClientFactory, \
WebSocketClientProtocol, \
connectWS
class EchoClientProtocol(WebSocketClientProtocol):
def __init__(self, message):
self.message = message.encode('utf8')
def sendHello(self):
self.sendMessage(self.message)
def onOpen(self):
self.sendHello()
def onMessage(self, payload, isBinary):
if not isBinary:
print("Text message received: {}".format(payload.decode('utf8')))
reactor.callLater(1, self.sendHello)
class EchoClientFactory(WebSocketClientFactory):
def buildProtocol(self, addr):
proto = EchoClientProtocol(self.message)
proto.factory = self
return proto
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Need the WebSocket server address, i.e. ws://localhost:9000"
sys.exit(1)
factory = EchoClientFactory(sys.argv[1])
factory.message = sys.argv[2] if len(sys.argv) > 2 else "My configurable message"
connectWS(factory)
reactor.run()
| apache-2.0 |
skbly7/serc | website/wiki/plugins/images/settings.py | 16 | 1323 | from __future__ import absolute_import
from __future__ import unicode_literals
from django import VERSION
from django.conf import settings as django_settings
from wiki.conf import settings as wiki_settings
SLUG = 'images'
# This is deprecated in django 1.7+
APP_LABEL = 'images' if VERSION < (1, 7) else None
# Where to store images
IMAGE_PATH = getattr(django_settings, 'WIKI_IMAGES_PATH', "wiki/images/%aid/")
# Storage backend to use, default is to use the same as the rest of the
# wiki, which is set in WIKI_STORAGE_BACKEND, but you can override it
# with WIKI_IMAGES_STORAGE_BACKEND
STORAGE_BACKEND = getattr(
django_settings,
'WIKI_IMAGES_STORAGE_BACKEND',
wiki_settings.STORAGE_BACKEND)
# Should the upload path be obscurified? If so, a random hash will be added to the path
# such that someone can not guess the location of files (if you have
# restricted permissions and the files are still located within the web
# server's
IMAGE_PATH_OBSCURIFY = getattr(
django_settings,
'WIKI_IMAGES_PATH_OBSCURIFY',
True)
# Allow anonymous users upload access (not nice on an open network)
# WIKI_IMAGES_ANONYMOUS can override this, otherwise the default
# in wiki.conf.settings is used.
ANONYMOUS = getattr(
django_settings,
'WIKI_IMAGES_ANONYMOUS',
wiki_settings.ANONYMOUS_UPLOAD)
| mit |
throwable-one/lettuce | tests/integration/lib/Django-1.2.5/django/core/cache/backends/locmem.py | 46 | 4062 | "Thread-safe in-memory cache backend."
import time
try:
import cPickle as pickle
except ImportError:
import pickle
from django.core.cache.backends.base import BaseCache
from django.utils.synch import RWLock
class CacheClass(BaseCache):
def __init__(self, _, params):
BaseCache.__init__(self, params)
self._cache = {}
self._expire_info = {}
max_entries = params.get('max_entries', 300)
try:
self._max_entries = int(max_entries)
except (ValueError, TypeError):
self._max_entries = 300
cull_frequency = params.get('cull_frequency', 3)
try:
self._cull_frequency = int(cull_frequency)
except (ValueError, TypeError):
self._cull_frequency = 3
self._lock = RWLock()
def add(self, key, value, timeout=None):
self.validate_key(key)
self._lock.writer_enters()
try:
exp = self._expire_info.get(key)
if exp is None or exp <= time.time():
try:
self._set(key, pickle.dumps(value), timeout)
return True
except pickle.PickleError:
pass
return False
finally:
self._lock.writer_leaves()
def get(self, key, default=None):
self.validate_key(key)
self._lock.reader_enters()
try:
exp = self._expire_info.get(key)
if exp is None:
return default
elif exp > time.time():
try:
return pickle.loads(self._cache[key])
except pickle.PickleError:
return default
finally:
self._lock.reader_leaves()
self._lock.writer_enters()
try:
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
pass
return default
finally:
self._lock.writer_leaves()
def _set(self, key, value, timeout=None):
if len(self._cache) >= self._max_entries:
self._cull()
if timeout is None:
timeout = self.default_timeout
self._cache[key] = value
self._expire_info[key] = time.time() + timeout
def set(self, key, value, timeout=None):
self.validate_key(key)
self._lock.writer_enters()
# Python 2.4 doesn't allow combined try-except-finally blocks.
try:
try:
self._set(key, pickle.dumps(value), timeout)
except pickle.PickleError:
pass
finally:
self._lock.writer_leaves()
def has_key(self, key):
self.validate_key(key)
self._lock.reader_enters()
try:
exp = self._expire_info.get(key)
if exp is None:
return False
elif exp > time.time():
return True
finally:
self._lock.reader_leaves()
self._lock.writer_enters()
try:
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
pass
return False
finally:
self._lock.writer_leaves()
def _cull(self):
if self._cull_frequency == 0:
self.clear()
else:
doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
for k in doomed:
self._delete(k)
def _delete(self, key):
try:
del self._cache[key]
except KeyError:
pass
try:
del self._expire_info[key]
except KeyError:
pass
def delete(self, key):
self.validate_key(key)
self._lock.writer_enters()
try:
self._delete(key)
finally:
self._lock.writer_leaves()
def clear(self):
self._cache.clear()
self._expire_info.clear()
| gpl-3.0 |
raulanatol/awsebcli | botocore_eb/vendored/requests/models.py | 14 | 23710 | # -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import logging
import datetime
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .exceptions import (
HTTPError, RequestException, MissingSchema, InvalidURL,
ChunkedEncodingError)
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring, IncompleteRead)
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
log = logging.getLogger(__name__)
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, str):
fp = StringIO(fp)
if isinstance(fp, bytes):
fp = BytesIO(fp)
rf = RequestField(name=k, data=fp.read(),
filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach the request. If a dictionary is provided, form-encoding will take place.
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self,
method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy()
p.body = self.body
p.hooks = self.hooks
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = self.method.upper()
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
try:
url = unicode(url)
except NameError:
# We're on Python 3.
url = str(url)
except UnicodeDecodeError:
pass
# Support for unicode domain names and paths.
scheme, auth, host, port, path, query, fragment = parse_url(url)
if not scheme:
raise MissingSchema("Invalid URL %r: No schema supplied" % url)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode('idna').decode('utf-8')
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
else:
self.headers = CaseInsensitiveDict()
def prepare_body(self, data, files):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
length = None
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, basestring),
not isinstance(data, list),
not isinstance(data, dict)
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length is not None:
self.headers['Content-Length'] = str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data:
body = self._encode_params(data)
if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if (content_type) and (not 'content-type' in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
if hasattr(body, 'seek') and hasattr(body, 'tell'):
body.seek(0, 2)
self.headers['Content-Length'] = str(body.tell())
body.seek(0, 0)
elif body is not None:
l = super_len(body)
if l:
self.headers['Content-Length'] = str(l)
elif self.method not in ('GET', 'HEAD'):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data."""
if isinstance(cookies, cookielib.CookieJar):
cookies = cookies
else:
cookies = cookiejar_from_dict(cookies)
if 'cookie' not in self.headers:
cookie_header = get_cookie_header(cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Requires that ``stream=True` on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta)
self.elapsed = datetime.timedelta(0)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except RequestException:
return False
return True
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the lovely Charade library
(Thanks, Ian!)."""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
"""
if self._content_consumed:
# simulate reading small chunks of the content
return iter_slices(self._content, chunk_size)
def generate():
try:
# Special case for urllib3.
try:
for chunk in self.raw.stream(chunk_size,
decode_content=True):
yield chunk
except IncompleteRead as e:
raise ChunkedEncodingError(e)
except AttributeError:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
gen = generate()
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
return gen
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size,
decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
if Response.encoding is None and chardet module is available, encoding
will be guessed.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
"""
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
return json.loads(self.content.decode(encoding), **kwargs)
return json.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Closes the underlying file descriptor and releases the connection
back to the pool.
*Note: Should not normally need to be called explicitly.*
"""
return self.raw.release_conn()
| apache-2.0 |
r0balo/pelisalacarta | python/main-classic/channels/gaypornshare.py | 5 | 5897 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Canal para gaypornshare.com
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import os
import re
from core import config
from core import logger
from core import scrapertools
from core import servertools
from core.item import Item
DEBUG = config.get_setting("debug")
IMAGES_PATH = os.path.join( config.get_runtime_path(), 'resources' , 'images' , 'gaypornshare' )
def strip_tags(value):
return re.sub(r'<[^>]*?>', '', value)
def mainlist(item):
logger.info("[gaypornshare.py] mainlist")
itemlist = []
itemlist.append( Item(channel=item.channel, action="lista" , title="Todas las Películas" , url="http://gaypornshare.org/page/1/",thumbnail="http://t1.pixhost.org/thumbs/3282/12031567_a152063_xlb.jpg"))
itemlist.append( Item(channel=item.channel, title="Buscar" , action="search") )
return itemlist
def lista(item):
logger.info("[gaypornshare.py] lista")
itemlist = []
# Descarga la pagina
data = scrapertools.downloadpageGzip(item.url)
#logger.info(data)
# Extrae las entradas (carpetas)
#<h2 class="posttitle"><a href='http://gaypornshare.org/workshop-bears/' class='entry-title' rel='bookmark' title='Workshop Bears' >Workshop Bears</a></h2>
patronvideos ="<a href='([^']+)' class='entry-title'.*?>([^']+)</a></h2>"+'.*?<img src="([^"]+)'
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
scrapedtitle = match[1]
scrapedtitle = scrapedtitle.replace("–","-")
scrapedtitle = scrapedtitle.replace("’","'")
scrapedurl = match[0]
scrapedthumbnail = match[2]
imagen = ""
scrapedplot = match[0]
tipo = match[1]
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
scrapedplot=strip_tags(scrapedplot)
itemlist.append( Item(channel=item.channel, action="detail", title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail , plot=scrapedplot , folder=True) )
# Extrae la marca de siguiente página
#<span class='current'>8</span><a class="page larger" href="http://gaypornshare.org/page/9/">9</a>
patronvideos ="<span class='current'.*?</span>"+'<a.*?href="([^"]+)".*?>([^<]+)</a>'
matches2 = re.compile(patronvideos,re.DOTALL).findall(data)
for match2 in matches2:
scrapedtitle = ">> página "+match2[1]
scrapedurl = match2[0]
scrapedthumbnail = ""
imagen = ""
scrapedplot = match2[0]
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
itemlist.append( Item(channel=item.channel, action="lista", title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail , plot=scrapedplot , folder=True) )
itemlist.append( Item(channel=item.channel, action="mainlist", title="<< volver al inicio", folder=True) )
return itemlist
def search(item,texto):
logger.info("[gaypornshare.py] search")
itemlist = []
# descarga la pagina
data=scrapertools.downloadpageGzip("http://gaypornshare.org/?s="+texto)
# Extrae las entradas (carpetas)
#<h2 class="posttitle"><a href='http://gaypornshare.org/workshop-bears/' class='entry-title' rel='bookmark' title='Workshop Bears' >Workshop Bears</a></h2>
patronvideos ="<a href='([^']+)' class='entry-title'.*?>([^']+)</a></h2>"+'.*?<img src="([^"]+)'
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
scrapedtitle = match[1]
scrapedtitle = scrapedtitle.replace("–","-")
scrapedtitle = scrapedtitle.replace("’","'")
scrapedurl = match[0]
scrapedthumbnail = match[2]
imagen = ""
scrapedplot = match[0]
tipo = match[1]
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
scrapedplot=strip_tags(scrapedplot)
itemlist.append( Item(channel=item.channel, action="detail", title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail , plot=scrapedplot , folder=True) )
# Extrae la marca de siguiente página
#<span class='current'>8</span><a class="page larger" href="http://gaypornshare.org/page/9/">9</a>
patronvideos ="<span class='current'.*?</span>"+'<a.*?href="([^"]+)".*?>([^<]+)</a>'
matches2 = re.compile(patronvideos,re.DOTALL).findall(data)
for match2 in matches2:
scrapedtitle = ">> página"+match2[1]
scrapedurl = match2[0]
scrapedthumbnail = ""
imagen = ""
scrapedplot = match2[0]
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
itemlist.append( Item(channel=item.channel, action="lista", title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail , plot=scrapedplot , folder=True) )
itemlist.append( Item(channel=item.channel, action="mainlist", title="<< volver al inicio", folder=True) )
return itemlist
def detail(item):
logger.info("[gaypornshare.py] detail")
itemlist = []
# Descarga la pagina
data = scrapertools.downloadpageGzip(item.url)
# Busca los enlaces a los videos de los servidores
video_itemlist = servertools.find_video_items(data=data)
for video_item in video_itemlist:
itemlist.append( Item(channel=item.channel , action="play" , server=video_item.server, title=item.title+video_item.title, url=video_item.url, thumbnail=item.thumbnail, plot=item.plot, folder=False))
return itemlist
| gpl-3.0 |
tlakshman26/cinder-new-branch | cinder/api/views/scheduler_stats.py | 33 | 1719 | # Copyright (C) 2014 eBay Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder.api import common
class ViewBuilder(common.ViewBuilder):
"""Model scheduler-stats API responses as a python dictionary."""
_collection_name = "scheduler-stats"
def __init__(self):
"""Initialize view builder."""
super(ViewBuilder, self).__init__()
def summary(self, request, pool):
"""Detailed view of a single pool."""
return {
'pool': {
'name': pool.get('name'),
}
}
def detail(self, request, pool):
"""Detailed view of a single pool."""
return {
'pool': {
'name': pool.get('name'),
'capabilities': pool.get('capabilities'),
}
}
def pools(self, request, pools, detail):
"""Detailed view of a list of pools seen by scheduler."""
if detail:
plist = [self.detail(request, pool)['pool'] for pool in pools]
else:
plist = [self.summary(request, pool)['pool'] for pool in pools]
pools_dict = dict(pools=plist)
return pools_dict
| apache-2.0 |
sshnaidm/ru | script.video.F4mProxy/lib/flvlib/scripts/index_flv.py | 98 | 13650 | import os
import sys
import shutil
import logging
import tempfile
from optparse import OptionParser
from flvlib import __versionstr__
from flvlib.constants import TAG_TYPE_AUDIO, TAG_TYPE_VIDEO, TAG_TYPE_SCRIPT
from flvlib.constants import FRAME_TYPE_KEYFRAME
from flvlib.astypes import MalformedFLV, FLVObject
from flvlib.tags import FLV, EndOfFile, AudioTag, VideoTag, ScriptTag
from flvlib.tags import create_script_tag, create_flv_header
from flvlib.helpers import force_remove
log = logging.getLogger('flvlib.index-flv')
class IndexingAudioTag(AudioTag):
SEEKPOINT_DENSITY = 10
def __init__(self, parent_flv, f):
AudioTag.__init__(self, parent_flv, f)
def parse(self):
parent = self.parent_flv
AudioTag.parse(self)
if not parent.first_media_tag_offset:
parent.first_media_tag_offset = self.offset
# If the FLV has video, we're done. No need to store audio seekpoint
# information anymore.
if not parent.no_video:
return
# We haven't seen any video tag yet. Store every SEEKPOINT_DENSITY tag
# offset and timestamp.
parent.audio_tag_number += 1
if (parent.audio_tag_number % self.SEEKPOINT_DENSITY == 0):
parent.audio_seekpoints.filepositions.append(self.offset)
parent.audio_seekpoints.times.append(self.timestamp / 1000.0)
class IndexingVideoTag(VideoTag):
def parse(self):
parent = self.parent_flv
VideoTag.parse(self)
parent.no_video = False
if not parent.first_media_tag_offset:
parent.first_media_tag_offset = self.offset
if self.frame_type == FRAME_TYPE_KEYFRAME:
parent.keyframes.filepositions.append(self.offset)
parent.keyframes.times.append(self.timestamp / 1000.0)
class IndexingScriptTag(ScriptTag):
def parse(self):
parent = self.parent_flv
ScriptTag.parse(self)
if self.name == 'onMetaData':
parent.metadata = self.variable
parent.metadata_tag_start = self.offset
parent.metadata_tag_end = self.f.tell()
tag_to_class = {
TAG_TYPE_AUDIO: IndexingAudioTag,
TAG_TYPE_VIDEO: IndexingVideoTag,
TAG_TYPE_SCRIPT: IndexingScriptTag
}
class IndexingFLV(FLV):
def __init__(self, f):
FLV.__init__(self, f)
self.metadata = None
self.keyframes = FLVObject()
self.keyframes.filepositions = []
self.keyframes.times = []
self.no_video = True
# If the FLV file has no video, there are no keyframes. We want to put
# some info in the metadata anyway -- Flash players use keyframe
# information as a seek table. In audio-only FLV files you can usually
# seek to the beginning of any tag (this is not entirely true for AAC).
# Most players still work if you just provide "keyframe" info that's
# really a table of every Nth audio tag, even with AAC.
# Because of that, until we see a video tag we make every Nth
# IndexingAudioTag store its offset and timestamp.
self.audio_tag_number = 0
self.audio_seekpoints = FLVObject()
self.audio_seekpoints.filepositions = []
self.audio_seekpoints.times = []
self.metadata_tag_start = None
self.metadata_tag_end = None
self.first_media_tag_offset = None
def tag_type_to_class(self, tag_type):
try:
return tag_to_class[tag_type]
except KeyError:
raise MalformedFLV("Invalid tag type: %d", tag_type)
def filepositions_difference(metadata, original_metadata_size):
test_payload = create_script_tag('onMetaData', metadata)
payload_size = len(test_payload)
difference = payload_size - original_metadata_size
return test_payload, difference
def retimestamp_and_index_file(inpath, outpath=None, retimestamp=None):
# no retimestamping needed
if retimestamp is None:
return index_file(inpath, outpath)
# retimestamp the input in place and index
elif retimestamp == 'inplace':
from flvlib.scripts.retimestamp_flv import retimestamp_file_inplace
log.debug("Retimestamping file `%s' in place", inpath)
# retimestamp the file inplace
if not retimestamp_file_inplace(inpath):
log.error("Failed to retimestamp `%s' in place", inpath)
return False
return index_file(inpath, outpath)
# retimestamp the input into a temporary file
elif retimestamp == 'atomic':
from flvlib.scripts.retimestamp_flv import retimestamp_file_atomically
log.debug("Retimestamping file `%s' atomically", inpath)
try:
fd, temppath = tempfile.mkstemp()
os.close(fd)
# preserve the permission bits
shutil.copymode(inpath, temppath)
except EnvironmentError, (errno, strerror):
log.error("Failed to create temporary file: %s", strerror)
return False
if not retimestamp_file_atomically(inpath, temppath):
log.error("Failed to retimestamp `%s' atomically", inpath)
# remove the temporary files
force_remove(temppath)
return False
# index the temporary file
if not index_file(temppath, outpath):
force_remove(temppath)
return False
if not outpath:
# If we were not writing directly to the output file
# we need to overwrite the original
try:
shutil.move(temppath, inpath)
except EnvironmentError, (errno, strerror):
log.error("Failed to overwrite the original file with the "
"retimestamped and indexed version: %s", strerror)
return False
else:
# if we were writing directly to the output file we need to remove
# the retimestamped temporary file
force_remove(temppath)
return True
def index_file(inpath, outpath=None):
out_text = (outpath and ("into file `%s'" % outpath)) or "and overwriting"
log.debug("Indexing file `%s' %s", inpath, out_text)
try:
f = open(inpath, 'rb')
except IOError, (errno, strerror):
log.error("Failed to open `%s': %s", inpath, strerror)
return False
flv = IndexingFLV(f)
tag_iterator = flv.iter_tags()
last_tag = None
try:
while True:
tag = tag_iterator.next()
# some buggy software, like gstreamer's flvmux, puts a metadata tag
# at the end of the file with timestamp 0, and we don't want to
# base our duration computation on that
if tag.timestamp != 0:
last_tag = tag
except MalformedFLV, e:
message = e[0] % e[1:]
log.error("The file `%s' is not a valid FLV file: %s", inpath, message)
return False
except EndOfFile:
log.error("Unexpected end of file on file `%s'", inpath)
return False
except StopIteration:
pass
if not flv.first_media_tag_offset:
log.error("The file `%s' does not have any media content", inpath)
return False
if not last_tag:
log.error("The file `%s' does not have any content with a "
"non-zero timestamp", inpath)
return False
metadata = flv.metadata or {}
if flv.metadata_tag_start:
original_metadata_size = flv.metadata_tag_end - flv.metadata_tag_start
else:
log.debug("The file `%s' has no metadata", inpath)
original_metadata_size = 0
keyframes = flv.keyframes
if flv.no_video:
log.info("The file `%s' has no video, using audio seekpoints info",
inpath)
keyframes = flv.audio_seekpoints
duration = metadata.get('duration')
if not duration:
# A duration of 0 is nonsensical, yet some tools put it like that. In
# that case (or when there is no such field) update the duration value.
duration = last_tag.timestamp / 1000.0
metadata['duration'] = duration
metadata['keyframes'] = keyframes
metadata['metadatacreator'] = 'flvlib %s' % __versionstr__
# we're going to write new metadata, so we need to shift the
# filepositions by the amount of bytes that we're going to add to
# the metadata tag
test_payload, difference = filepositions_difference(metadata,
original_metadata_size)
if difference:
new_filepositions = [pos + difference
for pos in keyframes.filepositions]
metadata['keyframes'].filepositions = new_filepositions
payload = create_script_tag('onMetaData', metadata)
else:
log.debug("The file `%s' metadata size did not change.", inpath)
payload = test_payload
if outpath:
try:
fo = open(outpath, 'wb')
except IOError, (errno, strerror):
log.error("Failed to open `%s': %s", outpath, strerror)
return False
else:
try:
fd, temppath = tempfile.mkstemp()
# preserve the permission bits
shutil.copymode(inpath, temppath)
fo = os.fdopen(fd, 'wb')
except EnvironmentError, (errno, strerror):
log.error("Failed to create temporary file: %s", strerror)
return False
log.debug("Creating the output file")
try:
fo.write(create_flv_header(has_audio=flv.has_audio,
has_video=flv.has_video))
fo.write(payload)
f.seek(flv.first_media_tag_offset)
shutil.copyfileobj(f, fo)
except IOError, (errno, strerror):
log.error("Failed to create the indexed file: %s", strerror)
if not outpath:
# remove the temporary file
force_remove(temppath)
return False
f.close()
fo.close()
if not outpath:
# If we were not writing directly to the output file
# we need to overwrite the original
try:
shutil.move(temppath, inpath)
except EnvironmentError, (errno, strerror):
log.error("Failed to overwrite the original file "
"with the indexed version: %s", strerror)
return False
return True
def process_options():
usage = "%prog [-U] file [outfile|file2 file3 ...]"
description = ("Finds keyframe timestamps and file offsets "
"in FLV files and updates the onMetaData "
"script tag with that information. "
"With the -U (update) option operates on all parameters, "
"overwriting the original file. Without the -U "
"option accepts one input and one output file path.")
version = "%%prog flvlib %s" % __versionstr__
parser = OptionParser(usage=usage, description=description,
version=version)
parser.add_option("-U", "--update", action="store_true",
help=("update mode, overwrites the given files "
"instead of writing to outfile"))
parser.add_option("-r", "--retimestamp", action="store_true",
help=("rewrite timestamps in the files before indexing, "
"identical to running retimestamp-flv first"))
parser.add_option("-R", "--retimestamp-inplace", action="store_true",
help=("same as -r but avoid creating temporary files at "
"the risk of corrupting the input files in case "
"of errors"))
parser.add_option("-v", "--verbose", action="count",
default=0, dest="verbosity",
help="be more verbose, each -v increases verbosity")
options, args = parser.parse_args(sys.argv)
if len(args) < 2:
parser.error("You have to provide at least one file path")
if not options.update and len(args) != 3:
parser.error("You need to provide one infile and one outfile "
"when not using the update mode")
if options.retimestamp and options.retimestamp_inplace:
parser.error("You cannot provide both -r and -R")
if options.verbosity > 3:
options.verbosity = 3
log.setLevel({0: logging.ERROR, 1: logging.WARNING,
2: logging.INFO, 3: logging.DEBUG}[options.verbosity])
return options, args
def index_files():
options, args = process_options()
clean_run = True
retimestamp_mode = None
if options.retimestamp:
retimestamp_mode = 'atomic'
elif options.retimestamp_inplace:
retimestamp_mode = 'inplace'
if not options.update:
clean_run = retimestamp_and_index_file(args[1], args[2],
retimestamp=retimestamp_mode)
else:
for filename in args[1:]:
if not retimestamp_and_index_file(filename,
retimestamp=retimestamp_mode):
clean_run = False
return clean_run
def main():
try:
outcome = index_files()
except KeyboardInterrupt:
# give the right exit status, 128 + signal number
# signal.SIGINT = 2
sys.exit(128 + 2)
except EnvironmentError, (errno, strerror):
try:
print >>sys.stderr, strerror
except StandardError:
pass
sys.exit(2)
if outcome:
sys.exit(0)
else:
sys.exit(1)
| gpl-2.0 |
yarikoptic/pystatsmodels | statsmodels/datasets/randhie/data.py | 3 | 2604 | """RAND Health Insurance Experiment Data"""
__docformat__ = 'restructuredtext'
COPYRIGHT = """This is in the public domain."""
TITLE = __doc__
SOURCE = """
The data was collected by the RAND corporation as part of the Health
Insurance Experiment (HIE).
http://www.rand.org/health/projects/hie/
This data was used in::
Cameron, A.C. amd Trivedi, P.K. 2005. `Microeconometrics: Methods
and Applications,` Cambridge: New York.
And was obtained from: <http://cameron.econ.ucdavis.edu/mmabook/mmadata.html>
See randhie/src for the original data and description. The data included
here contains only a subset of the original data. The data varies slightly
compared to that reported in Cameron and Trivedi.
"""
DESCRSHORT = """The RAND Co. Health Insurance Experiment Data"""
DESCRLONG = """"""
NOTE = """
Number of observations - 20,190
Number of variables - 10
Variable name definitions::
mdvis - Number of outpatient visits to an MD
lncoins - ln(coinsurance + 1), 0 <= coninsurance <= 100
idp - 1 if individual deductible plan, 0 otherwise
lpi - ln(max(1, annual participation incentive payment))
fmde - 0 if idp = 1; ln(max(1, MDE/(0.01 coinsurance))) otherwise
physlm - 1 if the person has a physical limitation
disea - number of chronic diseases
hlthg - 1 if self-rated health is good
hlthf - 1 if self-rated health is fair
hlthp - 1 if self-rated health is poor
(Omitted category is excellent self-rated health)
"""
from numpy import recfromtxt, column_stack, array
from statsmodels.datasets import utils as du
from os.path import dirname, abspath
PATH = '%s/%s' % (dirname(abspath(__file__)), 'randhie.csv')
def load():
"""
Loads the RAND HIE data and returns a Dataset class.
----------
endog - response variable, mdvis
exog - design
Returns
Load instance:
a class of the data with array attrbutes 'endog' and 'exog'
"""
data = _get_data()
return du.process_recarray(data, endog_idx=0, dtype=float)
def load_pandas():
"""
Loads the RAND HIE data and returns a Dataset class.
----------
endog - response variable, mdvis
exog - design
Returns
Load instance:
a class of the data with array attrbutes 'endog' and 'exog'
"""
from pandas import read_csv
data = read_csv(PATH)
return du.process_recarray_pandas(data, endog_idx=0)
def _get_data():
filepath = dirname(abspath(__file__))
data = recfromtxt(open(PATH, "rb"), delimiter=",", names=True, dtype=float)
return data
| bsd-3-clause |
mstriemer/app-validator | tests/js/test_features.py | 1 | 3970 | from functools import wraps
from nose.tools import eq_
from js_helper import TestCase
def uses_feature(name):
def wrap(func):
@wraps(func)
def inner(self, *args, **kw):
func(self, *args, **kw)
self.assert_has_feature(name)
return inner
return wrap
class FeatureTester(TestCase):
def test_all(self):
def _test(feature, script):
self.setUp()
self.setup_err()
self.run_script(script)
self.assert_has_feature(feature)
for feature, script in self.TESTS:
yield _test, feature, script
class TestWindowFeatures(FeatureTester):
"""Tests for feature APIs in the global context."""
TESTS = [
("ACTIVITY", "var x = new MozActivity();"),
("LIGHT_EVENTS", "window.ondevicelight = function() {};"),
("ARCHIVE", "var x = new ArchiveReader();"),
("INDEXEDDB", "var x = new mozIndexedDB();"),
("PROXIMITY", "window.ondeviceproximity = function() {};"),
("ORIENTATION", "window.ondeviceorientation = function() {};"),
("TOUCH", "window.ontouchstart = function() {};"),
("AUDIO", "var audio = new Audio(); audio.src = 'asdf';"),
("WEBAUDIO", "var x = new mozAudioContext();"),
("QUOTA", "var x = new mozPersistentStorage();"),
("QUOTA", "var x = new StorageInfo();"),
("WEBRTC_MEDIA", "var x = new MediaStream();"),
("WEBRTC_DATA", "var x = new DataChannel();"),
("WEBRTC_PEER", "var x = new RTCPeerConnection();"),
("SPEECH_SYN", "var x = speechSynthesis.foo();"),
("SPEECH_REC", "var x = new SpeechRecognition();"),
("POINTER_LOCK", "document.documentElement.requestPointerLock()"),
("THIRDPARTY_KEYBOARD_SUPPORT", "var x = navigator.mozInputMethod.foo()"),
]
class TestNavigatorFeatures(FeatureTester):
"""Tests for feature APIs in the navigator.* object."""
TESTS = [
("APPS", "navigator.mozApps.install('foo/bar.webapp');"),
("APPS", "navigator.apps.install('foo/bar.webapp');"),
("PACKAGED_APPS", "navigator.apps.installPackage('foo/bar.webapp');"),
("PAY", "navigator.mozPay.foo();"),
("BATTERY", "navigator.battery.foo();"),
("BLUETOOTH", "navigator.bluetooth.foo();"),
("CONTACTS", "navigator.mozContacts.foo();"),
("DEVICE_STORAGE", "navigator.getDeviceStorage();"),
("GEOLOCATION", "navigator.getCurrentPosition();"),
("IDLE", "navigator.addIdleObserver();"),
("NETWORK_INFO", "navigator.connection.foo();"),
("NETWORK_STATS", "navigator.networkStats.foo();"),
("PUSH", "navigator.mozPush.foo();"),
("TIME_CLOCK", "navigator.mozTime.foo();"),
("VIBRATE", "navigator.vibrate.foo();"),
("FM", "navigator.mozFM();"),
("FM", "navigator.mozFMRadio();"),
("SMS", "navigator.mozSms.foo();"),
("GAMEPAD", "navigator.getGamepad();"),
("NOTIFICATION", "navigator.mozNotification.foo();"),
("ALARM", "navigator.mozAlarms.foo();"),
("TCPSOCKET", "var x = new navigator.mozTCPSocket();"),
("TCPSOCKET", "var x = new navigator.mozTCPServerSocket();"),
]
class TestInstMembersFeatures(FeatureTester):
"""Tests for feature APIs in instance properties."""
TESTS = [
("TOUCH",
"document.getElementById('foo').ontouchstart = function() {};"),
("FULLSCREEN",
"document.getElementById('foo').requestFullScreen();"),
]
class TestGUMFeatures(FeatureTester):
"""Tests for getUserMedia-related feature APIs."""
TESTS = [
("CAMERA", "navigator.getUserMedia({video:true})"),
("CAMERA", "navigator.getUserMedia({picture:true})"),
("MIC", "navigator.getUserMedia({audio:true})"),
("SCREEN_CAPTURE",
"navigator.getUserMedia({video:{mandatory:"
"{chromeMediaSource:'screen'}}})"),
]
| bsd-3-clause |
MCMic/Sick-Beard | lib/imdb/parser/http/characterParser.py | 143 | 8139 | """
parser.http.characterParser module (imdb package).
This module provides the classes (and the instances), used to parse
the IMDb pages on the akas.imdb.com server about a character.
E.g., for "Jesse James" the referred pages would be:
main details: http://www.imdb.com/character/ch0000001/
biography: http://www.imdb.com/character/ch0000001/bio
...and so on...
Copyright 2007-2009 Davide Alberani <da@erlug.linux.it>
2008 H. Turgut Uyar <uyar@tekir.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import re
from utils import Attribute, Extractor, DOMParserBase, build_movie, \
analyze_imdbid
from personParser import DOMHTMLMaindetailsParser
from imdb.Movie import Movie
_personIDs = re.compile(r'/name/nm([0-9]{7})')
class DOMHTMLCharacterMaindetailsParser(DOMHTMLMaindetailsParser):
"""Parser for the "filmography" page of a given character.
The page should be provided as a string, as taken from
the akas.imdb.com server. The final result will be a
dictionary, with a key for every relevant section.
Example:
bparser = DOMHTMLCharacterMaindetailsParser()
result = bparser.parse(character_biography_html_string)
"""
_containsObjects = True
_film_attrs = [Attribute(key=None,
multi=True,
path={
'link': "./a[1]/@href",
'title': ".//text()",
'status': "./i/a//text()",
'roleID': "./a/@href"
},
postprocess=lambda x:
build_movie(x.get('title') or u'',
movieID=analyze_imdbid(x.get('link') or u''),
roleID=_personIDs.findall(x.get('roleID') or u''),
status=x.get('status') or None,
_parsingCharacter=True))]
extractors = [
Extractor(label='title',
path="//title",
attrs=Attribute(key='name',
path="./text()",
postprocess=lambda x: \
x.replace(' (Character)', '').replace(
'- Filmography by type', '').strip())),
Extractor(label='headshot',
path="//a[@name='headshot']",
attrs=Attribute(key='headshot',
path="./img/@src")),
Extractor(label='akas',
path="//div[h5='Alternate Names:']",
attrs=Attribute(key='akas',
path="./div//text()",
postprocess=lambda x: x.strip().split(' / '))),
Extractor(label='filmography',
path="//div[@class='filmo'][not(h5)]/ol/li",
attrs=_film_attrs),
Extractor(label='filmography sections',
group="//div[@class='filmo'][h5]",
group_key="./h5/a/text()",
group_key_normalize=lambda x: x.lower()[:-1],
path="./ol/li",
attrs=_film_attrs),
]
preprocessors = [
# Check that this doesn't cut "status"...
(re.compile(r'<br>(\.\.\.| ).+?</li>', re.I | re.M), '</li>')]
class DOMHTMLCharacterBioParser(DOMParserBase):
"""Parser for the "biography" page of a given character.
The page should be provided as a string, as taken from
the akas.imdb.com server. The final result will be a
dictionary, with a key for every relevant section.
Example:
bparser = DOMHTMLCharacterBioParser()
result = bparser.parse(character_biography_html_string)
"""
_defGetRefs = True
extractors = [
Extractor(label='introduction',
path="//div[@id='_intro']",
attrs=Attribute(key='introduction',
path=".//text()",
postprocess=lambda x: x.strip())),
Extractor(label='biography',
path="//span[@class='_biography']",
attrs=Attribute(key='biography',
multi=True,
path={
'info': "./preceding-sibling::h4[1]//text()",
'text': ".//text()"
},
postprocess=lambda x: u'%s: %s' % (
x.get('info').strip(),
x.get('text').replace('\n',
' ').replace('||', '\n\n').strip()))),
]
preprocessors = [
(re.compile('(<div id="swiki.2.3.1">)', re.I), r'\1<div id="_intro">'),
(re.compile('(<a name="history">)\s*(<table .*?</table>)',
re.I | re.DOTALL),
r'</div>\2\1</a>'),
(re.compile('(<a name="[^"]+">)(<h4>)', re.I), r'</span>\1</a>\2'),
(re.compile('(</h4>)</a>', re.I), r'\1<span class="_biography">'),
(re.compile('<br/><br/>', re.I), r'||'),
(re.compile('\|\|\n', re.I), r'</span>'),
]
class DOMHTMLCharacterQuotesParser(DOMParserBase):
"""Parser for the "quotes" page of a given character.
The page should be provided as a string, as taken from
the akas.imdb.com server. The final result will be a
dictionary, with a key for every relevant section.
Example:
qparser = DOMHTMLCharacterQuotesParser()
result = qparser.parse(character_quotes_html_string)
"""
_defGetRefs = True
extractors = [
Extractor(label='charquotes',
group="//h5",
group_key="./a/text()",
path="./following-sibling::div[1]",
attrs=Attribute(key=None,
path={'txt': ".//text()",
'movieID': ".//a[1]/@href"},
postprocess=lambda x: (analyze_imdbid(x['movieID']),
x['txt'].strip().replace(': ',
': ').replace(': ', ': ').split('||'))))
]
preprocessors = [
(re.compile('(</h5>)', re.I), r'\1<div>'),
(re.compile('\s*<br/><br/>\s*', re.I), r'||'),
(re.compile('\|\|\s*(<hr/>)', re.I), r'</div>\1'),
(re.compile('\s*<br/>\s*', re.I), r'::')
]
def postprocess_data(self, data):
if not data:
return {}
newData = {}
for title in data:
movieID, quotes = data[title]
if movieID is None:
movie = title
else:
movie = Movie(title=title, movieID=movieID,
accessSystem=self._as, modFunct=self._modFunct)
newData[movie] = [quote.split('::') for quote in quotes]
return {'quotes': newData}
from personParser import DOMHTMLSeriesParser
_OBJECTS = {
'character_main_parser': ((DOMHTMLCharacterMaindetailsParser,),
{'kind': 'character'}),
'character_series_parser': ((DOMHTMLSeriesParser,), None),
'character_bio_parser': ((DOMHTMLCharacterBioParser,), None),
'character_quotes_parser': ((DOMHTMLCharacterQuotesParser,), None)
}
| gpl-3.0 |
mvaled/OpenUpgrade | addons/website_event_sale/controllers/main.py | 233 | 3374 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.addons.web import http
from openerp.addons.web.http import request
from openerp.addons.website_event.controllers.main import website_event
from openerp.addons.website_sale.controllers.main import get_pricelist
from openerp.tools.translate import _
class website_event(website_event):
@http.route(['/event/<model("event.event"):event>/register'], type='http', auth="public", website=True)
def event_register(self, event, **post):
pricelist_id = int(get_pricelist())
values = {
'event': event.with_context(pricelist=pricelist_id),
'main_object': event.with_context(pricelist=pricelist_id),
'range': range,
}
return request.website.render("website_event.event_description_full", values)
@http.route(['/event/cart/update'], type='http', auth="public", methods=['POST'], website=True)
def cart_update(self, event_id, **post):
cr, uid, context = request.cr, request.uid, request.context
ticket_obj = request.registry.get('event.event.ticket')
sale = False
for key, value in post.items():
quantity = int(value or "0")
if not quantity:
continue
sale = True
ticket_id = key.split("-")[0] == 'ticket' and int(key.split("-")[1]) or None
ticket = ticket_obj.browse(cr, SUPERUSER_ID, ticket_id, context=context)
order = request.website.sale_get_order(force_create=1)
order.with_context(event_ticket_id=ticket.id)._cart_update(product_id=ticket.product_id.id, add_qty=quantity)
if not sale:
return request.redirect("/event/%s" % event_id)
return request.redirect("/shop/checkout")
def _add_event(self, event_name="New Event", context={}, **kwargs):
try:
dummy, res_id = request.registry.get('ir.model.data').get_object_reference(request.cr, request.uid, 'event_sale', 'product_product_event')
context['default_event_ticket_ids'] = [[0,0,{
'name': _('Subscription'),
'product_id': res_id,
'deadline' : False,
'seats_max': 1000,
'price': 0,
}]]
except ValueError:
pass
return super(website_event, self)._add_event(event_name, context, **kwargs)
| agpl-3.0 |
EdgarSun/Django-Demo | django/contrib/auth/tests/forms.py | 97 | 9436 | from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm, PasswordChangeForm, SetPasswordForm, UserChangeForm, PasswordResetForm
from django.db import connection
from django.test import TestCase
from django.utils import unittest
class UserCreationFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_user_already_exists(self):
data = {
'username': 'testclient',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["username"].errors,
[u'A user with that username already exists.'])
def test_invalid_data(self):
data = {
'username': 'jsmith!',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["username"].errors,
[u'This value may contain only letters, numbers and @/./+/-/_ characters.'])
def test_password_verification(self):
# The verification password is incorrect.
data = {
'username': 'jsmith',
'password1': 'test123',
'password2': 'test',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["password2"].errors,
[u"The two password fields didn't match."])
def test_both_passwords(self):
# One (or both) passwords weren't given
data = {'username': 'jsmith'}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['password1'].errors,
[u'This field is required.'])
self.assertEqual(form['password2'].errors,
[u'This field is required.'])
data['password2'] = 'test123'
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['password1'].errors,
[u'This field is required.'])
def test_success(self):
# The success case.
data = {
'username': 'jsmith@example.com',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
u = form.save()
self.assertEqual(repr(u), '<User: jsmith@example.com>')
class AuthenticationFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_invalid_username(self):
# The user submits an invalid username.
data = {
'username': 'jsmith_does_not_exist',
'password': 'test123',
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(),
[u'Please enter a correct username and password. Note that both fields are case-sensitive.'])
def test_inactive_user(self):
# The user is inactive.
data = {
'username': 'inactive',
'password': 'password',
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(),
[u'This account is inactive.'])
def test_success(self):
# The success case
data = {
'username': 'testclient',
'password': 'password',
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
class SetPasswordFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username='testclient')
data = {
'new_password1': 'abc123',
'new_password2': 'abc',
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["new_password2"].errors,
[u"The two password fields didn't match."])
def test_success(self):
user = User.objects.get(username='testclient')
data = {
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
class PasswordChangeFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_incorrect_password(self):
user = User.objects.get(username='testclient')
data = {
'old_password': 'test',
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["old_password"].errors,
[u'Your old password was entered incorrectly. Please enter it again.'])
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username='testclient')
data = {
'old_password': 'password',
'new_password1': 'abc123',
'new_password2': 'abc',
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["new_password2"].errors,
[u"The two password fields didn't match."])
def test_success(self):
# The success case.
user = User.objects.get(username='testclient')
data = {
'old_password': 'password',
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
def test_field_order(self):
# Regression test - check the order of fields:
user = User.objects.get(username='testclient')
self.assertEqual(PasswordChangeForm(user, {}).fields.keys(),
['old_password', 'new_password1', 'new_password2'])
class UserChangeFormTest(TestCase):
fixtures = ['authtestdata.json']
@unittest.skipIf(not connection.features.supports_joins, 'Requires JOIN support')
def test_username_validity(self):
user = User.objects.get(username='testclient')
data = {'username': 'not valid'}
form = UserChangeForm(data, instance=user)
self.assertFalse(form.is_valid())
self.assertEqual(form['username'].errors,
[u'This value may contain only letters, numbers and @/./+/-/_ characters.'])
def test_bug_14242(self):
# A regression test, introduce by adding an optimization for the
# UserChangeForm.
class MyUserForm(UserChangeForm):
def __init__(self, *args, **kwargs):
super(MyUserForm, self).__init__(*args, **kwargs)
self.fields['groups'].help_text = 'These groups give users different permissions'
class Meta(UserChangeForm.Meta):
fields = ('groups',)
# Just check we can create it
form = MyUserForm({})
class PasswordResetFormTest(TestCase):
fixtures = ['authtestdata.json']
def create_dummy_user(self):
"""creates a user and returns a tuple
(user_object, username, email)
"""
username = 'jsmith'
email = 'jsmith@example.com'
user = User.objects.create_user(username, email, 'test123')
return (user, username, email)
def test_invalid_email(self):
data = {'email':'not valid'}
form = PasswordResetForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['email'].errors,
[u'Enter a valid e-mail address.'])
def test_nonexistant_email(self):
# Test nonexistant email address
data = {'email':'foo@bar.com'}
form = PasswordResetForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors,
{'email': [u"That e-mail address doesn't have an associated user account. Are you sure you've registered?"]})
def test_cleaned_data(self):
# Regression test
(user, username, email) = self.create_dummy_user()
data = {'email': email}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['email'], email)
def test_bug_5605(self):
# bug #5605, preserve the case of the user name (before the @ in the
# email address) when creating a user.
user = User.objects.create_user('forms_test2', 'tesT@EXAMple.com', 'test')
self.assertEqual(user.email, 'tesT@example.com')
user = User.objects.create_user('forms_test3', 'tesT', 'test')
self.assertEqual(user.email, 'tesT')
def test_inactive_user(self):
#tests that inactive user cannot
#receive password reset email
(user, username, email) = self.create_dummy_user()
user.is_active = False
user.save()
form = PasswordResetForm({'email': email})
self.assertFalse(form.is_valid())
| mit |
flgiordano/netcash | +/google-cloud-sdk/lib/surface/components/list.py | 1 | 3566 | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command to list installed/available gcloud components."""
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from googlecloudsdk.core.resource import resource_printer_base
class List(base.SilentCommand):
"""List the status of all Cloud SDK components.
List all components in the Cloud SDK and provide information such as whether
the component is installed on the local workstation, whether a newer version
is available, the size of the component, and the ID used to refer to the
component in commands.
"""
detailed_help = {
'DESCRIPTION': """\
This command lists all the available components in the Cloud SDK. For
each component, the command lists the following information:
* Status on your local workstation: not installed, installed (and
up to date), and update available (installed, but not up to date)
* Name of the component (a description)
* ID of the component (used to refer to the component in other
[{parent_command}] commands)
* Size of the component
In addition, if the `--show-versions` flag is specified, the command
lists the currently installed version (if any) and the latest
available version of each individual component.
""",
'EXAMPLES': """\
$ {command}
$ {command} --show-versions
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'--show-versions', required=False, action='store_true',
help='Show installed and available versions of all components.')
def Format(self, args):
attributes = [
'box',
'legend=" "',
'empty-legend="No updates."',
'title="Components"'
]
columns = [
'state.name:label=Status',
'name:label=Name',
]
if args.show_versions:
columns.extend([
'current_version_string:label=Installed:align=right',
'latest_version_string:label=Latest:align=right',
])
columns.extend([
'id:label=ID',
'size.size(zero="",min=1048576):label=Size:align=right',
])
return 'table[{attributes}]({columns})'.format(
attributes=','.join(attributes), columns=','.join(columns))
def Run(self, args):
"""Runs the list command."""
result = self.group.update_manager.List()
(to_print, current_version, latest_version) = result
if not to_print:
raise StopIteration
for c in to_print:
yield c
yield resource_printer_base.FinishMarker()
log.status.write("""\
To install or remove components at your current SDK version [{current}], run:
$ gcloud components install COMPONENT_ID
$ gcloud components remove COMPONENT_ID
To update your SDK installation to the latest version [{latest}], run:
$ gcloud components update
""".format(current=current_version, latest=latest_version))
log.status.flush()
| bsd-3-clause |
marcoserafini/h-store | third_party/cpp/protobuf/python/ez_setup.py | 55 | 9816 | #!python
# This file was obtained from:
# http://peak.telecommunity.com/dist/ez_setup.py
# on 2009/4/17.
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import sys
DEFAULT_VERSION = "0.6c9"
DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
md5_data = {
'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f',
'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2',
'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc',
'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167',
'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64',
'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d',
'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20',
'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab',
'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53',
'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2',
'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e',
'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372',
'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
}
import sys, os
try: from hashlib import md5
except ImportError: from md5 import md5
def _validate_md5(egg_name, data):
if egg_name in md5_data:
digest = md5(data).hexdigest()
if digest != md5_data[egg_name]:
print >>sys.stderr, (
"md5 validation of %s failed! (Possible download problem?)"
% egg_name
)
sys.exit(2)
return data
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
download_delay=15
):
"""Automatically find/download setuptools and make it available on sys.path
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end with
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
it is not already available. If `download_delay` is specified, it should
be the number of seconds that will be paused before initiating a download,
should one be required. If an older version of setuptools is installed,
this routine will print a message to ``sys.stderr`` and raise SystemExit in
an attempt to abort the calling script.
"""
was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
def do_download():
egg = download_setuptools(version, download_base, to_dir, download_delay)
sys.path.insert(0, egg)
import setuptools; setuptools.bootstrap_install_from = egg
try:
import pkg_resources
except ImportError:
return do_download()
try:
pkg_resources.require("setuptools>="+version); return
except pkg_resources.VersionConflict, e:
if was_imported:
print >>sys.stderr, (
"The required version of setuptools (>=%s) is not available, and\n"
"can't be installed while this script is running. Please install\n"
" a more recent version first, using 'easy_install -U setuptools'."
"\n\n(Currently using %r)"
) % (version, e.args[0])
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return do_download()
except pkg_resources.DistributionNotFound:
return do_download()
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
delay = 15
):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download attempt.
"""
import urllib2, shutil
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
url = download_base + egg_name
saveto = os.path.join(to_dir, egg_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
from distutils import log
if delay:
log.warn("""
---------------------------------------------------------------------------
This script requires setuptools version %s to run (even to display
help). I will attempt to download it for you (from
%s), but
you may need to enable firewall access for this script first.
I will start the download in %d seconds.
(Note: if this machine does not have network access, please obtain the file
%s
and place it in this directory before rerunning this script.)
---------------------------------------------------------------------------""",
version, download_base, delay, url
); from time import sleep; sleep(delay)
log.warn("Downloading %s", url)
src = urllib2.urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = _validate_md5(egg_name, src.read())
dst = open(saveto,"wb"); dst.write(data)
finally:
if src: src.close()
if dst: dst.close()
return os.path.realpath(saveto)
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
try:
import setuptools
except ImportError:
egg = None
try:
egg = download_setuptools(version, delay=0)
sys.path.insert(0,egg)
from setuptools.command.easy_install import main
return main(list(argv)+[egg]) # we're done here
finally:
if egg and os.path.exists(egg):
os.unlink(egg)
else:
if setuptools.__version__ == '0.0.1':
print >>sys.stderr, (
"You have an obsolete version of setuptools installed. Please\n"
"remove it from your system entirely before rerunning this script."
)
sys.exit(2)
req = "setuptools>="+version
import pkg_resources
try:
pkg_resources.require(req)
except pkg_resources.VersionConflict:
try:
from setuptools.command.easy_install import main
except ImportError:
from easy_install import main
main(list(argv)+[download_setuptools(delay=0)])
sys.exit(0) # try to force an exit
else:
if argv:
from setuptools.command.easy_install import main
main(argv)
else:
print "Setuptools version",version,"or greater has been installed."
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
print >>sys.stderr, "Internal error!"
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close()
if __name__=='__main__':
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
update_md5(sys.argv[2:])
else:
main(sys.argv[1:])
| gpl-3.0 |
isandlaTech/cohorte-3rdparty | sleekxmpp/src/main/python/sleekxmpp/plugins/xep_0071/stanza.py | 11 | 2862 | """
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2012 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
from sleekxmpp.stanza import Message
from sleekxmpp.util import unicode
from sleekxmpp.thirdparty import OrderedDict
from sleekxmpp.xmlstream import ElementBase, ET, register_stanza_plugin, tostring
XHTML_NS = 'http://www.w3.org/1999/xhtml'
class XHTML_IM(ElementBase):
namespace = 'http://jabber.org/protocol/xhtml-im'
name = 'html'
interfaces = set(['body'])
lang_interfaces = set(['body'])
plugin_attrib = name
def set_body(self, content, lang=None):
if lang is None:
lang = self.get_lang()
self.del_body(lang)
if lang == '*':
for sublang, subcontent in content.items():
self.set_body(subcontent, sublang)
else:
if isinstance(content, type(ET.Element('test'))):
content = unicode(ET.tostring(content))
else:
content = unicode(content)
header = '<body xmlns="%s"' % XHTML_NS
if lang:
header = '%s xml:lang="%s"' % (header, lang)
content = '%s>%s</body>' % (header, content)
xhtml = ET.fromstring(content)
self.xml.append(xhtml)
def get_body(self, lang=None):
"""Return the contents of the HTML body."""
if lang is None:
lang = self.get_lang()
bodies = self.xml.findall('{%s}body' % XHTML_NS)
if lang == '*':
result = OrderedDict()
for body in bodies:
body_lang = body.attrib.get('{%s}lang' % self.xml_ns, '')
body_result = []
body_result.append(body.text if body.text else '')
for child in body:
body_result.append(tostring(child, xmlns=XHTML_NS))
body_result.append(body.tail if body.tail else '')
result[body_lang] = ''.join(body_result)
return result
else:
for body in bodies:
if body.attrib.get('{%s}lang' % self.xml_ns, self.get_lang()) == lang:
result = []
result.append(body.text if body.text else '')
for child in body:
result.append(tostring(child, xmlns=XHTML_NS))
result.append(body.tail if body.tail else '')
return ''.join(result)
return ''
def del_body(self, lang=None):
if lang is None:
lang = self.get_lang()
bodies = self.xml.findall('{%s}body' % XHTML_NS)
for body in bodies:
if body.attrib.get('{%s}lang' % self.xml_ns, self.get_lang()) == lang:
self.xml.remove(body)
return
| apache-2.0 |
gnowledge/OTM2 | opentreemap/treemap/migrations/0082_rename_species_fields.py | 3 | 23085 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.rename_column(u'treemap_species', 'other', 'other_part_of_name')
db.rename_column(u'treemap_species', 'native_status', 'is_native')
db.rename_column(u'treemap_species', 'bloom_period', 'flowering_period')
db.rename_column(u'treemap_species', 'fruit_period', 'fruit_or_nut_period')
db.rename_column(u'treemap_species', 'wildlife_value', 'has_wildlife_value')
db.rename_column(u'treemap_species', 'fact_sheet', 'fact_sheet_url')
db.rename_column(u'treemap_species', 'plant_guide', 'plant_guide_url')
db.rename_column(u'treemap_species', 'max_dbh', 'max_diameter')
def backwards(self, orm):
db.rename_column(u'treemap_species', 'other_part_of_name', 'other')
db.rename_column(u'treemap_species', 'is_native', 'native_status')
db.rename_column(u'treemap_species', 'flowering_period', 'bloom_period')
db.rename_column(u'treemap_species', 'fruit_or_nut_period', 'fruit_period')
db.rename_column(u'treemap_species', 'has_wildlife_value', 'wildlife_value')
db.rename_column(u'treemap_species', 'fact_sheet_url', 'fact_sheet')
db.rename_column(u'treemap_species', 'plant_guide_url', 'plant_guide')
db.rename_column(u'treemap_species', 'max_diameter', 'max_dbh')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'treemap.audit': {
'Meta': {'object_name': 'Audit'},
'action': ('django.db.models.fields.IntegerField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'current_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_index': 'True'}),
'field': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']", 'null': 'True', 'blank': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'}),
'model_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}),
'previous_value': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'ref': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Audit']", 'null': 'True'}),
'requires_auth': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.User']"})
},
u'treemap.benefitcurrencyconversion': {
'Meta': {'object_name': 'BenefitCurrencyConversion'},
'co2_lb_to_currency': ('django.db.models.fields.FloatField', [], {}),
'currency_symbol': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'electricity_kwh_to_currency': ('django.db.models.fields.FloatField', [], {}),
'h20_gal_to_currency': ('django.db.models.fields.FloatField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'natural_gas_kbtu_to_currency': ('django.db.models.fields.FloatField', [], {}),
'nox_lb_to_currency': ('django.db.models.fields.FloatField', [], {}),
'o3_lb_to_currency': ('django.db.models.fields.FloatField', [], {}),
'pm10_lb_to_currency': ('django.db.models.fields.FloatField', [], {}),
'sox_lb_to_currency': ('django.db.models.fields.FloatField', [], {}),
'voc_lb_to_currency': ('django.db.models.fields.FloatField', [], {})
},
u'treemap.boundary': {
'Meta': {'object_name': 'Boundary'},
'category': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'geom': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'srid': '3857', 'db_column': "u'the_geom_webmercator'"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sort_order': ('django.db.models.fields.IntegerField', [], {})
},
u'treemap.fieldpermission': {
'Meta': {'unique_together': "((u'model_name', u'field_name', u'role', u'instance'),)", 'object_name': 'FieldPermission'},
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']"}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'permission_level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Role']"})
},
u'treemap.instance': {
'Meta': {'object_name': 'Instance'},
'adjuncts_timestamp': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'basemap_data': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'basemap_type': ('django.db.models.fields.CharField', [], {'default': "u'google'", 'max_length': '255'}),
'boundaries': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['treemap.Boundary']", 'null': 'True', 'blank': 'True'}),
'bounds': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'srid': '3857'}),
'center_override': ('django.contrib.gis.db.models.fields.PointField', [], {'srid': '3857', 'null': 'True', 'blank': 'True'}),
'config': ('treemap.json_field.JSONField', [], {'blank': 'True'}),
'default_role': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'default_role'", 'to': u"orm['treemap.Role']"}),
'eco_benefits_conversion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.BenefitCurrencyConversion']", 'null': 'True', 'blank': 'True'}),
'geo_rev': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'itree_region_default': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'url_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['treemap.User']", 'null': 'True', 'through': u"orm['treemap.InstanceUser']", 'blank': 'True'})
},
u'treemap.instanceuser': {
'Meta': {'unique_together': "((u'instance', u'user'),)", 'object_name': 'InstanceUser'},
'admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']"}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Role']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.User']"})
},
u'treemap.itreecodeoverride': {
'Meta': {'unique_together': "((u'instance_species', u'region'),)", 'object_name': 'ITreeCodeOverride'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_species': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Species']"}),
'itree_code': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.ITreeRegion']"})
},
u'treemap.itreeregion': {
'Meta': {'object_name': 'ITreeRegion'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'geometry': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'srid': '3857'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'treemap.mapfeature': {
'Meta': {'object_name': 'MapFeature'},
'address_city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'address_street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'address_zip': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'feature_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'geom': ('django.contrib.gis.db.models.fields.PointField', [], {'srid': '3857', 'db_column': "u'the_geom_webmercator'"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']"}),
'readonly': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'udfs': (u'treemap.udf.UDFField', [], {'db_index': 'True', 'blank': 'True'})
},
u'treemap.mapfeaturephoto': {
'Meta': {'object_name': 'MapFeaturePhoto'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']"}),
'map_feature': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.MapFeature']"}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'})
},
u'treemap.plot': {
'Meta': {'object_name': 'Plot', '_ormbases': [u'treemap.MapFeature']},
'length': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'mapfeature_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['treemap.MapFeature']", 'unique': 'True', 'primary_key': 'True'}),
'owner_orig_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'treemap.reputationmetric': {
'Meta': {'object_name': 'ReputationMetric'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'approval_score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'denial_score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'direct_write_score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']"}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'treemap.role': {
'Meta': {'object_name': 'Role'},
'default_permission': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'rep_thresh': ('django.db.models.fields.IntegerField', [], {})
},
u'treemap.species': {
'Meta': {'object_name': 'Species'},
'common_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'cultivar': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'fact_sheet_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'fall_conspicuous': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'flower_conspicuous': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'flowering_period': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'fruit_or_nut_period': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'genus': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'has_wildlife_value': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']"}),
'is_native': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'max_diameter': ('django.db.models.fields.IntegerField', [], {'default': '200'}),
'max_height': ('django.db.models.fields.IntegerField', [], {'default': '800'}),
'other_part_of_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'otm_code': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'palatable_human': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'plant_guide_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'species': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'udfs': (u'treemap.udf.UDFField', [], {'db_index': 'True', 'blank': 'True'})
},
u'treemap.staticpage': {
'Meta': {'object_name': 'StaticPage'},
'content': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'treemap.tree': {
'Meta': {'object_name': 'Tree'},
'canopy_height': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'date_planted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_removed': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'diameter': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'height': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']"}),
'plot': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Plot']"}),
'readonly': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'species': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Species']", 'null': 'True', 'blank': 'True'}),
'udfs': (u'treemap.udf.UDFField', [], {'db_index': 'True', 'blank': 'True'})
},
u'treemap.treefavorite': {
'Meta': {'unique_together': "((u'user', u'tree'),)", 'object_name': 'TreeFavorite'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tree': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Tree']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.User']"})
},
u'treemap.treephoto': {
'Meta': {'object_name': 'TreePhoto', '_ormbases': [u'treemap.MapFeaturePhoto']},
u'mapfeaturephoto_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['treemap.MapFeaturePhoto']", 'unique': 'True', 'primary_key': 'True'}),
'tree': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Tree']"})
},
u'treemap.user': {
'Meta': {'object_name': 'User'},
'allow_email_contact': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '30', 'blank': 'True'}),
'make_info_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'organization': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'treemap.userdefinedcollectionvalue': {
'Meta': {'object_name': 'UserDefinedCollectionValue'},
'data': (u'django_hstore.fields.DictionaryField', [], {}),
'field_definition': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.UserDefinedFieldDefinition']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model_id': ('django.db.models.fields.IntegerField', [], {})
},
u'treemap.userdefinedfielddefinition': {
'Meta': {'object_name': 'UserDefinedFieldDefinition'},
'datatype': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['treemap.Instance']"}),
'iscollection': ('django.db.models.fields.BooleanField', [], {}),
'model_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['treemap'] | gpl-3.0 |
tomchristie/django | tests/postgres_tests/test_indexes.py | 25 | 10150 | from django.contrib.postgres.indexes import BrinIndex, GinIndex, GistIndex
from django.db import connection
from django.test import skipUnlessDBFeature
from . import PostgreSQLTestCase
from .models import CharFieldModel, DateTimeArrayModel, IntegerArrayModel
@skipUnlessDBFeature('has_brin_index_support')
class BrinIndexTests(PostgreSQLTestCase):
def test_suffix(self):
self.assertEqual(BrinIndex.suffix, 'brin')
def test_not_eq(self):
index = BrinIndex(fields=['title'])
index_with_page_range = BrinIndex(fields=['title'], pages_per_range=16)
self.assertNotEqual(index, index_with_page_range)
def test_name_auto_generation(self):
"""
A name longer than 30 characters (since len(BrinIndex.suffix) is 4
rather than usual limit of 3) is okay for PostgreSQL. For this test,
the name of the field ('datetimes') must be at least 7 characters to
generate a name longer than 30 characters.
"""
index = BrinIndex(fields=['datetimes'])
index.set_name_with_model(DateTimeArrayModel)
self.assertEqual(index.name, 'postgres_te_datetim_abf104_brin')
def test_deconstruction(self):
index = BrinIndex(fields=['title'], name='test_title_brin')
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.BrinIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_brin'})
def test_deconstruction_with_pages_per_range(self):
index = BrinIndex(fields=['title'], name='test_title_brin', pages_per_range=16)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.BrinIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_brin', 'pages_per_range': 16})
def test_invalid_pages_per_range(self):
with self.assertRaisesMessage(ValueError, 'pages_per_range must be None or a positive integer'):
BrinIndex(fields=['title'], name='test_title_brin', pages_per_range=0)
class GinIndexTests(PostgreSQLTestCase):
def test_suffix(self):
self.assertEqual(GinIndex.suffix, 'gin')
def test_eq(self):
index = GinIndex(fields=['title'])
same_index = GinIndex(fields=['title'])
another_index = GinIndex(fields=['author'])
self.assertEqual(index, same_index)
self.assertNotEqual(index, another_index)
def test_name_auto_generation(self):
index = GinIndex(fields=['field'])
index.set_name_with_model(IntegerArrayModel)
self.assertEqual(index.name, 'postgres_te_field_def2f8_gin')
def test_deconstruction(self):
index = GinIndex(
fields=['title'],
name='test_title_gin',
fastupdate=True,
gin_pending_list_limit=128,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.GinIndex')
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
'fields': ['title'],
'name': 'test_title_gin',
'fastupdate': True,
'gin_pending_list_limit': 128,
}
)
def test_deconstruct_no_args(self):
index = GinIndex(fields=['title'], name='test_title_gin')
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.GinIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_gin'})
class GistIndexTests(PostgreSQLTestCase):
def test_suffix(self):
self.assertEqual(GistIndex.suffix, 'gist')
def test_eq(self):
index = GistIndex(fields=['title'], fillfactor=64)
same_index = GistIndex(fields=['title'], fillfactor=64)
another_index = GistIndex(fields=['author'], buffering=True)
self.assertEqual(index, same_index)
self.assertNotEqual(index, another_index)
def test_name_auto_generation(self):
index = GistIndex(fields=['field'])
index.set_name_with_model(CharFieldModel)
self.assertEqual(index.name, 'postgres_te_field_1e0206_gist')
def test_deconstruction(self):
index = GistIndex(fields=['title'], name='test_title_gist', buffering=False, fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.GistIndex')
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
'fields': ['title'],
'name': 'test_title_gist',
'buffering': False,
'fillfactor': 80,
}
)
def test_deconstruction_no_customization(self):
index = GistIndex(fields=['title'], name='test_title_gist')
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.GistIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_gist'})
class SchemaTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_gin_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(IntegerArrayModel._meta.db_table))
# Add the index
index_name = 'integer_array_model_field_gin'
index = GinIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
# Check gin index was added
self.assertEqual(constraints[index_name]['type'], GinIndex.suffix)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table))
def test_gin_fastupdate(self):
index_name = 'integer_array_gin_fastupdate'
index = GinIndex(fields=['field'], name=index_name, fastupdate=False)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], 'gin')
self.assertEqual(constraints[index_name]['options'], ['fastupdate=off'])
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table))
@skipUnlessDBFeature('has_gin_pending_list_limit')
def test_gin_parameters(self):
index_name = 'integer_array_gin_params'
index = GinIndex(fields=['field'], name=index_name, fastupdate=True, gin_pending_list_limit=64)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], 'gin')
self.assertEqual(constraints[index_name]['options'], ['gin_pending_list_limit=64', 'fastupdate=on'])
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table))
@skipUnlessDBFeature('has_brin_index_support')
def test_brin_index(self):
index_name = 'char_field_model_field_brin'
index = BrinIndex(fields=['field'], name=index_name, pages_per_range=4)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], BrinIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['pages_per_range=4'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_gist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = 'char_field_model_field_gist'
index = GistIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]['type'], GistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_gist_parameters(self):
index_name = 'integer_array_gist_buffering'
index = GistIndex(fields=['field'], name=index_name, buffering=True, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], GistIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['buffering=on', 'fillfactor=80'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
| bsd-3-clause |
cntnboys/410Lab6 | v1/lib/python2.7/site-packages/django/template/defaulttags.py | 49 | 50815 | """Default tags used by the template system, available to all templates."""
from __future__ import unicode_literals
import os
import sys
import re
from datetime import datetime
from itertools import groupby, cycle as itertools_cycle
import warnings
from django.conf import settings
from django.template.base import (Node, NodeList, Template, Context, Library,
TemplateSyntaxError, VariableDoesNotExist, InvalidTemplateLibrary,
BLOCK_TAG_START, BLOCK_TAG_END, VARIABLE_TAG_START, VARIABLE_TAG_END,
SINGLE_BRACE_START, SINGLE_BRACE_END, COMMENT_TAG_START, COMMENT_TAG_END,
VARIABLE_ATTRIBUTE_SEPARATOR, get_library, token_kwargs, kwarg_re,
render_value_in_context)
from django.template.smartif import IfParser, Literal
from django.template.defaultfilters import date
from django.utils.deprecation import RemovedInDjango18Warning
from django.utils.encoding import force_text, smart_text
from django.utils.safestring import mark_safe
from django.utils.html import format_html
from django.utils import six
from django.utils import timezone
register = Library()
class AutoEscapeControlNode(Node):
"""Implements the actions of the autoescape tag."""
def __init__(self, setting, nodelist):
self.setting, self.nodelist = setting, nodelist
def render(self, context):
old_setting = context.autoescape
context.autoescape = self.setting
output = self.nodelist.render(context)
context.autoescape = old_setting
if self.setting:
return mark_safe(output)
else:
return output
class CommentNode(Node):
def render(self, context):
return ''
class CsrfTokenNode(Node):
def render(self, context):
csrf_token = context.get('csrf_token', None)
if csrf_token:
if csrf_token == 'NOTPROVIDED':
return format_html("")
else:
return format_html("<input type='hidden' name='csrfmiddlewaretoken' value='{0}' />", csrf_token)
else:
# It's very probable that the token is missing because of
# misconfiguration, so we raise a warning
if settings.DEBUG:
warnings.warn("A {% csrf_token %} was used in a template, but the context did not provide the value. This is usually caused by not using RequestContext.")
return ''
class CycleNode(Node):
def __init__(self, cyclevars, variable_name=None, silent=False, escape=False):
self.cyclevars = cyclevars
self.variable_name = variable_name
self.silent = silent
self.escape = escape # only while the "future" version exists
def render(self, context):
if self not in context.render_context:
# First time the node is rendered in template
context.render_context[self] = itertools_cycle(self.cyclevars)
cycle_iter = context.render_context[self]
value = next(cycle_iter).resolve(context)
if self.variable_name:
context[self.variable_name] = value
if self.silent:
return ''
if not self.escape:
value = mark_safe(value)
return render_value_in_context(value, context)
class DebugNode(Node):
def render(self, context):
from pprint import pformat
output = [force_text(pformat(val)) for val in context]
output.append('\n\n')
output.append(pformat(sys.modules))
return ''.join(output)
class FilterNode(Node):
def __init__(self, filter_expr, nodelist):
self.filter_expr, self.nodelist = filter_expr, nodelist
def render(self, context):
output = self.nodelist.render(context)
# Apply filters.
with context.push(var=output):
return self.filter_expr.resolve(context)
class FirstOfNode(Node):
def __init__(self, variables, escape=False):
self.vars = variables
self.escape = escape # only while the "future" version exists
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
if not self.escape:
value = mark_safe(value)
return render_value_in_context(value, context)
return ''
class ForNode(Node):
child_nodelists = ('nodelist_loop', 'nodelist_empty')
def __init__(self, loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty=None):
self.loopvars, self.sequence = loopvars, sequence
self.is_reversed = is_reversed
self.nodelist_loop = nodelist_loop
if nodelist_empty is None:
self.nodelist_empty = NodeList()
else:
self.nodelist_empty = nodelist_empty
def __repr__(self):
reversed_text = ' reversed' if self.is_reversed else ''
return "<For Node: for %s in %s, tail_len: %d%s>" % \
(', '.join(self.loopvars), self.sequence, len(self.nodelist_loop),
reversed_text)
def __iter__(self):
for node in self.nodelist_loop:
yield node
for node in self.nodelist_empty:
yield node
def render(self, context):
if 'forloop' in context:
parentloop = context['forloop']
else:
parentloop = {}
with context.push():
try:
values = self.sequence.resolve(context, True)
except VariableDoesNotExist:
values = []
if values is None:
values = []
if not hasattr(values, '__len__'):
values = list(values)
len_values = len(values)
if len_values < 1:
return self.nodelist_empty.render(context)
nodelist = []
if self.is_reversed:
values = reversed(values)
unpack = len(self.loopvars) > 1
# Create a forloop value in the context. We'll update counters on each
# iteration just below.
loop_dict = context['forloop'] = {'parentloop': parentloop}
for i, item in enumerate(values):
# Shortcuts for current loop iteration number.
loop_dict['counter0'] = i
loop_dict['counter'] = i + 1
# Reverse counter iteration numbers.
loop_dict['revcounter'] = len_values - i
loop_dict['revcounter0'] = len_values - i - 1
# Boolean values designating first and last times through loop.
loop_dict['first'] = (i == 0)
loop_dict['last'] = (i == len_values - 1)
pop_context = False
if unpack:
# If there are multiple loop variables, unpack the item into
# them.
try:
unpacked_vars = dict(zip(self.loopvars, item))
except TypeError:
pass
else:
pop_context = True
context.update(unpacked_vars)
else:
context[self.loopvars[0]] = item
# In TEMPLATE_DEBUG mode provide source of the node which
# actually raised the exception
if settings.TEMPLATE_DEBUG:
for node in self.nodelist_loop:
try:
nodelist.append(node.render(context))
except Exception as e:
if not hasattr(e, 'django_template_source'):
e.django_template_source = node.source
raise
else:
for node in self.nodelist_loop:
nodelist.append(node.render(context))
if pop_context:
# The loop variables were pushed on to the context so pop them
# off again. This is necessary because the tag lets the length
# of loopvars differ to the length of each set of items and we
# don't want to leave any vars from the previous loop on the
# context.
context.pop()
return mark_safe(''.join(force_text(n) for n in nodelist))
class IfChangedNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, nodelist_true, nodelist_false, *varlist):
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self._varlist = varlist
def render(self, context):
# Init state storage
state_frame = self._get_context_stack_frame(context)
if self not in state_frame:
state_frame[self] = None
nodelist_true_output = None
try:
if self._varlist:
# Consider multiple parameters. This automatically behaves
# like an OR evaluation of the multiple variables.
compare_to = [var.resolve(context, True) for var in self._varlist]
else:
# The "{% ifchanged %}" syntax (without any variables) compares the rendered output.
compare_to = nodelist_true_output = self.nodelist_true.render(context)
except VariableDoesNotExist:
compare_to = None
if compare_to != state_frame[self]:
state_frame[self] = compare_to
return nodelist_true_output or self.nodelist_true.render(context) # render true block if not already rendered
elif self.nodelist_false:
return self.nodelist_false.render(context)
return ''
def _get_context_stack_frame(self, context):
# The Context object behaves like a stack where each template tag can create a new scope.
# Find the place where to store the state to detect changes.
if 'forloop' in context:
# Ifchanged is bound to the local for loop.
# When there is a loop-in-loop, the state is bound to the inner loop,
# so it resets when the outer loop continues.
return context['forloop']
else:
# Using ifchanged outside loops. Effectively this is a no-op because the state is associated with 'self'.
return context.render_context
class IfEqualNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, var1, var2, nodelist_true, nodelist_false, negate):
self.var1, self.var2 = var1, var2
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self.negate = negate
def __repr__(self):
return "<IfEqualNode>"
def render(self, context):
val1 = self.var1.resolve(context, True)
val2 = self.var2.resolve(context, True)
if (self.negate and val1 != val2) or (not self.negate and val1 == val2):
return self.nodelist_true.render(context)
return self.nodelist_false.render(context)
class IfNode(Node):
def __init__(self, conditions_nodelists):
self.conditions_nodelists = conditions_nodelists
def __repr__(self):
return "<IfNode>"
def __iter__(self):
for _, nodelist in self.conditions_nodelists:
for node in nodelist:
yield node
@property
def nodelist(self):
return NodeList(node for _, nodelist in self.conditions_nodelists for node in nodelist)
def render(self, context):
for condition, nodelist in self.conditions_nodelists:
if condition is not None: # if / elif clause
try:
match = condition.eval(context)
except VariableDoesNotExist:
match = None
else: # else clause
match = True
if match:
return nodelist.render(context)
return ''
class RegroupNode(Node):
def __init__(self, target, expression, var_name):
self.target, self.expression = target, expression
self.var_name = var_name
def resolve_expression(self, obj, context):
# This method is called for each object in self.target. See regroup()
# for the reason why we temporarily put the object in the context.
context[self.var_name] = obj
return self.expression.resolve(context, True)
def render(self, context):
obj_list = self.target.resolve(context, True)
if obj_list is None:
# target variable wasn't found in context; fail silently.
context[self.var_name] = []
return ''
# List of dictionaries in the format:
# {'grouper': 'key', 'list': [list of contents]}.
context[self.var_name] = [
{'grouper': key, 'list': list(val)}
for key, val in
groupby(obj_list, lambda obj: self.resolve_expression(obj, context))
]
return ''
def include_is_allowed(filepath):
filepath = os.path.abspath(filepath)
for root in settings.ALLOWED_INCLUDE_ROOTS:
if filepath.startswith(root):
return True
return False
class SsiNode(Node):
def __init__(self, filepath, parsed):
self.filepath = filepath
self.parsed = parsed
def render(self, context):
filepath = self.filepath.resolve(context)
if not include_is_allowed(filepath):
if settings.DEBUG:
return "[Didn't have permission to include file]"
else:
return '' # Fail silently for invalid includes.
try:
with open(filepath, 'r') as fp:
output = fp.read()
except IOError:
output = ''
if self.parsed:
try:
t = Template(output, name=filepath)
return t.render(context)
except TemplateSyntaxError as e:
if settings.DEBUG:
return "[Included template had syntax error: %s]" % e
else:
return '' # Fail silently for invalid included templates.
return output
class LoadNode(Node):
def render(self, context):
return ''
class NowNode(Node):
def __init__(self, format_string):
self.format_string = format_string
def render(self, context):
tzinfo = timezone.get_current_timezone() if settings.USE_TZ else None
return date(datetime.now(tz=tzinfo), self.format_string)
class SpacelessNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
from django.utils.html import strip_spaces_between_tags
return strip_spaces_between_tags(self.nodelist.render(context).strip())
class TemplateTagNode(Node):
mapping = {'openblock': BLOCK_TAG_START,
'closeblock': BLOCK_TAG_END,
'openvariable': VARIABLE_TAG_START,
'closevariable': VARIABLE_TAG_END,
'openbrace': SINGLE_BRACE_START,
'closebrace': SINGLE_BRACE_END,
'opencomment': COMMENT_TAG_START,
'closecomment': COMMENT_TAG_END,
}
def __init__(self, tagtype):
self.tagtype = tagtype
def render(self, context):
return self.mapping.get(self.tagtype, '')
class URLNode(Node):
def __init__(self, view_name, args, kwargs, asvar):
self.view_name = view_name
self.args = args
self.kwargs = kwargs
self.asvar = asvar
def render(self, context):
from django.core.urlresolvers import reverse, NoReverseMatch
args = [arg.resolve(context) for arg in self.args]
kwargs = dict((smart_text(k, 'ascii'), v.resolve(context))
for k, v in self.kwargs.items())
view_name = self.view_name.resolve(context)
# Try to look up the URL twice: once given the view name, and again
# relative to what we guess is the "main" app. If they both fail,
# re-raise the NoReverseMatch unless we're using the
# {% url ... as var %} construct in which case return nothing.
url = ''
try:
url = reverse(view_name, args=args, kwargs=kwargs, current_app=context.current_app)
except NoReverseMatch:
exc_info = sys.exc_info()
if settings.SETTINGS_MODULE:
project_name = settings.SETTINGS_MODULE.split('.')[0]
try:
url = reverse(project_name + '.' + view_name,
args=args, kwargs=kwargs,
current_app=context.current_app)
except NoReverseMatch:
if self.asvar is None:
# Re-raise the original exception, not the one with
# the path relative to the project. This makes a
# better error message.
six.reraise(*exc_info)
else:
if self.asvar is None:
raise
if self.asvar:
context[self.asvar] = url
return ''
else:
return url
class VerbatimNode(Node):
def __init__(self, content):
self.content = content
def render(self, context):
return self.content
class WidthRatioNode(Node):
def __init__(self, val_expr, max_expr, max_width, asvar=None):
self.val_expr = val_expr
self.max_expr = max_expr
self.max_width = max_width
self.asvar = asvar
def render(self, context):
try:
value = self.val_expr.resolve(context)
max_value = self.max_expr.resolve(context)
max_width = int(self.max_width.resolve(context))
except VariableDoesNotExist:
return ''
except (ValueError, TypeError):
raise TemplateSyntaxError("widthratio final argument must be a number")
try:
value = float(value)
max_value = float(max_value)
ratio = (value / max_value) * max_width
result = str(int(round(ratio)))
except ZeroDivisionError:
return '0'
except (ValueError, TypeError, OverflowError):
return ''
if self.asvar:
context[self.asvar] = result
return ''
else:
return result
class WithNode(Node):
def __init__(self, var, name, nodelist, extra_context=None):
self.nodelist = nodelist
# var and name are legacy attributes, being left in case they are used
# by third-party subclasses of this Node.
self.extra_context = extra_context or {}
if name:
self.extra_context[name] = var
def __repr__(self):
return "<WithNode>"
def render(self, context):
values = dict((key, val.resolve(context)) for key, val in
six.iteritems(self.extra_context))
with context.push(**values):
return self.nodelist.render(context)
@register.tag
def autoescape(parser, token):
"""
Force autoescape behavior for this block.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 2:
raise TemplateSyntaxError("'autoescape' tag requires exactly one argument.")
arg = args[1]
if arg not in ('on', 'off'):
raise TemplateSyntaxError("'autoescape' argument should be 'on' or 'off'")
nodelist = parser.parse(('endautoescape',))
parser.delete_first_token()
return AutoEscapeControlNode((arg == 'on'), nodelist)
@register.tag
def comment(parser, token):
"""
Ignores everything between ``{% comment %}`` and ``{% endcomment %}``.
"""
parser.skip_past('endcomment')
return CommentNode()
@register.tag
def cycle(parser, token, escape=False):
"""
Cycles among the given strings each time this tag is encountered.
Within a loop, cycles among the given strings each time through
the loop::
{% for o in some_list %}
<tr class="{% cycle 'row1' 'row2' %}">
...
</tr>
{% endfor %}
Outside of a loop, give the values a unique name the first time you call
it, then use that name each successive time through::
<tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
You can use any number of values, separated by spaces. Commas can also
be used to separate values; if a comma is used, the cycle values are
interpreted as literal strings.
The optional flag "silent" can be used to prevent the cycle declaration
from returning any value::
{% for o in some_list %}
{% cycle 'row1' 'row2' as rowcolors silent %}
<tr class="{{ rowcolors }}">{% include "subtemplate.html " %}</tr>
{% endfor %}
"""
if not escape:
warnings.warn(
"'The `cycle` template tag is changing to escape its arguments; "
"the non-autoescaping version is deprecated. Load it "
"from the `future` tag library to start using the new behavior.",
RemovedInDjango18Warning, stacklevel=2)
# Note: This returns the exact same node on each {% cycle name %} call;
# that is, the node object returned from {% cycle a b c as name %} and the
# one returned from {% cycle name %} are the exact same object. This
# shouldn't cause problems (heh), but if it does, now you know.
#
# Ugly hack warning: This stuffs the named template dict into parser so
# that names are only unique within each template (as opposed to using
# a global variable, which would make cycle names have to be unique across
# *all* templates.
args = token.split_contents()
if len(args) < 2:
raise TemplateSyntaxError("'cycle' tag requires at least two arguments")
if ',' in args[1]:
# Backwards compatibility: {% cycle a,b %} or {% cycle a,b as foo %}
# case.
args[1:2] = ['"%s"' % arg for arg in args[1].split(",")]
if len(args) == 2:
# {% cycle foo %} case.
name = args[1]
if not hasattr(parser, '_namedCycleNodes'):
raise TemplateSyntaxError("No named cycles in template. '%s' is not defined" % name)
if name not in parser._namedCycleNodes:
raise TemplateSyntaxError("Named cycle '%s' does not exist" % name)
return parser._namedCycleNodes[name]
as_form = False
if len(args) > 4:
# {% cycle ... as foo [silent] %} case.
if args[-3] == "as":
if args[-1] != "silent":
raise TemplateSyntaxError("Only 'silent' flag is allowed after cycle's name, not '%s'." % args[-1])
as_form = True
silent = True
args = args[:-1]
elif args[-2] == "as":
as_form = True
silent = False
if as_form:
name = args[-1]
values = [parser.compile_filter(arg) for arg in args[1:-2]]
node = CycleNode(values, name, silent=silent, escape=escape)
if not hasattr(parser, '_namedCycleNodes'):
parser._namedCycleNodes = {}
parser._namedCycleNodes[name] = node
else:
values = [parser.compile_filter(arg) for arg in args[1:]]
node = CycleNode(values, escape=escape)
return node
@register.tag
def csrf_token(parser, token):
return CsrfTokenNode()
@register.tag
def debug(parser, token):
"""
Outputs a whole load of debugging information, including the current
context and imported modules.
Sample usage::
<pre>
{% debug %}
</pre>
"""
return DebugNode()
@register.tag('filter')
def do_filter(parser, token):
"""
Filters the contents of the block through variable filters.
Filters can also be piped through each other, and they can have
arguments -- just like in variable syntax.
Sample usage::
{% filter force_escape|lower %}
This text will be HTML-escaped, and will appear in lowercase.
{% endfilter %}
Note that the ``escape`` and ``safe`` filters are not acceptable arguments.
Instead, use the ``autoescape`` tag to manage autoescaping for blocks of
template code.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
_, rest = token.contents.split(None, 1)
filter_expr = parser.compile_filter("var|%s" % (rest))
for func, unused in filter_expr.filters:
filter_name = getattr(func, '_filter_name', None)
if filter_name in ('escape', 'safe'):
raise TemplateSyntaxError('"filter %s" is not permitted. Use the "autoescape" tag instead.' % filter_name)
nodelist = parser.parse(('endfilter',))
parser.delete_first_token()
return FilterNode(filter_expr, nodelist)
@register.tag
def firstof(parser, token, escape=False):
"""
Outputs the first variable passed that is not False, without escaping.
Outputs nothing if all the passed variables are False.
Sample usage::
{% firstof var1 var2 var3 %}
This is equivalent to::
{% if var1 %}
{{ var1|safe }}
{% elif var2 %}
{{ var2|safe }}
{% elif var3 %}
{{ var3|safe }}
{% endif %}
but obviously much cleaner!
You can also use a literal string as a fallback value in case all
passed variables are False::
{% firstof var1 var2 var3 "fallback value" %}
If you want to escape the output, use a filter tag::
{% filter force_escape %}
{% firstof var1 var2 var3 "fallback value" %}
{% endfilter %}
"""
if not escape:
warnings.warn(
"'The `firstof` template tag is changing to escape its arguments; "
"the non-autoescaping version is deprecated. Load it "
"from the `future` tag library to start using the new behavior.",
RemovedInDjango18Warning, stacklevel=2)
bits = token.split_contents()[1:]
if len(bits) < 1:
raise TemplateSyntaxError("'firstof' statement requires at least one argument")
return FirstOfNode([parser.compile_filter(bit) for bit in bits], escape=escape)
@register.tag('for')
def do_for(parser, token):
"""
Loops over each item in an array.
For example, to display a list of athletes given ``athlete_list``::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
</ul>
You can loop over a list in reverse by using
``{% for obj in list reversed %}``.
You can also unpack multiple values from a two-dimensional array::
{% for key,value in dict.items %}
{{ key }}: {{ value }}
{% endfor %}
The ``for`` tag can take an optional ``{% empty %}`` clause that will
be displayed if the given array is empty or could not be found::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% empty %}
<li>Sorry, no athletes in this list.</li>
{% endfor %}
<ul>
The above is equivalent to -- but shorter, cleaner, and possibly faster
than -- the following::
<ul>
{% if althete_list %}
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
{% else %}
<li>Sorry, no athletes in this list.</li>
{% endif %}
</ul>
The for loop sets a number of variables available within the loop:
========================== ================================================
Variable Description
========================== ================================================
``forloop.counter`` The current iteration of the loop (1-indexed)
``forloop.counter0`` The current iteration of the loop (0-indexed)
``forloop.revcounter`` The number of iterations from the end of the
loop (1-indexed)
``forloop.revcounter0`` The number of iterations from the end of the
loop (0-indexed)
``forloop.first`` True if this is the first time through the loop
``forloop.last`` True if this is the last time through the loop
``forloop.parentloop`` For nested loops, this is the loop "above" the
current one
========================== ================================================
"""
bits = token.split_contents()
if len(bits) < 4:
raise TemplateSyntaxError("'for' statements should have at least four"
" words: %s" % token.contents)
is_reversed = bits[-1] == 'reversed'
in_index = -3 if is_reversed else -2
if bits[in_index] != 'in':
raise TemplateSyntaxError("'for' statements should use the format"
" 'for x in y': %s" % token.contents)
loopvars = re.split(r' *, *', ' '.join(bits[1:in_index]))
for var in loopvars:
if not var or ' ' in var:
raise TemplateSyntaxError("'for' tag received an invalid argument:"
" %s" % token.contents)
sequence = parser.compile_filter(bits[in_index + 1])
nodelist_loop = parser.parse(('empty', 'endfor',))
token = parser.next_token()
if token.contents == 'empty':
nodelist_empty = parser.parse(('endfor',))
parser.delete_first_token()
else:
nodelist_empty = None
return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
def do_ifequal(parser, token, negate):
bits = list(token.split_contents())
if len(bits) != 3:
raise TemplateSyntaxError("%r takes two arguments" % bits[0])
end_tag = 'end' + bits[0]
nodelist_true = parser.parse(('else', end_tag))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse((end_tag,))
parser.delete_first_token()
else:
nodelist_false = NodeList()
val1 = parser.compile_filter(bits[1])
val2 = parser.compile_filter(bits[2])
return IfEqualNode(val1, val2, nodelist_true, nodelist_false, negate)
@register.tag
def ifequal(parser, token):
"""
Outputs the contents of the block if the two arguments equal each other.
Examples::
{% ifequal user.id comment.user_id %}
...
{% endifequal %}
{% ifnotequal user.id comment.user_id %}
...
{% else %}
...
{% endifnotequal %}
"""
return do_ifequal(parser, token, False)
@register.tag
def ifnotequal(parser, token):
"""
Outputs the contents of the block if the two arguments are not equal.
See ifequal.
"""
return do_ifequal(parser, token, True)
class TemplateLiteral(Literal):
def __init__(self, value, text):
self.value = value
self.text = text # for better error messages
def display(self):
return self.text
def eval(self, context):
return self.value.resolve(context, ignore_failures=True)
class TemplateIfParser(IfParser):
error_class = TemplateSyntaxError
def __init__(self, parser, *args, **kwargs):
self.template_parser = parser
super(TemplateIfParser, self).__init__(*args, **kwargs)
def create_var(self, value):
return TemplateLiteral(self.template_parser.compile_filter(value), value)
@register.tag('if')
def do_if(parser, token):
"""
The ``{% if %}`` tag evaluates a variable, and if that variable is "true"
(i.e., exists, is not empty, and is not a false boolean value), the
contents of the block are output:
::
{% if athlete_list %}
Number of athletes: {{ athlete_list|count }}
{% elif athlete_in_locker_room_list %}
Athletes should be out of the locker room soon!
{% else %}
No athletes.
{% endif %}
In the above, if ``athlete_list`` is not empty, the number of athletes will
be displayed by the ``{{ athlete_list|count }}`` variable.
As you can see, the ``if`` tag may take one or several `` {% elif %}``
clauses, as well as an ``{% else %}`` clause that will be displayed if all
previous conditions fail. These clauses are optional.
``if`` tags may use ``or``, ``and`` or ``not`` to test a number of
variables or to negate a given variable::
{% if not athlete_list %}
There are no athletes.
{% endif %}
{% if athlete_list or coach_list %}
There are some athletes or some coaches.
{% endif %}
{% if athlete_list and coach_list %}
Both athletes and coaches are available.
{% endif %}
{% if not athlete_list or coach_list %}
There are no athletes, or there are some coaches.
{% endif %}
{% if athlete_list and not coach_list %}
There are some athletes and absolutely no coaches.
{% endif %}
Comparison operators are also available, and the use of filters is also
allowed, for example::
{% if articles|length >= 5 %}...{% endif %}
Arguments and operators _must_ have a space between them, so
``{% if 1>2 %}`` is not a valid if tag.
All supported operators are: ``or``, ``and``, ``in``, ``not in``
``==`` (or ``=``), ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
Operator precedence follows Python.
"""
# {% if ... %}
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists = [(condition, nodelist)]
token = parser.next_token()
# {% elif ... %} (repeatable)
while token.contents.startswith('elif'):
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists.append((condition, nodelist))
token = parser.next_token()
# {% else %} (optional)
if token.contents == 'else':
nodelist = parser.parse(('endif',))
conditions_nodelists.append((None, nodelist))
token = parser.next_token()
# {% endif %}
assert token.contents == 'endif'
return IfNode(conditions_nodelists)
@register.tag
def ifchanged(parser, token):
"""
Checks if a value has changed from the last iteration of a loop.
The ``{% ifchanged %}`` block tag is used within a loop. It has two
possible uses.
1. Checks its own rendered contents against its previous state and only
displays the content if it has changed. For example, this displays a
list of days, only displaying the month if it changes::
<h1>Archive for {{ year }}</h1>
{% for date in days %}
{% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
<a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
{% endfor %}
2. If given one or more variables, check whether any variable has changed.
For example, the following shows the date every time it changes, while
showing the hour if either the hour or the date has changed::
{% for date in days %}
{% ifchanged date.date %} {{ date.date }} {% endifchanged %}
{% ifchanged date.hour date.date %}
{{ date.hour }}
{% endifchanged %}
{% endfor %}
"""
bits = token.split_contents()
nodelist_true = parser.parse(('else', 'endifchanged'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endifchanged',))
parser.delete_first_token()
else:
nodelist_false = NodeList()
values = [parser.compile_filter(bit) for bit in bits[1:]]
return IfChangedNode(nodelist_true, nodelist_false, *values)
@register.tag
def ssi(parser, token):
"""
Outputs the contents of a given file into the page.
Like a simple "include" tag, the ``ssi`` tag includes the contents
of another file -- which must be specified using an absolute path --
in the current page::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" %}
If the optional "parsed" parameter is given, the contents of the included
file are evaluated as template code, with the current context::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" parsed %}
"""
bits = token.split_contents()
parsed = False
if len(bits) not in (2, 3):
raise TemplateSyntaxError("'ssi' tag takes one argument: the path to"
" the file to be included")
if len(bits) == 3:
if bits[2] == 'parsed':
parsed = True
else:
raise TemplateSyntaxError("Second (optional) argument to %s tag"
" must be 'parsed'" % bits[0])
filepath = parser.compile_filter(bits[1])
return SsiNode(filepath, parsed)
@register.tag
def load(parser, token):
"""
Loads a custom template tag set.
For example, to load the template tags in
``django/templatetags/news/photos.py``::
{% load news.photos %}
Can also be used to load an individual tag/filter from
a library::
{% load byline from news %}
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) >= 4 and bits[-2] == "from":
try:
taglib = bits[-1]
lib = get_library(taglib)
except InvalidTemplateLibrary as e:
raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
(taglib, e))
else:
temp_lib = Library()
for name in bits[1:-2]:
if name in lib.tags:
temp_lib.tags[name] = lib.tags[name]
# a name could be a tag *and* a filter, so check for both
if name in lib.filters:
temp_lib.filters[name] = lib.filters[name]
elif name in lib.filters:
temp_lib.filters[name] = lib.filters[name]
else:
raise TemplateSyntaxError("'%s' is not a valid tag or filter in tag library '%s'" %
(name, taglib))
parser.add_library(temp_lib)
else:
for taglib in bits[1:]:
# add the library to the parser
try:
lib = get_library(taglib)
parser.add_library(lib)
except InvalidTemplateLibrary as e:
raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
(taglib, e))
return LoadNode()
@register.tag
def now(parser, token):
"""
Displays the date, formatted according to the given string.
Uses the same format as PHP's ``date()`` function; see http://php.net/date
for all the possible values.
Sample usage::
It is {% now "jS F Y H:i" %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'now' statement takes one argument")
format_string = bits[1][1:-1]
return NowNode(format_string)
@register.tag
def regroup(parser, token):
"""
Regroups a list of alike objects by a common attribute.
This complex tag is best illustrated by use of an example: say that
``people`` is a list of ``Person`` objects that have ``first_name``,
``last_name``, and ``gender`` attributes, and you'd like to display a list
that looks like:
* Male:
* George Bush
* Bill Clinton
* Female:
* Margaret Thatcher
* Colendeeza Rice
* Unknown:
* Pat Smith
The following snippet of template code would accomplish this dubious task::
{% regroup people by gender as grouped %}
<ul>
{% for group in grouped %}
<li>{{ group.grouper }}
<ul>
{% for item in group.list %}
<li>{{ item }}</li>
{% endfor %}
</ul>
{% endfor %}
</ul>
As you can see, ``{% regroup %}`` populates a variable with a list of
objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the
item that was grouped by; ``list`` contains the list of objects that share
that ``grouper``. In this case, ``grouper`` would be ``Male``, ``Female``
and ``Unknown``, and ``list`` is the list of people with those genders.
Note that ``{% regroup %}`` does not work when the list to be grouped is not
sorted by the key you are grouping by! This means that if your list of
people was not sorted by gender, you'd need to make sure it is sorted
before using it, i.e.::
{% regroup people|dictsort:"gender" by gender as grouped %}
"""
bits = token.split_contents()
if len(bits) != 6:
raise TemplateSyntaxError("'regroup' tag takes five arguments")
target = parser.compile_filter(bits[1])
if bits[2] != 'by':
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
if bits[4] != 'as':
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must"
" be 'as'")
var_name = bits[5]
# RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and
# group by the resulting value. After all items are processed, it will
# save the final result in the context under 'var_name', thus clearing the
# temporary values. This hack is necessary because the template engine
# doesn't provide a context-aware equivalent of Python's getattr.
expression = parser.compile_filter(var_name +
VARIABLE_ATTRIBUTE_SEPARATOR +
bits[3])
return RegroupNode(target, expression, var_name)
@register.tag
def spaceless(parser, token):
"""
Removes whitespace between HTML tags, including tab and newline characters.
Example usage::
{% spaceless %}
<p>
<a href="foo/">Foo</a>
</p>
{% endspaceless %}
This example would return this HTML::
<p><a href="foo/">Foo</a></p>
Only space between *tags* is normalized -- not space between tags and text.
In this example, the space around ``Hello`` won't be stripped::
{% spaceless %}
<strong>
Hello
</strong>
{% endspaceless %}
"""
nodelist = parser.parse(('endspaceless',))
parser.delete_first_token()
return SpacelessNode(nodelist)
@register.tag
def templatetag(parser, token):
"""
Outputs one of the bits used to compose template tags.
Since the template system has no concept of "escaping", to display one of
the bits used in template tags, you must use the ``{% templatetag %}`` tag.
The argument tells which template bit to output:
================== =======
Argument Outputs
================== =======
``openblock`` ``{%``
``closeblock`` ``%}``
``openvariable`` ``{{``
``closevariable`` ``}}``
``openbrace`` ``{``
``closebrace`` ``}``
``opencomment`` ``{#``
``closecomment`` ``#}``
================== =======
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'templatetag' statement takes one argument")
tag = bits[1]
if tag not in TemplateTagNode.mapping:
raise TemplateSyntaxError("Invalid templatetag argument: '%s'."
" Must be one of: %s" %
(tag, list(TemplateTagNode.mapping)))
return TemplateTagNode(tag)
@register.tag
def url(parser, token):
"""
Returns an absolute URL matching given view with its parameters.
This is a way to define links that aren't tied to a particular URL
configuration::
{% url "path.to.some_view" arg1 arg2 %}
or
{% url "path.to.some_view" name1=value1 name2=value2 %}
The first argument is a path to a view. It can be an absolute Python path
or just ``app_name.view_name`` without the project name if the view is
located inside the project.
Other arguments are space-separated values that will be filled in place of
positional and keyword arguments in the URL. Don't mix positional and
keyword arguments.
All arguments for the URL should be present.
For example if you have a view ``app_name.client`` taking client's id and
the corresponding line in a URLconf looks like this::
('^client/(\d+)/$', 'app_name.client')
and this app's URLconf is included into the project's URLconf under some
path::
('^clients/', include('project_name.app_name.urls'))
then in a template you can create a link for a certain client like this::
{% url "app_name.client" client.id %}
The URL will look like ``/clients/client/123/``.
The first argument can also be a named URL instead of the Python path to
the view callable. For example if the URLconf entry looks like this::
url('^client/(\d+)/$', name='client-detail-view')
then in the template you can use::
{% url "client-detail-view" client.id %}
There is even another possible value type for the first argument. It can be
the name of a template variable that will be evaluated to obtain the view
name or the URL name, e.g.::
{% with view_path="app_name.client" %}
{% url view_path client.id %}
{% endwith %}
or,
{% with url_name="client-detail-view" %}
{% url url_name client.id %}
{% endwith %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument"
" (path to a view)" % bits[0])
viewname = parser.compile_filter(bits[1])
args = []
kwargs = {}
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits):
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLNode(viewname, args, kwargs, asvar)
@register.tag
def verbatim(parser, token):
"""
Stops the template engine from rendering the contents of this block tag.
Usage::
{% verbatim %}
{% don't process this %}
{% endverbatim %}
You can also designate a specific closing tag block (allowing the
unrendered use of ``{% endverbatim %}``)::
{% verbatim myblock %}
...
{% endverbatim myblock %}
"""
nodelist = parser.parse(('endverbatim',))
parser.delete_first_token()
return VerbatimNode(nodelist.render(Context()))
@register.tag
def widthratio(parser, token):
"""
For creating bar charts and such, this tag calculates the ratio of a given
value to a maximum value, and then applies that ratio to a constant.
For example::
<img src="bar.png" alt="Bar"
height="10" width="{% widthratio this_value max_value max_width %}" />
If ``this_value`` is 175, ``max_value`` is 200, and ``max_width`` is 100,
the image in the above example will be 88 pixels wide
(because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88).
In some cases you might want to capture the result of widthratio in a
variable. It can be useful for instance in a blocktrans like this::
{% widthratio this_value max_value max_width as width %}
{% blocktrans %}The width is: {{ width }}{% endblocktrans %}
"""
bits = token.split_contents()
if len(bits) == 4:
tag, this_value_expr, max_value_expr, max_width = bits
asvar = None
elif len(bits) == 6:
tag, this_value_expr, max_value_expr, max_width, as_, asvar = bits
if as_ != 'as':
raise TemplateSyntaxError("Invalid syntax in widthratio tag. Expecting 'as' keyword")
else:
raise TemplateSyntaxError("widthratio takes at least three arguments")
return WidthRatioNode(parser.compile_filter(this_value_expr),
parser.compile_filter(max_value_expr),
parser.compile_filter(max_width),
asvar=asvar)
@register.tag('with')
def do_with(parser, token):
"""
Adds one or more values to the context (inside of this block) for caching
and easy access.
For example::
{% with total=person.some_sql_method %}
{{ total }} object{{ total|pluralize }}
{% endwith %}
Multiple values can be added to the context::
{% with foo=1 bar=2 %}
...
{% endwith %}
The legacy format of ``{% with person.some_sql_method as total %}`` is
still accepted.
"""
bits = token.split_contents()
remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context:
raise TemplateSyntaxError("%r expected at least one variable "
"assignment" % bits[0])
if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0]))
nodelist = parser.parse(('endwith',))
parser.delete_first_token()
return WithNode(None, None, nodelist, extra_context=extra_context)
| apache-2.0 |
ProfessionalIT/maxigenios-website | sdk/google_appengine/lib/PyAMF-0.7.2/pyamf/tests/remoting/test_remoteobject.py | 6 | 8543 | # -*- coding: utf-8 -*-
#
# Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
RemoteObject Tests.
@since: 0.1
"""
import unittest
import pyamf
from pyamf import remoting
from pyamf.remoting import amf3, gateway
from pyamf.flex import messaging
class RandomIdGeneratorTestCase(unittest.TestCase):
def test_generate(self):
x = []
for i in range(5):
id_ = amf3.generate_random_id()
self.assertTrue(id_ not in x)
x.append(id_)
class AcknowlegdementGeneratorTestCase(unittest.TestCase):
def test_generate(self):
ack = amf3.generate_acknowledgement()
self.assertTrue(isinstance(ack, messaging.AcknowledgeMessage))
self.assertTrue(ack.messageId is not None)
self.assertTrue(ack.clientId is not None)
self.assertTrue(ack.timestamp is not None)
def test_request(self):
ack = amf3.generate_acknowledgement(pyamf.ASObject(messageId='123123'))
self.assertTrue(isinstance(ack, messaging.AcknowledgeMessage))
self.assertTrue(ack.messageId is not None)
self.assertTrue(ack.clientId is not None)
self.assertTrue(ack.timestamp is not None)
self.assertEqual(ack.correlationId, '123123')
class RequestProcessorTestCase(unittest.TestCase):
def test_create(self):
rp = amf3.RequestProcessor('xyz')
self.assertEqual(rp.gateway, 'xyz')
def test_ping(self):
message = messaging.CommandMessage(operation=5)
rp = amf3.RequestProcessor(None)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_OK)
self.assertTrue(isinstance(ack, messaging.AcknowledgeMessage))
self.assertEqual(ack.body, True)
def test_request(self):
def echo(x):
return x
gw = gateway.BaseGateway({'echo': echo})
rp = amf3.RequestProcessor(gw)
message = messaging.RemotingMessage(
body=['spam.eggs'], operation='echo'
)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_OK)
self.assertTrue(isinstance(ack, messaging.AcknowledgeMessage))
self.assertEqual(ack.body, 'spam.eggs')
def test_error(self):
def echo(x):
raise TypeError('foo')
gw = gateway.BaseGateway({'echo': echo})
rp = amf3.RequestProcessor(gw)
message = messaging.RemotingMessage(
body=['spam.eggs'], operation='echo'
)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertFalse(gw.debug)
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_ERROR)
self.assertTrue(isinstance(ack, messaging.ErrorMessage))
self.assertEqual(ack.faultCode, 'TypeError')
self.assertEqual(ack.faultString, 'foo')
def test_error_debug(self):
def echo(x):
raise TypeError('foo')
gw = gateway.BaseGateway({'echo': echo}, debug=True)
rp = amf3.RequestProcessor(gw)
message = messaging.RemotingMessage(
body=['spam.eggs'], operation='echo'
)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertTrue(gw.debug)
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_ERROR)
self.assertTrue(isinstance(ack, messaging.ErrorMessage))
self.assertEqual(ack.faultCode, 'TypeError')
self.assertNotEquals(ack.extendedData, None)
def test_too_many_args(self):
def spam(bar):
return bar
gw = gateway.BaseGateway({'spam': spam})
rp = amf3.RequestProcessor(gw)
message = messaging.RemotingMessage(
body=['eggs', 'baz'], operation='spam'
)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_ERROR)
self.assertTrue(isinstance(ack, messaging.ErrorMessage))
self.assertEqual(ack.faultCode, 'TypeError')
def test_preprocess(self):
def echo(x):
return x
self.called = False
def preproc(sr, *args):
self.called = True
self.assertEqual(args, ('spam.eggs',))
self.assertTrue(isinstance(sr, gateway.ServiceRequest))
gw = gateway.BaseGateway({'echo': echo}, preprocessor=preproc)
rp = amf3.RequestProcessor(gw)
message = messaging.RemotingMessage(
body=['spam.eggs'], operation='echo'
)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_OK)
self.assertTrue(isinstance(ack, messaging.AcknowledgeMessage))
self.assertEqual(ack.body, 'spam.eggs')
self.assertTrue(self.called)
def test_fail_preprocess(self):
def preproc(sr, *args):
raise IndexError
def echo(x):
return x
gw = gateway.BaseGateway({'echo': echo}, preprocessor=preproc)
rp = amf3.RequestProcessor(gw)
message = messaging.RemotingMessage(
body=['spam.eggs'], operation='echo'
)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_ERROR)
self.assertTrue(isinstance(ack, messaging.ErrorMessage))
def test_destination(self):
def echo(x):
return x
gw = gateway.BaseGateway({'spam.eggs': echo})
rp = amf3.RequestProcessor(gw)
message = messaging.RemotingMessage(
body=[None], destination='spam', operation='eggs'
)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_OK)
self.assertTrue(isinstance(ack, messaging.AcknowledgeMessage))
self.assertEqual(ack.body, None)
def test_disconnect(self):
message = messaging.CommandMessage(operation=12)
rp = amf3.RequestProcessor(None)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_OK)
self.assertTrue(isinstance(ack, messaging.AcknowledgeMessage))
def test_async(self):
message = messaging.AsyncMessage()
rp = amf3.RequestProcessor(None)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_OK)
self.assertTrue(isinstance(ack, messaging.AcknowledgeMessage))
def test_error_unicode_message(self):
"""
See #727
"""
def echo(x):
raise TypeError(u'ƒøø')
gw = gateway.BaseGateway({'echo': echo})
rp = amf3.RequestProcessor(gw)
message = messaging.RemotingMessage(
body=['spam.eggs'], operation='echo'
)
request = remoting.Request('null', body=[message])
response = rp(request)
ack = response.body
self.assertFalse(gw.debug)
self.assertTrue(isinstance(response, remoting.Response))
self.assertEqual(response.status, remoting.STATUS_ERROR)
self.assertTrue(isinstance(ack, messaging.ErrorMessage))
self.assertEqual(ack.faultCode, 'TypeError')
self.assertEqual(ack.faultString, u'ƒøø')
| mit |
aboutsajjad/Bridge | app_packages/youtube_dl/extractor/bleacherreport.py | 39 | 4164 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from .amp import AMPIE
from ..utils import (
ExtractorError,
int_or_none,
parse_iso8601,
)
class BleacherReportIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?bleacherreport\.com/articles/(?P<id>\d+)'
_TESTS = [{
'url': 'http://bleacherreport.com/articles/2496438-fsu-stat-projections-is-jalen-ramsey-best-defensive-player-in-college-football',
'md5': 'a3ffc3dc73afdbc2010f02d98f990f20',
'info_dict': {
'id': '2496438',
'ext': 'mp4',
'title': 'FSU Stat Projections: Is Jalen Ramsey Best Defensive Player in College Football?',
'uploader_id': 3992341,
'description': 'CFB, ACC, Florida State',
'timestamp': 1434380212,
'upload_date': '20150615',
'uploader': 'Team Stream Now ',
},
'add_ie': ['Ooyala'],
}, {
'url': 'http://bleacherreport.com/articles/2586817-aussie-golfers-get-fright-of-their-lives-after-being-chased-by-angry-kangaroo',
'md5': '6a5cd403418c7b01719248ca97fb0692',
'info_dict': {
'id': '2586817',
'ext': 'webm',
'title': 'Aussie Golfers Get Fright of Their Lives After Being Chased by Angry Kangaroo',
'timestamp': 1446839961,
'uploader': 'Sean Fay',
'description': 'md5:b1601e2314c4d8eec23b6eafe086a757',
'uploader_id': 6466954,
'upload_date': '20151011',
},
'add_ie': ['Youtube'],
}]
def _real_extract(self, url):
article_id = self._match_id(url)
article_data = self._download_json('http://api.bleacherreport.com/api/v1/articles/%s' % article_id, article_id)['article']
thumbnails = []
primary_photo = article_data.get('primaryPhoto')
if primary_photo:
thumbnails = [{
'url': primary_photo['url'],
'width': primary_photo.get('width'),
'height': primary_photo.get('height'),
}]
info = {
'_type': 'url_transparent',
'id': article_id,
'title': article_data['title'],
'uploader': article_data.get('author', {}).get('name'),
'uploader_id': article_data.get('authorId'),
'timestamp': parse_iso8601(article_data.get('createdAt')),
'thumbnails': thumbnails,
'comment_count': int_or_none(article_data.get('commentsCount')),
'view_count': int_or_none(article_data.get('hitCount')),
}
video = article_data.get('video')
if video:
video_type = video['type']
if video_type == 'cms.bleacherreport.com':
info['url'] = 'http://bleacherreport.com/video_embed?id=%s' % video['id']
elif video_type == 'ooyala.com':
info['url'] = 'ooyala:%s' % video['id']
elif video_type == 'youtube.com':
info['url'] = video['id']
elif video_type == 'vine.co':
info['url'] = 'https://vine.co/v/%s' % video['id']
else:
info['url'] = video_type + video['id']
return info
else:
raise ExtractorError('no video in the article', expected=True)
class BleacherReportCMSIE(AMPIE):
_VALID_URL = r'https?://(?:www\.)?bleacherreport\.com/video_embed\?id=(?P<id>[0-9a-f-]{36})'
_TESTS = [{
'url': 'http://bleacherreport.com/video_embed?id=8fd44c2f-3dc5-4821-9118-2c825a98c0e1',
'md5': '2e4b0a997f9228ffa31fada5c53d1ed1',
'info_dict': {
'id': '8fd44c2f-3dc5-4821-9118-2c825a98c0e1',
'ext': 'flv',
'title': 'Cena vs. Rollins Would Expose the Heavyweight Division',
'description': 'md5:984afb4ade2f9c0db35f3267ed88b36e',
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
info = self._extract_feed_info('http://cms.bleacherreport.com/media/items/%s/akamai.json' % video_id)
info['id'] = video_id
return info
| mit |
sheltowt/PIXLEE-computer-vision-clustering | PCV/tools/imregistration.py | 21 | 3547 | from PIL import Image
from xml.dom import minidom
from numpy import *
from pylab import *
from scipy import ndimage, linalg
from scipy.misc import imsave
import os
def read_points_from_xml(xmlFileName):
""" Reads control points for face alignment. """
xmldoc = minidom.parse(xmlFileName)
facelist = xmldoc.getElementsByTagName('face')
faces = {}
for xmlFace in facelist:
fileName = xmlFace.attributes['file'].value
xf = int(xmlFace.attributes['xf'].value)
yf = int(xmlFace.attributes['yf'].value)
xs = int(xmlFace.attributes['xs'].value)
ys = int(xmlFace.attributes['ys'].value)
xm = int(xmlFace.attributes['xm'].value)
ym = int(xmlFace.attributes['ym'].value)
faces[fileName] = array([xf, yf, xs, ys, xm, ym])
return faces
def write_points_to_xml(faces, xmlFileName):
xmldoc = minidom.Document()
xmlFaces = xmldoc.createElement("faces")
keys = faces.keys()
for k in keys:
xmlFace = xmldoc.createElement("face")
xmlFace.setAttribute("file", k)
xmlFace.setAttribute("xf", "%d" % faces[k][0])
xmlFace.setAttribute("yf", "%d" % faces[k][1])
xmlFace.setAttribute("xs", "%d" % faces[k][2])
xmlFace.setAttribute("ys", "%d" % faces[k][3])
xmlFace.setAttribute("xm", "%d" % faces[k][4])
xmlFace.setAttribute("ym", "%d" % faces[k][5])
xmlFaces.appendChild(xmlFace)
xmldoc.appendChild(xmlFaces)
fp = open(xmlFileName, "w")
fp.write(xmldoc.toprettyxml(encoding='utf-8'))
fp.close()
def compute_rigid_transform(refpoints,points):
""" Computes rotation, scale and translation for
aligning points to refpoints. """
A = array([ [points[0], -points[1], 1, 0],
[points[1], points[0], 0, 1],
[points[2], -points[3], 1, 0],
[points[3], points[2], 0, 1],
[points[4], -points[5], 1, 0],
[points[5], points[4], 0, 1]])
y = array([ refpoints[0],
refpoints[1],
refpoints[2],
refpoints[3],
refpoints[4],
refpoints[5]])
# least sq solution to mimimize ||Ax - y||
a,b,tx,ty = linalg.lstsq(A,y)[0]
R = array([[a, -b], [b, a]]) # rotation matrix incl scale
return R,tx,ty
def rigid_alignment(faces,path,plotflag=False):
""" Align images rigidly and save as new images.
path determines where the aligned images are saved
set plotflag=True to plot the images. """
# take the points in the first image as reference points
refpoints = faces.values()[0]
# warp each image using affine transform
for face in faces:
points = faces[face]
R,tx,ty = compute_rigid_transform(refpoints, points)
T = array([[R[1][1], R[1][0]], [R[0][1], R[0][0]]])
im = array(Image.open(os.path.join(path,face)))
im2 = zeros(im.shape, 'uint8')
# warp each color channel
for i in range(len(im.shape)):
im2[:,:,i] = ndimage.affine_transform(im[:,:,i],linalg.inv(T),offset=[-ty,-tx])
if plotflag:
imshow(im2)
show()
# crop away border and save aligned images
h,w = im2.shape[:2]
border = (w+h)/20
# crop away border
imsave(os.path.join(path, 'aligned/'+face),im2[border:h-border,border:w-border,:])
| bsd-2-clause |
shepdelacreme/ansible | lib/ansible/modules/network/f5/bigip_snmp.py | 8 | 13167 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_snmp
short_description: Manipulate general SNMP settings on a BIG-IP
description:
- Manipulate general SNMP settings on a BIG-IP.
version_added: 2.4
options:
allowed_addresses:
description:
- Configures the IP addresses of the SNMP clients from which the snmpd
daemon accepts requests.
- This value can be hostnames, IP addresses, or IP networks.
- You may specify a single list item of C(default) to set the value back
to the system's default of C(127.0.0.0/8).
- You can remove all allowed addresses by either providing the word C(none), or
by providing the empty string C("").
version_added: 2.6
contact:
description:
- Specifies the name of the person who administers the SNMP
service for this system.
agent_status_traps:
description:
- When C(enabled), ensures that the system sends a trap whenever the
SNMP agent starts running or stops running. This is usually enabled
by default on a BIG-IP.
choices:
- enabled
- disabled
agent_authentication_traps:
description:
- When C(enabled), ensures that the system sends authentication warning
traps to the trap destinations. This is usually disabled by default on
a BIG-IP.
choices:
- enabled
- disabled
device_warning_traps:
description:
- When C(enabled), ensures that the system sends device warning traps
to the trap destinations. This is usually enabled by default on a
BIG-IP.
choices:
- enabled
- disabled
location:
description:
- Specifies the description of this system's physical location.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Set snmp contact
bigip_snmp:
contact: Joe User
password: secret
server: lb.mydomain.com
user: admin
validate_certs: false
delegate_to: localhost
- name: Set snmp location
bigip_snmp:
location: US West 1
password: secret
server: lb.mydomain.com
user: admin
validate_certs: no
delegate_to: localhost
'''
RETURN = r'''
agent_status_traps:
description: Value that the agent status traps was set to.
returned: changed
type: string
sample: enabled
agent_authentication_traps:
description: Value that the authentication status traps was set to.
returned: changed
type: string
sample: enabled
device_warning_traps:
description: Value that the warning status traps was set to.
returned: changed
type: string
sample: enabled
contact:
description: The new value for the person who administers SNMP on the device.
returned: changed
type: string
sample: Joe User
location:
description: The new value for the system's physical location.
returned: changed
type: string
sample: US West 1a
allowed_addresses:
description: The new allowed addresses for SNMP client connections.
returned: changed
type: list
sample: ['127.0.0.0/8', 'foo.bar.com', '10.10.10.10']
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import string_types
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.compat.ipaddress import ip_network
from library.module_utils.network.f5.common import is_valid_hostname
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.compat.ipaddress import ip_network
from ansible.module_utils.network.f5.common import is_valid_hostname
class Parameters(AnsibleF5Parameters):
api_map = {
'agentTrap': 'agent_status_traps',
'authTrap': 'agent_authentication_traps',
'bigipTraps': 'device_warning_traps',
'sysLocation': 'location',
'sysContact': 'contact',
'allowedAddresses': 'allowed_addresses',
}
updatables = [
'agent_status_traps',
'agent_authentication_traps',
'device_warning_traps',
'location',
'contact',
'allowed_addresses',
]
returnables = [
'agent_status_traps',
'agent_authentication_traps',
'device_warning_traps',
'location', 'contact',
'allowed_addresses',
]
api_attributes = [
'agentTrap',
'authTrap',
'bigipTraps',
'sysLocation',
'sysContact',
'allowedAddresses',
]
class ApiParameters(Parameters):
@property
def allowed_addresses(self):
if self._values['allowed_addresses'] is None:
return None
result = list(set(self._values['allowed_addresses']))
result.sort()
return result
class ModuleParameters(Parameters):
@property
def allowed_addresses(self):
if self._values['allowed_addresses'] is None:
return None
result = []
addresses = self._values['allowed_addresses']
if isinstance(addresses, string_types):
if addresses in ['', 'none']:
return []
else:
addresses = [addresses]
if len(addresses) == 1 and addresses[0] in ['default', '']:
result = ['127.0.0.0/8']
return result
for address in addresses:
try:
# Check for valid IPv4 or IPv6 entries
ip_network(u'%s' % str(address))
result.append(address)
except ValueError:
# else fallback to checking reasonably well formatted hostnames
if is_valid_hostname(address):
result.append(str(address))
continue
raise F5ModuleError(
"The provided 'allowed_address' value {0} is not a valid IP or hostname".format(address)
)
result = list(set(result))
result.sort()
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def allowed_addresses(self):
if self.want.allowed_addresses is None:
return None
if self.have.allowed_addresses is None:
if self.want.allowed_addresses:
return self.want.allowed_addresses
return None
want = set(self.want.allowed_addresses)
have = set(self.have.allowed_addresses)
if want != have:
result = list(want)
result.sort()
return result
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = ApiParameters()
self.want = ModuleParameters(params=self.module.params)
self.changes = UsableChanges()
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def exec_module(self):
result = dict()
changed = self.update()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/snmp/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/snmp/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.choices = ['enabled', 'disabled']
argument_spec = dict(
contact=dict(),
agent_status_traps=dict(
choices=self.choices
),
agent_authentication_traps=dict(
choices=self.choices
),
device_warning_traps=dict(
choices=self.choices
),
location=dict(),
allowed_addresses=dict(type='raw')
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 |
OsirisSPS/osiris-sps | client/share/plugins/AF9A4C281070FDB0F34CF417CDB168AB38C8A388/lib/test/test_random.py | 36 | 22281 | #!/usr/bin/env python
import unittest
import random
import time
import pickle
import warnings
from math import log, exp, pi, fsum, sin
from functools import reduce
from test import test_support
class TestBasicOps(unittest.TestCase):
# Superclass with tests common to all generators.
# Subclasses must arrange for self.gen to retrieve the Random instance
# to be tested.
def randomlist(self, n):
"""Helper function to make a list of random numbers"""
return [self.gen.random() for i in xrange(n)]
def test_autoseed(self):
self.gen.seed()
state1 = self.gen.getstate()
time.sleep(0.1)
self.gen.seed() # diffent seeds at different times
state2 = self.gen.getstate()
self.assertNotEqual(state1, state2)
def test_saverestore(self):
N = 1000
self.gen.seed()
state = self.gen.getstate()
randseq = self.randomlist(N)
self.gen.setstate(state) # should regenerate the same sequence
self.assertEqual(randseq, self.randomlist(N))
def test_seedargs(self):
for arg in [None, 0, 0L, 1, 1L, -1, -1L, 10**20, -(10**20),
3.14, 1+2j, 'a', tuple('abc')]:
self.gen.seed(arg)
for arg in [range(3), dict(one=1)]:
self.assertRaises(TypeError, self.gen.seed, arg)
self.assertRaises(TypeError, self.gen.seed, 1, 2)
self.assertRaises(TypeError, type(self.gen), [])
def test_jumpahead(self):
self.gen.seed()
state1 = self.gen.getstate()
self.gen.jumpahead(100)
state2 = self.gen.getstate() # s/b distinct from state1
self.assertNotEqual(state1, state2)
self.gen.jumpahead(100)
state3 = self.gen.getstate() # s/b distinct from state2
self.assertNotEqual(state2, state3)
with test_support.check_py3k_warnings(quiet=True):
self.assertRaises(TypeError, self.gen.jumpahead) # needs an arg
self.assertRaises(TypeError, self.gen.jumpahead, 2, 3) # too many
def test_sample(self):
# For the entire allowable range of 0 <= k <= N, validate that
# the sample is of the correct length and contains only unique items
N = 100
population = xrange(N)
for k in xrange(N+1):
s = self.gen.sample(population, k)
self.assertEqual(len(s), k)
uniq = set(s)
self.assertEqual(len(uniq), k)
self.assertTrue(uniq <= set(population))
self.assertEqual(self.gen.sample([], 0), []) # test edge case N==k==0
def test_sample_distribution(self):
# For the entire allowable range of 0 <= k <= N, validate that
# sample generates all possible permutations
n = 5
pop = range(n)
trials = 10000 # large num prevents false negatives without slowing normal case
def factorial(n):
return reduce(int.__mul__, xrange(1, n), 1)
for k in xrange(n):
expected = factorial(n) // factorial(n-k)
perms = {}
for i in xrange(trials):
perms[tuple(self.gen.sample(pop, k))] = None
if len(perms) == expected:
break
else:
self.fail()
def test_sample_inputs(self):
# SF bug #801342 -- population can be any iterable defining __len__()
self.gen.sample(set(range(20)), 2)
self.gen.sample(range(20), 2)
self.gen.sample(xrange(20), 2)
self.gen.sample(str('abcdefghijklmnopqrst'), 2)
self.gen.sample(tuple('abcdefghijklmnopqrst'), 2)
def test_sample_on_dicts(self):
self.gen.sample(dict.fromkeys('abcdefghijklmnopqrst'), 2)
# SF bug #1460340 -- random.sample can raise KeyError
a = dict.fromkeys(range(10)+range(10,100,2)+range(100,110))
self.gen.sample(a, 3)
# A followup to bug #1460340: sampling from a dict could return
# a subset of its keys or of its values, depending on the size of
# the subset requested.
N = 30
d = dict((i, complex(i, i)) for i in xrange(N))
for k in xrange(N+1):
samp = self.gen.sample(d, k)
# Verify that we got ints back (keys); the values are complex.
for x in samp:
self.assertTrue(type(x) is int)
samp.sort()
self.assertEqual(samp, range(N))
def test_gauss(self):
# Ensure that the seed() method initializes all the hidden state. In
# particular, through 2.2.1 it failed to reset a piece of state used
# by (and only by) the .gauss() method.
for seed in 1, 12, 123, 1234, 12345, 123456, 654321:
self.gen.seed(seed)
x1 = self.gen.random()
y1 = self.gen.gauss(0, 1)
self.gen.seed(seed)
x2 = self.gen.random()
y2 = self.gen.gauss(0, 1)
self.assertEqual(x1, x2)
self.assertEqual(y1, y2)
def test_pickling(self):
state = pickle.dumps(self.gen)
origseq = [self.gen.random() for i in xrange(10)]
newgen = pickle.loads(state)
restoredseq = [newgen.random() for i in xrange(10)]
self.assertEqual(origseq, restoredseq)
def test_bug_1727780(self):
# verify that version-2-pickles can be loaded
# fine, whether they are created on 32-bit or 64-bit
# platforms, and that version-3-pickles load fine.
files = [("randv2_32.pck", 780),
("randv2_64.pck", 866),
("randv3.pck", 343)]
for file, value in files:
f = open(test_support.findfile(file),"rb")
r = pickle.load(f)
f.close()
self.assertEqual(r.randrange(1000), value)
class WichmannHill_TestBasicOps(TestBasicOps):
gen = random.WichmannHill()
def test_setstate_first_arg(self):
self.assertRaises(ValueError, self.gen.setstate, (2, None, None))
def test_strong_jumpahead(self):
# tests that jumpahead(n) semantics correspond to n calls to random()
N = 1000
s = self.gen.getstate()
self.gen.jumpahead(N)
r1 = self.gen.random()
# now do it the slow way
self.gen.setstate(s)
for i in xrange(N):
self.gen.random()
r2 = self.gen.random()
self.assertEqual(r1, r2)
def test_gauss_with_whseed(self):
# Ensure that the seed() method initializes all the hidden state. In
# particular, through 2.2.1 it failed to reset a piece of state used
# by (and only by) the .gauss() method.
for seed in 1, 12, 123, 1234, 12345, 123456, 654321:
self.gen.whseed(seed)
x1 = self.gen.random()
y1 = self.gen.gauss(0, 1)
self.gen.whseed(seed)
x2 = self.gen.random()
y2 = self.gen.gauss(0, 1)
self.assertEqual(x1, x2)
self.assertEqual(y1, y2)
def test_bigrand(self):
# Verify warnings are raised when randrange is too large for random()
with warnings.catch_warnings():
warnings.filterwarnings("error", "Underlying random")
self.assertRaises(UserWarning, self.gen.randrange, 2**60)
class SystemRandom_TestBasicOps(TestBasicOps):
gen = random.SystemRandom()
def test_autoseed(self):
# Doesn't need to do anything except not fail
self.gen.seed()
def test_saverestore(self):
self.assertRaises(NotImplementedError, self.gen.getstate)
self.assertRaises(NotImplementedError, self.gen.setstate, None)
def test_seedargs(self):
# Doesn't need to do anything except not fail
self.gen.seed(100)
def test_jumpahead(self):
# Doesn't need to do anything except not fail
self.gen.jumpahead(100)
def test_gauss(self):
self.gen.gauss_next = None
self.gen.seed(100)
self.assertEqual(self.gen.gauss_next, None)
def test_pickling(self):
self.assertRaises(NotImplementedError, pickle.dumps, self.gen)
def test_53_bits_per_float(self):
# This should pass whenever a C double has 53 bit precision.
span = 2 ** 53
cum = 0
for i in xrange(100):
cum |= int(self.gen.random() * span)
self.assertEqual(cum, span-1)
def test_bigrand(self):
# The randrange routine should build-up the required number of bits
# in stages so that all bit positions are active.
span = 2 ** 500
cum = 0
for i in xrange(100):
r = self.gen.randrange(span)
self.assertTrue(0 <= r < span)
cum |= r
self.assertEqual(cum, span-1)
def test_bigrand_ranges(self):
for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
start = self.gen.randrange(2 ** i)
stop = self.gen.randrange(2 ** (i-2))
if stop <= start:
return
self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
def test_rangelimits(self):
for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
self.assertEqual(set(range(start,stop)),
set([self.gen.randrange(start,stop) for i in xrange(100)]))
def test_genrandbits(self):
# Verify ranges
for k in xrange(1, 1000):
self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
# Verify all bits active
getbits = self.gen.getrandbits
for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
cum = 0
for i in xrange(100):
cum |= getbits(span)
self.assertEqual(cum, 2**span-1)
# Verify argument checking
self.assertRaises(TypeError, self.gen.getrandbits)
self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
self.assertRaises(ValueError, self.gen.getrandbits, 0)
self.assertRaises(ValueError, self.gen.getrandbits, -1)
self.assertRaises(TypeError, self.gen.getrandbits, 10.1)
def test_randbelow_logic(self, _log=log, int=int):
# check bitcount transition points: 2**i and 2**(i+1)-1
# show that: k = int(1.001 + _log(n, 2))
# is equal to or one greater than the number of bits in n
for i in xrange(1, 1000):
n = 1L << i # check an exact power of two
numbits = i+1
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits)
self.assertTrue(n == 2**(k-1))
n += n - 1 # check 1 below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertIn(k, [numbits, numbits+1])
self.assertTrue(2**k > n > 2**(k-2))
n -= n >> 15 # check a little farther below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits) # note the stronger assertion
self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion
class MersenneTwister_TestBasicOps(TestBasicOps):
gen = random.Random()
def test_setstate_first_arg(self):
self.assertRaises(ValueError, self.gen.setstate, (1, None, None))
def test_setstate_middle_arg(self):
# Wrong type, s/b tuple
self.assertRaises(TypeError, self.gen.setstate, (2, None, None))
# Wrong length, s/b 625
self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None))
# Wrong type, s/b tuple of 625 ints
self.assertRaises(TypeError, self.gen.setstate, (2, ('a',)*625, None))
# Last element s/b an int also
self.assertRaises(TypeError, self.gen.setstate, (2, (0,)*624+('a',), None))
def test_referenceImplementation(self):
# Compare the python implementation with results from the original
# code. Create 2000 53-bit precision random floats. Compare only
# the last ten entries to show that the independent implementations
# are tracking. Here is the main() function needed to create the
# list of expected random numbers:
# void main(void){
# int i;
# unsigned long init[4]={61731, 24903, 614, 42143}, length=4;
# init_by_array(init, length);
# for (i=0; i<2000; i++) {
# printf("%.15f ", genrand_res53());
# if (i%5==4) printf("\n");
# }
# }
expected = [0.45839803073713259,
0.86057815201978782,
0.92848331726782152,
0.35932681119782461,
0.081823493762449573,
0.14332226470169329,
0.084297823823520024,
0.53814864671831453,
0.089215024911993401,
0.78486196105372907]
self.gen.seed(61731L + (24903L<<32) + (614L<<64) + (42143L<<96))
actual = self.randomlist(2000)[-10:]
for a, e in zip(actual, expected):
self.assertAlmostEqual(a,e,places=14)
def test_strong_reference_implementation(self):
# Like test_referenceImplementation, but checks for exact bit-level
# equality. This should pass on any box where C double contains
# at least 53 bits of precision (the underlying algorithm suffers
# no rounding errors -- all results are exact).
from math import ldexp
expected = [0x0eab3258d2231fL,
0x1b89db315277a5L,
0x1db622a5518016L,
0x0b7f9af0d575bfL,
0x029e4c4db82240L,
0x04961892f5d673L,
0x02b291598e4589L,
0x11388382c15694L,
0x02dad977c9e1feL,
0x191d96d4d334c6L]
self.gen.seed(61731L + (24903L<<32) + (614L<<64) + (42143L<<96))
actual = self.randomlist(2000)[-10:]
for a, e in zip(actual, expected):
self.assertEqual(long(ldexp(a, 53)), e)
def test_long_seed(self):
# This is most interesting to run in debug mode, just to make sure
# nothing blows up. Under the covers, a dynamically resized array
# is allocated, consuming space proportional to the number of bits
# in the seed. Unfortunately, that's a quadratic-time algorithm,
# so don't make this horribly big.
seed = (1L << (10000 * 8)) - 1 # about 10K bytes
self.gen.seed(seed)
def test_53_bits_per_float(self):
# This should pass whenever a C double has 53 bit precision.
span = 2 ** 53
cum = 0
for i in xrange(100):
cum |= int(self.gen.random() * span)
self.assertEqual(cum, span-1)
def test_bigrand(self):
# The randrange routine should build-up the required number of bits
# in stages so that all bit positions are active.
span = 2 ** 500
cum = 0
for i in xrange(100):
r = self.gen.randrange(span)
self.assertTrue(0 <= r < span)
cum |= r
self.assertEqual(cum, span-1)
def test_bigrand_ranges(self):
for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
start = self.gen.randrange(2 ** i)
stop = self.gen.randrange(2 ** (i-2))
if stop <= start:
return
self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
def test_rangelimits(self):
for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
self.assertEqual(set(range(start,stop)),
set([self.gen.randrange(start,stop) for i in xrange(100)]))
def test_genrandbits(self):
# Verify cross-platform repeatability
self.gen.seed(1234567)
self.assertEqual(self.gen.getrandbits(100),
97904845777343510404718956115L)
# Verify ranges
for k in xrange(1, 1000):
self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
# Verify all bits active
getbits = self.gen.getrandbits
for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
cum = 0
for i in xrange(100):
cum |= getbits(span)
self.assertEqual(cum, 2**span-1)
# Verify argument checking
self.assertRaises(TypeError, self.gen.getrandbits)
self.assertRaises(TypeError, self.gen.getrandbits, 'a')
self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
self.assertRaises(ValueError, self.gen.getrandbits, 0)
self.assertRaises(ValueError, self.gen.getrandbits, -1)
def test_randbelow_logic(self, _log=log, int=int):
# check bitcount transition points: 2**i and 2**(i+1)-1
# show that: k = int(1.001 + _log(n, 2))
# is equal to or one greater than the number of bits in n
for i in xrange(1, 1000):
n = 1L << i # check an exact power of two
numbits = i+1
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits)
self.assertTrue(n == 2**(k-1))
n += n - 1 # check 1 below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertIn(k, [numbits, numbits+1])
self.assertTrue(2**k > n > 2**(k-2))
n -= n >> 15 # check a little farther below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits) # note the stronger assertion
self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion
def test_randrange_bug_1590891(self):
start = 1000000000000
stop = -100000000000000000000
step = -200
x = self.gen.randrange(start, stop, step)
self.assertTrue(stop < x <= start)
self.assertEqual((x+stop)%step, 0)
def gamma(z, sqrt2pi=(2.0*pi)**0.5):
# Reflection to right half of complex plane
if z < 0.5:
return pi / sin(pi*z) / gamma(1.0-z)
# Lanczos approximation with g=7
az = z + (7.0 - 0.5)
return az ** (z-0.5) / exp(az) * sqrt2pi * fsum([
0.9999999999995183,
676.5203681218835 / z,
-1259.139216722289 / (z+1.0),
771.3234287757674 / (z+2.0),
-176.6150291498386 / (z+3.0),
12.50734324009056 / (z+4.0),
-0.1385710331296526 / (z+5.0),
0.9934937113930748e-05 / (z+6.0),
0.1659470187408462e-06 / (z+7.0),
])
class TestDistributions(unittest.TestCase):
def test_zeroinputs(self):
# Verify that distributions can handle a series of zero inputs'
g = random.Random()
x = [g.random() for i in xrange(50)] + [0.0]*5
g.random = x[:].pop; g.uniform(1,10)
g.random = x[:].pop; g.paretovariate(1.0)
g.random = x[:].pop; g.expovariate(1.0)
g.random = x[:].pop; g.weibullvariate(1.0, 1.0)
g.random = x[:].pop; g.normalvariate(0.0, 1.0)
g.random = x[:].pop; g.gauss(0.0, 1.0)
g.random = x[:].pop; g.lognormvariate(0.0, 1.0)
g.random = x[:].pop; g.vonmisesvariate(0.0, 1.0)
g.random = x[:].pop; g.gammavariate(0.01, 1.0)
g.random = x[:].pop; g.gammavariate(1.0, 1.0)
g.random = x[:].pop; g.gammavariate(200.0, 1.0)
g.random = x[:].pop; g.betavariate(3.0, 3.0)
g.random = x[:].pop; g.triangular(0.0, 1.0, 1.0/3.0)
def test_avg_std(self):
# Use integration to test distribution average and standard deviation.
# Only works for distributions which do not consume variates in pairs
g = random.Random()
N = 5000
x = [i/float(N) for i in xrange(1,N)]
for variate, args, mu, sigmasqrd in [
(g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12),
(g.triangular, (0.0, 1.0, 1.0/3.0), 4.0/9.0, 7.0/9.0/18.0),
(g.expovariate, (1.5,), 1/1.5, 1/1.5**2),
(g.paretovariate, (5.0,), 5.0/(5.0-1),
5.0/((5.0-1)**2*(5.0-2))),
(g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0),
gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]:
g.random = x[:].pop
y = []
for i in xrange(len(x)):
try:
y.append(variate(*args))
except IndexError:
pass
s1 = s2 = 0
for e in y:
s1 += e
s2 += (e - mu) ** 2
N = len(y)
self.assertAlmostEqual(s1/N, mu, 2)
self.assertAlmostEqual(s2/(N-1), sigmasqrd, 2)
class TestModule(unittest.TestCase):
def testMagicConstants(self):
self.assertAlmostEqual(random.NV_MAGICCONST, 1.71552776992141)
self.assertAlmostEqual(random.TWOPI, 6.28318530718)
self.assertAlmostEqual(random.LOG4, 1.38629436111989)
self.assertAlmostEqual(random.SG_MAGICCONST, 2.50407739677627)
def test__all__(self):
# tests validity but not completeness of the __all__ list
self.assertTrue(set(random.__all__) <= set(dir(random)))
def test_random_subclass_with_kwargs(self):
# SF bug #1486663 -- this used to erroneously raise a TypeError
class Subclass(random.Random):
def __init__(self, newarg=None):
random.Random.__init__(self)
Subclass(newarg=1)
def test_main(verbose=None):
testclasses = [WichmannHill_TestBasicOps,
MersenneTwister_TestBasicOps,
TestDistributions,
TestModule]
try:
random.SystemRandom().random()
except NotImplementedError:
pass
else:
testclasses.append(SystemRandom_TestBasicOps)
test_support.run_unittest(*testclasses)
# verify reference counting
import sys
if verbose and hasattr(sys, "gettotalrefcount"):
counts = [None] * 5
for i in xrange(len(counts)):
test_support.run_unittest(*testclasses)
counts[i] = sys.gettotalrefcount()
print counts
if __name__ == "__main__":
test_main(verbose=True)
| gpl-3.0 |
MrSenko/Nitrate | tcms/core/contrib/auth/views.py | 1 | 4087 | # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib import auth
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.shortcuts import redirect, render_to_response
from django.template import RequestContext
from django.views.decorators.http import require_GET
from django.views.decorators.http import require_http_methods
from tcms.core.contrib.auth import get_using_backend
from tcms.core.contrib.auth.forms import RegistrationForm
from tcms.core.contrib.auth.models import UserActivateKey
from tcms.core.views import Prompt
@require_GET
def logout(request):
"""Logout method of account"""
auth.logout(request)
return redirect(request.GET.get('next', reverse('tcms.core.views.index')))
@require_http_methods(['GET', 'POST'])
def register(request, template_name='registration/registration_form.html'):
"""Register method of account"""
request_data = request.GET or request.POST
# Check that registration is allowed by backend
backend = get_using_backend()
cr = getattr(backend, 'can_register') # can register
if not cr:
return HttpResponse(Prompt.render(
request=request,
info_type=Prompt.Alert,
info='The backend is not allowed to register.',
next=request_data.get('next', reverse('tcms.core.views.index'))
))
if request.method == 'POST':
form = RegistrationForm(data=request.POST, files=request.FILES)
if form.is_valid():
form.save()
ak = form.set_active_key()
# Send email to user if mail server is available.
if form.cleaned_data['email'] and settings.EMAIL_HOST:
form.send_confirm_mail(request=request, active_key=ak)
msg = 'Your account has been created, please check your ' \
'mailbox for confirmation.'
else:
msg = [
'<p>Your account has been created, but you need to contact '
'an administrator to active your account.</p>',
]
# If can not send email, prompt to user.
if settings.ADMINS:
msg.append('<p>Following is the admin list</p><ul>')
for name, email in settings.ADMINS:
msg.append('<li><a href="mailto:{}">{}</a></li>'.format(email, name))
msg.append('</ul>')
msg = ''.join(msg)
return HttpResponse(Prompt.render(
request=request,
info_type=Prompt.Info,
info=msg,
next=request.POST.get('next', reverse('tcms.core.views.index'))
))
else:
form = RegistrationForm()
context_data = {
'form': form,
}
return render_to_response(template_name, context_data,
context_instance=RequestContext(request))
@require_GET
def confirm(request, activation_key):
"""Confirm the user registration"""
# Get the object
try:
ak = UserActivateKey.objects.select_related('user')
ak = ak.get(activation_key=activation_key)
except UserActivateKey.DoesNotExist:
msg = 'This key no longer exist in the database.'
return HttpResponse(Prompt.render(
request=request,
info_type=Prompt.Info,
info=msg,
next=request.GET.get('next', reverse('tcms.core.views.index'))
))
# All thing done, start to active the user and use the user login
user = ak.user
user.is_active = True
user.save(update_fields=['is_active'])
ak.delete()
# login(request, user)
# Response to web browser.
msg = 'Your account has been activated successfully, click next link to ' \
're-login.'
return HttpResponse(Prompt.render(
request=request,
info_type=Prompt.Info,
info=msg,
next=request.GET.get('next', reverse(
'tcms.profiles.views.redirect_to_profile'))
))
| gpl-2.0 |
IV-GII/Django_Traduccion | allauth/socialaccount/providers/angellist/views.py | 80 | 1056 | import requests
from allauth.socialaccount.providers.oauth2.views import (OAuth2Adapter,
OAuth2LoginView,
OAuth2CallbackView)
from .provider import AngelListProvider
class AngelListOAuth2Adapter(OAuth2Adapter):
provider_id = AngelListProvider.id
access_token_url = 'https://angel.co/api/oauth/token/'
authorize_url = 'https://angel.co/api/oauth/authorize/'
profile_url = 'https://api.angel.co/1/me/'
supports_state = False
def complete_login(self, request, app, token, **kwargs):
resp = requests.get(self.profile_url,
params={'access_token': token.token})
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(AngelListOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(AngelListOAuth2Adapter)
| gpl-3.0 |
agdsn/sipa | sipa/utils/graph_utils.py | 2 | 3274 | # -*- coding: utf-8 -*-
from typing import List
import pygal
from flask_babel import gettext
from pygal import Graph
from pygal.colors import hsl_to_rgb
from pygal.style import Style
from sipa.units import (format_as_traffic, max_divisions,
reduce_by_base)
from sipa.utils.babel_utils import get_weekday
def rgb_string(r, g, b):
return "#{:02X}{:02X}{:02X}".format(int(r), int(g), int(b))
def hsl(h, s, l):
return rgb_string(*hsl_to_rgb(h, s, l))
traffic_style = Style(
background='transparent',
opacity='.6',
opacity_hover='.9',
transition='200ms ease-in',
colors=(hsl(130, 80, 60), hsl(70, 80, 60), hsl(190, 80, 60)),
font_family='default'
)
def default_chart(chart_type, title, inline=True, **kwargs):
return chart_type(
fill=True,
title=title,
height=350,
show_y_guides=True,
human_readable=False,
major_label_font_size=12,
label_font_size=12,
style=traffic_style,
disable_xml_declaration=inline, # for direct html import
js=[], # prevent automatically fetching scripts from github
**kwargs,
)
def generate_traffic_chart(traffic_data: List[dict], inline: bool = True) -> Graph:
"""Create a graph object from the input traffic data with pygal.
If inline is set, the chart is being passed the option to not add an xml
declaration header to the beginning of the `render()` output, so it can
be directly included in HTML code (wrapped by a `<figure>`)
:param traffic_data: The traffic data as given by `user.traffic_history`
:param inline: Determines the option `disable_xml_declaration`
:return: The graph object
"""
# choose unit according to maximum of `throughput`
divisions = (max_divisions(max(day['throughput'] for day in traffic_data))
if traffic_data else 0)
traffic_data = [{key: (reduce_by_base(val, divisions=divisions)
if key in ['input', 'output', 'throughput']
else val)
for key, val in entry.items()
}
for entry in traffic_data]
traffic_chart = default_chart(
pygal.Bar,
gettext("Traffic (MiB)"),
inline,
# don't divide, since the raw values already have been prepared.
# `divide=False` effectively just appends the according unit.
value_formatter=lambda value: format_as_traffic(value, divisions, divide=False),
)
traffic_chart.x_labels = (get_weekday(day['day']) for day in traffic_data)
traffic_chart.add(gettext("Eingehend"),
[day['input'] for day in traffic_data],
stroke_style={'dasharray': '5'})
traffic_chart.add(gettext("Ausgehend"),
[day['output'] for day in traffic_data],
stroke_style={'dasharray': '5'})
traffic_chart.add(gettext("Gesamt"),
[day['throughput'] for day in traffic_data],
stroke_style={'width': '2'})
return traffic_chart
def provide_render_function(generator):
def renderer(data, **kwargs):
return generator(data, **kwargs).render()
return renderer
| mit |
nuuuboo/odoo | openerp/addons/base/tests/test_mail_examples.py | 302 | 57129 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
MISC_HTML_SOURCE = """
<font size="2" style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; ">test1</font>
<div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; font-style: normal; ">
<b>test2</b></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; ">
<i>test3</i></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; ">
<u>test4</u></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; ">
<strike>test5</strike></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; ">
<font size="5">test6</font></div><div><ul><li><font color="#1f1f1f" face="monospace" size="2">test7</font></li><li>
<font color="#1f1f1f" face="monospace" size="2">test8</font></li></ul><div><ol><li><font color="#1f1f1f" face="monospace" size="2">test9</font>
</li><li><font color="#1f1f1f" face="monospace" size="2">test10</font></li></ol></div></div>
<blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;"><div><div><div><font color="#1f1f1f" face="monospace" size="2">
test11</font></div></div></div></blockquote><blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;">
<blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;"><div><font color="#1f1f1f" face="monospace" size="2">
test12</font></div><div><font color="#1f1f1f" face="monospace" size="2"><br></font></div></blockquote></blockquote>
<font color="#1f1f1f" face="monospace" size="2"><a href="http://google.com">google</a></font>
<a href="javascript:alert('malicious code')">test link</a>
"""
EDI_LIKE_HTML_SOURCE = """<div style="font-family: 'Lucica Grande', Ubuntu, Arial, Verdana, sans-serif; font-size: 12px; color: rgb(34, 34, 34); background-color: #FFF; ">
<p>Hello ${object.partner_id.name},</p>
<p>A new invoice is available for you: </p>
<p style="border-left: 1px solid #8e0000; margin-left: 30px;">
<strong>REFERENCES</strong><br />
Invoice number: <strong>${object.number}</strong><br />
Invoice total: <strong>${object.amount_total} ${object.currency_id.name}</strong><br />
Invoice date: ${object.date_invoice}<br />
Order reference: ${object.origin}<br />
Your contact: <a href="mailto:${object.user_id.email or ''}?subject=Invoice%20${object.number}">${object.user_id.name}</a>
</p>
<br/>
<p>It is also possible to directly pay with Paypal:</p>
<a style="margin-left: 120px;" href="${object.paypal_url}">
<img class="oe_edi_paypal_button" src="https://www.paypal.com/en_US/i/btn/btn_paynowCC_LG.gif"/>
</a>
<br/>
<p>If you have any question, do not hesitate to contact us.</p>
<p>Thank you for choosing ${object.company_id.name or 'us'}!</p>
<br/>
<br/>
<div style="width: 375px; margin: 0px; padding: 0px; background-color: #8E0000; border-top-left-radius: 5px 5px; border-top-right-radius: 5px 5px; background-repeat: repeat no-repeat;">
<h3 style="margin: 0px; padding: 2px 14px; font-size: 12px; color: #DDD;">
<strong style="text-transform:uppercase;">${object.company_id.name}</strong></h3>
</div>
<div style="width: 347px; margin: 0px; padding: 5px 14px; line-height: 16px; background-color: #F2F2F2;">
<span style="color: #222; margin-bottom: 5px; display: block; ">
${object.company_id.street}<br/>
${object.company_id.street2}<br/>
${object.company_id.zip} ${object.company_id.city}<br/>
${object.company_id.state_id and ('%s, ' % object.company_id.state_id.name) or ''} ${object.company_id.country_id.name or ''}<br/>
</span>
<div style="margin-top: 0px; margin-right: 0px; margin-bottom: 0px; margin-left: 0px; padding-top: 0px; padding-right: 0px; padding-bottom: 0px; padding-left: 0px; ">
Phone: ${object.company_id.phone}
</div>
<div>
Web : <a href="${object.company_id.website}">${object.company_id.website}</a>
</div>
</div>
</div></body></html>"""
OERP_WEBSITE_HTML_1 = """
<div>
<div class="container">
<div class="row">
<div class="col-md-12 text-center mt16 mb16" data-snippet-id="colmd">
<h2>OpenERP HR Features</h2>
<h3 class="text-muted">Manage your company most important asset: People</h3>
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-rounded img-responsive" src="/website/static/src/img/china_thumb.jpg">
<h4 class="mt16">Streamline Recruitments</h4>
<p>Post job offers and keep track of each application received. Follow applicants in your recruitment process with the smart kanban view.</p>
<p>Save time by automating some communications with email templates. Resumes are indexed automatically, allowing you to easily find for specific profiles.</p>
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-rounded img-responsive" src="/website/static/src/img/desert_thumb.jpg">
<h4 class="mt16">Enterprise Social Network</h4>
<p>Break down information silos. Share knowledge and best practices amongst all employees. Follow specific people or documents and join groups of interests to share expertise and documents.</p>
<p>Interact with your collegues in real time with live chat.</p>
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-rounded img-responsive" src="/website/static/src/img/deers_thumb.jpg">
<h4 class="mt16">Leaves Management</h4>
<p>Keep track of the vacation days accrued by each employee. Employees enter their requests (paid holidays, sick leave, etc), for managers to approve and validate. It's all done in just a few clicks. The agenda of each employee is updated accordingly.</p>
</div>
</div>
</div>
</div>"""
OERP_WEBSITE_HTML_1_IN = [
'Manage your company most important asset: People',
'img class="img-rounded img-responsive" src="/website/static/src/img/china_thumb.jpg"',
]
OERP_WEBSITE_HTML_1_OUT = [
'Break down information silos.',
'Keep track of the vacation days accrued by each employee',
'img class="img-rounded img-responsive" src="/website/static/src/img/deers_thumb.jpg',
]
OERP_WEBSITE_HTML_2 = """
<div class="mt16 cke_widget_editable cke_widget_element oe_editable oe_dirty" data-oe-model="blog.post" data-oe-id="6" data-oe-field="content" data-oe-type="html" data-oe-translate="0" data-oe-expression="blog_post.content" data-cke-widget-data="{}" data-cke-widget-keep-attr="0" data-widget="oeref" contenteditable="true" data-cke-widget-editable="text">
<section class="mt16 mb16" data-snippet-id="text-block">
<div class="container">
<div class="row">
<div class="col-md-12 text-center mt16 mb32" data-snippet-id="colmd">
<h2>
OpenERP Project Management
</h2>
<h3 class="text-muted">Infinitely flexible. Incredibly easy to use.</h3>
</div>
<div class="col-md-12 mb16 mt16" data-snippet-id="colmd">
<p>
OpenERP's <b>collaborative and realtime</b> project
management helps your team get work done. Keep
track of everything, from the big picture to the
minute details, from the customer contract to the
billing.
</p><p>
Organize projects around <b>your own processes</b>. Work
on tasks and issues using the kanban view, schedule
tasks using the gantt chart and control deadlines
in the calendar view. Every project may have it's
own stages allowing teams to optimize their job.
</p>
</div>
</div>
</div>
</section>
<section class="" data-snippet-id="image-text">
<div class="container">
<div class="row">
<div class="col-md-6 mt16 mb16" data-snippet-id="colmd">
<img class="img-responsive shadow" src="/website/static/src/img/image_text.jpg">
</div>
<div class="col-md-6 mt32" data-snippet-id="colmd">
<h3>Manage Your Shops</h3>
<p>
OpenERP's Point of Sale introduces a super clean
interface with no installation required that runs
online and offline on modern hardwares.
</p><p>
It's full integration with the company inventory
and accounting, gives you real time statistics and
consolidations amongst all shops without the hassle
of integrating several applications.
</p>
</div>
</div>
</div>
</section>
<section class="" data-snippet-id="text-image">
<div class="container">
<div class="row">
<div class="col-md-6 mt32" data-snippet-id="colmd">
<h3>Enterprise Social Network</h3>
<p>
Make every employee feel more connected and engaged
with twitter-like features for your own company. Follow
people, share best practices, 'like' top ideas, etc.
</p><p>
Connect with experts, follow what interests you, share
documents and promote best practices with OpenERP
Social application. Get work done with effective
collaboration across departments, geographies
and business applications.
</p>
</div>
<div class="col-md-6 mt16 mb16" data-snippet-id="colmd">
<img class="img-responsive shadow" src="/website/static/src/img/text_image.png">
</div>
</div>
</div>
</section><section class="" data-snippet-id="portfolio">
<div class="container">
<div class="row">
<div class="col-md-12 text-center mt16 mb32" data-snippet-id="colmd">
<h2>Our Porfolio</h2>
<h4 class="text-muted">More than 500 successful projects</h4>
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/deers.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/desert.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/china.jpg">
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/desert.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/china.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/deers.jpg">
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/landscape.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/china.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/desert.jpg">
</div>
</div>
</div>
</section>
</div>
"""
OERP_WEBSITE_HTML_2_IN = [
'management helps your team get work done',
]
OERP_WEBSITE_HTML_2_OUT = [
'Make every employee feel more connected',
'img class="img-responsive shadow" src="/website/static/src/img/text_image.png',
]
TEXT_1 = """I contact you about our meeting tomorrow. Here is the schedule I propose:
9 AM: brainstorming about our new amazing business app
9.45 AM: summary
10 AM: meeting with Ignasse to present our app
Is everything ok for you ?
--
MySignature"""
TEXT_1_IN = ["""I contact you about our meeting tomorrow. Here is the schedule I propose:
9 AM: brainstorming about our new amazing business app
9.45 AM: summary
10 AM: meeting with Ignasse to present our app
Is everything ok for you ?"""]
TEXT_1_OUT = ["""--
MySignature"""]
TEXT_2 = """Salut Raoul!
Le 28 oct. 2012 à 00:02, Raoul Grosbedon a écrit :
> I contact you about our meeting tomorrow. Here is the schedule I propose: (quote)
Of course. This seems viable.
> 2012/10/27 Bert Tartopoils :
>> blahblahblah (quote)?
>>
>> blahblahblah (quote)
>>
>> Bert TARTOPOILS
>> bert.tartopoils@miam.miam
>>
>
>
> --
> RaoulSignature
Bert TARTOPOILS
bert.tartopoils@miam.miam
"""
TEXT_2_IN = ["Salut Raoul!", "Of course. This seems viable."]
TEXT_2_OUT = ["I contact you about our meeting tomorrow. Here is the schedule I propose: (quote)",
"""> 2012/10/27 Bert Tartopoils :
>> blahblahblah (quote)?
>>
>> blahblahblah (quote)
>>
>> Bert TARTOPOILS
>> bert.tartopoils@miam.miam
>>
>
>
> --
> RaoulSignature"""]
HTML_1 = """<p>I contact you about our meeting for tomorrow. Here is the schedule I propose: (keep)
9 AM: brainstorming about our new amazing business app
9.45 AM: summary
10 AM: meeting with Ignasse to present our app
Is everything ok for you ?
--
MySignature</p>"""
HTML_1_IN = ["""I contact you about our meeting for tomorrow. Here is the schedule I propose: (keep)
9 AM: brainstorming about our new amazing business app
9.45 AM: summary
10 AM: meeting with Ignasse to present our app
Is everything ok for you ?"""]
HTML_1_OUT = ["""--
MySignature"""]
HTML_2 = """<div>
<font><span>I contact you about our meeting for tomorrow. Here is the schedule I propose:</span></font>
</div>
<div>
<ul>
<li><span>9 AM: brainstorming about our new amazing business app</span></li>
<li><span>9.45 AM: summary</span></li>
<li><span>10 AM: meeting with Fabien to present our app</span></li>
</ul>
</div>
<div>
<font><span>Is everything ok for you ?</span></font>
</div>"""
HTML_2_IN = ["<font><span>I contact you about our meeting for tomorrow. Here is the schedule I propose:</span></font>",
"<li><span>9 AM: brainstorming about our new amazing business app</span></li>",
"<li><span>9.45 AM: summary</span></li>",
"<li><span>10 AM: meeting with Fabien to present our app</span></li>",
"<font><span>Is everything ok for you ?</span></font>"]
HTML_2_OUT = []
HTML_3 = """<div><pre>This is an answer.
Regards,
XXXXXX
----- Mail original -----</pre>
<pre>Hi,
My CRM-related question.
Regards,
XXXX</pre></div>"""
HTML_3_IN = ["""<div><pre>This is an answer.
Regards,
XXXXXX
----- Mail original -----</pre>"""]
HTML_3_OUT = ["Hi,", "My CRM-related question.",
"Regards,"]
HTML_4 = """
<div>
<div>Hi Nicholas,</div>
<br>
<div>I'm free now. 00447710085916.</div>
<br>
<div>Regards,</div>
<div>Nicholas</div>
<br>
<span id="OLK_SRC_BODY_SECTION">
<div style="font-family:Calibri; font-size:11pt; text-align:left; color:black; BORDER-BOTTOM: medium none; BORDER-LEFT: medium none; PADDING-BOTTOM: 0in; PADDING-LEFT: 0in; PADDING-RIGHT: 0in; BORDER-TOP: #b5c4df 1pt solid; BORDER-RIGHT: medium none; PADDING-TOP: 3pt">
<span style="font-weight:bold">From: </span>OpenERP Enterprise <<a href="mailto:sales@openerp.com">sales@openerp.com</a>><br><span style="font-weight:bold">Reply-To: </span><<a href="mailto:sales@openerp.com">sales@openerp.com</a>><br><span style="font-weight:bold">Date: </span>Wed, 17 Apr 2013 13:30:47 +0000<br><span style="font-weight:bold">To: </span>Microsoft Office User <<a href="mailto:n.saxlund@babydino.com">n.saxlund@babydino.com</a>><br><span style="font-weight:bold">Subject: </span>Re: your OpenERP.com registration<br>
</div>
<br>
<div>
<p>Hello Nicholas Saxlund, </p>
<p>I noticed you recently registered to our OpenERP Online solution. </p>
<p>You indicated that you wish to use OpenERP in your own company. We would like to know more about your your business needs and requirements, and see how we can help you. When would you be available to discuss your project ?
</p>
<p>Best regards, </p>
<pre><a href="http://openerp.com">http://openerp.com</a>
Belgium: +32.81.81.37.00
U.S.: +1 (650) 307-6736
India: +91 (79) 40 500 100
</pre>
</div>
</span>
</div>"""
HTML_5 = """<div><pre>Hi,
I have downloaded OpenERP installer 7.0 and successfully installed the postgresql server and the OpenERP.
I created a database and started to install module by log in as administrator.
However, I was not able to install any module due to "OpenERP Server Error" as shown in the attachement.
Could you please let me know how could I fix this problem?
Regards,
Goh Sin Yih
________________________________
From: OpenERP Enterprise <sales@openerp.com>
To: sinyih_goh@yahoo.com
Sent: Friday, February 8, 2013 12:46 AM
Subject: Feedback From Your OpenERP Trial
Hello Goh Sin Yih,
Thank you for having tested OpenERP Online.
I noticed you started a trial of OpenERP Online (gsy) but you did not decide to keep using it.
So, I just wanted to get in touch with you to get your feedback. Can you tell me what kind of application you were you looking for and why you didn't decide to continue with OpenERP?
Thanks in advance for providing your feedback,
Do not hesitate to contact me if you have any questions,
Thanks,
</pre>"""
GMAIL_1 = """Hello,<div><br></div><div>Ok for me. I am replying directly in gmail, without signature.</div><div><br></div><div>Kind regards,</div><div><br></div><div>Demo.<br><br><div>On Thu, Nov 8, 2012 at 5:29 PM, <span><<a href="mailto:dummy@example.com">dummy@example.com</a>></span> wrote:<br><blockquote><div>I contact you about our meeting for tomorrow. Here is the schedule I propose:</div><div><ul><li>9 AM: brainstorming about our new amazing business app</span></li></li>
<li>9.45 AM: summary</li><li>10 AM: meeting with Fabien to present our app</li></ul></div><div>Is everything ok for you ?</div>
<div><p>--<br>Administrator</p></div>
<div><p>Log in our portal at: <a href="http://localhost:8069#action=login&db=mail_1&login=demo">http://localhost:8069#action=login&db=mail_1&login=demo</a></p></div>
</blockquote></div><br></div>"""
GMAIL_1_IN = ['Ok for me. I am replying directly in gmail, without signature.']
GMAIL_1_OUT = ['Administrator', 'Log in our portal at:']
THUNDERBIRD_1 = """<div>On 11/08/2012 05:29 PM,
<a href="mailto:dummy@example.com">dummy@example.com</a> wrote:<br></div>
<blockquote>
<div>I contact you about our meeting for tomorrow. Here is the
schedule I propose:</div>
<div>
<ul><li>9 AM: brainstorming about our new amazing business
app</span></li></li>
<li>9.45 AM: summary</li>
<li>10 AM: meeting with Fabien to present our app</li>
</ul></div>
<div>Is everything ok for you ?</div>
<div>
<p>--<br>
Administrator</p>
</div>
<div>
<p>Log in our portal at:
<a href="http://localhost:8069#action=login&db=mail_1&token=rHdWcUART5PhEnJRaXjH">http://localhost:8069#action=login&db=mail_1&token=rHdWcUART5PhEnJRaXjH</a></p>
</div>
</blockquote>
Ok for me. I am replying directly below your mail, using Thunderbird, with a signature.<br><br>
Did you receive my email about my new laptop, by the way ?<br><br>
Raoul.<br><pre>--
Raoul Grosbedonnée
</pre>"""
THUNDERBIRD_1_IN = ['Ok for me. I am replying directly below your mail, using Thunderbird, with a signature.']
THUNDERBIRD_1_OUT = ['I contact you about our meeting for tomorrow.', 'Raoul Grosbedon']
HOTMAIL_1 = """<div>
<div dir="ltr"><br>
I have an amazing company, i'm learning OpenERP, it is a small company yet, but plannig to grow up quickly.
<br> <br>Kindest regards,<br>xxx<br>
<div>
<div id="SkyDrivePlaceholder">
</div>
<hr id="stopSpelling">
Subject: Re: your OpenERP.com registration<br>From: xxx@xxx.xxx<br>To: xxx@xxx.xxx<br>Date: Wed, 27 Mar 2013 17:12:12 +0000
<br><br>
Hello xxx,
<br>
I noticed you recently created an OpenERP.com account to access OpenERP Apps.
<br>
You indicated that you wish to use OpenERP in your own company.
We would like to know more about your your business needs and requirements, and see how
we can help you. When would you be available to discuss your project ?<br>
Best regards,<br>
<pre>
<a href="http://openerp.com" target="_blank">http://openerp.com</a>
Belgium: +32.81.81.37.00
U.S.: +1 (650) 307-6736
India: +91 (79) 40 500 100
</pre>
</div>
</div>
</div>"""
HOTMAIL_1_IN = ["I have an amazing company, i'm learning OpenERP, it is a small company yet, but plannig to grow up quickly."]
HOTMAIL_1_OUT = ["Subject: Re: your OpenERP.com registration", " I noticed you recently created an OpenERP.com account to access OpenERP Apps.",
"We would like to know more about your your business needs and requirements", "Belgium: +32.81.81.37.00"]
MSOFFICE_1 = """
<div>
<div class="WordSection1">
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
Our requirements are simple. Just looking to replace some spreadsheets for tracking quotes and possibly using the timecard module.
We are a company of 25 engineers providing product design services to clients.
</span>
</p>
<p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
I’ll install on a windows server and run a very limited trial to see how it works.
If we adopt OpenERP we will probably move to Linux or look for a hosted SaaS option.
</span>
</p>
<p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
<br>
I am also evaluating Adempiere and maybe others.
</span>
</p>
<p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span>
</p>
<p> </p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
I expect the trial will take 2-3 months as this is not a high priority for us.
</span>
</p>
<p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span>
</p>
<p> </p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
Alan
</span>
</p>
<p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span>
</p>
<p> </p>
<p></p>
<div>
<div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0in 0in 0in">
<p class="MsoNormal">
<b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">
From:
</span></b>
<span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">
OpenERP Enterprise [mailto:sales@openerp.com]
<br><b>Sent:</b> Monday, 11 March, 2013 14:47<br><b>To:</b> Alan Widmer<br><b>Subject:</b> Re: your OpenERP.com registration
</span>
</p>
<p></p>
<p></p>
</div>
</div>
<p class="MsoNormal"></p>
<p> </p>
<p>Hello Alan Widmer, </p>
<p></p>
<p>I noticed you recently downloaded OpenERP. </p>
<p></p>
<p>
Uou mentioned you wish to use OpenERP in your own company. Please let me more about your
business needs and requirements? When will you be available to discuss about your project?
</p>
<p></p>
<p>Thanks for your interest in OpenERP, </p>
<p></p>
<p>Feel free to contact me if you have any questions, </p>
<p></p>
<p>Looking forward to hear from you soon. </p>
<p></p>
<pre><p> </p></pre>
<pre>--<p></p></pre>
<pre>Nicolas<p></p></pre>
<pre><a href="http://openerp.com">http://openerp.com</a><p></p></pre>
<pre>Belgium: +32.81.81.37.00<p></p></pre>
<pre>U.S.: +1 (650) 307-6736<p></p></pre>
<pre>India: +91 (79) 40 500 100<p></p></pre>
<pre> <p></p></pre>
</div>
</div>"""
MSOFFICE_1_IN = ['Our requirements are simple. Just looking to replace some spreadsheets for tracking quotes and possibly using the timecard module.']
MSOFFICE_1_OUT = ['I noticed you recently downloaded OpenERP.', 'Uou mentioned you wish to use OpenERP in your own company.', 'Belgium: +32.81.81.37.00']
MSOFFICE_2 = """
<div>
<div class="WordSection1">
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Nicolas,</span></p><p></p>
<p></p>
<p class="MsoNormal" style="text-indent:.5in">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">We are currently investigating the possibility of moving away from our current ERP </span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Thank You</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Matt</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<div>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Raoul Petitpoil</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Poil Industries</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Information Technology</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">920 Super Street</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Sanchez, Pa 17046 USA</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Tel: xxx.xxx</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Fax: xxx.xxx</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Email: </span>
<a href="mailto:raoul@petitpoil.com">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:blue">raoul@petitpoil.com</span>
</a>
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">www.poilindustries.com</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">www.superproducts.com</span></p><p></p>
<p></p>
</div>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<div>
<div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0in 0in 0in">
<p class="MsoNormal">
<b>
<span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">From:</span>
</b>
<span style="font-size:10.0pt;font-family:"Tahoma","sans-serif""> OpenERP Enterprise [mailto:sales@openerp.com] <br><b>Sent:</b> Wednesday, April 17, 2013 1:31 PM<br><b>To:</b> Matt Witters<br><b>Subject:</b> Re: your OpenERP.com registration</span></p><p></p>
<p></p>
</div>
</div>
<p class="MsoNormal"></p>
<p> </p>
<p>Hello Raoul Petitpoil, </p>
<p></p>
<p>I noticed you recently downloaded OpenERP. </p>
<p></p>
<p>You indicated that you wish to use OpenERP in your own company. We would like to know more about your your business needs and requirements, and see how we can help you. When would you be available to discuss your project ? </p>
<p></p>
<p>Best regards, </p>
<p></p>
<pre> <p> </p>
</pre>
<pre>--<p></p></pre>
<pre>Nicolas<p></p></pre>
<pre> <a href="http://openerp.com">http://openerp.com</a>
<p></p>
</pre>
<pre>Belgium: +32.81.81.37.00<p></p></pre>
<pre>U.S.: +1 (650) 307-6736<p></p></pre>
<pre>India: +91 (79) 40 500 100<p></p></pre>
<pre> <p></p></pre>
</div>
</div>"""
MSOFFICE_2_IN = ['We are currently investigating the possibility']
MSOFFICE_2_OUT = ['I noticed you recently downloaded OpenERP.', 'You indicated that you wish', 'Belgium: +32.81.81.37.00']
MSOFFICE_3 = """<div>
<div class="WordSection1">
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Hi Nicolas !</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<p class="MsoNormal">
<span lang="EN-US" style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Yes I’d be glad to hear about your offers as we struggle every year with the planning/approving of LOA. </span></p><p></p>
<p></p>
<p class="MsoNormal">
<span lang="EN-US" style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">I saw your boss yesterday on tv and immediately wanted to test the interface. </span></p><p></p>
<p></p>
<p class="MsoNormal">
<span lang="EN-US" style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<div>
<p class="MsoNormal">
<b>
<span lang="NL-BE" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">Bien à vous, </span></b></p><p></p><b>
</b>
<p></p>
<p class="MsoNormal">
<b>
<span lang="NL-BE" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">Met vriendelijke groeten, </span></b></p><p></p><b>
</b>
<p></p>
<p class="MsoNormal">
<b>
<span lang="EN-GB" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">Best regards,</span></b></p><p></p><b>
</b>
<p></p>
<p class="MsoNormal">
<b>
<span lang="EN-GB" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">
</span></b></p><p><b> </b></p><b>
</b>
<p></p>
<p class="MsoNormal">
<b>
<span lang="EN-GB" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">R. Petitpoil <br></span>
</b>
<span lang="EN-GB" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">Human Resource Manager<b><br><br>Field Resource s.a n.v. <i> <br></i></b>Hermesstraat 6A <br>1930 Zaventem</span>
<span lang="EN-GB" style="font-size:8.0pt;font-family:"Tahoma","sans-serif";color:gray"><br></span>
<b>
<span lang="FR" style="font-size:10.0pt;font-family:Wingdings;color:#1F497D">(</span>
</b>
<b>
<span lang="FR" style="font-size:9.0pt;font-family:Wingdings;color:#1F497D"> </span>
</b>
<b>
<span lang="EN-GB" style="font-size:8.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">xxx.xxx </span>
</b>
<b>
<span lang="EN-GB" style="font-size:9.0pt;font-family:"Trebuchet MS","sans-serif";color:gray"><br></span>
</b>
<b>
<span lang="FR" style="font-size:10.0pt;font-family:"Wingdings 2";color:#1F497D">7</span>
</b>
<b>
<span lang="FR" style="font-size:9.0pt;font-family:"Wingdings 2";color:#1F497D"> </span>
</b>
<b>
<span lang="EN-GB" style="font-size:8.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">+32 2 727.05.91<br></span>
</b>
<span lang="EN-GB" style="font-size:24.0pt;font-family:Webdings;color:green">P</span>
<span lang="EN-GB" style="font-size:8.0pt;font-family:"Tahoma","sans-serif";color:green"> <b> </b></span>
<b>
<span lang="EN-GB" style="font-size:9.0pt;font-family:"Trebuchet MS","sans-serif";color:green">Please consider the environment before printing this email.</span>
</b>
<span lang="EN-GB" style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:navy"> </span>
<span lang="EN-GB" style="font-family:"Calibri","sans-serif";color:navy">
</span></p><p></p>
<p></p>
</div>
<p class="MsoNormal">
<span lang="EN-US" style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<div>
<div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0cm 0cm 0cm">
<p class="MsoNormal">
<b>
<span lang="FR" style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">De :</span>
</b>
<span lang="FR" style="font-size:10.0pt;font-family:"Tahoma","sans-serif""> OpenERP Enterprise [mailto:sales@openerp.com] <br><b>Envoyé :</b> jeudi 18 avril 2013 11:31<br><b>À :</b> Paul Richard<br><b>Objet :</b> Re: your OpenERP.com registration</span></p><p></p>
<p></p>
</div>
</div>
<p class="MsoNormal"></p>
<p> </p>
<p>Hello Raoul PETITPOIL, </p>
<p></p>
<p>I noticed you recently registered to our OpenERP Online solution. </p>
<p></p>
<p>You indicated that you wish to use OpenERP in your own company. We would like to know more about your your business needs and requirements, and see how we can help you. When would you be available to discuss your project ? </p>
<p></p>
<p>Best regards, </p>
<p></p>
<pre> <p> </p>
</pre>
<pre>--<p></p></pre>
<pre>Nicolas<p></p></pre>
<pre> <a href="http://openerp.com">http://openerp.com</a>
<p></p>
</pre>
<pre>Belgium: +32.81.81.37.00<p></p></pre>
<pre>U.S.: +1 (650) 307-6736<p></p></pre>
<pre>India: +91 (79) 40 500 100<p></p></pre>
<pre> <p></p></pre>
</div>
</div>"""
MSOFFICE_3_IN = ['I saw your boss yesterday']
MSOFFICE_3_OUT = ['I noticed you recently downloaded OpenERP.', 'You indicated that you wish', 'Belgium: +32.81.81.37.00']
# ------------------------------------------------------------
# Test cases coming from bugs
# ------------------------------------------------------------
# bug: read more not apparent, strange message in read more span
BUG1 = """<pre>Hi Migration Team,
Paragraph 1, blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah.
Paragraph 2, blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah.
Paragraph 3, blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah.
Thanks.
Regards,
--
Olivier Laurent
Migration Manager
OpenERP SA
Chaussée de Namur, 40
B-1367 Gérompont
Tel: +32.81.81.37.00
Web: http://www.openerp.com</pre>"""
BUG_1_IN = [
'Hi Migration Team',
'Paragraph 1'
]
BUG_1_OUT = [
'Olivier Laurent',
'Chaussée de Namur',
'81.81.37.00',
'openerp.com',
]
BUG2 = """
<div>
<br>
<div class="moz-forward-container"><br>
<br>
-------- Original Message --------
<table class="moz-email-headers-table" border="0" cellpadding="0" cellspacing="0">
<tbody>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Subject:
</th>
<td>Fwd: TR: OpenERP S.A. Payment Reminder</td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Date: </th>
<td>Wed, 16 Oct 2013 14:11:13 +0200</td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">From: </th>
<td>Christine Herrmann <a class="moz-txt-link-rfc2396E" href="mailto:che@openerp.com"><che@openerp.com></a></td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">To: </th>
<td><a class="moz-txt-link-abbreviated" href="mailto:online@openerp.com">online@openerp.com</a></td>
</tr>
</tbody>
</table>
<br>
<br>
<br>
<div class="moz-forward-container"><br>
<br>
-------- Message original --------
<table class="moz-email-headers-table" border="0" cellpadding="0" cellspacing="0">
<tbody>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Sujet:
</th>
<td>TR: OpenERP S.A. Payment Reminder</td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Date :
</th>
<td>Wed, 16 Oct 2013 10:34:45 -0000</td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">De : </th>
<td>Ida Siwatala <a class="moz-txt-link-rfc2396E" href="mailto:infos@inzoservices.com"><infos@inzoservices.com></a></td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Répondre
à : </th>
<td><a class="moz-txt-link-abbreviated" href="mailto:catchall@mail.odoo.com">catchall@mail.odoo.com</a></td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Pour :
</th>
<td>Christine Herrmann (che) <a class="moz-txt-link-rfc2396E" href="mailto:che@openerp.com"><che@openerp.com></a></td>
</tr>
</tbody>
</table>
<br>
<br>
<div>
<div class="WordSection1">
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Bonjour,</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Pourriez-vous
me faire un retour sur ce point.</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Cordialement</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<div>
<div style="border:none;border-top:solid #B5C4DF
1.0pt;padding:3.0pt 0cm 0cm 0cm">
<p class="MsoNormal"><b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">De :</span></b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">
Ida Siwatala [<a class="moz-txt-link-freetext" href="mailto:infos@inzoservices.com">mailto:infos@inzoservices.com</a>]
<br>
<b>Envoyé :</b> vendredi 4 octobre 2013 20:03<br>
<b>À :</b> 'Followers of
INZO-services-8-all-e-Maxime-Lisbonne-77176-Savigny-le-temple-France'<br>
<b>Objet :</b> RE: OpenERP S.A. Payment Reminder</span></p>
</div>
</div>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Bonsoir,</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Je
me permets de revenir vers vous par écrit , car j’ai
fait 2 appels vers votre service en exposant mon
problème, mais je n’ai pas eu de retour.</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Cela
fait un mois que j’ai fait la souscription de votre
produit, mais je me rends compte qu’il est pas adapté à
ma situation ( fonctionnalité manquante et surtout je
n’ai pas beaucoup de temps à passer à résoudre des
bugs). </span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">C’est
pourquoi , j’ai demandé qu’un accord soit trouvé avec
vous pour annuler le contrat (tout en vous payant le
mois d’utilisation de septembre).</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Pourriez-vous
me faire un retour sur ce point.</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Cordialement,</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Ida
Siwatala</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">De :</span></b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">
<a href="mailto:che@openerp.com">che@openerp.com</a>
[<a href="mailto:che@openerp.com">mailto:che@openerp.com</a>]
<br>
<b>Envoyé :</b> vendredi 4 octobre 2013 17:41<br>
<b>À :</b> <a href="mailto:infos@inzoservices.com">infos@inzoservices.com</a><br>
<b>Objet :</b> OpenERP S.A. Payment Reminder</span></p>
<p> </p>
<div>
<p style="background:white"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222">Dear
INZO services,</span></p>
<p style="background:white"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222">Exception
made if there was a mistake of ours, it seems that the
following amount stays unpaid. Please, take
appropriate measures in order to carry out this
payment in the next 8 days. </span></p>
<p class="MsoNormal" style="background:white"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222"></span></p>
<p> </p>
<table class="MsoNormalTable" style="width:100.0%;border:outset 1.5pt" width="100%" border="1" cellpadding="0">
<tbody>
<tr>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Date de facturation</p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Description</p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Reference</p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Due Date</p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Amount (€)</p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Lit.</p>
</td>
</tr>
<tr>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal"><b>2013-09-24</b></p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal"><b>2013/1121</b></p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal"><b>Enterprise - Inzo Services
- Juillet 2013</b></p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal"><b>2013-09-24</b></p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal"><b>420.0</b></p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt"><br>
</td>
</tr>
<tr>
<td style="padding:.75pt .75pt .75pt .75pt"><br>
</td>
<td style="border:none;padding:.75pt .75pt .75pt
.75pt"><br>
</td>
<td style="border:none;padding:.75pt .75pt .75pt
.75pt"><br>
</td>
<td style="border:none;padding:.75pt .75pt .75pt
.75pt"><br>
</td>
<td style="border:none;padding:.75pt .75pt .75pt
.75pt"><br>
</td>
<td style="border:none;padding:.75pt .75pt .75pt
.75pt"><br>
</td>
</tr>
</tbody>
</table>
<p class="MsoNormal" style="text-align:center;background:white" align="center"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222">Amount
due : 420.00 € </span></p>
<p style="background:white"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222">Would
your payment have been carried out after this mail was
sent, please ignore this message. Do not hesitate to
contact our accounting department. </span></p>
<p class="MsoNormal" style="background:white"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222"><br>
Best Regards, <br>
Aurore Lesage <br>
OpenERP<br>
Chaussée de Namur, 40 <br>
B-1367 Grand Rosières <br>
Tel: +32.81.81.37.00 - Fax: +32.81.73.35.01 <br>
E-mail : <a href="mailto:ale@openerp.com">ale@openerp.com</a> <br>
Web: <a href="http://www.openerp.com">http://www.openerp.com</a></span></p>
</div>
</div>
</div>
--<br>
INZO services <small>Sent by <a style="color:inherit" href="http://www.openerp.com">OpenERP
S.A.</a> using <a style="color:inherit" href="https://www.openerp.com/">OpenERP</a>.</small>
<small>Access your messages and documents <a style="color:inherit" href="https://accounts.openerp.com?db=openerp#action=mail.action_mail_redirect&login=che&message_id=5750830">in
OpenERP</a></small> <br>
<pre class="moz-signature" cols="72">--
Christine Herrmann
OpenERP
Chaussée de Namur, 40
B-1367 Grand Rosières
Tel: +32.81.81.37.00 - Fax: +32.81.73.35.01
Web: <a class="moz-txt-link-freetext" href="http://www.openerp.com">http://www.openerp.com</a> </pre>
<br>
</div>
<br>
<br>
</div>
<br>
</div>"""
BUG_2_IN = [
'read more',
'...',
]
BUG_2_OUT = [
'Fwd: TR: OpenERP S.A'
'fait un mois'
]
# BUG 20/08/2014: READ MORE NOT APPEARING
BUG3 = """<div class="oe_msg_body_long" style="/* display: none; */"><p>OpenERP has been upgraded to version 8.0.</p>
<h2>What's new in this upgrade?</h2>
<div class="document">
<ul>
<li><p class="first">New Warehouse Management System:</p>
<blockquote>
<p>Schedule your picking, packing, receptions and internal moves automatically with Odoo using
your own routing rules. Define push and pull rules to organize a warehouse or to manage
product moves between several warehouses. Track in detail all stock moves, not only in your
warehouse but wherever else it's taken as well (customers, suppliers or manufacturing
locations).</p>
</blockquote>
</li>
<li><p class="first">New Product Configurator</p>
</li>
<li><p class="first">Documentation generation from website forum:</p>
<blockquote>
<p>New module to generate a documentation from questions and responses from your forum.
The documentation manager can define a table of content and any user, depending their karma,
can link a question to an entry of this TOC.</p>
</blockquote>
</li>
<li><p class="first">New kanban view of documents (resumes and letters in recruitement, project documents...)</p>
</li>
<li><p class="first">E-Commerce:</p>
<blockquote>
<ul class="simple">
<li>Manage TIN in contact form for B2B.</li>
<li>Dedicated salesteam to easily manage leads and orders.</li>
</ul>
</blockquote>
</li>
<li><p class="first">Better Instant Messaging.</p>
</li>
<li><p class="first">Faster and Improved Search view: Search drawer now appears on top of the results, and is open
by default in reporting views</p>
</li>
<li><p class="first">Improved User Interface:</p>
<blockquote>
<ul class="simple">
<li>Popups has changed to be more responsive on tablets and smartphones.</li>
<li>New Stat Buttons: Forms views have now dynamic buttons showing some statistics abouts linked models.</li>
<li>Color code to check in one look availability of components in an MRP order.</li>
<li>Unified menu bar allows you to switch easily between the frontend (website) and backend</li>
<li>Results panel is now scrollable independently of the menu bars, keeping the navigation,
search bar and view switcher always within reach.</li>
</ul>
</blockquote>
</li>
<li><p class="first">User signature is now in HTML.</p>
</li>
<li><p class="first">New development API.</p>
</li>
<li><p class="first">Remove support for Outlook and Thunderbird plugins</p>
</li>
</ul>
</div>
<p>Enjoy the new OpenERP Online!</p><span class="oe_mail_reduce"><a href="#">read less</a></span></div>"""
BUG_3_IN = [
'read more',
'...',
]
BUG_3_OUT = [
'New kanban view of documents'
]
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.