repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
HPDCS/hijacker | parse/parse.py | 1 | 2910 | #!/bin/python
import sys
class AddressInfo():
def __init__(self, address):
self.foo = {}
self.full_access = 0
self.partial_access = 0
self.address = address
def addPartial(self, acc):
self.partial_access = acc
def addFull(self, acc):
self.full_access = acc
def __str__(self):
if self.full_access = 0:
return ""
else:
return "addr: " + hex(self.address) + "\t full access: " + str(self.full_access) + "\t partial access: " + str(self.partial_access) + "\n"
class BlockInfo():
def __init__(self, block):
self.block = block
self.addresses = {}
def setPartialInfo(self, address, count):
if address in self.addresses:
addr = self.addresses[address]
else:
self.addresses[address] = AddressInfo(address)
addr = self.addresses[address]
addr.addPartial(count)
def setFullInfo(self, address, count):
if address in self.addresses:
addr = self.addresses[address]
else:
self.addresses[address] = AddressInfo(address)
addr = self.addresses[address]
addr.addFull(count)
def dump(self):
taken = 0
all_count = 0
full_measure = 0
part_measure = 0
for the_addr in self.addresses:
addr = self.addresses[the_addr]
if addr.partial_access > 0:
taken += 1
all_count += 1
full_measure += addr.full_access
part_measure += addr.partial_access
return str(self.block) + "\t" + str(float(taken) / float(all_count)) + "\t" + str(float(part_measure) / float(full_measure)) + "\n"
def __str__(self):
represent = "Block: " + str(self.block)
for the_addr in self.addresses:
represent += "\t" + str(self.addresses[the_addr])
represent += "\n"
return represent
def __repr__(self):
return self.__str__()
if len(sys.argv) < 4:
print "Usage: " + sys.argv[0] + " <partial trace> <full trace> <outfile>"
sys.exit()
blocks = {}
partial_f = [line.rstrip('\n') for line in open(sys.argv[1])]
full_f = [line.rstrip('\n') for line in open(sys.argv[2])]
for part_elem in partial_f:
line = part_elem.split()
address = long(line[0], 16) # hex value
count = long(line[1])
block_id = long(line[2])
if block_id in blocks:
block = blocks[block_id]
else:
blocks[block_id] = BlockInfo(block_id)
block = blocks[block_id]
block.setPartialInfo(address, count)
for part_elem in full_f:
line = part_elem.split()
address = long(line[0], 16) # hex value
count = long(line[1])
block_id = long(line[2])
if block_id in blocks:
block = blocks[block_id]
else:
blocks[block_id] = BlockInfo(block_id)
block = blocks[block_id]
block.setFullInfo(address, count)
# This is just for debugging
print blocks
# Dump data
f = open(sys.argv[3],'w')
for block_id in blocks:
block = blocks[block_id]
f.write(block.dump())
f.close()
| gpl-3.0 |
asridharan/dcos | pkgpanda/util.py | 9 | 16082 | import hashlib
import http.server
import json
import logging
import os
import re
import shutil
import socketserver
import subprocess
from contextlib import contextmanager, ExitStack
from itertools import chain
from multiprocessing import Process
from shutil import rmtree, which
from subprocess import check_call
from typing import List
import requests
import teamcity
import yaml
from teamcity.messages import TeamcityServiceMessages
from pkgpanda.exceptions import FetchError, ValidationError
json_prettyprint_args = {
"sort_keys": True,
"indent": 2,
"separators": (',', ':')
}
def variant_str(variant):
"""Return a string representation of variant."""
if variant is None:
return ''
return variant
def variant_object(variant_str):
"""Return a variant object from its string representation."""
if variant_str == '':
return None
return variant_str
def variant_name(variant):
"""Return a human-readable string representation of variant."""
if variant is None:
return '<default>'
return variant
def variant_prefix(variant):
"""Return a filename prefix for variant."""
if variant is None:
return ''
return variant + '.'
def variant_suffix(variant, delim='.'):
if variant is None:
return ''
return delim + variant
def download(out_filename, url, work_dir, rm_on_error=True):
assert os.path.isabs(out_filename)
assert os.path.isabs(work_dir)
work_dir = work_dir.rstrip('/')
# Strip off whitespace to make it so scheme matching doesn't fail because
# of simple user whitespace.
url = url.strip()
# Handle file:// urls specially since requests doesn't know about them.
try:
if url.startswith('file://'):
src_filename = url[len('file://'):]
if not os.path.isabs(src_filename):
src_filename = work_dir + '/' + src_filename
shutil.copyfile(src_filename, out_filename)
else:
# Download the file.
with open(out_filename, "w+b") as f:
r = requests.get(url, stream=True)
if r.status_code == 301:
raise Exception("got a 301")
r.raise_for_status()
for chunk in r.iter_content(chunk_size=4096):
f.write(chunk)
except Exception as fetch_exception:
if rm_on_error:
rm_passed = False
# try / except so if remove fails we don't get an exception during an exception.
# Sets rm_passed to true so if this fails we can include a special error message in the
# FetchError
try:
os.remove(out_filename)
rm_passed = True
except Exception:
pass
else:
rm_passed = True
raise FetchError(url, out_filename, fetch_exception, rm_passed) from fetch_exception
def download_atomic(out_filename, url, work_dir):
assert os.path.isabs(out_filename)
tmp_filename = out_filename + '.tmp'
try:
download(tmp_filename, url, work_dir)
os.rename(tmp_filename, out_filename)
except FetchError:
try:
os.remove(tmp_filename)
except:
pass
raise
def extract_tarball(path, target):
"""Extract the tarball into target.
If there are any errors, delete the folder being extracted to.
"""
# TODO(cmaloney): Validate extraction will pass before unpacking as much as possible.
# TODO(cmaloney): Unpack into a temporary directory then move into place to
# prevent partial extraction from ever laying around on the filesystem.
try:
assert os.path.exists(path), "Path doesn't exist but should: {}".format(path)
check_call(['mkdir', '-p', target])
check_call(['tar', '-xf', path, '-C', target])
except:
# If there are errors, we can't really cope since we are already in an error state.
rmtree(target, ignore_errors=True)
raise
def load_json(filename):
try:
with open(filename) as f:
return json.load(f)
except ValueError as ex:
raise ValueError("Invalid JSON in {0}: {1}".format(filename, ex)) from ex
class YamlParseError(Exception):
pass
def load_yaml(filename):
try:
with open(filename) as f:
return yaml.safe_load(f)
except yaml.YAMLError as ex:
raise YamlParseError("Invalid YAML in {}: {}".format(filename, ex)) from ex
def write_yaml(filename, data, **kwargs):
with open(filename, "w+") as f:
return yaml.safe_dump(data, f, **kwargs)
def make_file(name):
with open(name, 'a'):
pass
def write_json(filename, data):
with open(filename, "w+") as f:
return json.dump(data, f, **json_prettyprint_args)
def write_string(filename, data):
with open(filename, "w+") as f:
return f.write(data)
def load_string(filename):
with open(filename) as f:
return f.read().strip()
def json_prettyprint(data):
return json.dumps(data, **json_prettyprint_args)
def if_exists(fn, *args, **kwargs):
try:
return fn(*args, **kwargs)
except FileNotFoundError:
return None
def sha1(filename):
hasher = hashlib.sha1()
with open(filename, 'rb') as fh:
while 1:
buf = fh.read(4096)
if not buf:
break
hasher.update(buf)
return hasher.hexdigest()
def expect_folder(path, files):
path_contents = os.listdir(path)
assert set(path_contents) == set(files)
def expect_fs(folder, contents):
if isinstance(contents, list):
expect_folder(folder, contents)
elif isinstance(contents, dict):
expect_folder(folder, contents.keys())
for path in iter(contents):
if contents[path] is not None:
expect_fs(os.path.join(folder, path), contents[path])
else:
raise ValueError("Invalid type {0} passed to expect_fs".format(type(contents)))
def make_tar(result_filename, change_folder):
tar_cmd = ["tar", "--numeric-owner", "--owner=0", "--group=0"]
if which("pxz"):
tar_cmd += ["--use-compress-program=pxz", "-cf"]
else:
tar_cmd += ["-cJf"]
tar_cmd += [result_filename, "-C", change_folder, "."]
check_call(tar_cmd)
def rewrite_symlinks(root, old_prefix, new_prefix):
# Find the symlinks and rewrite them from old_prefix to new_prefix
# All symlinks not beginning with old_prefix are ignored because
# packages may contain arbitrary symlinks.
for root_dir, dirs, files in os.walk(root):
for name in chain(files, dirs):
full_path = os.path.join(root_dir, name)
if os.path.islink(full_path):
# Rewrite old_prefix to new_prefix if present.
target = os.readlink(full_path)
if target.startswith(old_prefix):
new_target = os.path.join(new_prefix, target[len(old_prefix) + 1:].lstrip('/'))
# Remove the old link and write a new one.
os.remove(full_path)
os.symlink(new_target, full_path)
def check_forbidden_services(path, services):
"""Check if package contains systemd services that may break DC/OS
This functions checks the contents of systemd's unit file dirs and
throws the exception if there are reserved services inside.
Args:
path: path where the package contents are
services: list of reserved services to look for
Raises:
ValidationError: Reserved serice names were found inside the package
"""
services_dir_regexp = re.compile(r'dcos.target.wants(?:_.+)?')
forbidden_srv_set = set(services)
pkg_srv_set = set()
for direntry in os.listdir(path):
if not services_dir_regexp.match(direntry):
continue
pkg_srv_set.update(set(os.listdir(os.path.join(path, direntry))))
found_units = forbidden_srv_set.intersection(pkg_srv_set)
if found_units:
msg = "Reverved unit names found: " + ','.join(found_units)
raise ValidationError(msg)
def run(cmd, *args, **kwargs):
proc = subprocess.Popen(cmd, *args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
stdout, stderr = proc.communicate()
print("STDOUT: ", stdout.decode('utf-8'))
print("STDERR: ", stderr.decode('utf-8'))
if proc.returncode != 0:
raise subprocess.CalledProcessError(proc.returncode, cmd)
assert len(stderr) == 0
return stdout.decode('utf-8')
def launch_server(directory):
os.chdir("resources/repo")
httpd = socketserver.TCPServer(
("", 8000),
http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
class TestRepo:
def __init__(self, repo_dir):
self.__dir = repo_dir
def __enter__(self):
self.__server = Process(target=launch_server, args=(self.__dir))
self.__server.start()
def __exit__(self, exc_type, exc_value, traceback):
self.__server.join()
def resources_test_dir(path):
assert not path.startswith('/')
return "pkgpanda/test_resources/{}".format(path)
class MessageLogger:
"""Abstraction over TeamCity Build Messages
When pkgpanda is ran in a TeamCity environment additional meta-messages will be output to stdout
such that TeamCity can provide improved status reporting, log line highlighting, and failure
reporting. When pkgpanda is ran in an environment other than TeamCity all meta-messages will
silently be omitted.
TeamCity docs: https://confluence.jetbrains.com/display/TCD10/Build+Script+Interaction+with+TeamCity
"""
def __init__(self):
self.loggers = []
if teamcity.is_running_under_teamcity():
self.loggers.append(TeamcityServiceMessages())
else:
self.loggers.append(PrintLogger())
def _custom_message(self, text, status, error_details='', flow_id=None):
for log in self.loggers:
log.customMessage(text, status, errorDetails=error_details, flowId=flow_id)
@contextmanager
def _block(self, log, name, flow_id):
log.blockOpened(name, flowId=flow_id)
log.progressMessage(name)
yield
log.blockClosed(name, flowId=flow_id)
@contextmanager
def scope(self, name, flow_id=None):
"""
Creates a new scope for TeamCity messages. This method is intended to be called in a ``with`` statement
:param name: The name of the scope
:param flow_id: Optional flow id that can be used if ``name`` can be non-unique
"""
with ExitStack() as stack:
for log in self.loggers:
stack.enter_context(self._block(log, name, flow_id))
yield
def normal(self, text, flow_id=None):
self._custom_message(text=text, status='NORMAL', flow_id=flow_id)
def warning(self, text, flow_id=None):
self._custom_message(text=text, status='WARNING', flow_id=flow_id)
def error(self, text, flow_id=None, error_details=''):
self._custom_message(text=text, status='ERROR', flow_id=flow_id, error_details=error_details)
def failure(self, text, flow_id=None):
self._custom_message(text=text, status='FAILURE', flow_id=flow_id)
class PrintLogger:
def customMessage(self, text, status, errorDetails='', flowId=None): # noqa: N802, N803
print("{}: {} {}".format(status, text, errorDetails))
def progressMessage(self, message): # noqa: N802, N803
pass
def blockOpened(self, name, flowId=None): # noqa: N802, N803
print("starting: {}".format(name))
def blockClosed(self, name, flowId=None): # noqa: N802, N803
print("completed: {}".format(name))
logger = MessageLogger()
def hash_str(s: str):
hasher = hashlib.sha1()
hasher.update(s.encode('utf-8'))
return hasher.hexdigest()
def hash_int(i: int):
return hash_str(str(i))
def hash_dict(d: dict):
item_hashes = []
for k in sorted(d.keys()):
assert isinstance(k, str)
item_hashes.append("{0}={1}".format(k, hash_checkout(d[k])))
return hash_str(",".join(item_hashes))
def hash_list(l: List[str]):
item_hashes = []
for item in sorted(l):
item_hashes.append(hash_checkout(item))
return hash_str(",".join(item_hashes))
def hash_checkout(item):
if isinstance(item, str) or isinstance(item, bytes):
return hash_str(item)
elif isinstance(item, dict):
return hash_dict(item)
elif isinstance(item, list):
return hash_list(item)
elif isinstance(item, int):
return hash_int(item)
elif isinstance(item, set):
return hash_list(list(item))
else:
raise NotImplementedError("{} of type {}".format(item, type(item)))
def split_by_token(token_prefix, token_suffix, string_, strip_token_decoration=False):
"""Yield a sequence of (substring, is_token) pairs comprising the string.
The string is split by token boundary, where a token is a substring that
begins with the token prefix and ends with the token suffix. is_token is
True if the substring is a token. If strip_token_decoration is True, tokens
are yielded without their prefix and suffix. Each token prefix must have a
matching suffix, and vice versa. Tokens may not be nested.
>>> list(split_by_token('{', '}', 'some text {token} some more text'))
[('some text ', False), ('{token}', True), (' some more text', False)]
>>> list(split_by_token('{', '}', 'some text {token} some more text', strip_token_decoration=True))
[('some text ', False), ('token', True), (' some more text', False)]
"""
def _next_substring(superstring, substring, start):
idx = superstring.find(substring, start)
if idx < 0:
return None
return idx, idx + len(substring)
def _raise_exception_if_suffix_in(substring):
if token_suffix in substring:
logging.debug("Token suffix found without matching prefix in string: {}".format(repr(string_)))
raise Exception("Token suffix found without matching prefix")
if len(token_prefix) == 0:
raise ValueError('Token prefix must be a nonzero length string')
if len(token_suffix) == 0:
raise ValueError('Token suffix must be a nonzero length string')
if string_ == '':
yield string_, False
num_chars_consumed = 0
while num_chars_consumed < len(string_):
# Find the next token.
token_start = _next_substring(string_, token_prefix, num_chars_consumed)
if not token_start:
# No token found. Yield the rest of the string and return.
remainder = string_[num_chars_consumed:]
_raise_exception_if_suffix_in(remainder)
yield remainder, False
return
# Yield the string preceding the token, if any.
if token_start[0] > num_chars_consumed:
preceding_string = string_[num_chars_consumed:token_start[0]]
_raise_exception_if_suffix_in(preceding_string)
yield preceding_string, False
# Find the end of the token.
token_end = _next_substring(string_, token_suffix, token_start[1])
if not token_end or token_prefix in string_[token_start[1]:token_end[0]]:
# Can't find a closing suffix, or found two consecutive prefixes without a suffix between them.
logging.debug("Token prefix found without matching suffix in string: {}".format(repr(string_)))
raise Exception("Token prefix found without matching suffix")
# Yield the token.
if strip_token_decoration:
# Omit the token's prefix and suffix.
yield string_[token_start[1]:token_end[0]], True
else:
# Yield the entire token.
yield string_[token_start[0]:token_end[1]], True
# Update the chars consumed count for the next iteration.
num_chars_consumed = token_end[1]
| apache-2.0 |
pluser/nikola | nikola/data/themes/base/messages/messages_fr.py | 6 | 1486 | # -*- encoding:utf-8 -*-
from __future__ import unicode_literals
MESSAGES = {
"%d min remaining to read": "Il reste encore %d min. de lecture",
"(active)": "(actif)",
"Also available in:": "Également disponible en:",
"Archive": "Archives",
"Categories": "Catégories",
"Comments": "Commentaires",
"LANGUAGE": "Français",
"Languages:": "Langues:",
"More posts about %s": "Plus d'articles sur %s",
"Newer posts": "Articles récents",
"Next post": "Article suivant",
"No posts found.": "Pas d'articles.",
"Nothing found.": "Pas de résultats.",
"Older posts": "Anciens articles",
"Original site": "Site d'origine",
"Posted:": "Publié:",
"Posts about %s": "Articles sur %s",
"Posts for year %s": "Articles de l'année %s",
"Posts for {month} {day}, {year}": "Articles du {day} {month} {year}",
"Posts for {month} {year}": "Articles de {month} {year}",
"Previous post": "Article précédent",
"Publication date": "Date de publication",
"RSS feed": "Flux RSS",
"Read in English": "Lire en français",
"Read more": "Lire la suite",
"Skip to main content": "Aller au contenu principal",
"Source": "Source",
"Subcategories:": "Sous-catégories",
"Tags and Categories": "Étiquettes et catégories",
"Tags": "Étiquettes",
"Write your page here.": "",
"Write your post here.": "",
"old posts, page %d": "anciens articles, page %d",
"page %d": "page %d",
}
| mit |
poornimakshirsagar/sos | sos/plugins/openstack_horizon.py | 2 | 2876 | # Copyright (C) 2009 Red Hat, Inc., Joey Boggs <jboggs@redhat.com>
# Copyright (C) 2012 Rackspace US, Inc.,
# Justin Shepherd <jshepher@rackspace.com>
# Copyright (C) 2013 Red Hat, Inc., Jeremy Agee <jagee@redhat.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class OpenStackHorizon(Plugin):
"""OpenStack Horizon
"""
plugin_name = "openstack_horizon"
profiles = ('openstack', 'openstack_controller')
option_list = [("log", "gathers openstack horizon logs", "slow", True)]
def setup(self):
self.add_copy_spec("/etc/openstack-dashboard/")
if self.get_option("log"):
self.add_copy_spec("/var/log/horizon/")
def postproc(self):
protect_keys = [
"SECRET_KEY", "EMAIL_HOST_PASSWORD"
]
regexp = r"((?m)^\s*(%s)\s*=\s*)(.*)" % "|".join(protect_keys)
self.do_path_regex_sub("/etc/openstack-dashboard/.*\.json",
regexp, r"\1*********")
self.do_path_regex_sub("/etc/openstack-dashboard/local_settings",
regexp, r"\1*********")
class DebianHorizon(OpenStackHorizon, DebianPlugin):
packages = (
'python-django-horizon',
'openstack-dashboard',
'openstack-dashboard-apache'
)
def setup(self):
super(DebianOpenStackHorizon, self).setup()
self.add_copy_spec("/etc/apache2/sites-available/")
class UbuntuHorizon(OpenStackHorizon, UbuntuPlugin):
packages = (
'python-django-horizon',
'openstack-dashboard',
'openstack-dashboard-ubuntu-theme'
)
def setup(self):
super(UbuntuOpenStackHorizon, self).setup()
self.add_copy_spec("/etc/apache2/conf.d/openstack-dashboard.conf")
class RedHatHorizon(OpenStackHorizon, RedHatPlugin):
packages = (
'python-django-horizon',
'openstack-dashboard'
)
def setup(self):
super(RedHatOpenStackHorizon, self).setup()
self.add_copy_spec("/etc/httpd/conf.d/openstack-dashboard.conf")
if self.get_option("log"):
self.add_copy_spec("/var/log/httpd/")
# vim: set et ts=4 sw=4 :
| gpl-2.0 |
duramato/SickRage | lib/hachoir_parser/archive/lzx.py | 74 | 13204 | """LZX data stream parser.
Also includes a decompression function (slow!!) which can decompress
LZX data stored in a Hachoir stream.
Author: Robert Xiao
Creation date: July 18, 2007
"""
from hachoir_parser import Parser
from hachoir_core.field import (FieldSet,
UInt32, Bit, Bits, PaddingBits,
RawBytes, ParserError)
from hachoir_core.endian import MIDDLE_ENDIAN, LITTLE_ENDIAN
from hachoir_core.tools import paddingSize, alignValue
from hachoir_parser.archive.zlib import build_tree, HuffmanCode, extend_data
from hachoir_core.bits import str2long
import new # for instancemethod
class LZXPreTreeEncodedTree(FieldSet):
def __init__(self, parent, name, num_elements, *args, **kwargs):
FieldSet.__init__(self, parent, name, *args, **kwargs)
self.num_elements = num_elements
def createFields(self):
for i in xrange(20):
yield Bits(self, "pretree_lengths[]", 4)
pre_tree = build_tree([self['pretree_lengths[%d]'%x].value for x in xrange(20)])
if not hasattr(self.root, "lzx_tree_lengths_"+self.name):
self.lengths = [0] * self.num_elements
setattr(self.root, "lzx_tree_lengths_"+self.name, self.lengths)
else:
self.lengths = getattr(self.root, "lzx_tree_lengths_"+self.name)
i = 0
while i < self.num_elements:
field = HuffmanCode(self, "tree_code[]", pre_tree)
if field.realvalue <= 16:
self.lengths[i] = (self.lengths[i] - field.realvalue) % 17
field._description = "Literal tree delta length %i (new length value %i for element %i)" % (
field.realvalue, self.lengths[i], i)
i += 1
yield field
elif field.realvalue == 17:
field._description = "Tree Code 17: Zeros for 4-19 elements"
yield field
extra = Bits(self, "extra[]", 4)
zeros = 4 + extra.value
extra._description = "Extra bits: zeros for %i elements (elements %i through %i)" % (zeros, i, i+zeros-1)
yield extra
self.lengths[i:i+zeros] = [0] * zeros
i += zeros
elif field.realvalue == 18:
field._description = "Tree Code 18: Zeros for 20-51 elements"
yield field
extra = Bits(self, "extra[]", 5)
zeros = 20 + extra.value
extra._description = "Extra bits: zeros for %i elements (elements %i through %i)" % (zeros, i, i+zeros-1)
yield extra
self.lengths[i:i+zeros] = [0] * zeros
i += zeros
elif field.realvalue == 19:
field._description = "Tree Code 19: Same code for 4-5 elements"
yield field
extra = Bits(self, "extra[]", 1)
run = 4 + extra.value
extra._description = "Extra bits: run for %i elements (elements %i through %i)" % (run, i, i+run-1)
yield extra
newfield = HuffmanCode(self, "tree_code[]", pre_tree)
assert newfield.realvalue <= 16
newfield._description = "Literal tree delta length %i (new length value %i for elements %i through %i)" % (
newfield.realvalue, self.lengths[i], i, i+run-1)
self.lengths[i:i+run] = [(self.lengths[i] - newfield.realvalue) % 17] * run
i += run
yield newfield
class LZXBlock(FieldSet):
WINDOW_SIZE = {15:30,
16:32,
17:34,
18:36,
19:38,
20:42,
21:50}
POSITION_SLOTS = {0:(0,0,0),
1:(1,1,0),
2:(2,2,0),
3:(3,3,0),
4:(4,5,1),
5:(6,7,1),
6:(8,11,2),
7:(12,15,2),
8:(16,23,3),
9:(24,31,3),
10:(32,47,4),
11:(48,63,4),
12:(64,95,5),
13:(96,127,5),
14:(128,191,6),
15:(192,255,6),
16:(256,383,7),
17:(384,511,7),
18:(512,767,8),
19:(768,1023,8),
20:(1024,1535,9),
21:(1536,2047,9),
22:(2048,3071,10),
23:(3072,4095,10),
24:(4096,6143,11),
25:(6144,8191,11),
26:(8192,12287,12),
27:(12288,16383,12),
28:(16384,24575,13),
29:(24576,32767,13),
30:(32768,49151,14),
31:(49152,65535,14),
32:(65536,98303,15),
33:(98304,131071,15),
34:(131072,196607,16),
35:(196608,262143,16),
36:(262144,393215,17),
37:(393216,524287,17),
38:(524288,655359,17),
39:(655360,786431,17),
40:(786432,917503,17),
41:(917504,1048575,17),
42:(1048576,1179647,17),
43:(1179648,1310719,17),
44:(1310720,1441791,17),
45:(1441792,1572863,17),
46:(1572864,1703935,17),
47:(1703936,1835007,17),
48:(1835008,1966079,17),
49:(1966080,2097151,17),
}
def createFields(self):
yield Bits(self, "block_type", 3)
yield Bits(self, "block_size", 24)
self.uncompressed_size = self["block_size"].value
self.compression_level = self.root.compr_level
self.window_size = self.WINDOW_SIZE[self.compression_level]
self.block_type = self["block_type"].value
curlen = len(self.parent.uncompressed_data)
if self.block_type in (1, 2): # Verbatim or aligned offset block
if self.block_type == 2:
for i in xrange(8):
yield Bits(self, "aligned_len[]", 3)
aligned_tree = build_tree([self['aligned_len[%d]'%i].value for i in xrange(8)])
yield LZXPreTreeEncodedTree(self, "main_tree_start", 256)
yield LZXPreTreeEncodedTree(self, "main_tree_rest", self.window_size * 8)
main_tree = build_tree(self["main_tree_start"].lengths + self["main_tree_rest"].lengths)
yield LZXPreTreeEncodedTree(self, "length_tree", 249)
length_tree = build_tree(self["length_tree"].lengths)
current_decoded_size = 0
while current_decoded_size < self.uncompressed_size:
if (curlen+current_decoded_size) % 32768 == 0 and (curlen+current_decoded_size) != 0:
padding = paddingSize(self.address + self.current_size, 16)
if padding:
yield PaddingBits(self, "padding[]", padding)
field = HuffmanCode(self, "main_code[]", main_tree)
if field.realvalue < 256:
field._description = "Literal value %r" % chr(field.realvalue)
current_decoded_size += 1
self.parent.uncompressed_data += chr(field.realvalue)
yield field
continue
position_header, length_header = divmod(field.realvalue - 256, 8)
info = self.POSITION_SLOTS[position_header]
if info[2] == 0:
if info[0] == 0:
position = self.parent.r0
field._description = "Position Slot %i, Position [R0] (%i)" % (position_header, position)
elif info[0] == 1:
position = self.parent.r1
self.parent.r1 = self.parent.r0
self.parent.r0 = position
field._description = "Position Slot %i, Position [R1] (%i)" % (position_header, position)
elif info[0] == 2:
position = self.parent.r2
self.parent.r2 = self.parent.r0
self.parent.r0 = position
field._description = "Position Slot %i, Position [R2] (%i)" % (position_header, position)
else:
position = info[0] - 2
self.parent.r2 = self.parent.r1
self.parent.r1 = self.parent.r0
self.parent.r0 = position
field._description = "Position Slot %i, Position %i" % (position_header, position)
else:
field._description = "Position Slot %i, Positions %i to %i" % (position_header, info[0] - 2, info[1] - 2)
if length_header == 7:
field._description += ", Length Values 9 and up"
yield field
length_field = HuffmanCode(self, "length_code[]", length_tree)
length = length_field.realvalue + 9
length_field._description = "Length Code %i, total length %i" % (length_field.realvalue, length)
yield length_field
else:
field._description += ", Length Value %i (Huffman Code %i)"%(length_header + 2, field.value)
yield field
length = length_header + 2
if info[2]:
if self.block_type == 1 or info[2] < 3: # verbatim
extrafield = Bits(self, "position_extra[%s" % field.name.split('[')[1], info[2])
position = extrafield.value + info[0] - 2
extrafield._description = "Position Extra Bits (%i), total position %i"%(extrafield.value, position)
yield extrafield
else: # aligned offset
position = info[0] - 2
if info[2] > 3:
extrafield = Bits(self, "position_verbatim[%s" % field.name.split('[')[1], info[2]-3)
position += extrafield.value*8
extrafield._description = "Position Verbatim Bits (%i), added position %i"%(extrafield.value, extrafield.value*8)
yield extrafield
if info[2] >= 3:
extrafield = HuffmanCode(self, "position_aligned[%s" % field.name.split('[')[1], aligned_tree)
position += extrafield.realvalue
extrafield._description = "Position Aligned Bits (%i), total position %i"%(extrafield.realvalue, position)
yield extrafield
self.parent.r2 = self.parent.r1
self.parent.r1 = self.parent.r0
self.parent.r0 = position
self.parent.uncompressed_data = extend_data(self.parent.uncompressed_data, length, position)
current_decoded_size += length
elif self.block_type == 3: # Uncompressed block
padding = paddingSize(self.address + self.current_size, 16)
if padding:
yield PaddingBits(self, "padding[]", padding)
else:
yield PaddingBits(self, "padding[]", 16)
self.endian = LITTLE_ENDIAN
yield UInt32(self, "r[]", "New value of R0")
yield UInt32(self, "r[]", "New value of R1")
yield UInt32(self, "r[]", "New value of R2")
self.parent.r0 = self["r[0]"].value
self.parent.r1 = self["r[1]"].value
self.parent.r2 = self["r[2]"].value
yield RawBytes(self, "data", self.uncompressed_size)
self.parent.uncompressed_data+=self["data"].value
if self["block_size"].value % 2:
yield PaddingBits(self, "padding", 8)
else:
raise ParserError("Unknown block type %d!"%self.block_type)
class LZXStream(Parser):
endian = MIDDLE_ENDIAN
def createFields(self):
self.uncompressed_data = ""
self.r0 = 1
self.r1 = 1
self.r2 = 1
yield Bit(self, "filesize_indicator")
if self["filesize_indicator"].value:
yield UInt32(self, "filesize")
while self.current_size < self.size:
block = LZXBlock(self, "block[]")
yield block
if self.size - self.current_size < 16:
padding = paddingSize(self.address + self.current_size, 16)
if padding:
yield PaddingBits(self, "padding[]", padding)
break
def lzx_decompress(stream, window_bits):
data = LZXStream(stream)
data.compr_level = window_bits
for unused in data:
pass
return data.uncompressed_data
| gpl-3.0 |
j4/horizon | openstack_dashboard/dashboards/project/volumes/backups/forms.py | 57 | 4363 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing backups.
"""
import operator
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.containers \
import forms as containers_forms
class CreateBackupForm(forms.SelfHandlingForm):
name = forms.CharField(max_length=255, label=_("Backup Name"))
description = forms.CharField(widget=forms.Textarea(attrs={'rows': 4}),
label=_("Description"),
required=False)
container_name = forms.CharField(
max_length=255,
label=_("Container Name"),
validators=[containers_forms.no_slash_validator],
required=False)
volume_id = forms.CharField(widget=forms.HiddenInput())
def handle(self, request, data):
# Create a container for the user if no input is given
if not data['container_name']:
data['container_name'] = 'volumebackups'
try:
backup = api.cinder.volume_backup_create(request,
data['volume_id'],
data['container_name'],
data['name'],
data['description'])
message = _('Creating volume backup "%s"') % data['name']
messages.success(request, message)
return backup
except Exception:
redirect = reverse('horizon:project:volumes:index')
exceptions.handle(request,
_('Unable to create volume backup.'),
redirect=redirect)
class RestoreBackupForm(forms.SelfHandlingForm):
volume_id = forms.ChoiceField(label=_('Select Volume'), required=False)
backup_id = forms.CharField(widget=forms.HiddenInput())
backup_name = forms.CharField(widget=forms.HiddenInput())
def __init__(self, request, *args, **kwargs):
super(RestoreBackupForm, self).__init__(request, *args, **kwargs)
try:
volumes = api.cinder.volume_list(request)
except Exception:
msg = _('Unable to lookup volume or backup information.')
redirect = reverse('horizon:project:volumes:index')
exceptions.handle(request, msg, redirect=redirect)
raise exceptions.Http302(redirect)
volumes.sort(key=operator.attrgetter('name', 'created_at'))
choices = [('', _('Create a New Volume'))]
choices.extend((volume.id, volume.name) for volume in volumes)
self.fields['volume_id'].choices = choices
def handle(self, request, data):
backup_id = data['backup_id']
backup_name = data['backup_name'] or None
volume_id = data['volume_id'] or None
try:
restore = api.cinder.volume_backup_restore(request,
backup_id,
volume_id)
# Needed for cases when a new volume is created.
volume_id = restore.volume_id
message = _('Successfully restored backup %(backup_name)s '
'to volume with id: %(volume_id)s')
messages.success(request, message % {'backup_name': backup_name,
'volume_id': volume_id})
return restore
except Exception:
msg = _('Unable to restore backup.')
redirect = reverse('horizon:project:volumes:index')
exceptions.handle(request, msg, redirect=redirect)
| apache-2.0 |
WilliamYi96/Machine-Learning | Deep-Learning-Specialization/Convolutional-Neural-Networks/CNN-Applications.py | 1 | 1996 | import math
import numpy as np
import h5py
import matplotlib.pyplot as plt
from PIL import Image
from scipy import ndimage
import tensorflow as tf
from tensorflow.python.framework import ops
from cnn_utils import *
np.random.seed(1)
# Loading the data (signs)
X_train_orig, Y_train_orig, X_test_orig, Y_test_orig, classes = load_dataset()
# Example of a picture
index = 5
plt.imshow(X_train_orig[index])
# print(Y_train_orig.shape, np.squeeze(Y_train_orig).shape)
print('y = ' + str(np.squeeze(Y_train_orig)[index]))
# print('y = ' + str(np.squeeze(Y_train_orig[:,index])))
plt.show()
X_train = X_train_orig / 255.
X_test = X_test_orig / 255.
Y_train = convert_to_one_hot(Y_train_orig, 6).T
Y_test = convert_to_one_hot(Y_test_orig, 6).T
print ("number of training examples = " + str(X_train.shape[0]))
print ("number of test examples = " + str(X_test.shape[0]))
print ("X_train shape: " + str(X_train.shape))
print ("Y_train shape: " + str(Y_train.shape))
print ("X_test shape: " + str(X_test.shape))
print ("Y_test shape: " + str(Y_test.shape))
conv_layers = {}
def create_placeholders(n_H0, n_W0, n_C0, n_y):
'''
Creates the placeholders for the tensorflow session.
Arguments:
n_H0 -- scalar, height of an input image
n_W0 -- scalar, width of an input image
n_C0 -- scalar, number of channels of the input
n_y -- scalar, number of classes
Returns:
X -- placeholder for the data input, of shape [None, n_H0, n_W0, n_C0] and dtype 'float'
Y -- placeholder for the input labels, of shape [None, n_y] and dtype 'float'
'''
X = tf.placeholder(shape=[None, n_H0, n_W0, n_C0], dtype=tf.float32)
Y = tf.placeholder(shape=[None, n_y], dtype=tf.float32)
return X, Y
X, Y = create_placeholders(64, 64, 3, 6)
print('X = {}'.format(X))
print('Y = {}'.format(Y))
# Continue from Initialize parameters.
# https://walzqyuibvvdjprisbmedy.coursera-apps.org/notebooks/week1/Convolution_model_Application_v1a.ipynb#1.2---Initialize-parameters | apache-2.0 |
ttm/indicadores-participativos | puxaTweetsAA.py | 1 | 5595 | #-*- coding: utf8 -*-
import pymongo, time as T, sys
from twython import TwythonStreamer
from twython import Twython
#HTAG="#arenaNETmundial"
HTAG="#aao0"
HTAG_=HTAG.replace("#","H")
#client=pymongo.MongoClient()
#db = client['mytest']
#C = db['twitter'] #collection
#foo=C.find()
#tweets=[ff for ff in foo if "arenaNETmundial" in ff.keys()][0]["arenaNETmundial"]
#
from maccess import tw2 as tw
TWITTER_API_KEY = tw.tak
TWITTER_API_KEY_SECRET = tw.taks
TWITTER_ACCESS_TOKEN = tw.tat
TWITTER_ACCESS_TOKEN_SECRET = tw.tats
#TWITTER_API_KEY = 'AI2A7Ts772TesPoK38njA' #supply the appropriate value
#TWITTER_API_KEY_SECRET = 'dmrMuSkX78WyKrvp2s9VUTWKcRbnGa46uREcWjfzad4'
#TWITTER_ACCESS_TOKEN = '18882547-ylXgGFI1FfqR4XpsA3HASBnMLDEbUKxUM1IXb7sD2'
#TWITTER_ACCESS_TOKEN_SECRET = 'gZYe00t5UCTFukHTgtchZECiFN8W5Easho5u4dB5EoPEm'
print 1
t = Twython(app_key=TWITTER_API_KEY,
app_secret=TWITTER_API_KEY_SECRET,
oauth_token=TWITTER_ACCESS_TOKEN,
oauth_token_secret=TWITTER_ACCESS_TOKEN_SECRET)
print 1
#search = t.search(q=HTAG,count=150,max_id=tweets[-1]['id']-1)
search = t.search(q=HTAG,count=150)
from maccess import mdc
client=pymongo.MongoClient(mdc.u1)
db = client['sna']
C = db[HTAG_] #collection
foo=C.find()#twitterArena
print 2
ss=[]
if not foo.count(): # collection n existe
print "colecao n existe"
# fazer busca geral
# e retroativa, tentar util
# https://dev.twitter.com/docs/api/1.1/get/search/tweets
# include_entities para true
search = t.search(q=HTAG,count=100,result_type="recent")
while search["statuses"]:
print len(search["statuses"])
print "batelada de status"
ss+=search["statuses"]
T.sleep(70)
search = t.search(q=HTAG,count=150,max_id=ss[-1]['id']-1,result_type="recent")
ss=ss[::-1]
# collection já existe, adicionar tweets mais reecntes
# inverter se realmente precisar
search = t.search(q=HTAG,count=150,since_id=ss[-1]['id'],result_type="recent")
while search["statuses"]:
print len(search["statuses"])
print "batelada de status mais recente"
ss+=search["statuses"][::-1]
T.sleep(70)
search = t.search(q=HTAG,count=150,since_id=ss[-1]['id'],result_type="recent")
C.insert(ss)
# else: se já existe, pegar os limites inferiores e superiores do BD
else:
print "colecao jah existe"
quantos=foo.count()
primeira=foo[0]["id"]
dprimeira=foo[0]["created_at"]
ultima= foo[quantos-1]["id"]
dultima=foo[0]["created_at"]
search = t.search(q=HTAG,count=100,max_id=primeira-1,result_type="recent")
ss=[]
asd=[]
ANTES=0
while len(search["statuses"]):
ANTES=1
print len(search["statuses"])
asd.append(search["statuses"])
print "batelada de status"
ss+=search["statuses"]
T.sleep(70)
search = t.search(q=HTAG,count=100,max_id=ss[-1]['id']-1,result_type="recent")
asd.append(search["statuses"])
ss=ss[::-1]
antes=[i for i in C.find()]
agora=ss+antes
if ss:
ss=agora
oid=agora[-1]["id"]
search = t.search(q=HTAG,count=150,since_id=oid,result_type="recent")
while search["statuses"]:
print len(search["statuses"])
asd.append(search["statuses"])
print "batelada de status mais recente"
ss+=search["statuses"][::-1]
T.sleep(70)
search = t.search(q=HTAG,count=150,since_id=ss[-1]['id'],result_type="recent")
asd.append(search["statuses"])
if ss:
if ANTES:
C.remove()
else:
C.insert(ss)
# ativar interface de streaming
class MyStreamer(TwythonStreamer):
def on_success(self, data):
if 'text' in data:
C.insert(data)
print data['text'].encode('utf-8')
def on_error(self, status_code, data):
print status_code
print "iniciando streaming"
stream=MyStreamer(tw.tak,tw.taks,tw.tat,tw.tats)
stream.statuses.filter(track=HTAG)
sys.exit()
tweets=[ff for ff in foo]
print 1
#since_id
#search = t.search(q='#arenaNETmundial', count=150,since_id="444663164026638336")
#search = t.search(q='#arenaNETmundial', max_id="445939520354406401",result_type="mixed")
#search = t.search(q='#arenaNETmundial', max_id="446756730140385280",result_type="recent")
#search = t.search(q='#arenaNETmundial', since_id="444663164026638336",max_id="445564745635348480",result_type="recent",count=150)
#
while 1:
search = t.search(q='#arenaNETmundial',count=150,max_id=tweets[-1]['id']-1)
i=0
while len(search['statuses'])>0:
tweets +=search['statuses']
#search = t.search(q='#arenaNETmundial', count=150, max_id=tweets[-1]['id']-1)
print "older", i, len(tweets),search['statuses']; i+=1
search2 = t.search(q='#arenaNETmundial',count=150,since_id=tweets[0]['id'])
i=0
while len(search2['statuses'])>0:
tweets =search2['statuses']+tweets
#search = t.search(q='#arenaNETmundial', count=150, since_id=tweets[0]['id'])
print "newer", i, len(tweets),search2['statuses']; i+=1
#db.twitter.remove()
if search['statuses'] or search2['statuses']:
print "tweets"
db.sna.remove()
#C = db['twitter'] #collection
#C.insert({"arenaNETmundial":tweets})
db.sna.insert((i for i in tweets))
#db2.sna.insert((i for i in db.sna.find()))
print("atualizado")
T.sleep(60*60) # atualizar BD de 2 em 2 minutos
stream=Twython.TwythonStreamer(tw.tak,tw.taks,tw.tat,tw.tats)
stream.statuses.filter(track=HTAG)
| lgpl-3.0 |
EduPepperPDTesting/pepper2013-testing | lms/envs/devgroups/courses.py | 68 | 1305 |
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0614
from ..dev import *
CLASSES_TO_DBS = {
'BerkeleyX/CS169.1x/2012_Fall': "cs169.db",
'BerkeleyX/CS188.1x/2012_Fall': "cs188_1.db",
'HarvardX/CS50x/2012': "cs50.db",
'HarvardX/PH207x/2012_Fall': "ph207.db",
'MITx/3.091x/2012_Fall': "3091.db",
'MITx/6.002x/2012_Fall': "6002.db",
'MITx/6.00x/2012_Fall': "600.db",
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'general': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
'KEY_PREFIX': 'general',
'VERSION': 5,
'KEY_FUNCTION': 'util.memcache.safe_key',
}
}
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
def path_for_db(db_name):
return ENV_ROOT / "db" / db_name
def course_db_for(course_id):
db_name = CLASSES_TO_DBS[course_id]
return {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': path_for_db(db_name)
}
}
| agpl-3.0 |
sanger-pathogens/ariba | ariba/tasks/micplot.py | 2 | 1239 | import argparse
import ariba
def run(options):
plotter = ariba.mic_plotter.MicPlotter(
options.prepareref_dir,
options.antibiotic,
options.mic_file,
options.summary_file,
options.outprefix,
use_hets=options.use_hets,
main_title=options.main_title,
plot_height=options.plot_height,
plot_width=options.plot_width,
log_y=options.log_y,
plot_types=options.plot_types,
jitter_width=options.jitter_width,
no_combinations=options.no_combinations,
hlines=options.hlines,
point_size=options.point_size,
point_scale=options.point_scale,
dot_size=options.dot_size,
dot_outline=options.dot_outline,
dot_y_text_size=options.dot_y_text_size,
panel_heights=options.panel_heights,
panel_widths=options.panel_widths,
colourmap=options.colourmap,
number_of_colours=options.number_of_colours,
colour_skip=options.colour_skip,
interrupted=options.interrupted,
violin_width=options.violin_width,
xkcd=options.xkcd,
min_samples=options.min_samples,
count_legend_x=options.count_legend_x,
out_format=options.out_format,
p_cutoff=options.p_cutoff
)
plotter.run()
| gpl-3.0 |
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/IPython/lib/tests/test_display.py | 8 | 5762 | """Tests for IPython.lib.display.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import print_function
from tempfile import NamedTemporaryFile, mkdtemp
from os.path import split, join as pjoin, dirname
# Third-party imports
import nose.tools as nt
# Our own imports
from IPython.lib import display
from IPython.testing.decorators import skipif_not_numpy
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
#--------------------------
# FileLink tests
#--------------------------
def test_instantiation_FileLink():
"""FileLink: Test class can be instantiated"""
fl = display.FileLink('example.txt')
def test_warning_on_non_existant_path_FileLink():
"""FileLink: Calling _repr_html_ on non-existant files returns a warning
"""
fl = display.FileLink('example.txt')
nt.assert_true(fl._repr_html_().startswith('Path (<tt>example.txt</tt>)'))
def test_existing_path_FileLink():
"""FileLink: Calling _repr_html_ functions as expected on existing filepath
"""
tf = NamedTemporaryFile()
fl = display.FileLink(tf.name)
actual = fl._repr_html_()
expected = "<a href='%s' target='_blank'>%s</a><br>" % (tf.name,tf.name)
nt.assert_equal(actual,expected)
def test_existing_path_FileLink_repr():
"""FileLink: Calling repr() functions as expected on existing filepath
"""
tf = NamedTemporaryFile()
fl = display.FileLink(tf.name)
actual = repr(fl)
expected = tf.name
nt.assert_equal(actual,expected)
def test_error_on_directory_to_FileLink():
"""FileLink: Raises error when passed directory
"""
td = mkdtemp()
nt.assert_raises(ValueError,display.FileLink,td)
#--------------------------
# FileLinks tests
#--------------------------
def test_instantiation_FileLinks():
"""FileLinks: Test class can be instantiated
"""
fls = display.FileLinks('example')
def test_warning_on_non_existant_path_FileLinks():
"""FileLinks: Calling _repr_html_ on non-existant files returns a warning
"""
fls = display.FileLinks('example')
nt.assert_true(fls._repr_html_().startswith('Path (<tt>example</tt>)'))
def test_existing_path_FileLinks():
"""FileLinks: Calling _repr_html_ functions as expected on existing dir
"""
td = mkdtemp()
tf1 = NamedTemporaryFile(dir=td)
tf2 = NamedTemporaryFile(dir=td)
fl = display.FileLinks(td)
actual = fl._repr_html_()
actual = actual.split('\n')
actual.sort()
# the links should always have forward slashes, even on windows, so replace
# backslashes with forward slashes here
expected = ["%s/<br>" % td,
" <a href='%s' target='_blank'>%s</a><br>" %\
(tf2.name.replace("\\","/"),split(tf2.name)[1]),
" <a href='%s' target='_blank'>%s</a><br>" %\
(tf1.name.replace("\\","/"),split(tf1.name)[1])]
expected.sort()
# We compare the sorted list of links here as that's more reliable
nt.assert_equal(actual,expected)
def test_existing_path_FileLinks_alt_formatter():
"""FileLinks: Calling _repr_html_ functions as expected w/ an alt formatter
"""
td = mkdtemp()
tf1 = NamedTemporaryFile(dir=td)
tf2 = NamedTemporaryFile(dir=td)
def fake_formatter(dirname,fnames,included_suffixes):
return ["hello","world"]
fl = display.FileLinks(td,notebook_display_formatter=fake_formatter)
actual = fl._repr_html_()
actual = actual.split('\n')
actual.sort()
expected = ["hello","world"]
expected.sort()
# We compare the sorted list of links here as that's more reliable
nt.assert_equal(actual,expected)
def test_existing_path_FileLinks_repr():
"""FileLinks: Calling repr() functions as expected on existing directory """
td = mkdtemp()
tf1 = NamedTemporaryFile(dir=td)
tf2 = NamedTemporaryFile(dir=td)
fl = display.FileLinks(td)
actual = repr(fl)
actual = actual.split('\n')
actual.sort()
expected = ['%s/' % td, ' %s' % split(tf1.name)[1],' %s' % split(tf2.name)[1]]
expected.sort()
# We compare the sorted list of links here as that's more reliable
nt.assert_equal(actual,expected)
def test_existing_path_FileLinks_repr_alt_formatter():
"""FileLinks: Calling repr() functions as expected w/ alt formatter
"""
td = mkdtemp()
tf1 = NamedTemporaryFile(dir=td)
tf2 = NamedTemporaryFile(dir=td)
def fake_formatter(dirname,fnames,included_suffixes):
return ["hello","world"]
fl = display.FileLinks(td,terminal_display_formatter=fake_formatter)
actual = repr(fl)
actual = actual.split('\n')
actual.sort()
expected = ["hello","world"]
expected.sort()
# We compare the sorted list of links here as that's more reliable
nt.assert_equal(actual,expected)
def test_error_on_file_to_FileLinks():
"""FileLinks: Raises error when passed file
"""
td = mkdtemp()
tf1 = NamedTemporaryFile(dir=td)
nt.assert_raises(ValueError,display.FileLinks,tf1.name)
@skipif_not_numpy
def test_audio_from_file():
path = pjoin(dirname(__file__), 'test.wav')
display.Audio(filename=path) | bsd-3-clause |
zhujiangang/shadowsocks | shadowsocks/crypto/sodium.py | 1032 | 3778 | #!/usr/bin/env python
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
from ctypes import c_char_p, c_int, c_ulonglong, byref, \
create_string_buffer, c_void_p
from shadowsocks.crypto import util
__all__ = ['ciphers']
libsodium = None
loaded = False
buf_size = 2048
# for salsa20 and chacha20
BLOCK_SIZE = 64
def load_libsodium():
global loaded, libsodium, buf
libsodium = util.find_library('sodium', 'crypto_stream_salsa20_xor_ic',
'libsodium')
if libsodium is None:
raise Exception('libsodium not found')
libsodium.crypto_stream_salsa20_xor_ic.restype = c_int
libsodium.crypto_stream_salsa20_xor_ic.argtypes = (c_void_p, c_char_p,
c_ulonglong,
c_char_p, c_ulonglong,
c_char_p)
libsodium.crypto_stream_chacha20_xor_ic.restype = c_int
libsodium.crypto_stream_chacha20_xor_ic.argtypes = (c_void_p, c_char_p,
c_ulonglong,
c_char_p, c_ulonglong,
c_char_p)
buf = create_string_buffer(buf_size)
loaded = True
class SodiumCrypto(object):
def __init__(self, cipher_name, key, iv, op):
if not loaded:
load_libsodium()
self.key = key
self.iv = iv
self.key_ptr = c_char_p(key)
self.iv_ptr = c_char_p(iv)
if cipher_name == 'salsa20':
self.cipher = libsodium.crypto_stream_salsa20_xor_ic
elif cipher_name == 'chacha20':
self.cipher = libsodium.crypto_stream_chacha20_xor_ic
else:
raise Exception('Unknown cipher')
# byte counter, not block counter
self.counter = 0
def update(self, data):
global buf_size, buf
l = len(data)
# we can only prepend some padding to make the encryption align to
# blocks
padding = self.counter % BLOCK_SIZE
if buf_size < padding + l:
buf_size = (padding + l) * 2
buf = create_string_buffer(buf_size)
if padding:
data = (b'\0' * padding) + data
self.cipher(byref(buf), c_char_p(data), padding + l,
self.iv_ptr, int(self.counter / BLOCK_SIZE), self.key_ptr)
self.counter += l
# buf is copied to a str object when we access buf.raw
# strip off the padding
return buf.raw[padding:padding + l]
ciphers = {
'salsa20': (32, 8, SodiumCrypto),
'chacha20': (32, 8, SodiumCrypto),
}
def test_salsa20():
cipher = SodiumCrypto('salsa20', b'k' * 32, b'i' * 16, 1)
decipher = SodiumCrypto('salsa20', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
def test_chacha20():
cipher = SodiumCrypto('chacha20', b'k' * 32, b'i' * 16, 1)
decipher = SodiumCrypto('chacha20', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
if __name__ == '__main__':
test_chacha20()
test_salsa20()
| apache-2.0 |
harshilasu/LinkurApp | y/google-cloud-sdk/platform/gcutil/lib/google_api_python_client/apiclient/mimeparse.py | 5 | 6459 | # Copyright (C) 2007 Joe Gregorio
#
# Licensed under the MIT License
"""MIME-Type Parser
This module provides basic functions for handling mime-types. It can handle
matching mime-types against a list of media-ranges. See section 14.1 of the
HTTP specification [RFC 2616] for a complete explanation.
http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
Contents:
- parse_mime_type(): Parses a mime-type into its component parts.
- parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q'
quality parameter.
- quality(): Determines the quality ('q') of a mime-type when
compared against a list of media-ranges.
- quality_parsed(): Just like quality() except the second parameter must be
pre-parsed.
- best_match(): Choose the mime-type with the highest quality ('q')
from a list of candidates.
"""
__version__ = '0.1.3'
__email__ = 'joe@bitworking.org'
__license__ = 'MIT License'
__credits__ = ''
def parse_mime_type(mime_type):
"""Parses a mime-type into its component parts.
Carves up a mime-type and returns a tuple of the (type, subtype, params)
where 'params' is a dictionary of all the parameters for the media range.
For example, the media range 'application/xhtml;q=0.5' would get parsed
into:
('application', 'xhtml', {'q', '0.5'})
"""
parts = mime_type.split(';')
params = dict([tuple([s.strip() for s in param.split('=', 1)])\
for param in parts[1:]
])
full_type = parts[0].strip()
# Java URLConnection class sends an Accept header that includes a
# single '*'. Turn it into a legal wildcard.
if full_type == '*':
full_type = '*/*'
(type, subtype) = full_type.split('/')
return (type.strip(), subtype.strip(), params)
def parse_media_range(range):
"""Parse a media-range into its component parts.
Carves up a media range and returns a tuple of the (type, subtype,
params) where 'params' is a dictionary of all the parameters for the media
range. For example, the media range 'application/*;q=0.5' would get parsed
into:
('application', '*', {'q', '0.5'})
In addition this function also guarantees that there is a value for 'q'
in the params dictionary, filling it in with a proper default if
necessary.
"""
(type, subtype, params) = parse_mime_type(range)
if not params.has_key('q') or not params['q'] or \
not float(params['q']) or float(params['q']) > 1\
or float(params['q']) < 0:
params['q'] = '1'
return (type, subtype, params)
def fitness_and_quality_parsed(mime_type, parsed_ranges):
"""Find the best match for a mime-type amongst parsed media-ranges.
Find the best match for a given mime-type against a list of media_ranges
that have already been parsed by parse_media_range(). Returns a tuple of
the fitness value and the value of the 'q' quality parameter of the best
match, or (-1, 0) if no match was found. Just as for quality_parsed(),
'parsed_ranges' must be a list of parsed media ranges.
"""
best_fitness = -1
best_fit_q = 0
(target_type, target_subtype, target_params) =\
parse_media_range(mime_type)
for (type, subtype, params) in parsed_ranges:
type_match = (type == target_type or\
type == '*' or\
target_type == '*')
subtype_match = (subtype == target_subtype or\
subtype == '*' or\
target_subtype == '*')
if type_match and subtype_match:
param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \
target_params.iteritems() if key != 'q' and \
params.has_key(key) and value == params[key]], 0)
fitness = (type == target_type) and 100 or 0
fitness += (subtype == target_subtype) and 10 or 0
fitness += param_matches
if fitness > best_fitness:
best_fitness = fitness
best_fit_q = params['q']
return best_fitness, float(best_fit_q)
def quality_parsed(mime_type, parsed_ranges):
"""Find the best match for a mime-type amongst parsed media-ranges.
Find the best match for a given mime-type against a list of media_ranges
that have already been parsed by parse_media_range(). Returns the 'q'
quality parameter of the best match, 0 if no match was found. This function
bahaves the same as quality() except that 'parsed_ranges' must be a list of
parsed media ranges.
"""
return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
def quality(mime_type, ranges):
"""Return the quality ('q') of a mime-type against a list of media-ranges.
Returns the quality 'q' of a mime-type when compared against the
media-ranges in ranges. For example:
>>> quality('text/html','text/*;q=0.3, text/html;q=0.7,
text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
0.7
"""
parsed_ranges = [parse_media_range(r) for r in ranges.split(',')]
return quality_parsed(mime_type, parsed_ranges)
def best_match(supported, header):
"""Return mime-type with the highest quality ('q') from list of candidates.
Takes a list of supported mime-types and finds the best match for all the
media-ranges listed in header. The value of header must be a string that
conforms to the format of the HTTP Accept: header. The value of 'supported'
is a list of mime-types. The list of supported mime-types should be sorted
in order of increasing desirability, in case of a situation where there is
a tie.
>>> best_match(['application/xbel+xml', 'text/xml'],
'text/*;q=0.5,*/*; q=0.1')
'text/xml'
"""
split_header = _filter_blank(header.split(','))
parsed_header = [parse_media_range(r) for r in split_header]
weighted_matches = []
pos = 0
for mime_type in supported:
weighted_matches.append((fitness_and_quality_parsed(mime_type,
parsed_header), pos, mime_type))
pos += 1
weighted_matches.sort()
return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ''
def _filter_blank(i):
for s in i:
if s.strip():
yield s
| gpl-3.0 |
softeg/symfony2 | vendor/doctrine/orm/docs/en/conf.py | 2448 | 6497 | # -*- coding: utf-8 -*-
#
# Doctrine 2 ORM documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 3 18:10:24 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('_exts'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['configurationblock']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Doctrine 2 ORM'
copyright = u'2010-12, Doctrine Project Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2'
# The full version, including alpha/beta/rc tags.
release = '2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'doctrine'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Doctrine2ORMdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Doctrine2ORM.tex', u'Doctrine 2 ORM Documentation',
u'Doctrine Project Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
primary_domain = "dcorm"
def linkcode_resolve(domain, info):
if domain == 'dcorm':
return 'http://'
return None
| mit |
tedder/ansible | lib/ansible/modules/network/f5/bigip_log_destination.py | 14 | 58575 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_log_destination
short_description: Manages log destinations on a BIG-IP.
description:
- Manages log destinations on a BIG-IP.
version_added: 2.6
options:
name:
description:
- Specifies the name of the log destination.
required: True
type:
description:
- Specifies the type of log destination.
- Once created, this parameter cannot be changed.
choices:
- remote-high-speed-log
- remote-syslog
- arcsight
- splunk
- management-port
- ipfix
required: True
description:
description:
- The description of the log destination.
pool_settings:
description:
- This parameter is only available when C(type) is C(remote-high-speed-log).
- Deprecated. Use the equivalent top-level parameters instead.
suboptions:
pool:
description:
- Specifies the existing pool of remote high-speed log servers where logs will be sent.
- When creating a new destination (and C(type) is C(remote-high-speed-log)), this parameter
is required.
protocol:
description:
- Specifies the protocol for the system to use to send logs to the pool of remote high-speed
log servers, where the logs are stored.
- When creating a new log destination (and C(type) is C(remote-high-speed-log)), if this
parameter is not specified, the default is C(tcp).
choices:
- tcp
- udp
distribution:
description:
- Specifies the distribution method used by the Remote High Speed Log destination to send
messages to pool members.
- When C(adaptive), connections to pool members will be added as required to provide enough
logging bandwidth. This can have the undesirable effect of logs accumulating on only one
pool member when it provides sufficient logging bandwidth on its own.
- When C(balanced), sends each successive log to a new pool member, balancing the logs among
them according to the pool's load balancing method.
- When C(replicated), replicates each log to all pool members, for redundancy.
- When creating a new log destination (and C(type) is C(remote-high-speed-log)), if this
parameter is not specified, the default is C(adaptive).
choices:
- adaptive
- balanced
- replicated
syslog_settings:
description:
- This parameter is only available when C(type) is C(remote-syslog).
- Deprecated. Use the equivalent top-level parameters instead.
suboptions:
syslog_format:
description:
- Specifies the method to use to format the logs associated with the remote Syslog log destination.
- When creating a new log destination (and C(type) is C(remote-syslog)), if this parameter is
not specified, the default is C(bsd-syslog).
- The C(syslog) and C(rfc5424) choices are two ways of saying the same thing.
- The C(bsd-syslog) and C(rfc3164) choices are two ways of saying the same thing.
choices:
- bsd-syslog
- syslog
- legacy-bigip
- rfc5424
- rfc3164
forward_to:
description:
- Specifies the management port log destination, which will be used to forward the logs to a
single log server, or a remote high-speed log destination, which will be used to forward the
logs to a pool of remote log servers.
- When creating a new log destination (and C(type) is C(remote-syslog)), this parameter is required.
syslog_format:
description:
- Specifies the method to use to format the logs associated with the remote Syslog log destination.
- When creating a new log destination (and C(type) is C(remote-syslog)), if this parameter is
not specified, the default is C(bsd-syslog).
- The C(syslog) and C(rfc5424) choices are two ways of saying the same thing.
- The C(bsd-syslog) and C(rfc3164) choices are two ways of saying the same thing.
choices:
- bsd-syslog
- syslog
- legacy-bigip
- rfc5424
- rfc3164
version_added: 2.8
forward_to:
description:
- When C(type) is C(remote-syslog), specifies the management port log destination, which will
be used to forward the logs to a single log server, or a remote high-speed log destination,
which will be used to forward the logs to a pool of remote log servers.
- When C(type) is C(splunk) or C(arcsight), specifies the log destination to which logs are
forwarded. This log destination may be a management port destination, a remote high-speed
log destination, or a remote Syslog destination which is configured to send logs to an
ArcSight or Splunk server.
- When creating a new log destination and C(type) is C(remote-syslog), C(splunk), or C(arcsight),
this parameter is required.
version_added: 2.8
pool:
description:
- When C(type) is C(remote-high-speed-log), specifies the existing pool of remote high-speed
log servers where logs will be sent.
- When C(type) is C(ipfix), specifies the existing LTM pool of remote IPFIX collectors. Any
BIG-IP application that uses this log destination sends its IP-traffic logs to this pool
of collectors.
- When creating a new destination and C(type) is C(remote-high-speed-log) or C(ipfix), this
parameter is required.
version_added: 2.8
protocol:
description:
- When C(type) is C(remote-high-speed-log), specifies the protocol for the system to use to
send logs to the pool of remote high-speed log servers, where the logs are stored.
- When C(type) is C(ipfix), can be IPFIX or Netflow v9, depending on the type of collectors
you have in the pool that you specify.
- When C(type) is C(management-port), specifies the protocol used to send messages to the
specified location.
- When C(type) is C(management-port), only C(tcp) and C(udp) are valid values.
choices:
- tcp
- udp
- ipfix
- netflow-9
version_added: 2.8
distribution:
description:
- Specifies the distribution method used by the Remote High Speed Log destination to send
messages to pool members.
- When C(adaptive), connections to pool members will be added as required to provide enough
logging bandwidth. This can have the undesirable effect of logs accumulating on only one
pool member when it provides sufficient logging bandwidth on its own.
- When C(balanced), sends each successive log to a new pool member, balancing the logs among
them according to the pool's load balancing method.
- When C(replicated), replicates each log to all pool members, for redundancy.
- When creating a new log destination and C(type) is C(remote-high-speed-log), if this
parameter is not specified, the default is C(adaptive).
choices:
- adaptive
- balanced
- replicated
version_added: 2.8
address:
description:
- Specifies the IP address that will receive messages from the specified local Log Destination.
- This parameter is only available when C(type) is C(management-port).
- When creating a new log destination and C(type) is C(management-port), this parameter
is required.
version_added: 2.8
port:
description:
- Specifies the port of the IP address that will receive messages from the specified local
Log Destination.
- This parameter is only available when C(type) is C(management-port).
- When creating a new log destination and C(type) is C(management-port), this parameter
is required.
version_added: 2.8
transport_profile:
description:
- Is a transport profile based on either TCP or UDP.
- This profile defines the TCP or UDP options used to send IP-traffic logs
to the pool of collectors.
- This parameter is only available when C(type) is C(ipfix).
version_added: 2.8
server_ssl_profile:
description:
- If the C(transport_profile) is a TCP profile, you can use this field to
choose a Secure Socket Layer (SSL) profile for sending logs to the IPFIX
collectors.
- An SSL server profile defines how to communicate securely over SSL or
Transport Layer Security (TLS).
- This parameter is only available when C(type) is C(ipfix).
version_added: 2.8
template_retransmit_interval:
description:
- Enter the time (in seconds) between each transmission of IPFIX templates
to the pool of IPFIX collectors.
- The logging destination periodically retransmits all of its IPFIX templates
at the interval you set in this field. These retransmissions are helpful
for UDP, a lossy transport mechanism.
- This parameter is only available when C(type) is C(ipfix).
version_added: 2.8
template_delete_delay:
description:
- Enter the time (in seconds) that the BIG-IP device should pause between
deleting an obsolete IPFIX template and reusing its template ID.
- This feature is useful for systems where you use iRules to create
customized IPFIX templates.
version_added: 2.8
partition:
description:
- Device partition to manage resources on.
default: Common
state:
description:
- When C(present), ensures that the resource exists.
- When C(absent), ensures the resource is removed.
default: present
choices:
- present
- absent
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create a high-speed logging destination
bigip_log_destination:
name: foo
type: remote-high-speed-log
pool: my-ltm-pool
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Create a remote-syslog logging destination
bigip_log_destination:
name: foo
type: remote-syslog
syslog_format: rfc5424
forward_to: my-destination
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
forward_to:
description: The new Forward To value.
returned: changed
type: str
sample: /Common/dest1
pool:
description: The new Pool value.
returned: changed
type: str
sample: /Common/pool1
distribution:
description: The new Distribution Method value.
returned: changed
type: str
sample: balanced
protocol:
description: The new Protocol value.
returned: changed
type: str
sample: tcp
syslog_format:
description: The new Syslog format value.
returned: changed
type: str
sample: syslog
address:
description: The new Address value.
returned: changed
type: str
sample: 1.2.3.2
port:
description: The new Port value.
returned: changed
type: int
sample: 2020
template_delete_delay:
description: The new Template Delete Delay value.
returned: changed
type: int
sample: 20
template_retransmit_interval:
description: The new Template Retransmit Interval value.
returned: changed
type: int
sample: 200
transport_profile:
description: The new Transport Profile value.
returned: changed
type: str
sample: /Common/tcp
server_ssl_profile:
description: The new Server SSL Profile value.
returned: changed
type: str
sample: /Common/serverssl
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.compare import cmp_str_with_none
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.compare import cmp_str_with_none
class V1Parameters(AnsibleF5Parameters):
"""Base Parameters for remote-syslog
"""
api_map = {
'remoteHighSpeedLog': 'forward_to',
'format': 'syslog_format'
}
api_attributes = [
'remoteHighSpeedLog',
'format'
]
returnables = [
'forward_to',
'syslog_format'
]
updatables = [
'forward_to',
'syslog_format',
'type'
]
# TODO(Remove in 2.12)
class V1ModuleParameters(V1Parameters):
@property
def forward_to(self):
if self._values['forward_to']:
result = self._values['forward_to']
else:
if self._values['syslog_settings'] is None:
return None
result = self._values['syslog_settings'].get('forward_to', None)
if result:
result = fq_name(self.partition, result)
return result
@property
def syslog_format(self):
if self._values['syslog_format']:
result = self._values['syslog_format']
else:
if self._values['syslog_settings'] is None:
return None
result = self._values['syslog_settings'].get('syslog_format', None)
if result == 'syslog':
result = 'rfc5424'
if result == 'bsd-syslog':
result = 'rfc3164'
return result
# TODO(Remove in 2.12)
class V1ApiParameters(V1Parameters):
@property
def type(self):
return 'remote-syslog'
# TODO(Remove in 2.12)
class V1Changes(V1Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
# TODO(Remove in 2.12)
class V1UsableChanges(V1Changes):
pass
# TODO(Remove in 2.12)
class V1ReportableChanges(V1Changes):
pass
# TODO(Remove in 2.12)
class V2Parameters(AnsibleF5Parameters):
"""Base Parameters for remote-high-speed-log
"""
api_map = {
'poolName': 'pool'
}
api_attributes = [
'distribution',
'poolName',
'protocol'
]
returnables = [
'pool',
'distribution',
'protocol'
]
updatables = [
'pool',
'distribution',
'protocol',
'type'
]
# TODO(Remove in 2.12)
class V2ModuleParameters(V2Parameters):
@property
def pool(self):
if self._values['pool']:
result = self._values['pool']
else:
if self._values['pool_settings'] is None:
return None
result = self._values['pool_settings'].get('pool', None)
if result:
result = fq_name(self.partition, result)
return result
@property
def protocol(self):
if self._values['protocol']:
return self._values['protocol']
else:
if self._values['pool_settings'] is None:
return None
return self._values['pool_settings'].get('protocol', None)
@property
def distribution(self):
if self._values['distribution']:
return self._values['distribution']
else:
if self._values['pool_settings'] is None:
return None
return self._values['pool_settings'].get('distribution', None)
# TODO(Remove in 2.12)
class V2ApiParameters(V2Parameters):
@property
def type(self):
return 'remote-high-speed-log'
# TODO(Remove in 2.12)
class V2Changes(V2Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
# TODO(Remove in 2.12)
class V2UsableChanges(V2Changes):
pass
# TODO(Remove in 2.12)
class V2ReportableChanges(V2Changes):
pass
class V3Parameters(AnsibleF5Parameters):
api_map = {
'forwardTo': 'forward_to',
'poolName': 'pool',
'remoteHighSpeedLog': 'forward_to',
'format': 'syslog_format',
'ipAddress': 'address',
'protocolVersion': 'protocol',
'templateDeleteDelay': 'template_delete_delay',
'templateRetransmitInterval': 'template_retransmit_interval',
'transportProfile': 'transport_profile',
'serversslProfile': 'server_ssl_profile',
}
api_attributes = [
'forwardTo',
'distribution',
'poolName',
'protocol',
'remoteHighSpeedLog',
'format',
'ipAddress',
'port',
'serversslProfile',
'transportProfile',
'templateRetransmitInterval',
'templateDeleteDelay',
'protocolVersion',
]
returnables = [
'forward_to',
'pool',
'distribution',
'protocol',
'syslog_format',
'address',
'port',
'template_delete_delay',
'template_retransmit_interval',
'transport_profile',
'server_ssl_profile',
]
updatables = [
'forward_to',
'type',
'pool',
'distribution',
'protocol',
'syslog_format',
'address',
'port',
'template_delete_delay',
'template_retransmit_interval',
'transport_profile',
'server_ssl_profile',
'type',
]
class V3ModuleParameters(V3Parameters):
@property
def forward_to(self):
if self._values['forward_to'] is None:
return None
return fq_name(self.partition, self._values['forward_to'])
@property
def pool(self):
if self._values['pool'] is None:
return None
return fq_name(self.partition, self._values['pool'])
@property
def syslog_format(self):
if self._values['syslog_format'] is None:
return None
result = self._values['syslog_format']
if result == 'syslog':
result = 'rfc5424'
if result == 'bsd-syslog':
result = 'rfc3164'
return result
@property
def server_ssl_profile(self):
if self._values['server_ssl_profile'] is None:
return None
elif self._values['server_ssl_profile'] in ['', 'none']:
return ''
return fq_name(self.partition, self._values['server_ssl_profile'])
@property
def transport_profile(self):
if self._values['transport_profile'] is None:
return None
return fq_name(self.partition, self._values['transport_profile'])
class V3ApiParameters(V3Parameters):
pass
class V3Changes(V3Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class V3UsableChanges(V3Changes):
pass
class V3ReportableChanges(V3Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def type(self):
if self.want.type != self.have.type:
raise F5ModuleError(
"'type' cannot be changed once it is set."
)
@property
def server_ssl_profile(self):
return cmp_str_with_none(self.want.server_ssl_profile, self.have.server_ssl_profile)
@property
def transport_profile(self):
return cmp_str_with_none(self.want.transport_profile, self.have.transport_profile)
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
def _set_changed_options(self):
changed = {}
for key in self.get_returnables():
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = self.get_usable_changes(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = self.get_updatables()
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = self.get_usable_changes(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = self.get_reportable_changes(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _validate_creation_parameters(self):
pass
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._validate_creation_parameters()
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
class V1Manager(BaseManager):
"""Manages remote-syslog settings
"""
def __init__(self, *args, **kwargs):
super(V1Manager, self).__init__(*args, **kwargs)
self.want = self.get_module_params(params=self.module.params)
self.have = self.get_api_params()
self.changes = self.get_usable_changes()
def _validate_creation_parameters(self):
if self.want.syslog_format is None:
self.want.update({'syslog_format': 'bsd-syslog'})
if self.want.forward_to is None:
raise F5ModuleError(
"'forward_to' is required when creating a new remote-syslog destination."
)
# TODO(In 2.12, these get_* methods should no longer be needed)
def get_reportable_changes(self, params=None):
if params:
return V1ReportableChanges(params=params)
return V1ReportableChanges()
def get_usable_changes(self, params=None):
if params:
return V1UsableChanges(params=params)
return V1UsableChanges()
def get_returnables(self):
return V1ApiParameters.returnables
def get_updatables(self):
return V1ApiParameters.updatables
def get_module_params(self, params=None):
if params:
return V1ModuleParameters(params=params)
return V1ModuleParameters()
def get_api_params(self, params=None):
if params:
return V1ApiParameters(params=params)
return V1ApiParameters()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/remote-syslog/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/remote-syslog/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/remote-syslog/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/remote-syslog/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/remote-syslog/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
response['type'] = 'remote-syslog'
return V1ApiParameters(params=response)
class V2Manager(BaseManager):
"""Manages remote-high-speed-log settings
"""
def __init__(self, *args, **kwargs):
super(V2Manager, self).__init__(*args, **kwargs)
self.want = self.get_module_params(params=self.module.params)
self.have = self.get_api_params()
self.changes = self.get_usable_changes()
def get_reportable_changes(self, params=None):
if params:
return V2ReportableChanges(params=params)
return V2ReportableChanges()
def get_usable_changes(self, params=None):
if params:
return V2UsableChanges(params=params)
return V2UsableChanges()
def _validate_creation_parameters(self):
if self.want.protocol is None:
self.want.update({'protocol': 'tcp'})
if self.want.distribution is None:
self.want.update({'distribution': 'adaptive'})
if self.want.pool is None:
raise F5ModuleError(
"'pool' is required when creating a new remote-high-speed-log destination."
)
def get_returnables(self):
return V2ApiParameters.returnables
def get_updatables(self):
return V2ApiParameters.updatables
def get_module_params(self, params=None):
if params:
return V2ModuleParameters(params=params)
return V2ModuleParameters()
def get_api_params(self, params=None):
if params:
return V2ApiParameters(params=params)
return V2ApiParameters()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/remote-high-speed-log/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/remote-high-speed-log/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/remote-high-speed-log/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/remote-high-speed-log/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/remote-high-speed-log/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
response['type'] = 'remote-high-speed-log'
return V2ApiParameters(params=response)
class V3Manager(BaseManager):
def __init__(self, *args, **kwargs):
super(V3Manager, self).__init__(*args, **kwargs)
self.want = self.get_module_params(params=self.module.params)
self.have = self.get_api_params()
self.changes = self.get_usable_changes()
def get_reportable_changes(self, params=None):
if params:
return V3ReportableChanges(params=params)
return V3ReportableChanges()
def get_usable_changes(self, params=None):
if params:
return V3UsableChanges(params=params)
return V3UsableChanges()
def _validate_creation_parameters(self):
if self.want.forward_to is None:
raise F5ModuleError(
"'forward_to' is required when creating a new arcsight destination."
)
def get_returnables(self):
return V3ApiParameters.returnables
def get_updatables(self):
return V3ApiParameters.updatables
def get_module_params(self, params=None):
if params:
return V3ModuleParameters(params=params)
return V3ModuleParameters()
def get_api_params(self, params=None):
if params:
return V3ApiParameters(params=params)
return V3ApiParameters()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/arcsight/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/arcsight/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/arcsight/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/arcsight/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/arcsight/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
response['type'] = 'arcsight'
return V3ApiParameters(params=response)
class V4Manager(BaseManager):
"""Manager for Splunk
Do not worry about the usage of V3 classes in this V4 manager.
In Ansible 2.12, the Parameter classes will undergo a rename
because of parameters being deprecated.
The correct Parameter classes to use in this class are the
V3 Parameter classes.
"""
def __init__(self, *args, **kwargs):
super(V4Manager, self).__init__(*args, **kwargs)
self.want = self.get_module_params(params=self.module.params)
self.have = self.get_api_params()
self.changes = self.get_usable_changes()
def get_reportable_changes(self, params=None):
if params:
return V3ReportableChanges(params=params)
return V3ReportableChanges()
def get_usable_changes(self, params=None):
if params:
return V3UsableChanges(params=params)
return V3UsableChanges()
def _validate_creation_parameters(self):
if self.want.forward_to is None:
raise F5ModuleError(
"'forward_to' is required when creating a new splunk destination."
)
def get_returnables(self):
return V3ApiParameters.returnables
def get_updatables(self):
return V3ApiParameters.updatables
def get_module_params(self, params=None):
if params:
return V3ModuleParameters(params=params)
return V3ModuleParameters()
def get_api_params(self, params=None):
if params:
return V3ApiParameters(params=params)
return V3ApiParameters()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/splunk/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/splunk/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/splunk/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/splunk/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/splunk/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
response['type'] = 'splunk'
return V3ApiParameters(params=response)
class V5Manager(BaseManager):
"""Manager for Management Port
Do not worry about the usage of V3 classes in this V5 manager.
In Ansible 2.12, the Parameter classes will undergo a rename
because of parameters being deprecated.
The correct Parameter classes to use in this class are the
V3 Parameter classes.
"""
def __init__(self, *args, **kwargs):
super(V5Manager, self).__init__(*args, **kwargs)
self.want = self.get_module_params(params=self.module.params)
self.have = self.get_api_params()
self.changes = self.get_usable_changes()
def get_reportable_changes(self, params=None):
if params:
return V3ReportableChanges(params=params)
return V3ReportableChanges()
def get_usable_changes(self, params=None):
if params:
return V3UsableChanges(params=params)
return V3UsableChanges()
def _validate_creation_parameters(self):
if self.want.address is None:
raise F5ModuleError(
"'address' is required when creating a new management-port destination."
)
if self.want.port is None:
raise F5ModuleError(
"'port' is required when creating a new management-port destination."
)
def get_returnables(self):
return V3ApiParameters.returnables
def get_updatables(self):
return V3ApiParameters.updatables
def get_module_params(self, params=None):
if params:
return V3ModuleParameters(params=params)
return V3ModuleParameters()
def get_api_params(self, params=None):
if params:
return V3ApiParameters(params=params)
return V3ApiParameters()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/management-port/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/management-port/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/management-port/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/management-port/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/management-port/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
response['type'] = 'management-port'
return V3ApiParameters(params=response)
class V6Manager(BaseManager):
"""Manager for IPFIX
Do not worry about the usage of V3 classes in this V6 manager.
In Ansible 2.12, the Parameter classes will undergo a rename
because of parameters being deprecated.
The correct Parameter classes to use in this class are the
V3 Parameter classes.
"""
def __init__(self, *args, **kwargs):
super(V6Manager, self).__init__(*args, **kwargs)
self.want = self.get_module_params(params=self.module.params)
self.have = self.get_api_params()
self.changes = self.get_usable_changes()
def get_reportable_changes(self, params=None):
if params:
return V3ReportableChanges(params=params)
return V3ReportableChanges()
def get_usable_changes(self, params=None):
if params:
return V3UsableChanges(params=params)
return V3UsableChanges()
def _validate_creation_parameters(self):
if self.want.protocol is None:
raise F5ModuleError(
"'protocol' is required when creating a new ipfix destination."
)
if self.want.pool is None:
raise F5ModuleError(
"'port' is required when creating a new ipfix destination."
)
if self.want.transport_profile is None:
raise F5ModuleError(
"'transport_profile' is required when creating a new ipfix destination."
)
def get_returnables(self):
return V3ApiParameters.returnables
def get_updatables(self):
return V3ApiParameters.updatables
def get_module_params(self, params=None):
if params:
return V3ModuleParameters(params=params)
return V3ModuleParameters()
def get_api_params(self, params=None):
if params:
return V3ApiParameters(params=params)
return V3ApiParameters()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/ipfix/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/ipfix/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/ipfix/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/ipfix/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/log-config/destination/ipfix/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
response['type'] = 'ipfix'
return V3ApiParameters(params=response)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.kwargs = kwargs
self.module = kwargs.get('module', None)
def exec_module(self):
if self.module.params['type'] == 'remote-syslog':
manager = self.get_manager('v1')
elif self.module.params['type'] == 'remote-high-speed-log':
manager = self.get_manager('v2')
elif self.module.params['type'] == 'arcsight':
manager = self.get_manager('v3')
elif self.module.params['type'] == 'splunk':
manager = self.get_manager('v4')
elif self.module.params['type'] == 'management-port':
manager = self.get_manager('v5')
elif self.module.params['type'] == 'ipfix':
manager = self.get_manager('v6')
else:
raise F5ModuleError(
"Unknown type specified."
)
result = manager.exec_module()
return result
def get_manager(self, type):
if type == 'v1':
return V1Manager(**self.kwargs)
elif type == 'v2':
return V2Manager(**self.kwargs)
elif type == 'v3':
return V3Manager(**self.kwargs)
elif type == 'v4':
return V4Manager(**self.kwargs)
elif type == 'v5':
return V5Manager(**self.kwargs)
elif type == 'v6':
return V6Manager(**self.kwargs)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
type=dict(
required=True,
choices=[
'arcsight',
'remote-high-speed-log',
'remote-syslog',
'splunk',
'management-port',
'ipfix',
]
),
description=dict(),
syslog_format=dict(
choices=[
'bsd-syslog',
'syslog',
'legacy-bigip',
'rfc5424',
'rfc3164'
]
),
forward_to=dict(),
pool=dict(),
protocol=dict(
choices=['tcp', 'udp', 'ipfix', 'netflow-9']
),
distribution=dict(
choices=[
'adaptive',
'balanced',
'replicated',
]
),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
address=dict(),
port=dict(type='int'),
transport_profile=dict(),
server_ssl_profile=dict(),
template_retransmit_interval=dict(type='int'),
template_delete_delay=dict(type='int'),
# Deprecated settings
pool_settings=dict(
type='dict',
suboptions=dict(
pool=dict(),
protocol=dict(
choices=['tcp', 'udp']
),
distribution=dict(
choices=[
'adaptive',
'balanced',
'replicated',
]
)
),
removed_in_version=2.12,
),
syslog_settings=dict(
type='dict',
suboptions=dict(
syslog_format=dict(
choices=[
'bsd-syslog',
'syslog',
'legacy-bigip',
'rfc5424',
'rfc3164'
]
),
forward_to=dict()
),
removed_in_version=2.12,
),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.mutually_exclusive = [
['syslog_settings', 'syslog_format'],
['syslog_settings', 'forward_to'],
['pool_settings', 'pool'],
['pool_settings', 'protocol'],
['pool_settings', 'distribution'],
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.4.3/Lib/test/test_weakref.py | 7 | 37110 | import gc
import sys
import unittest
import UserList
import weakref
from test import test_support
class C:
def method(self):
pass
class Callable:
bar = None
def __call__(self, x):
self.bar = x
def create_function():
def f(): pass
return f
def create_bound_method():
return C().method
def create_unbound_method():
return C.method
class TestBase(unittest.TestCase):
def setUp(self):
self.cbcalled = 0
def callback(self, ref):
self.cbcalled += 1
class ReferencesTestCase(TestBase):
def test_basic_ref(self):
self.check_basic_ref(C)
self.check_basic_ref(create_function)
self.check_basic_ref(create_bound_method)
self.check_basic_ref(create_unbound_method)
# Just make sure the tp_repr handler doesn't raise an exception.
# Live reference:
o = C()
wr = weakref.ref(o)
`wr`
# Dead reference:
del o
`wr`
def test_basic_callback(self):
self.check_basic_callback(C)
self.check_basic_callback(create_function)
self.check_basic_callback(create_bound_method)
self.check_basic_callback(create_unbound_method)
def test_multiple_callbacks(self):
o = C()
ref1 = weakref.ref(o, self.callback)
ref2 = weakref.ref(o, self.callback)
del o
self.assert_(ref1() is None,
"expected reference to be invalidated")
self.assert_(ref2() is None,
"expected reference to be invalidated")
self.assert_(self.cbcalled == 2,
"callback not called the right number of times")
def test_multiple_selfref_callbacks(self):
# Make sure all references are invalidated before callbacks are called
#
# What's important here is that we're using the first
# reference in the callback invoked on the second reference
# (the most recently created ref is cleaned up first). This
# tests that all references to the object are invalidated
# before any of the callbacks are invoked, so that we only
# have one invocation of _weakref.c:cleanup_helper() active
# for a particular object at a time.
#
def callback(object, self=self):
self.ref()
c = C()
self.ref = weakref.ref(c, callback)
ref1 = weakref.ref(c, callback)
del c
def test_proxy_ref(self):
o = C()
o.bar = 1
ref1 = weakref.proxy(o, self.callback)
ref2 = weakref.proxy(o, self.callback)
del o
def check(proxy):
proxy.bar
self.assertRaises(weakref.ReferenceError, check, ref1)
self.assertRaises(weakref.ReferenceError, check, ref2)
self.assertRaises(weakref.ReferenceError, bool, weakref.proxy(C()))
self.assert_(self.cbcalled == 2)
def check_basic_ref(self, factory):
o = factory()
ref = weakref.ref(o)
self.assert_(ref() is not None,
"weak reference to live object should be live")
o2 = ref()
self.assert_(o is o2,
"<ref>() should return original object if live")
def check_basic_callback(self, factory):
self.cbcalled = 0
o = factory()
ref = weakref.ref(o, self.callback)
del o
self.assert_(self.cbcalled == 1,
"callback did not properly set 'cbcalled'")
self.assert_(ref() is None,
"ref2 should be dead after deleting object reference")
def test_ref_reuse(self):
o = C()
ref1 = weakref.ref(o)
# create a proxy to make sure that there's an intervening creation
# between these two; it should make no difference
proxy = weakref.proxy(o)
ref2 = weakref.ref(o)
self.assert_(ref1 is ref2,
"reference object w/out callback should be re-used")
o = C()
proxy = weakref.proxy(o)
ref1 = weakref.ref(o)
ref2 = weakref.ref(o)
self.assert_(ref1 is ref2,
"reference object w/out callback should be re-used")
self.assert_(weakref.getweakrefcount(o) == 2,
"wrong weak ref count for object")
del proxy
self.assert_(weakref.getweakrefcount(o) == 1,
"wrong weak ref count for object after deleting proxy")
def test_proxy_reuse(self):
o = C()
proxy1 = weakref.proxy(o)
ref = weakref.ref(o)
proxy2 = weakref.proxy(o)
self.assert_(proxy1 is proxy2,
"proxy object w/out callback should have been re-used")
def test_basic_proxy(self):
o = C()
self.check_proxy(o, weakref.proxy(o))
L = UserList.UserList()
p = weakref.proxy(L)
self.failIf(p, "proxy for empty UserList should be false")
p.append(12)
self.assertEqual(len(L), 1)
self.failUnless(p, "proxy for non-empty UserList should be true")
p[:] = [2, 3]
self.assertEqual(len(L), 2)
self.assertEqual(len(p), 2)
self.failUnless(3 in p,
"proxy didn't support __contains__() properly")
p[1] = 5
self.assertEqual(L[1], 5)
self.assertEqual(p[1], 5)
L2 = UserList.UserList(L)
p2 = weakref.proxy(L2)
self.assertEqual(p, p2)
## self.assertEqual(repr(L2), repr(p2))
L3 = UserList.UserList(range(10))
p3 = weakref.proxy(L3)
self.assertEqual(L3[:], p3[:])
self.assertEqual(L3[5:], p3[5:])
self.assertEqual(L3[:5], p3[:5])
self.assertEqual(L3[2:5], p3[2:5])
# The PyWeakref_* C API is documented as allowing either NULL or
# None as the value for the callback, where either means "no
# callback". The "no callback" ref and proxy objects are supposed
# to be shared so long as they exist by all callers so long as
# they are active. In Python 2.3.3 and earlier, this guaranttee
# was not honored, and was broken in different ways for
# PyWeakref_NewRef() and PyWeakref_NewProxy(). (Two tests.)
def test_shared_ref_without_callback(self):
self.check_shared_without_callback(weakref.ref)
def test_shared_proxy_without_callback(self):
self.check_shared_without_callback(weakref.proxy)
def check_shared_without_callback(self, makeref):
o = Object(1)
p1 = makeref(o, None)
p2 = makeref(o, None)
self.assert_(p1 is p2, "both callbacks were None in the C API")
del p1, p2
p1 = makeref(o)
p2 = makeref(o, None)
self.assert_(p1 is p2, "callbacks were NULL, None in the C API")
del p1, p2
p1 = makeref(o)
p2 = makeref(o)
self.assert_(p1 is p2, "both callbacks were NULL in the C API")
del p1, p2
p1 = makeref(o, None)
p2 = makeref(o)
self.assert_(p1 is p2, "callbacks were None, NULL in the C API")
def test_callable_proxy(self):
o = Callable()
ref1 = weakref.proxy(o)
self.check_proxy(o, ref1)
self.assert_(type(ref1) is weakref.CallableProxyType,
"proxy is not of callable type")
ref1('twinkies!')
self.assert_(o.bar == 'twinkies!',
"call through proxy not passed through to original")
ref1(x='Splat.')
self.assert_(o.bar == 'Splat.',
"call through proxy not passed through to original")
# expect due to too few args
self.assertRaises(TypeError, ref1)
# expect due to too many args
self.assertRaises(TypeError, ref1, 1, 2, 3)
def check_proxy(self, o, proxy):
o.foo = 1
self.assert_(proxy.foo == 1,
"proxy does not reflect attribute addition")
o.foo = 2
self.assert_(proxy.foo == 2,
"proxy does not reflect attribute modification")
del o.foo
self.assert_(not hasattr(proxy, 'foo'),
"proxy does not reflect attribute removal")
proxy.foo = 1
self.assert_(o.foo == 1,
"object does not reflect attribute addition via proxy")
proxy.foo = 2
self.assert_(
o.foo == 2,
"object does not reflect attribute modification via proxy")
del proxy.foo
self.assert_(not hasattr(o, 'foo'),
"object does not reflect attribute removal via proxy")
def test_proxy_deletion(self):
# Test clearing of SF bug #762891
class Foo:
result = None
def __delitem__(self, accessor):
self.result = accessor
g = Foo()
f = weakref.proxy(g)
del f[0]
self.assertEqual(f.result, 0)
def test_proxy_bool(self):
# Test clearing of SF bug #1170766
class List(list): pass
lyst = List()
self.assertEqual(bool(weakref.proxy(lyst)), bool(lyst))
def test_getweakrefcount(self):
o = C()
ref1 = weakref.ref(o)
ref2 = weakref.ref(o, self.callback)
self.assert_(weakref.getweakrefcount(o) == 2,
"got wrong number of weak reference objects")
proxy1 = weakref.proxy(o)
proxy2 = weakref.proxy(o, self.callback)
self.assert_(weakref.getweakrefcount(o) == 4,
"got wrong number of weak reference objects")
del ref1, ref2, proxy1, proxy2
self.assert_(weakref.getweakrefcount(o) == 0,
"weak reference objects not unlinked from"
" referent when discarded.")
# assumes ints do not support weakrefs
self.assert_(weakref.getweakrefcount(1) == 0,
"got wrong number of weak reference objects for int")
def test_getweakrefs(self):
o = C()
ref1 = weakref.ref(o, self.callback)
ref2 = weakref.ref(o, self.callback)
del ref1
self.assert_(weakref.getweakrefs(o) == [ref2],
"list of refs does not match")
o = C()
ref1 = weakref.ref(o, self.callback)
ref2 = weakref.ref(o, self.callback)
del ref2
self.assert_(weakref.getweakrefs(o) == [ref1],
"list of refs does not match")
del ref1
self.assert_(weakref.getweakrefs(o) == [],
"list of refs not cleared")
# assumes ints do not support weakrefs
self.assert_(weakref.getweakrefs(1) == [],
"list of refs does not match for int")
def test_newstyle_number_ops(self):
class F(float):
pass
f = F(2.0)
p = weakref.proxy(f)
self.assert_(p + 1.0 == 3.0)
self.assert_(1.0 + p == 3.0) # this used to SEGV
def test_callbacks_protected(self):
# Callbacks protected from already-set exceptions?
# Regression test for SF bug #478534.
class BogusError(Exception):
pass
data = {}
def remove(k):
del data[k]
def encapsulate():
f = lambda : ()
data[weakref.ref(f, remove)] = None
raise BogusError
try:
encapsulate()
except BogusError:
pass
else:
self.fail("exception not properly restored")
try:
encapsulate()
except BogusError:
pass
else:
self.fail("exception not properly restored")
def test_sf_bug_840829(self):
# "weakref callbacks and gc corrupt memory"
# subtype_dealloc erroneously exposed a new-style instance
# already in the process of getting deallocated to gc,
# causing double-deallocation if the instance had a weakref
# callback that triggered gc.
# If the bug exists, there probably won't be an obvious symptom
# in a release build. In a debug build, a segfault will occur
# when the second attempt to remove the instance from the "list
# of all objects" occurs.
import gc
class C(object):
pass
c = C()
wr = weakref.ref(c, lambda ignore: gc.collect())
del c
# There endeth the first part. It gets worse.
del wr
c1 = C()
c1.i = C()
wr = weakref.ref(c1.i, lambda ignore: gc.collect())
c2 = C()
c2.c1 = c1
del c1 # still alive because c2 points to it
# Now when subtype_dealloc gets called on c2, it's not enough just
# that c2 is immune from gc while the weakref callbacks associated
# with c2 execute (there are none in this 2nd half of the test, btw).
# subtype_dealloc goes on to call the base classes' deallocs too,
# so any gc triggered by weakref callbacks associated with anything
# torn down by a base class dealloc can also trigger double
# deallocation of c2.
del c2
def test_callback_in_cycle_1(self):
import gc
class J(object):
pass
class II(object):
def acallback(self, ignore):
self.J
I = II()
I.J = J
I.wr = weakref.ref(J, I.acallback)
# Now J and II are each in a self-cycle (as all new-style class
# objects are, since their __mro__ points back to them). I holds
# both a weak reference (I.wr) and a strong reference (I.J) to class
# J. I is also in a cycle (I.wr points to a weakref that references
# I.acallback). When we del these three, they all become trash, but
# the cycles prevent any of them from getting cleaned up immediately.
# Instead they have to wait for cyclic gc to deduce that they're
# trash.
#
# gc used to call tp_clear on all of them, and the order in which
# it does that is pretty accidental. The exact order in which we
# built up these things manages to provoke gc into running tp_clear
# in just the right order (I last). Calling tp_clear on II leaves
# behind an insane class object (its __mro__ becomes NULL). Calling
# tp_clear on J breaks its self-cycle, but J doesn't get deleted
# just then because of the strong reference from I.J. Calling
# tp_clear on I starts to clear I's __dict__, and just happens to
# clear I.J first -- I.wr is still intact. That removes the last
# reference to J, which triggers the weakref callback. The callback
# tries to do "self.J", and instances of new-style classes look up
# attributes ("J") in the class dict first. The class (II) wants to
# search II.__mro__, but that's NULL. The result was a segfault in
# a release build, and an assert failure in a debug build.
del I, J, II
gc.collect()
def test_callback_in_cycle_2(self):
import gc
# This is just like test_callback_in_cycle_1, except that II is an
# old-style class. The symptom is different then: an instance of an
# old-style class looks in its own __dict__ first. 'J' happens to
# get cleared from I.__dict__ before 'wr', and 'J' was never in II's
# __dict__, so the attribute isn't found. The difference is that
# the old-style II doesn't have a NULL __mro__ (it doesn't have any
# __mro__), so no segfault occurs. Instead it got:
# test_callback_in_cycle_2 (__main__.ReferencesTestCase) ...
# Exception exceptions.AttributeError:
# "II instance has no attribute 'J'" in <bound method II.acallback
# of <?.II instance at 0x00B9B4B8>> ignored
class J(object):
pass
class II:
def acallback(self, ignore):
self.J
I = II()
I.J = J
I.wr = weakref.ref(J, I.acallback)
del I, J, II
gc.collect()
def test_callback_in_cycle_3(self):
import gc
# This one broke the first patch that fixed the last two. In this
# case, the objects reachable from the callback aren't also reachable
# from the object (c1) *triggering* the callback: you can get to
# c1 from c2, but not vice-versa. The result was that c2's __dict__
# got tp_clear'ed by the time the c2.cb callback got invoked.
class C:
def cb(self, ignore):
self.me
self.c1
self.wr
c1, c2 = C(), C()
c2.me = c2
c2.c1 = c1
c2.wr = weakref.ref(c1, c2.cb)
del c1, c2
gc.collect()
def test_callback_in_cycle_4(self):
import gc
# Like test_callback_in_cycle_3, except c2 and c1 have different
# classes. c2's class (C) isn't reachable from c1 then, so protecting
# objects reachable from the dying object (c1) isn't enough to stop
# c2's class (C) from getting tp_clear'ed before c2.cb is invoked.
# The result was a segfault (C.__mro__ was NULL when the callback
# tried to look up self.me).
class C(object):
def cb(self, ignore):
self.me
self.c1
self.wr
class D:
pass
c1, c2 = D(), C()
c2.me = c2
c2.c1 = c1
c2.wr = weakref.ref(c1, c2.cb)
del c1, c2, C, D
gc.collect()
def test_callback_in_cycle_resurrection(self):
import gc
# Do something nasty in a weakref callback: resurrect objects
# from dead cycles. For this to be attempted, the weakref and
# its callback must also be part of the cyclic trash (else the
# objects reachable via the callback couldn't be in cyclic trash
# to begin with -- the callback would act like an external root).
# But gc clears trash weakrefs with callbacks early now, which
# disables the callbacks, so the callbacks shouldn't get called
# at all (and so nothing actually gets resurrected).
alist = []
class C(object):
def __init__(self, value):
self.attribute = value
def acallback(self, ignore):
alist.append(self.c)
c1, c2 = C(1), C(2)
c1.c = c2
c2.c = c1
c1.wr = weakref.ref(c2, c1.acallback)
c2.wr = weakref.ref(c1, c2.acallback)
def C_went_away(ignore):
alist.append("C went away")
wr = weakref.ref(C, C_went_away)
del c1, c2, C # make them all trash
self.assertEqual(alist, []) # del isn't enough to reclaim anything
gc.collect()
# c1.wr and c2.wr were part of the cyclic trash, so should have
# been cleared without their callbacks executing. OTOH, the weakref
# to C is bound to a function local (wr), and wasn't trash, so that
# callback should have been invoked when C went away.
self.assertEqual(alist, ["C went away"])
# The remaining weakref should be dead now (its callback ran).
self.assertEqual(wr(), None)
del alist[:]
gc.collect()
self.assertEqual(alist, [])
def test_callbacks_on_callback(self):
import gc
# Set up weakref callbacks *on* weakref callbacks.
alist = []
def safe_callback(ignore):
alist.append("safe_callback called")
class C(object):
def cb(self, ignore):
alist.append("cb called")
c, d = C(), C()
c.other = d
d.other = c
callback = c.cb
c.wr = weakref.ref(d, callback) # this won't trigger
d.wr = weakref.ref(callback, d.cb) # ditto
external_wr = weakref.ref(callback, safe_callback) # but this will
self.assert_(external_wr() is callback)
# The weakrefs attached to c and d should get cleared, so that
# C.cb is never called. But external_wr isn't part of the cyclic
# trash, and no cyclic trash is reachable from it, so safe_callback
# should get invoked when the bound method object callback (c.cb)
# -- which is itself a callback, and also part of the cyclic trash --
# gets reclaimed at the end of gc.
del callback, c, d, C
self.assertEqual(alist, []) # del isn't enough to clean up cycles
gc.collect()
self.assertEqual(alist, ["safe_callback called"])
self.assertEqual(external_wr(), None)
del alist[:]
gc.collect()
self.assertEqual(alist, [])
def test_gc_during_ref_creation(self):
self.check_gc_during_creation(weakref.ref)
def test_gc_during_proxy_creation(self):
self.check_gc_during_creation(weakref.proxy)
def check_gc_during_creation(self, makeref):
thresholds = gc.get_threshold()
gc.set_threshold(1, 1, 1)
gc.collect()
class A:
pass
def callback(*args):
pass
referenced = A()
a = A()
a.a = a
a.wr = makeref(referenced)
try:
# now make sure the object and the ref get labeled as
# cyclic trash:
a = A()
weakref.ref(referenced, callback)
finally:
gc.set_threshold(*thresholds)
class SubclassableWeakrefTestCase(unittest.TestCase):
def test_subclass_refs(self):
class MyRef(weakref.ref):
def __init__(self, ob, callback=None, value=42):
self.value = value
super(MyRef, self).__init__(ob, callback)
def __call__(self):
self.called = True
return super(MyRef, self).__call__()
o = Object("foo")
mr = MyRef(o, value=24)
self.assert_(mr() is o)
self.assert_(mr.called)
self.assertEqual(mr.value, 24)
del o
self.assert_(mr() is None)
self.assert_(mr.called)
def test_subclass_refs_dont_replace_standard_refs(self):
class MyRef(weakref.ref):
pass
o = Object(42)
r1 = MyRef(o)
r2 = weakref.ref(o)
self.assert_(r1 is not r2)
self.assertEqual(weakref.getweakrefs(o), [r2, r1])
self.assertEqual(weakref.getweakrefcount(o), 2)
r3 = MyRef(o)
self.assertEqual(weakref.getweakrefcount(o), 3)
refs = weakref.getweakrefs(o)
self.assertEqual(len(refs), 3)
self.assert_(r2 is refs[0])
self.assert_(r1 in refs[1:])
self.assert_(r3 in refs[1:])
def test_subclass_refs_dont_conflate_callbacks(self):
class MyRef(weakref.ref):
pass
o = Object(42)
r1 = MyRef(o, id)
r2 = MyRef(o, str)
self.assert_(r1 is not r2)
refs = weakref.getweakrefs(o)
self.assert_(r1 in refs)
self.assert_(r2 in refs)
def test_subclass_refs_with_slots(self):
class MyRef(weakref.ref):
__slots__ = "slot1", "slot2"
def __new__(type, ob, callback, slot1, slot2):
return weakref.ref.__new__(type, ob, callback)
def __init__(self, ob, callback, slot1, slot2):
self.slot1 = slot1
self.slot2 = slot2
def meth(self):
return self.slot1 + self.slot2
o = Object(42)
r = MyRef(o, None, "abc", "def")
self.assertEqual(r.slot1, "abc")
self.assertEqual(r.slot2, "def")
self.assertEqual(r.meth(), "abcdef")
self.failIf(hasattr(r, "__dict__"))
class Object:
def __init__(self, arg):
self.arg = arg
def __repr__(self):
return "<Object %r>" % self.arg
class MappingTestCase(TestBase):
COUNT = 10
def test_weak_values(self):
#
# This exercises d.copy(), d.items(), d[], del d[], len(d).
#
dict, objects = self.make_weak_valued_dict()
for o in objects:
self.assert_(weakref.getweakrefcount(o) == 1,
"wrong number of weak references to %r!" % o)
self.assert_(o is dict[o.arg],
"wrong object returned by weak dict!")
items1 = dict.items()
items2 = dict.copy().items()
items1.sort()
items2.sort()
self.assert_(items1 == items2,
"cloning of weak-valued dictionary did not work!")
del items1, items2
self.assert_(len(dict) == self.COUNT)
del objects[0]
self.assert_(len(dict) == (self.COUNT - 1),
"deleting object did not cause dictionary update")
del objects, o
self.assert_(len(dict) == 0,
"deleting the values did not clear the dictionary")
# regression on SF bug #447152:
dict = weakref.WeakValueDictionary()
self.assertRaises(KeyError, dict.__getitem__, 1)
dict[2] = C()
self.assertRaises(KeyError, dict.__getitem__, 2)
def test_weak_keys(self):
#
# This exercises d.copy(), d.items(), d[] = v, d[], del d[],
# len(d), d.has_key().
#
dict, objects = self.make_weak_keyed_dict()
for o in objects:
self.assert_(weakref.getweakrefcount(o) == 1,
"wrong number of weak references to %r!" % o)
self.assert_(o.arg is dict[o],
"wrong object returned by weak dict!")
items1 = dict.items()
items2 = dict.copy().items()
self.assert_(set(items1) == set(items2),
"cloning of weak-keyed dictionary did not work!")
del items1, items2
self.assert_(len(dict) == self.COUNT)
del objects[0]
self.assert_(len(dict) == (self.COUNT - 1),
"deleting object did not cause dictionary update")
del objects, o
self.assert_(len(dict) == 0,
"deleting the keys did not clear the dictionary")
o = Object(42)
dict[o] = "What is the meaning of the universe?"
self.assert_(dict.has_key(o))
self.assert_(not dict.has_key(34))
def test_weak_keyed_iters(self):
dict, objects = self.make_weak_keyed_dict()
self.check_iters(dict)
def test_weak_valued_iters(self):
dict, objects = self.make_weak_valued_dict()
self.check_iters(dict)
def check_iters(self, dict):
# item iterator:
items = dict.items()
for item in dict.iteritems():
items.remove(item)
self.assert_(len(items) == 0, "iteritems() did not touch all items")
# key iterator, via __iter__():
keys = dict.keys()
for k in dict:
keys.remove(k)
self.assert_(len(keys) == 0, "__iter__() did not touch all keys")
# key iterator, via iterkeys():
keys = dict.keys()
for k in dict.iterkeys():
keys.remove(k)
self.assert_(len(keys) == 0, "iterkeys() did not touch all keys")
# value iterator:
values = dict.values()
for v in dict.itervalues():
values.remove(v)
self.assert_(len(values) == 0,
"itervalues() did not touch all values")
def test_make_weak_keyed_dict_from_dict(self):
o = Object(3)
dict = weakref.WeakKeyDictionary({o:364})
self.assert_(dict[o] == 364)
def test_make_weak_keyed_dict_from_weak_keyed_dict(self):
o = Object(3)
dict = weakref.WeakKeyDictionary({o:364})
dict2 = weakref.WeakKeyDictionary(dict)
self.assert_(dict[o] == 364)
def make_weak_keyed_dict(self):
dict = weakref.WeakKeyDictionary()
objects = map(Object, range(self.COUNT))
for o in objects:
dict[o] = o.arg
return dict, objects
def make_weak_valued_dict(self):
dict = weakref.WeakValueDictionary()
objects = map(Object, range(self.COUNT))
for o in objects:
dict[o.arg] = o
return dict, objects
def check_popitem(self, klass, key1, value1, key2, value2):
weakdict = klass()
weakdict[key1] = value1
weakdict[key2] = value2
self.assert_(len(weakdict) == 2)
k, v = weakdict.popitem()
self.assert_(len(weakdict) == 1)
if k is key1:
self.assert_(v is value1)
else:
self.assert_(v is value2)
k, v = weakdict.popitem()
self.assert_(len(weakdict) == 0)
if k is key1:
self.assert_(v is value1)
else:
self.assert_(v is value2)
def test_weak_valued_dict_popitem(self):
self.check_popitem(weakref.WeakValueDictionary,
"key1", C(), "key2", C())
def test_weak_keyed_dict_popitem(self):
self.check_popitem(weakref.WeakKeyDictionary,
C(), "value 1", C(), "value 2")
def check_setdefault(self, klass, key, value1, value2):
self.assert_(value1 is not value2,
"invalid test"
" -- value parameters must be distinct objects")
weakdict = klass()
o = weakdict.setdefault(key, value1)
self.assert_(o is value1)
self.assert_(weakdict.has_key(key))
self.assert_(weakdict.get(key) is value1)
self.assert_(weakdict[key] is value1)
o = weakdict.setdefault(key, value2)
self.assert_(o is value1)
self.assert_(weakdict.has_key(key))
self.assert_(weakdict.get(key) is value1)
self.assert_(weakdict[key] is value1)
def test_weak_valued_dict_setdefault(self):
self.check_setdefault(weakref.WeakValueDictionary,
"key", C(), C())
def test_weak_keyed_dict_setdefault(self):
self.check_setdefault(weakref.WeakKeyDictionary,
C(), "value 1", "value 2")
def check_update(self, klass, dict):
#
# This exercises d.update(), len(d), d.keys(), d.has_key(),
# d.get(), d[].
#
weakdict = klass()
weakdict.update(dict)
self.assert_(len(weakdict) == len(dict))
for k in weakdict.keys():
self.assert_(dict.has_key(k),
"mysterious new key appeared in weak dict")
v = dict.get(k)
self.assert_(v is weakdict[k])
self.assert_(v is weakdict.get(k))
for k in dict.keys():
self.assert_(weakdict.has_key(k),
"original key disappeared in weak dict")
v = dict[k]
self.assert_(v is weakdict[k])
self.assert_(v is weakdict.get(k))
def test_weak_valued_dict_update(self):
self.check_update(weakref.WeakValueDictionary,
{1: C(), 'a': C(), C(): C()})
def test_weak_keyed_dict_update(self):
self.check_update(weakref.WeakKeyDictionary,
{C(): 1, C(): 2, C(): 3})
def test_weak_keyed_delitem(self):
d = weakref.WeakKeyDictionary()
o1 = Object('1')
o2 = Object('2')
d[o1] = 'something'
d[o2] = 'something'
self.assert_(len(d) == 2)
del d[o1]
self.assert_(len(d) == 1)
self.assert_(d.keys() == [o2])
def test_weak_valued_delitem(self):
d = weakref.WeakValueDictionary()
o1 = Object('1')
o2 = Object('2')
d['something'] = o1
d['something else'] = o2
self.assert_(len(d) == 2)
del d['something']
self.assert_(len(d) == 1)
self.assert_(d.items() == [('something else', o2)])
def test_weak_keyed_bad_delitem(self):
d = weakref.WeakKeyDictionary()
o = Object('1')
# An attempt to delete an object that isn't there should raise
# KeyError. It didn't before 2.3.
self.assertRaises(KeyError, d.__delitem__, o)
self.assertRaises(KeyError, d.__getitem__, o)
# If a key isn't of a weakly referencable type, __getitem__ and
# __setitem__ raise TypeError. __delitem__ should too.
self.assertRaises(TypeError, d.__delitem__, 13)
self.assertRaises(TypeError, d.__getitem__, 13)
self.assertRaises(TypeError, d.__setitem__, 13, 13)
def test_weak_keyed_cascading_deletes(self):
# SF bug 742860. For some reason, before 2.3 __delitem__ iterated
# over the keys via self.data.iterkeys(). If things vanished from
# the dict during this (or got added), that caused a RuntimeError.
d = weakref.WeakKeyDictionary()
mutate = False
class C(object):
def __init__(self, i):
self.value = i
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
if mutate:
# Side effect that mutates the dict, by removing the
# last strong reference to a key.
del objs[-1]
return self.value == other.value
objs = [C(i) for i in range(4)]
for o in objs:
d[o] = o.value
del o # now the only strong references to keys are in objs
# Find the order in which iterkeys sees the keys.
objs = d.keys()
# Reverse it, so that the iteration implementation of __delitem__
# has to keep looping to find the first object we delete.
objs.reverse()
# Turn on mutation in C.__eq__. The first time thru the loop,
# under the iterkeys() business the first comparison will delete
# the last item iterkeys() would see, and that causes a
# RuntimeError: dictionary changed size during iteration
# when the iterkeys() loop goes around to try comparing the next
# key. After this was fixed, it just deletes the last object *our*
# "for o in obj" loop would have gotten to.
mutate = True
count = 0
for o in objs:
count += 1
del d[o]
self.assertEqual(len(d), 0)
self.assertEqual(count, 2)
from test import mapping_tests
class WeakValueDictionaryTestCase(mapping_tests.BasicTestMappingProtocol):
"""Check that WeakValueDictionary conforms to the mapping protocol"""
__ref = {"key1":Object(1), "key2":Object(2), "key3":Object(3)}
type2test = weakref.WeakValueDictionary
def _reference(self):
return self.__ref.copy()
class WeakKeyDictionaryTestCase(mapping_tests.BasicTestMappingProtocol):
"""Check that WeakKeyDictionary conforms to the mapping protocol"""
__ref = {Object("key1"):1, Object("key2"):2, Object("key3"):3}
type2test = weakref.WeakKeyDictionary
def _reference(self):
return self.__ref.copy()
libreftest = """ Doctest for examples in the library reference: libweakref.tex
>>> import weakref
>>> class Dict(dict):
... pass
...
>>> obj = Dict(red=1, green=2, blue=3) # this object is weak referencable
>>> r = weakref.ref(obj)
>>> print r()
{'blue': 3, 'green': 2, 'red': 1}
>>> import weakref
>>> class Object:
... pass
...
>>> o = Object()
>>> r = weakref.ref(o)
>>> o2 = r()
>>> o is o2
True
>>> del o, o2
>>> print r()
None
>>> import weakref
>>> class ExtendedRef(weakref.ref):
... def __init__(self, ob, callback=None, **annotations):
... super(ExtendedRef, self).__init__(ob, callback)
... self.__counter = 0
... for k, v in annotations.iteritems():
... setattr(self, k, v)
... def __call__(self):
... '''Return a pair containing the referent and the number of
... times the reference has been called.
... '''
... ob = super(ExtendedRef, self).__call__()
... if ob is not None:
... self.__counter += 1
... ob = (ob, self.__counter)
... return ob
...
>>> class A: # not in docs from here, just testing the ExtendedRef
... pass
...
>>> a = A()
>>> r = ExtendedRef(a, foo=1, bar="baz")
>>> r.foo
1
>>> r.bar
'baz'
>>> r()[1]
1
>>> r()[1]
2
>>> r()[0] is a
True
>>> import weakref
>>> _id2obj_dict = weakref.WeakValueDictionary()
>>> def remember(obj):
... oid = id(obj)
... _id2obj_dict[oid] = obj
... return oid
...
>>> def id2obj(oid):
... return _id2obj_dict[oid]
...
>>> a = A() # from here, just testing
>>> a_id = remember(a)
>>> id2obj(a_id) is a
True
>>> del a
>>> try:
... id2obj(a_id)
... except KeyError:
... print 'OK'
... else:
... print 'WeakValueDictionary error'
OK
"""
__test__ = {'libreftest' : libreftest}
def test_main():
test_support.run_unittest(
ReferencesTestCase,
MappingTestCase,
WeakValueDictionaryTestCase,
WeakKeyDictionaryTestCase,
)
test_support.run_doctest(sys.modules[__name__])
if __name__ == "__main__":
test_main()
| mit |
camillemonchicourt/Geotrek-rando | rando/wsgi.py | 1 | 1137 | """
WSGI config for rando project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "rando.settings.prod")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| bsd-2-clause |
Petr-Kovalev/nupic-win32 | py/regions/ImageSensorFilters/CenteredMultipleScales.py | 2 | 2997 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
## @file
"""
from PIL import (Image,
ImageChops)
from nupic.regions.ImageSensorFilters.BaseFilter import BaseFilter
class CenteredMultipleScales(BaseFilter):
"""
Create scaled versions of the original image and centers them.
"""
def __init__(self, scales=[1], background=0, simultaneous=False):
"""
@param scales -- List of factors used for scaling. scales = [.5, 1] returns
two images, one half the size of the original in each dimension, and one
which is the original image.
@param scales -- Background pixel value.
@param simultaneous -- Whether the images should be sent out of the sensor
simultaneously.
"""
BaseFilter.__init__(self)
self.scales = scales
self.background = background
self.simultaneous = simultaneous
def process(self, image):
"""
@param image -- The image to process.
Returns a single image, or a list containing one or more images.
"""
BaseFilter.process(self, image)
mode = image.mode;
originalSize = image.size;
sizes = [(int(round(image.size[0]*s)), int(round(image.size[1]*s)))
for s in self.scales]
resizedImages = []
for size in sizes:
if size < image.size:
resizedImage = image.resize(size,Image.ANTIALIAS)
else:
resizedImage = image.resize(size,Image.BICUBIC)
x = (originalSize[0] - size[0])/2
y = (originalSize[1] - size[1])/2
newImage = Image.new(mode,originalSize,self.background)
newImage.paste(resizedImage,(x,y))
resizedImages.append(newImage)
if not self.simultaneous:
return resizedImages
else:
return [resizedImages]
def getOutputCount(self):
"""
Return the number of images returned by each call to process().
If the filter creates multiple simultaneous outputs, return a tuple:
(numOutputs, numSimultaneousOutputs).
"""
if not self.simultaneous:
return len(self.scales)
else:
return (1, len(self.scales)) | gpl-3.0 |
ubgarbage/gae-blog | django/contrib/comments/__init__.py | 423 | 3333 | from django.conf import settings
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.contrib.comments.models import Comment
from django.contrib.comments.forms import CommentForm
from django.utils.importlib import import_module
DEFAULT_COMMENTS_APP = 'django.contrib.comments'
def get_comment_app():
"""
Get the comment app (i.e. "django.contrib.comments") as defined in the settings
"""
# Make sure the app's in INSTALLED_APPS
comments_app = get_comment_app_name()
if comments_app not in settings.INSTALLED_APPS:
raise ImproperlyConfigured("The COMMENTS_APP (%r) "\
"must be in INSTALLED_APPS" % settings.COMMENTS_APP)
# Try to import the package
try:
package = import_module(comments_app)
except ImportError:
raise ImproperlyConfigured("The COMMENTS_APP setting refers to "\
"a non-existing package.")
return package
def get_comment_app_name():
"""
Returns the name of the comment app (either the setting value, if it
exists, or the default).
"""
return getattr(settings, 'COMMENTS_APP', DEFAULT_COMMENTS_APP)
def get_model():
"""
Returns the comment model class.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_model"):
return get_comment_app().get_model()
else:
return Comment
def get_form():
"""
Returns the comment ModelForm class.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_form"):
return get_comment_app().get_form()
else:
return CommentForm
def get_form_target():
"""
Returns the target URL for the comment form submission view.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_form_target"):
return get_comment_app().get_form_target()
else:
return urlresolvers.reverse("django.contrib.comments.views.comments.post_comment")
def get_flag_url(comment):
"""
Get the URL for the "flag this comment" view.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_flag_url"):
return get_comment_app().get_flag_url(comment)
else:
return urlresolvers.reverse("django.contrib.comments.views.moderation.flag",
args=(comment.id,))
def get_delete_url(comment):
"""
Get the URL for the "delete this comment" view.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_delete_url"):
return get_comment_app().get_delete_url(comment)
else:
return urlresolvers.reverse("django.contrib.comments.views.moderation.delete",
args=(comment.id,))
def get_approve_url(comment):
"""
Get the URL for the "approve this comment from moderation" view.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_approve_url"):
return get_comment_app().get_approve_url(comment)
else:
return urlresolvers.reverse("django.contrib.comments.views.moderation.approve",
args=(comment.id,))
| bsd-3-clause |
indictranstech/frappe-digitales | frappe/utils/email_lib/receive.py | 35 | 7591 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import time
import poplib
import frappe
from frappe.utils import extract_email_id, convert_utc_to_user_timezone, now, cint
from frappe.utils.scheduler import log
class EmailSizeExceededError(frappe.ValidationError): pass
class EmailTimeoutError(frappe.ValidationError): pass
class TotalSizeExceededError(frappe.ValidationError): pass
class IncomingMail:
"""
Single incoming email object. Extracts, text / html and attachments from the email
"""
def __init__(self, content):
import email, email.utils
import datetime
self.mail = email.message_from_string(content)
self.text_content = ''
self.html_content = ''
self.attachments = []
self.parse()
self.set_content_and_type()
self.set_subject()
self.from_email = extract_email_id(self.mail["From"])
self.from_real_name = email.utils.parseaddr(self.mail["From"])[0]
if self.mail["Date"]:
utc = email.utils.mktime_tz(email.utils.parsedate_tz(self.mail["Date"]))
utc_dt = datetime.datetime.utcfromtimestamp(utc)
self.date = convert_utc_to_user_timezone(utc_dt).strftime('%Y-%m-%d %H:%M:%S')
else:
self.date = now()
def parse(self):
for part in self.mail.walk():
self.process_part(part)
def set_subject(self):
import email.header
_subject = email.header.decode_header(self.mail.get("Subject", "No Subject"))
self.subject = _subject[0][0] or ""
if _subject[0][1]:
self.subject = self.subject.decode(_subject[0][1])
else:
# assume that the encoding is utf-8
self.subject = self.subject.decode("utf-8")
if not self.subject:
self.subject = "No Subject"
def set_content_and_type(self):
self.content, self.content_type = '[Blank Email]', 'text/plain'
if self.text_content:
self.content, self.content_type = self.text_content, 'text/plain'
else:
self.content, self.content_type = self.html_content, 'text/html'
def process_part(self, part):
content_type = part.get_content_type()
charset = part.get_content_charset()
if not charset: charset = self.get_charset(part)
if content_type == 'text/plain':
self.text_content += self.get_payload(part, charset)
if content_type == 'text/html':
self.html_content += self.get_payload(part, charset)
if part.get_filename():
self.get_attachment(part, charset)
def get_text_content(self):
return self.text_content or self.html_content
def get_charset(self, part):
charset = part.get_content_charset()
if not charset:
import chardet
charset = chardet.detect(str(part))['encoding']
return charset
def get_payload(self, part, charset):
try:
return unicode(part.get_payload(decode=True),str(charset),"ignore")
except LookupError:
return part.get_payload()
def get_attachment(self, part, charset):
self.attachments.append({
'content-type': part.get_content_type(),
'filename': part.get_filename(),
'content': part.get_payload(decode=True),
})
def save_attachments_in_doc(self, doc):
from frappe.utils.file_manager import save_file, MaxFileSizeReachedError
for attachment in self.attachments:
try:
fid = save_file(attachment['filename'], attachment['content'],
doc.doctype, doc.name)
except MaxFileSizeReachedError:
# WARNING: bypass max file size exception
pass
except frappe.DuplicateEntryError:
# same file attached twice??
pass
def get_thread_id(self):
import re
l = re.findall('(?<=\[)[\w/-]+', self.subject)
return l and l[0] or None
class POP3Mailbox:
def __init__(self, args=None):
self.setup(args)
self.get_messages()
def setup(self, args=None):
# overrride
self.settings = args or frappe._dict()
def check_mails(self):
# overrride
return True
def process_message(self, mail):
# overrride
pass
def connect(self):
if cint(self.settings.use_ssl):
self.pop = Timed_POP3_SSL(self.settings.host, timeout=frappe.conf.get("pop_timeout"))
else:
self.pop = Timed_POP3(self.settings.host, timeout=frappe.conf.get("pop_timeout"))
self.pop.user(self.settings.username)
self.pop.pass_(self.settings.password)
def get_messages(self):
if not self.check_mails():
return # nothing to do
frappe.db.commit()
self.connect()
try:
# track if errors arised
self.errors = False
pop_list = self.pop.list()[1]
num = num_copy = len(pop_list)
# WARNING: Hard coded max no. of messages to be popped
if num > 20: num = 20
# size limits
self.total_size = 0
self.max_email_size = cint(frappe.local.conf.get("max_email_size"))
self.max_total_size = 5 * self.max_email_size
for i, pop_meta in enumerate(pop_list):
# do not pull more than NUM emails
if (i+1) > num:
break
try:
self.retrieve_message(pop_meta, i+1)
except (TotalSizeExceededError, EmailTimeoutError):
break
# WARNING: Mark as read - message number 101 onwards from the pop list
# This is to avoid having too many messages entering the system
num = num_copy
if num > 100 and not self.errors:
for m in xrange(101, num+1):
self.pop.dele(m)
finally:
# no matter the exception, pop should quit if connected
self.pop.quit()
def retrieve_message(self, pop_meta, msg_num):
incoming_mail = None
try:
self.validate_pop(pop_meta)
msg = self.pop.retr(msg_num)
incoming_mail = IncomingMail(b'\n'.join(msg[1]))
frappe.db.begin()
self.process_message(incoming_mail)
frappe.db.commit()
except (TotalSizeExceededError, EmailTimeoutError):
# propagate this error to break the loop
raise
except:
# log performs rollback and logs error in scheduler log
log("receive.get_messages", self.make_error_msg(msg_num, incoming_mail))
self.errors = True
frappe.db.rollback()
self.pop.dele(msg_num)
else:
self.pop.dele(msg_num)
def validate_pop(self, pop_meta):
# throttle based on email size
if not self.max_email_size:
return
m, size = pop_meta.split()
size = cint(size)
if size < self.max_email_size:
self.total_size += size
if self.total_size > self.max_total_size:
raise TotalSizeExceededError
else:
raise EmailSizeExceededError
def make_error_msg(self, msg_num, incoming_mail):
error_msg = "Error in retrieving email."
if not incoming_mail:
try:
# retrieve headers
incoming_mail = IncomingMail(b'\n'.join(self.pop.top(msg_num, 5)[1]))
except:
pass
if incoming_mail:
error_msg += "\nDate: {date}\nFrom: {from_email}\nSubject: {subject}\n".format(
date=incoming_mail.date, from_email=incoming_mail.from_email, subject=incoming_mail.subject)
return error_msg
class TimerMixin(object):
def __init__(self, *args, **kwargs):
self.timeout = kwargs.pop('timeout', 0.0)
self.elapsed_time = 0.0
self._super.__init__(self, *args, **kwargs)
if self.timeout:
# set per operation timeout to one-fifth of total pop timeout
self.sock.settimeout(self.timeout / 5.0)
def _getline(self, *args, **kwargs):
start_time = time.time()
ret = self._super._getline(self, *args, **kwargs)
self.elapsed_time += time.time() - start_time
if self.timeout and self.elapsed_time > self.timeout:
raise EmailTimeoutError
return ret
def quit(self, *args, **kwargs):
self.elapsed_time = 0.0
return self._super.quit(self, *args, **kwargs)
class Timed_POP3(TimerMixin, poplib.POP3):
_super = poplib.POP3
class Timed_POP3_SSL(TimerMixin, poplib.POP3_SSL):
_super = poplib.POP3_SSL
| mit |
ajaali/django | tests/contenttypes_tests/test_models.py | 249 | 12059 | from __future__ import unicode_literals
import warnings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.views import shortcut
from django.contrib.sites.shortcuts import get_current_site
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
from django.http import Http404, HttpRequest
from django.test import TestCase, mock, override_settings
from django.utils import six
from .models import (
ConcreteModel, FooWithBrokenAbsoluteUrl, FooWithoutUrl, FooWithUrl,
ProxyModel,
)
class ContentTypesTests(TestCase):
def setUp(self):
ContentType.objects.clear_cache()
def tearDown(self):
ContentType.objects.clear_cache()
def test_lookup_cache(self):
"""
Make sure that the content type cache (see ContentTypeManager)
works correctly. Lookups for a particular content type -- by model, ID
or natural key -- should hit the database only on the first lookup.
"""
# At this point, a lookup for a ContentType should hit the DB
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# A second hit, though, won't hit the DB, nor will a lookup by ID
# or natural key
with self.assertNumQueries(0):
ct = ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(0):
ContentType.objects.get_for_id(ct.id)
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key('contenttypes',
'contenttype')
# Once we clear the cache, another lookup will again hit the DB
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# The same should happen with a lookup by natural key
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_by_natural_key('contenttypes',
'contenttype')
# And a second hit shouldn't hit the DB
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key('contenttypes',
'contenttype')
def test_get_for_models_empty_cache(self):
# Empty cache.
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_models_partial_cache(self):
# Partial cache
ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_models_full_cache(self):
# Full cache
ContentType.objects.get_for_model(ContentType)
ContentType.objects.get_for_model(FooWithUrl)
with self.assertNumQueries(0):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_concrete_model(self):
"""
Make sure the `for_concrete_model` kwarg correctly works
with concrete, proxy and deferred models
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(ProxyModel))
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(ConcreteModel,
for_concrete_model=False))
proxy_model_ct = ContentType.objects.get_for_model(ProxyModel,
for_concrete_model=False)
self.assertNotEqual(concrete_model_ct, proxy_model_ct)
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only('pk').get().__class__
DeferredProxyModel = ProxyModel.objects.only('pk').get().__class__
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(DeferredConcreteModel))
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(DeferredConcreteModel,
for_concrete_model=False))
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(DeferredProxyModel))
self.assertEqual(proxy_model_ct,
ContentType.objects.get_for_model(DeferredProxyModel,
for_concrete_model=False))
def test_get_for_concrete_models(self):
"""
Make sure the `for_concrete_models` kwarg correctly works
with concrete, proxy and deferred models.
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel)
self.assertEqual(cts, {
ConcreteModel: concrete_model_ct,
ProxyModel: concrete_model_ct,
})
proxy_model_ct = ContentType.objects.get_for_model(ProxyModel,
for_concrete_model=False)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel,
for_concrete_models=False)
self.assertEqual(cts, {
ConcreteModel: concrete_model_ct,
ProxyModel: proxy_model_ct,
})
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only('pk').get().__class__
DeferredProxyModel = ProxyModel.objects.only('pk').get().__class__
cts = ContentType.objects.get_for_models(DeferredConcreteModel,
DeferredProxyModel)
self.assertEqual(cts, {
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: concrete_model_ct,
})
cts = ContentType.objects.get_for_models(DeferredConcreteModel,
DeferredProxyModel,
for_concrete_models=False)
self.assertEqual(cts, {
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: proxy_model_ct,
})
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_shortcut_view(self):
"""
Check that the shortcut view (used for the admin "view on site"
functionality) returns a complete URL regardless of whether the sites
framework is installed
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithUrl)
obj = FooWithUrl.objects.create(name="john")
with self.modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'}):
response = shortcut(request, user_ct.id, obj.id)
self.assertEqual("http://%s/users/john/" % get_current_site(request).domain,
response._headers.get("location")[1])
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
response = shortcut(request, user_ct.id, obj.id)
self.assertEqual("http://Example.com/users/john/",
response._headers.get("location")[1])
def test_shortcut_view_without_get_absolute_url(self):
"""
Check that the shortcut view (used for the admin "view on site"
functionality) returns 404 when get_absolute_url is not defined.
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithoutUrl)
obj = FooWithoutUrl.objects.create(name="john")
self.assertRaises(Http404, shortcut, request, user_ct.id, obj.id)
def test_shortcut_view_with_broken_get_absolute_url(self):
"""
Check that the shortcut view does not catch an AttributeError raised
by the model's get_absolute_url method.
Refs #8997.
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithBrokenAbsoluteUrl)
obj = FooWithBrokenAbsoluteUrl.objects.create(name="john")
self.assertRaises(AttributeError, shortcut, request, user_ct.id, obj.id)
def test_missing_model(self):
"""
Ensures that displaying content types in admin (or anywhere) doesn't
break on leftover content type records in the DB for which no model
is defined anymore.
"""
ct = ContentType.objects.create(
app_label='contenttypes',
model='OldModel',
)
self.assertEqual(six.text_type(ct), 'OldModel')
self.assertIsNone(ct.model_class())
# Make sure stale ContentTypes can be fetched like any other object.
# Before Django 1.6 this caused a NoneType error in the caching mechanism.
# Instead, just return the ContentType object and let the app detect stale states.
ct_fetched = ContentType.objects.get_for_id(ct.pk)
self.assertIsNone(ct_fetched.model_class())
def test_name_deprecation(self):
"""
ContentType.name has been removed. Test that a warning is emitted when
creating a ContentType with a `name`, but the creation should not fail.
"""
with warnings.catch_warnings(record=True) as warns:
warnings.simplefilter('always')
ContentType.objects.create(
name='Name',
app_label='contenttypes',
model='OldModel',
)
self.assertEqual(len(warns), 1)
self.assertEqual(
str(warns[0].message),
"ContentType.name field doesn't exist any longer. Please remove it from your code."
)
self.assertTrue(ContentType.objects.filter(model='OldModel').exists())
@mock.patch('django.contrib.contenttypes.models.ContentTypeManager.get_or_create')
@mock.patch('django.contrib.contenttypes.models.ContentTypeManager.get')
def test_message_if_get_for_model_fails(self, mocked_get, mocked_get_or_create):
"""
Check that `RuntimeError` with nice error message is raised if
`get_for_model` fails because of database errors.
"""
def _test_message(mocked_method):
for ExceptionClass in (IntegrityError, OperationalError, ProgrammingError):
mocked_method.side_effect = ExceptionClass
with self.assertRaisesMessage(
RuntimeError,
"Error creating new content types. Please make sure contenttypes "
"is migrated before trying to migrate apps individually."
):
ContentType.objects.get_for_model(ContentType)
_test_message(mocked_get)
mocked_get.side_effect = ContentType.DoesNotExist
_test_message(mocked_get_or_create)
| bsd-3-clause |
Keisuke69/libcloud | libcloud/dns/drivers/zerigo.py | 1 | 17542 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'ZerigoDNSDriver'
]
import copy
import base64
import httplib
from xml.etree import ElementTree as ET
from libcloud.utils import fixxpath, findtext, findattr, findall
from libcloud.utils import merge_valid_keys, get_new_obj
from libcloud.common.base import XmlResponse, ConnectionUserAndKey
from libcloud.common.types import InvalidCredsError, LibcloudError
from libcloud.common.types import MalformedResponseError, LazyList
from libcloud.dns.types import Provider, RecordType
from libcloud.dns.types import ZoneDoesNotExistError, RecordDoesNotExistError
from libcloud.dns.base import DNSDriver, Zone, Record
API_HOST = 'ns.zerigo.com'
API_VERSION = '1.1'
API_ROOT = '/api/%s/' % (API_VERSION)
VALID_ZONE_EXTRA_PARAMS = ['notes', 'tag-list', 'ns1', 'slave-nameservers']
VALID_RECORD_EXTRA_PARAMS = ['notes', 'ttl', 'priority']
# Number of items per page (maximum limit is 1000)
ITEMS_PER_PAGE = 100
RECORD_TYPE_MAP = {
RecordType.A: 'A',
RecordType.AAAA: 'AAAA',
RecordType.CNAME: 'CNAME',
RecordType.MX: 'MX',
RecordType.REDIRECT: 'REDIRECT',
RecordType.TXT: 'TXT',
RecordType.SRV: 'SRV',
RecordType.NAPTR: 'NAPTR',
RecordType.NS: 'NS',
RecordType.PTR: 'PTR',
RecordType.SPF: 'SPF',
}
class ZerigoError(LibcloudError):
def __init__(self, code, errors):
self.code = code
self.errors = errors or []
def __str__(self):
return 'Errors: %s' % (', '.join(self.errors))
def __repr__(self):
return ('<ZerigoError response code=%s, errors count=%s>' %
(self.code,
len(self.errors)))
class ZerigoDNSResponse(XmlResponse):
def success(self):
return self.status in [httplib.OK, httplib.CREATED, httplib.ACCEPTED]
def parse_error(self):
status = int(self.status)
if status == 401:
if not self.body:
raise InvalidCredsError(str(self.status) + ': ' + self.error)
else:
raise InvalidCredsError(self.body)
elif status == 404:
context = self.connection.context
if context['resource'] == 'zone':
raise ZoneDoesNotExistError(value='', driver=self,
zone_id=context['id'])
elif context['resource'] == 'record':
raise RecordDoesNotExistError(value='', driver=self,
record_id=context['id'])
elif status != 503:
try:
body = ET.XML(self.body)
except:
raise MalformedResponseError('Failed to parse XML',
body=self.body)
errors = []
for error in findall(element=body, xpath='error'):
errors.append(error.text)
raise ZerigoError(code=status, errors=errors)
return self.body
class ZerigoDNSConnection(ConnectionUserAndKey):
host = API_HOST
secure = True
responseCls = ZerigoDNSResponse
def add_default_headers(self, headers):
auth_b64 = base64.b64encode('%s:%s' % (self.user_id, self.key))
headers['Authorization'] = 'Basic %s' % (auth_b64)
return headers
def request(self, action, params=None, data='', headers=None,
method='GET'):
if not headers:
headers = {}
if not params:
params = {}
if method in ("POST", "PUT"):
headers = {'Content-Type': 'application/xml; charset=UTF-8'}
return super(ZerigoDNSConnection, self).request(action=action,
params=params,
data=data,
method=method,
headers=headers)
class ZerigoDNSDriver(DNSDriver):
type = Provider.ZERIGO
name = 'Zerigo DNS'
connectionCls = ZerigoDNSConnection
def list_record_types(self):
return RECORD_TYPE_MAP.keys()
def list_zones(self):
value_dict = {'type': 'zones'}
return LazyList(get_more=self._get_more, value_dict=value_dict)
def list_records(self, zone):
value_dict = {'type': 'records', 'zone': zone}
return LazyList(get_more=self._get_more, value_dict=value_dict)
def get_zone(self, zone_id):
path = API_ROOT + 'zones/%s.xml' % (zone_id)
self.connection.set_context({'resource': 'zone', 'id': zone_id})
data = self.connection.request(path).object
zone = self._to_zone(elem=data)
return zone
def get_record(self, zone_id, record_id):
zone = self.get_zone(zone_id=zone_id)
self.connection.set_context({'resource': 'record', 'id': record_id})
path = API_ROOT + 'hosts/%s.xml' % (record_id)
data = self.connection.request(path).object
record = self._to_record(elem=data, zone=zone)
return record
def create_zone(self, domain, type='master', ttl=None, extra=None):
"""
Create a new zone.
Provider API docs:
https://www.zerigo.com/docs/apis/dns/1.1/zones/create
"""
path = API_ROOT + 'zones.xml'
zone_elem = self._to_zone_elem(domain=domain, type=type, ttl=ttl,
extra=extra)
data = self.connection.request(action=path,
data=ET.tostring(zone_elem),
method='POST').object
zone = self._to_zone(elem=data)
return zone
def update_zone(self, zone, domain=None, type=None, ttl=None, extra=None):
"""
Update an existing zone.
Provider API docs:
https://www.zerigo.com/docs/apis/dns/1.1/zones/update
"""
if domain:
raise LibcloudError('Domain cannot be changed', driver=self)
path = API_ROOT + 'zones/%s.xml' % (zone.id)
zone_elem = self._to_zone_elem(domain=domain, type=type, ttl=ttl,
extra=extra)
response = self.connection.request(action=path,
data=ET.tostring(zone_elem),
method='PUT')
assert response.status == httplib.OK
merged = merge_valid_keys(params=copy.deepcopy(zone.extra),
valid_keys=VALID_ZONE_EXTRA_PARAMS,
extra=extra)
updated_zone = get_new_obj(obj=zone, klass=Zone,
attributes={'type': type,
'ttl': ttl,
'extra': merged})
return updated_zone
def create_record(self, name, zone, type, data, extra=None):
"""
Create a new record.
Provider API docs:
https://www.zerigo.com/docs/apis/dns/1.1/hosts/create
"""
path = API_ROOT + 'zones/%s/hosts.xml' % (zone.id)
record_elem = self._to_record_elem(name=name, type=type, data=data,
extra=extra)
response = self.connection.request(action=path,
data=ET.tostring(record_elem),
method='POST')
assert response.status == httplib.CREATED
record = self._to_record(elem=response.object, zone=zone)
return record
def update_record(self, record, name=None, type=None, data=None,
extra=None):
path = API_ROOT + 'hosts/%s.xml' % (record.id)
record_elem = self._to_record_elem(name=name, type=type, data=data,
extra=extra)
response = self.connection.request(action=path,
data=ET.tostring(record_elem),
method='PUT')
assert response.status == httplib.OK
merged = merge_valid_keys(params=copy.deepcopy(record.extra),
valid_keys=VALID_RECORD_EXTRA_PARAMS,
extra=extra)
updated_record = get_new_obj(obj=record, klass=Record,
attributes={'type': type,
'data': data,
'extra': merged})
return updated_record
def delete_zone(self, zone):
path = API_ROOT + 'zones/%s.xml' % (zone.id)
self.connection.set_context({'resource': 'zone', 'id': zone.id})
response = self.connection.request(action=path, method='DELETE')
return response.status == httplib.OK
def delete_record(self, record):
path = API_ROOT + 'hosts/%s.xml' % (record.id)
self.connection.set_context({'resource': 'record', 'id': record.id})
response = self.connection.request(action=path, method='DELETE')
return response.status == httplib.OK
def ex_get_zone_by_domain(self, domain):
"""
Retrieve a zone object by the domain name.
"""
path = API_ROOT + 'zones/%s.xml' % (domain)
self.connection.set_context({'resource': 'zone', 'id': domain})
data = self.connection.request(path).object
zone = self._to_zone(elem=data)
return zone
def ex_force_slave_axfr(self, zone):
"""
Force a zone transfer.
"""
path = API_ROOT + 'zones/%s/force_slave_axfr.xml' % (zone.id)
self.connection.set_context({'resource': 'zone', 'id': zone.id})
response = self.connection.request(path, method='POST')
assert response.status == httplib.ACCEPTED
return zone
def _to_zone_elem(self, domain=None, type=None, ttl=None, extra=None):
zone_elem = ET.Element('zone', {})
if domain:
domain_elem = ET.SubElement(zone_elem, 'domain')
domain_elem.text = domain
if type:
ns_type_elem = ET.SubElement(zone_elem, 'ns-type')
if type == 'master':
ns_type_elem.text = 'pri_sec'
elif type == 'slave':
if not extra or 'ns1' not in extra:
raise LibcloudError('ns1 extra attribute is required ' +
'when zone type is slave', driver=self)
ns_type_elem.text = 'sec'
ns1_elem = ET.SubElement(zone_elem, 'ns1')
ns1_elem.text = extra['ns1']
elif type == 'std_master':
# TODO: Each driver should provide supported zone types
# Slave name servers are elsewhere
if not extra or 'slave-nameservers' not in extra:
raise LibcloudError('slave-nameservers extra ' +
'attribute is required whenzone ' +
'type is std_master', driver=self)
ns_type_elem.text = 'pri'
slave_nameservers_elem = ET.SubElement(zone_elem,
'slave-nameservers')
slave_nameservers_elem.text = extra['slave-nameservers']
if ttl:
default_ttl_elem = ET.SubElement(zone_elem, 'default-ttl')
default_ttl_elem.text = str(ttl)
if extra and 'tag-list' in extra:
tags = extra['tag-list']
tags_elem = ET.SubElement(zone_elem, 'tag-list')
tags_elem.text = ' '.join(tags)
return zone_elem
def _to_record_elem(self, name=None, type=None, data=None, extra=None):
record_elem = ET.Element('host', {})
if name:
name_elem = ET.SubElement(record_elem, 'hostname')
name_elem.text = name
if type:
type_elem = ET.SubElement(record_elem, 'host-type')
type_elem.text = RECORD_TYPE_MAP[type]
if data:
data_elem = ET.SubElement(record_elem, 'data')
data_elem.text = data
if extra:
if 'ttl' in extra:
ttl_elem = ET.SubElement(record_elem, 'ttl',
{'type': 'integer'})
ttl_elem.text = str(extra['ttl'])
if 'priority' in extra:
# Only MX and SRV records support priority
priority_elem = ET.SubElement(record_elem, 'priority',
{'type': 'integer'})
priority_elem.text = str(extra['priority'])
if 'notes' in extra:
notes_elem = ET.SubElement(record_elem, 'notes')
notes_elem.text = extra['notes']
return record_elem
def _to_zones(self, elem):
zones = []
for item in findall(element=elem, xpath='zone'):
zone = self._to_zone(elem=item)
zones.append(zone)
return zones
def _to_zone(self, elem):
id = findtext(element=elem, xpath='id')
domain = findtext(element=elem, xpath='domain')
type = findtext(element=elem, xpath='ns-type')
type = 'master' if type.find('pri') == 0 else 'slave'
ttl = findtext(element=elem, xpath='default-ttl')
hostmaster = findtext(element=elem, xpath='hostmaster')
custom_ns = findtext(element=elem, xpath='custom-ns')
custom_nameservers = findtext(element=elem, xpath='custom-nameservers')
notes = findtext(element=elem, xpath='notes')
nx_ttl = findtext(element=elem, xpath='nx-ttl')
slave_nameservers = findtext(element=elem, xpath='slave-nameservers')
tags = findtext(element=elem, xpath='tag-list')
tags = tags.split(' ') if tags else []
extra = {'hostmaster': hostmaster, 'custom-ns': custom_ns,
'custom-nameservers': custom_nameservers, 'notes': notes,
'nx-ttl': nx_ttl, 'slave-nameservers': slave_nameservers,
'tags': tags}
zone = Zone(id=str(id), domain=domain, type=type, ttl=int(ttl),
driver=self, extra=extra)
return zone
def _to_records(self, elem, zone):
records = []
for item in findall(element=elem, xpath='host'):
record = self._to_record(elem=item, zone=zone)
records.append(record)
return records
def _to_record(self, elem, zone):
id = findtext(element=elem, xpath='id')
name = findtext(element=elem, xpath='hostname')
type = findtext(element=elem, xpath='host-type')
type = self._string_to_record_type(type)
data = findtext(element=elem, xpath='data')
notes = findtext(element=elem, xpath='notes')
state = findtext(element=elem, xpath='state')
fqdn = findtext(element=elem, xpath='fqdn')
priority = findtext(element=elem, xpath='priority')
extra = {'notes': notes, 'state': state, 'fqdn': fqdn,
'priority': priority}
record = Record(id=id, name=name, type=type, data=data,
zone=zone, driver=self, extra=extra)
return record
def _get_more(self, last_key, value_dict):
# Note: last_key in this case really is a "last_page".
# TODO: Update base driver and change last_key to something more
# generic - e.g. marker
params = {}
params['per_page'] = ITEMS_PER_PAGE
params['page'] = last_key + 1 if last_key else 1
transform_func_kwargs = {}
if value_dict['type'] == 'zones':
path = API_ROOT + 'zones.xml'
response = self.connection.request(path)
transform_func = self._to_zones
elif value_dict['type'] == 'records':
zone = value_dict['zone']
path = API_ROOT + 'zones/%s/hosts.xml' % (zone.id)
self.connection.set_context({'resource': 'zone', 'id': zone.id})
response = self.connection.request(path, params=params)
transform_func = self._to_records
transform_func_kwargs['zone'] = value_dict['zone']
exhausted = False
result_count = int(response.headers.get('x-query-count', 0))
transform_func_kwargs['elem'] = response.object
if (params['page'] * ITEMS_PER_PAGE) >= result_count:
exhausted = True
if response.status == httplib.OK:
items = transform_func(**transform_func_kwargs)
return items, params['page'], exhausted
else:
return [], None, True
| apache-2.0 |
jakeva/bitcoin-pwcheck | qa/rpc-tests/mempool_resurrect_test.py | 150 | 3478 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test resurrection of mined transactions when
# the blockchain is re-organized.
#
from test_framework import BitcoinTestFramework
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
import os
import shutil
# Create one-input, one-output, no-fee transaction:
class MempoolCoinbaseTest(BitcoinTestFramework):
def setup_network(self):
# Just need one node for this test
args = ["-checkmempool", "-debug=mempool"]
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, args))
self.is_network_split = False
def create_tx(self, from_txid, to_address, amount):
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
signresult = self.nodes[0].signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
def run_test(self):
node0_address = self.nodes[0].getnewaddress()
# Spend block 1/2/3's coinbase transactions
# Mine a block.
# Create three more transactions, spending the spends
# Mine another block.
# ... make sure all the transactions are confirmed
# Invalidate both blocks
# ... make sure all the transactions are put back in the mempool
# Mine a new block
# ... make sure all the transactions are confirmed again.
b = [ self.nodes[0].getblockhash(n) for n in range(1, 4) ]
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
spends1_raw = [ self.create_tx(txid, node0_address, 50) for txid in coinbase_txids ]
spends1_id = [ self.nodes[0].sendrawtransaction(tx) for tx in spends1_raw ]
blocks = []
blocks.extend(self.nodes[0].setgenerate(True, 1))
spends2_raw = [ self.create_tx(txid, node0_address, 49.99) for txid in spends1_id ]
spends2_id = [ self.nodes[0].sendrawtransaction(tx) for tx in spends2_raw ]
blocks.extend(self.nodes[0].setgenerate(True, 1))
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set())
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] > 0)
# Use invalidateblock to re-org back; all transactions should
# end up unconfirmed and back in the mempool
for node in self.nodes:
node.invalidateblock(blocks[0])
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set(spends1_id+spends2_id))
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] == 0)
# Generate another block, they should all get mined
self.nodes[0].setgenerate(True, 1)
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set())
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] > 0)
if __name__ == '__main__':
MempoolCoinbaseTest().main()
| mit |
SiriusParadox/sys_scripts | sys_cache_ratio.py | 1 | 1894 | #!/usr/bin/env python
from __future__ import print_function
import os
import re
import sys
import six
import threading
__author__ = 'Maurice Green'
__purpose__ = 'System Cahce Hit Ratio'
class Proc(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
uname = os.uname()
if uname[0] == 'Linux':
self.proc = "/proc"
else:
print("OS: {} NOT Supported".format(uname[0]))
def paths(self, *args):
return os.path.join(self.proc, *(str(x) for x in args))
def name(self, target_file):
try:
with open(target_file, 'r') as f:
proc_name = f.readline()
f.close()
return proc_name
except:
pass
proc = Proc()
def getPids():
pids = [inode for inode in os.listdir('/proc') if inode.isdigit()]
return map(getHits, pids)
def printTable(pc, cchr, mr):
print("{}".format("=" * 100))
print("Process: {}\n".format(pc))
print("Cache Hit Ratio: %.2f%%" % float(cchr))
print("Miss Hit Ratio: %.2f%%" % float(mr))
print("{}".format("=" * 100))
def getHits(pid):
byte_access = dict()
io_file = proc.paths(pid, 'io')
pc_name = proc.name(proc.paths(pid,'comm'))
with open(io_file, 'r') as ioholder:
dtx = ioholder.read().split()
for idx, sts in enumerate(dtx[::2]):
byte_access[sts] = dtx[idx] if dtx[idx].isdigit() else dtx[idx+1]
ioholder.close()
miss_ratio_n = (100 * int(byte_access.get('read_bytes:')))
miss_ratio_d = (int(byte_access.get('rchar:')))
try:
miss_ratio = (float(miss_ratio_n)/float(miss_ratio_d))
cache_hit_ratio = float(100 - miss_ratio)
printTable(pc_name, cache_hit_ratio, miss_ratio)
except ZeroDivisionError:
pass
if __name__ == "__main__":
getPids()
| gpl-3.0 |
leansoft/edx-platform | common/test/acceptance/pages/lms/dashboard.py | 41 | 6349 | # -*- coding: utf-8 -*-
"""
Student dashboard page.
"""
from bok_choy.page_object import PageObject
from . import BASE_URL
class DashboardPage(PageObject):
"""
Student dashboard, where the student can view
courses she/he has registered for.
"""
def __init__(self, browser):
"""Initialize the page.
Arguments:
browser (Browser): The browser instance.
"""
super(DashboardPage, self).__init__(browser)
url = "{base}/dashboard".format(base=BASE_URL)
def is_browser_on_page(self):
return self.q(css='section.my-courses').present
@property
def current_courses_text(self):
"""
This is the title label for the section of the student dashboard that
shows all the courses that the student is enrolled in.
The string displayed is defined in lms/templates/dashboard.html.
"""
text_items = self.q(css='section#my-courses').text
if len(text_items) > 0:
return text_items[0]
else:
return ""
@property
def available_courses(self):
"""
Return list of the names of available courses (e.g. "999 edX Demonstration Course")
"""
def _get_course_name(el):
return el.text
return self.q(css='h3.course-title > a').map(_get_course_name).results
@property
def banner_text(self):
"""
Return the text of the banner on top of the page, or None if
the banner is not present.
"""
message = self.q(css='div.wrapper-msg')
if message.present:
return message.text[0]
return None
def get_enrollment_mode(self, course_name):
"""Get the enrollment mode for a given course on the dashboard.
Arguments:
course_name (str): The name of the course whose mode should be retrieved.
Returns:
String, indicating the enrollment mode for the course corresponding to
the provided course name.
Raises:
Exception, if no course with the provided name is found on the dashboard.
"""
# Filter elements by course name, only returning the relevant course item
course_listing = self.q(css=".course").filter(lambda el: course_name in el.text).results
if course_listing:
# There should only be one course listing for the provided course name.
# Since 'ENABLE_VERIFIED_CERTIFICATES' is true in the Bok Choy settings, we
# can expect two classes to be present on <article> elements, one being 'course'
# and the other being the enrollment mode.
enrollment_mode = course_listing[0].get_attribute('class').split('course ')[1]
else:
raise Exception("No course named {} was found on the dashboard".format(course_name))
return enrollment_mode
def upgrade_enrollment(self, course_name, upgrade_page):
"""Interact with the upgrade button for the course with the provided name.
Arguments:
course_name (str): The name of the course whose mode should be checked.
upgrade_page (PageObject): The page to wait on after clicking the upgrade button. Importing
the definition of PaymentAndVerificationFlow results in a circular dependency.
Raises:
Exception, if no enrollment corresponding to the provided course name appears
on the dashboard.
"""
# Filter elements by course name, only returning the relevant course item
course_listing = self.q(css=".course").filter(lambda el: course_name in el.text).results
if course_listing:
# There should only be one course listing corresponding to the provided course name.
el = course_listing[0]
# Click the upgrade button
el.find_element_by_css_selector('#upgrade-to-verified').click()
upgrade_page.wait_for_page()
else:
raise Exception("No enrollment for {} is visible on the dashboard.".format(course_name))
def view_course(self, course_id):
"""
Go to the course with `course_id` (e.g. edx/Open_DemoX/edx_demo_course)
"""
link_css = self._link_css(course_id)
if link_css is not None:
self.q(css=link_css).first.click()
else:
msg = "No links found for course {0}".format(course_id)
self.warning(msg)
def _link_css(self, course_id):
"""
Return a CSS selector for the link to the course with `course_id`.
"""
# Get the link hrefs for all courses
all_links = self.q(css='a.enter-course').map(lambda el: el.get_attribute('href')).results
# Search for the first link that matches the course id
link_index = None
for index in range(len(all_links)):
if course_id in all_links[index]:
link_index = index
break
if link_index is not None:
return "a.enter-course:nth-of-type({0})".format(link_index + 1)
else:
return None
def pre_requisite_message_displayed(self):
"""
Verify if pre-requisite course messages are being displayed.
"""
return self.q(css='li.prerequisites > .tip').visible
def get_course_listings(self):
"""Retrieve the list of course DOM elements"""
return self.q(css='ul.listing-courses')
def get_course_social_sharing_widget(self, widget_name):
""" Retrieves the specified social sharing widget by its classification """
return self.q(css='a.action-{}'.format(widget_name))
def click_username_dropdown(self):
"""
Click username dropdown.
"""
self.q(css='.dropdown').first.click()
@property
def username_dropdown_link_text(self):
"""
Return list username dropdown links.
"""
return self.q(css='.dropdown-menu li a').text
def click_my_profile_link(self):
"""
Click on `Profile` link.
"""
self.q(css='.dropdown-menu li a').nth(1).click()
def click_account_settings_link(self):
"""
Click on `Account` link.
"""
self.q(css='.dropdown-menu li a').nth(2).click()
| agpl-3.0 |
sudhof/politeness | features/vectorizer.py | 3 | 4790 |
import os
import cPickle
import string
import nltk
from itertools import chain
from collections import defaultdict
# local import
from politeness_strategies import get_politeness_strategy_features
# Will need access to local dir
# for support files
LOCAL_DIR = os.path.split(__file__)[0]
def get_unigrams_and_bigrams(document):
"""
Grabs unigrams and bigrams from document
sentences. NLTK does the work.
"""
# Get unigram list per sentence:
unigram_lists = map(lambda x: nltk.word_tokenize(x), document['sentences'])
# Generate bigrams from all sentences:
bigrams = chain(*map(lambda x: nltk.bigrams(x), unigram_lists))
# Chain unigram lists
unigrams = chain(*unigram_lists)
return unigrams, bigrams
class PolitenessFeatureVectorizer:
"""
Returns document features based on-
- unigrams and bigrams
- politeness strategies
(inspired by B&L, modeled using dependency parses)
"""
UNIGRAMS_FILENAME = os.path.join(LOCAL_DIR, "featunigrams.p")
BIGRAMS_FILENAME = os.path.join(LOCAL_DIR, "featbigrams.p")
def __init__(self):
"""
Load pickled lists of unigram and bigram features
These lists can be generated using the training set
and PolitenessFeatureVectorizer.generate_bow_features
"""
self.unigrams = cPickle.load(open(self.UNIGRAMS_FILENAME))
self.bigrams = cPickle.load(open(self.BIGRAMS_FILENAME))
def features(self, document):
"""
document must be a dict of the following format--
{
'sentences': ["sentence str"],
'parses': [[dependency parse list]]
}
"""
feature_dict = {}
# Add unigram, bigram features:
feature_dict.update(self._get_term_features(document))
# Add politeness strategy features:
feature_dict.update(get_politeness_strategy_features(document))
return feature_dict
def _get_term_features(self, document):
# One binary feature per ngram in
# in self.unigrams and self.bigrams
unigrams, bigrams = get_unigrams_and_bigrams(document)
# Add unigrams to document for later use
document['unigrams'] = unigrams
unigrams, bigrams = set(unigrams), set(bigrams)
f = {}
f.update(dict(map(lambda x: ("UNIGRAM_" + str(x), 1 if x in unigrams else 0), self.unigrams)))
f.update(dict(map(lambda x: ("BIGRAM_" + str(x), 1 if x in bigrams else 0), self.bigrams)))
return f
@staticmethod
def generate_bow_features(documents, min_unigram_count=20, min_bigram_count=20):
"""
Given a list of documents, compute and store list of unigrams and bigrams
with a frequency > min_unigram_count and min_bigram_count, respectively.
This method must be called prior to the first vectorizer instantiation.
documents -
each document must be a dict
{
'sentences': ["sentence one string", "sentence two string"],
'parses': [ ["dep(a,b)"], ["dep(b,c)"] ]
}
"""
punctuation = string.punctuation
punctuation = punctuation.replace("?","").replace("!","")
unigram_counts, bigram_counts = defaultdict(int), defaultdict(int)
# Count unigrams and bigrams:
for d in documents:
unigrams, bigrams = get_unigrams_and_bigrams(d)
# Count
for w in unigrams:
unigram_counts[w] += 1
for w in bigrams:
bigram_counts[w] += 1
# Keep only ngrams that pass frequency threshold:
unigram_features = filter(lambda x: unigram_counts[x] > min_unigram_count, unigram_counts.iterkeys())
bigram_features = filter(lambda x: bigram_counts[x] > min_bigram_count, bigram_counts.iterkeys())
# Save results:
cPickle.dump(unigram_features, open(PolitenessFeatureVectorizer.UNIGRAMS_FILENAME, 'w'))
cPickle.dump(bigram_features, open(PolitenessFeatureVectorizer.BIGRAMS_FILENAME, 'w'))
if __name__ == "__main__":
"""
Extract features from test documents
"""
from test_documents import TEST_DOCUMENTS
vectorizer = PolitenessFeatureVectorizer()
for doc in TEST_DOCUMENTS:
f = vectorizer.features(doc)
# Print summary of features that are present
print "\n===================="
print "Text: ", doc['text']
print "\tUnigrams, Bigrams: %d" % len(filter(lambda x: f[x] > 0 and ("UNIGRAM_" in x or "BIGRAM_" in x), f.iterkeys()))
print "\tPoliteness Strategies: \n\t\t%s" % "\n\t\t".join(filter(lambda x: f[x] > 0 and "feature_politeness_" in x, f.iterkeys()))
print "\n"
| apache-2.0 |
gmr/tredis | tests/sortedsets_tests.py | 1 | 6583 | import mock
from tornado import testing
from tredis import exceptions
from . import base
class SortedSetTests(base.AsyncTestCase):
@testing.gen_test
def test_zadd_single(self):
key, value = self.uuid4(2)
result = yield self.client.zadd(key, '1', value)
self.assertEqual(result, 1)
@testing.gen_test
def test_zadd_multiple(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2,
'3', value3)
self.assertEqual(result, 3)
@testing.gen_test
def test_zadd_dict(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, {'1': value1, '2': value2,
'3': value3})
self.assertEqual(result, 3)
@testing.gen_test
def test_zadd_multiple_dupe(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2,
'3', value3, '4', value3)
self.assertEqual(result, 3)
@testing.gen_test
def test_zadd_ch(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2)
self.assertEqual(result, 2)
result = yield self.client.zadd(key, '2', value1, '3', value2,
'4', value3, ch=True)
self.assertEqual(result, 3)
@testing.gen_test
def test_zadd_xx(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2)
self.assertEqual(result, 2)
result = yield self.client.zadd(key, '2', value1, '3', value2,
'4', value3, xx=True)
self.assertEqual(result, 0)
@testing.gen_test
def test_zadd_nx(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2)
self.assertEqual(result, 2)
result = yield self.client.zadd(key, '2', value1, '3', value2,
'4', value3, nx=True, ch=True)
self.assertEqual(result, 1)
@testing.gen_test
def test_zadd_incr(self):
key, value = self.uuid4(2)
result = yield self.client.zadd(key, '1', value)
self.assertEqual(result, 1)
result = yield self.client.zadd(key, '10', value, incr=True)
self.assertEqual(result, b'11')
@testing.gen_test
def test_zadd_with_error(self):
key, score, value = self.uuid4(3)
self._execute_result = exceptions.RedisError('Test Exception')
with mock.patch.object(self.client, '_execute', self._execute):
with self.assertRaises(exceptions.RedisError):
yield self.client.zadd(key, score, value)
@testing.gen_test
def test_zcard_with_extant_set(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2,
'3', value3)
self.assertEqual(result, 3)
result = yield self.client.zcard(key)
self.assertEqual(result, 3)
@testing.gen_test
def test_zcard_with_nonextant_set(self):
key = self.uuid4()
result = yield self.client.zcard(key)
self.assertEqual(result, 0)
@testing.gen_test
def test_zrangebyscore(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2,
'3', value3)
self.assertEqual(result, 3)
result = yield self.client.zrangebyscore(key, '1', '2')
self.assertListEqual(result, [value1, value2])
@testing.gen_test
def test_zrangebyscore_withitems(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2,
'3', value3)
self.assertEqual(result, 3)
result = yield self.client.zrangebyscore(key, '1', '2',
with_scores=True)
self.assertListEqual(result, [value1, b'1', value2, b'2'])
@testing.gen_test
def test_zrangebyscore_offset(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2,
'3', value3)
self.assertEqual(result, 3)
result = yield self.client.zrangebyscore(key, '1', '2',
offset=1, count=20)
self.assertListEqual(result, [value2])
@testing.gen_test
def test_zrangebyscore_count(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2,
'3', value3)
self.assertEqual(result, 3)
result = yield self.client.zrangebyscore(key, '1', '3',
offset=0, count=1)
self.assertListEqual(result, [value1])
@testing.gen_test
def test_zremrangebyscore(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2,
'3', value3)
self.assertEqual(result, 3)
result = yield self.client.zremrangebyscore(key, '1', '2')
self.assertEqual(result, 2)
@testing.gen_test
def test_zremrangebyscore_inf(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2,
'3', value3)
self.assertEqual(result, 3)
result = yield self.client.zremrangebyscore(key, '(1', 'inf')
self.assertEqual(result, 2)
@testing.gen_test
def test_zscore_with_member_of_set(self):
key, value1, value2, value3 = self.uuid4(4)
result = yield self.client.zadd(key, '1', value1, '2', value2,
'3', value3)
self.assertEqual(result, 3)
result = yield self.client.zscore(key, value1)
self.assertEqual(result, b'1')
@testing.gen_test
def test_zscore_with_nonmember_of_set(self):
key, value1 = self.uuid4(2)
result = yield self.client.zscore(key, value1)
self.assertEqual(result, None)
| bsd-3-clause |
leiferikb/bitpop | src/tools/grit/grit/format/policy_templates/writers/xml_writer_base_unittest.py | 37 | 1373 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for grit.format.policy_templates.writers.admx_writer."""
import re
import unittest
class XmlWriterBaseTest(unittest.TestCase):
'''Base class for XML writer unit-tests.
'''
def GetXMLOfChildren(self, parent):
'''Returns the XML of all child nodes of the given parent node.
Args:
parent: The XML of the children of this node will be returned.
Return: XML of the chrildren of the parent node.
'''
raw_pretty_xml = ''.join(
child.toprettyxml(indent=' ') for child in parent.childNodes)
# Python 2.6.5 which is present in Lucid has bug in its pretty print
# function which produces new lines around string literals. This has been
# fixed in Precise which has Python 2.7.3 but we have to keep compatibility
# with both for now.
text_re = re.compile('>\n\s+([^<>\s].*?)\n\s*</', re.DOTALL)
return text_re.sub('>\g<1></', raw_pretty_xml)
def AssertXMLEquals(self, output, expected_output):
'''Asserts if the passed XML arguements are equal.
Args:
output: Actual XML text.
expected_output: Expected XML text.
'''
self.assertEquals(output.strip(), expected_output.strip())
| gpl-3.0 |
davidjb/sqlalchemy | test/orm/test_unitofwork.py | 21 | 84951 | # coding: utf-8
"""Tests unitofwork operations."""
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
import datetime
from sqlalchemy.orm import mapper as orm_mapper
import sqlalchemy as sa
from sqlalchemy.util import u, ue, b
from sqlalchemy import Integer, String, ForeignKey, \
literal_column, event, Boolean
from sqlalchemy.testing import engines
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.schema import Column
from sqlalchemy.orm import mapper, relationship, create_session, \
column_property, Session, exc as orm_exc
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.testing.assertsql import AllOf, CompiledSQL
class UnitOfWorkTest(object):
pass
class HistoryTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
class Address(cls.Comparable):
pass
def test_backref(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
am = mapper(Address, addresses)
m = mapper(User, users, properties=dict(
addresses = relationship(am, backref='user', lazy='joined')))
session = create_session(autocommit=False)
u = User(name='u1')
a = Address(email_address='u1@e')
a.user = u
session.add(u)
eq_(u.addresses, [a])
session.commit()
session.expunge_all()
u = session.query(m).one()
assert u.addresses[0].user == u
session.close()
class UnicodeTest(fixtures.MappedTest):
__requires__ = ('unicode_connections',)
@classmethod
def define_tables(cls, metadata):
uni_type = sa.Unicode(50).with_variant(
sa.Unicode(50, collation="utf8_unicode_ci"), "mysql")
Table('uni_t1', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('txt', uni_type, unique=True))
Table('uni_t2', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('txt', uni_type, ForeignKey('uni_t1')))
@classmethod
def setup_classes(cls):
class Test(cls.Basic):
pass
class Test2(cls.Basic):
pass
def test_basic(self):
Test, uni_t1 = self.classes.Test, self.tables.uni_t1
mapper(Test, uni_t1)
txt = ue("\u0160\u0110\u0106\u010c\u017d")
t1 = Test(id=1, txt=txt)
self.assert_(t1.txt == txt)
session = create_session(autocommit=False)
session.add(t1)
session.commit()
self.assert_(t1.txt == txt)
def test_relationship(self):
Test, uni_t2, uni_t1, Test2 = (self.classes.Test,
self.tables.uni_t2,
self.tables.uni_t1,
self.classes.Test2)
mapper(Test, uni_t1, properties={
't2s': relationship(Test2)})
mapper(Test2, uni_t2)
txt = ue("\u0160\u0110\u0106\u010c\u017d")
t1 = Test(txt=txt)
t1.t2s.append(Test2())
t1.t2s.append(Test2())
session = create_session(autocommit=False)
session.add(t1)
session.commit()
session.close()
session = create_session()
t1 = session.query(Test).filter_by(id=t1.id).one()
assert len(t1.t2s) == 2
class UnicodeSchemaTest(fixtures.MappedTest):
__requires__ = ('unicode_connections', 'unicode_ddl',)
run_dispose_bind = 'once'
@classmethod
def define_tables(cls, metadata):
t1 = Table('unitable1', metadata,
Column(u('méil'), Integer, primary_key=True, key='a', test_needs_autoincrement=True),
Column(ue('\u6e2c\u8a66'), Integer, key='b'),
Column('type', String(20)),
test_needs_fk=True,
test_needs_autoincrement=True)
t2 = Table(u('Unitéble2'), metadata,
Column(u('méil'), Integer, primary_key=True, key="cc", test_needs_autoincrement=True),
Column(ue('\u6e2c\u8a66'), Integer,
ForeignKey('unitable1.a'), key="d"),
Column(ue('\u6e2c\u8a66_2'), Integer, key="e"),
test_needs_fk=True,
test_needs_autoincrement=True)
cls.tables['t1'] = t1
cls.tables['t2'] = t2
@classmethod
def setup_class(cls):
super(UnicodeSchemaTest, cls).setup_class()
@classmethod
def teardown_class(cls):
super(UnicodeSchemaTest, cls).teardown_class()
@testing.fails_on('mssql+pyodbc',
'pyodbc returns a non unicode encoding of the results description.')
def test_mapping(self):
t2, t1 = self.tables.t2, self.tables.t1
class A(fixtures.ComparableEntity):
pass
class B(fixtures.ComparableEntity):
pass
mapper(A, t1, properties={
't2s':relationship(B)})
mapper(B, t2)
a1 = A()
b1 = B()
a1.t2s.append(b1)
session = create_session()
session.add(a1)
session.flush()
session.expunge_all()
new_a1 = session.query(A).filter(t1.c.a == a1.a).one()
assert new_a1.a == a1.a
assert new_a1.t2s[0].d == b1.d
session.expunge_all()
new_a1 = (session.query(A).options(sa.orm.joinedload('t2s')).
filter(t1.c.a == a1.a)).one()
assert new_a1.a == a1.a
assert new_a1.t2s[0].d == b1.d
session.expunge_all()
new_a1 = session.query(A).filter(A.a == a1.a).one()
assert new_a1.a == a1.a
assert new_a1.t2s[0].d == b1.d
session.expunge_all()
@testing.fails_on('mssql+pyodbc',
'pyodbc returns a non unicode encoding of the results description.')
def test_inheritance_mapping(self):
t2, t1 = self.tables.t2, self.tables.t1
class A(fixtures.ComparableEntity):
pass
class B(A):
pass
mapper(A, t1,
polymorphic_on=t1.c.type,
polymorphic_identity='a')
mapper(B, t2,
inherits=A,
polymorphic_identity='b')
a1 = A(b=5)
b1 = B(e=7)
session = create_session()
session.add_all((a1, b1))
session.flush()
session.expunge_all()
eq_([A(b=5), B(e=7)], session.query(A).all())
class BinaryHistTest(fixtures.MappedTest, testing.AssertsExecutionResults):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
Column('id', sa.Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', sa.LargeBinary),
)
@classmethod
def setup_classes(cls):
class Foo(cls.Basic):
pass
def test_binary_equality(self):
Foo, t1 = self.classes.Foo, self.tables.t1
data = b("this is some data")
mapper(Foo, t1)
s = create_session()
f1 = Foo(data=data)
s.add(f1)
s.flush()
s.expire_all()
f1 = s.query(Foo).first()
assert f1.data == data
f1.data = data
eq_(
sa.orm.attributes.get_history(f1, "data"),
((), [data], ())
)
def go():
s.flush()
self.assert_sql_count(testing.db, go, 0)
class PKTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('multipk1', metadata,
Column('multi_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('multi_rev', Integer, primary_key=True),
Column('name', String(50), nullable=False),
Column('value', String(100)))
Table('multipk2', metadata,
Column('pk_col_1', String(30), primary_key=True),
Column('pk_col_2', String(30), primary_key=True),
Column('data', String(30)))
Table('multipk3', metadata,
Column('pri_code', String(30), key='primary', primary_key=True),
Column('sec_code', String(30), key='secondary', primary_key=True),
Column('date_assigned', sa.Date, key='assigned', primary_key=True),
Column('data', String(30)))
@classmethod
def setup_classes(cls):
class Entry(cls.Basic):
pass
# not supported on sqlite since sqlite's auto-pk generation only works with
# single column primary keys
@testing.fails_on('sqlite', 'FIXME: unknown')
def test_primary_key(self):
Entry, multipk1 = self.classes.Entry, self.tables.multipk1
mapper(Entry, multipk1)
e = Entry(name='entry1', value='this is entry 1', multi_rev=2)
session = create_session()
session.add(e)
session.flush()
session.expunge_all()
e2 = session.query(Entry).get((e.multi_id, 2))
self.assert_(e is not e2)
state = sa.orm.attributes.instance_state(e)
state2 = sa.orm.attributes.instance_state(e2)
eq_(state.key, state2.key)
# this one works with sqlite since we are manually setting up pk values
def test_manual_pk(self):
Entry, multipk2 = self.classes.Entry, self.tables.multipk2
mapper(Entry, multipk2)
e = Entry(pk_col_1='pk1', pk_col_2='pk1_related', data='im the data')
session = create_session()
session.add(e)
session.flush()
def test_key_pks(self):
Entry, multipk3 = self.classes.Entry, self.tables.multipk3
mapper(Entry, multipk3)
e = Entry(primary= 'pk1', secondary='pk2',
assigned=datetime.date.today(), data='some more data')
session = create_session()
session.add(e)
session.flush()
class ForeignPKTest(fixtures.MappedTest):
"""Detection of the relationship direction on PK joins."""
@classmethod
def define_tables(cls, metadata):
Table("people", metadata,
Column('person', String(10), primary_key=True),
Column('firstname', String(10)),
Column('lastname', String(10)))
Table("peoplesites", metadata,
Column('person', String(10), ForeignKey("people.person"),
primary_key=True),
Column('site', String(10)))
@classmethod
def setup_classes(cls):
class Person(cls.Basic):
pass
class PersonSite(cls.Basic):
pass
def test_basic(self):
peoplesites, PersonSite, Person, people = (self.tables.peoplesites,
self.classes.PersonSite,
self.classes.Person,
self.tables.people)
m1 = mapper(PersonSite, peoplesites)
m2 = mapper(Person, people, properties={
'sites' : relationship(PersonSite)})
sa.orm.configure_mappers()
eq_(list(m2.get_property('sites').synchronize_pairs),
[(people.c.person, peoplesites.c.person)])
p = Person(person='im the key', firstname='asdf')
ps = PersonSite(site='asdf')
p.sites.append(ps)
session = create_session()
session.add(p)
session.flush()
p_count = people.count(people.c.person=='im the key').scalar()
eq_(p_count, 1)
eq_(peoplesites.count(peoplesites.c.person=='im the key').scalar(), 1)
class ClauseAttributesTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users_t', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(30)),
Column('counter', Integer, default=1))
Table('boolean_t', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('value', Boolean),
)
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
class HasBoolean(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
User, users_t = cls.classes.User, cls.tables.users_t
HasBoolean, boolean_t = cls.classes.HasBoolean, cls.tables.boolean_t
mapper(User, users_t)
mapper(HasBoolean, boolean_t)
def test_update(self):
User = self.classes.User
u = User(name='test')
session = create_session()
session.add(u)
session.flush()
eq_(u.counter, 1)
u.counter = User.counter + 1
session.flush()
def go():
assert (u.counter == 2) is True # ensure its not a ClauseElement
self.sql_count_(1, go)
def test_multi_update(self):
User = self.classes.User
u = User(name='test')
session = create_session()
session.add(u)
session.flush()
eq_(u.counter, 1)
u.name = 'test2'
u.counter = User.counter + 1
session.flush()
def go():
eq_(u.name, 'test2')
assert (u.counter == 2) is True
self.sql_count_(1, go)
session.expunge_all()
u = session.query(User).get(u.id)
eq_(u.name, 'test2')
eq_(u.counter, 2)
def test_insert(self):
User = self.classes.User
u = User(name='test', counter=sa.select([5]))
session = create_session()
session.add(u)
session.flush()
assert (u.counter == 5) is True
def test_update_special_comparator(self):
HasBoolean = self.classes.HasBoolean
# make sure the comparison we're shooting
# for is invalid, otherwise we need to
# test something else here
assert_raises_message(
TypeError,
"Boolean value of this clause is not defined",
bool, None == sa.false()
)
s = create_session()
hb = HasBoolean(value=None)
s.add(hb)
s.flush()
hb.value = sa.false()
s.flush()
# needs to be refreshed
assert 'value' not in hb.__dict__
eq_(hb.value, False)
class PassiveDeletesTest(fixtures.MappedTest):
__requires__ = ('foreign_keys',)
@classmethod
def define_tables(cls, metadata):
Table('mytable', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(30)),
test_needs_fk=True)
Table('myothertable', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('parent_id', Integer),
Column('data', String(30)),
sa.ForeignKeyConstraint(['parent_id'],
['mytable.id'],
ondelete="CASCADE"),
test_needs_fk=True)
@classmethod
def setup_classes(cls):
class MyClass(cls.Basic):
pass
class MyOtherClass(cls.Basic):
pass
def test_basic(self):
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children':relationship(MyOtherClass,
passive_deletes=True,
cascade="all")})
session = create_session()
mc = MyClass()
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
session.add(mc)
session.flush()
session.expunge_all()
assert myothertable.count().scalar() == 4
mc = session.query(MyClass).get(mc.id)
session.delete(mc)
session.flush()
assert mytable.count().scalar() == 0
assert myothertable.count().scalar() == 0
@testing.emits_warning(r".*'passive_deletes' is normally configured on one-to-many")
def test_backwards_pd(self):
"""Test that passive_deletes=True disables a delete from an m2o.
This is not the usual usage and it now raises a warning, but test
that it works nonetheless.
"""
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable, properties={
'myclass':relationship(MyClass, cascade="all, delete", passive_deletes=True)
})
mapper(MyClass, mytable)
session = create_session()
mc = MyClass()
mco = MyOtherClass()
mco.myclass = mc
session.add(mco)
session.flush()
assert mytable.count().scalar() == 1
assert myothertable.count().scalar() == 1
session.expire(mco, ['myclass'])
session.delete(mco)
session.flush()
# mytable wasn't deleted, is the point.
assert mytable.count().scalar() == 1
assert myothertable.count().scalar() == 0
def test_aaa_m2o_emits_warning(self):
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable, properties={
'myclass':relationship(MyClass, cascade="all, delete", passive_deletes=True)
})
mapper(MyClass, mytable)
assert_raises(sa.exc.SAWarning, sa.orm.configure_mappers)
class BatchDeleteIgnoresRowcountTest(fixtures.DeclarativeMappedTest):
__requires__ = ('foreign_keys',)
@classmethod
def setup_classes(cls):
class A(cls.DeclarativeBasic):
__tablename__ = 'A'
__table_args__ = dict(test_needs_fk=True)
__mapper_args__ = {"confirm_deleted_rows": False}
id = Column(Integer, primary_key=True)
parent_id = Column(Integer, ForeignKey('A.id', ondelete='CASCADE'))
def test_delete_both(self):
A = self.classes.A
session = Session(testing.db)
a1, a2 = A(id=1), A(id=2, parent_id=1)
session.add_all([a1, a2])
session.flush()
session.delete(a1)
session.delete(a2)
# no issue with multi-row count here
session.flush()
class ExtraPassiveDeletesTest(fixtures.MappedTest):
__requires__ = ('foreign_keys',)
@classmethod
def define_tables(cls, metadata):
Table('mytable', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(30)),
test_needs_fk=True)
Table('myothertable', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('parent_id', Integer),
Column('data', String(30)),
# no CASCADE, the same as ON DELETE RESTRICT
sa.ForeignKeyConstraint(['parent_id'],
['mytable.id']),
test_needs_fk=True)
@classmethod
def setup_classes(cls):
class MyClass(cls.Basic):
pass
class MyOtherClass(cls.Basic):
pass
def test_assertions(self):
myothertable, MyOtherClass = self.tables.myothertable, self.classes.MyOtherClass
mytable, MyClass = self.tables.mytable, self.classes.MyClass
mapper(MyClass, mytable, properties={
'foo': relationship(MyOtherClass,
passive_deletes='all',
cascade="all")
})
mapper(MyOtherClass, myothertable)
assert_raises_message(
sa.exc.ArgumentError,
"On MyClass.foo, can't set passive_deletes='all' in conjunction with 'delete' "
"or 'delete-orphan' cascade",
sa.orm.configure_mappers
)
def test_extra_passive(self):
myothertable, MyClass, MyOtherClass, mytable = (
self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children': relationship(MyOtherClass,
passive_deletes='all',
cascade="save-update")})
session = create_session()
mc = MyClass()
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
session.add(mc)
session.flush()
session.expunge_all()
assert myothertable.count().scalar() == 4
mc = session.query(MyClass).get(mc.id)
session.delete(mc)
assert_raises(sa.exc.DBAPIError, session.flush)
def test_extra_passive_2(self):
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children': relationship(MyOtherClass,
passive_deletes='all',
cascade="save-update")})
session = create_session()
mc = MyClass()
mc.children.append(MyOtherClass())
session.add(mc)
session.flush()
session.expunge_all()
assert myothertable.count().scalar() == 1
mc = session.query(MyClass).get(mc.id)
session.delete(mc)
mc.children[0].data = 'some new data'
assert_raises(sa.exc.DBAPIError, session.flush)
def test_dont_emit(self):
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children': relationship(MyOtherClass,
passive_deletes='all',
cascade="save-update")})
session = Session()
mc = MyClass()
session.add(mc)
session.commit()
mc.id
session.delete(mc)
# no load for "children" should occur
self.assert_sql_count(testing.db, session.flush, 1)
class ColumnCollisionTest(fixtures.MappedTest):
"""Ensure the mapper doesn't break bind param naming rules on flush."""
@classmethod
def define_tables(cls, metadata):
Table('book', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('book_id', String(50)),
Column('title', String(50))
)
def test_naming(self):
book = self.tables.book
class Book(fixtures.ComparableEntity):
pass
mapper(Book, book)
sess = create_session()
b1 = Book(book_id='abc', title='def')
sess.add(b1)
sess.flush()
b1.title = 'ghi'
sess.flush()
sess.close()
eq_(
sess.query(Book).first(),
Book(book_id='abc', title='ghi')
)
class DefaultTest(fixtures.MappedTest):
"""Exercise mappings on columns with DefaultGenerators.
Tests that when saving objects whose table contains DefaultGenerators,
either python-side, preexec or database-side, the newly saved instances
receive all the default values either through a post-fetch or getting the
pre-exec'ed defaults back from the engine.
"""
@classmethod
def define_tables(cls, metadata):
use_string_defaults = testing.against('postgresql', 'oracle', 'sqlite', 'mssql')
if use_string_defaults:
hohotype = String(30)
hohoval = "im hoho"
althohoval = "im different hoho"
else:
hohotype = Integer
hohoval = 9
althohoval = 15
cls.other['hohoval'] = hohoval
cls.other['althohoval'] = althohoval
dt = Table('default_t', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('hoho', hohotype, server_default=str(hohoval)),
Column('counter', Integer, default=sa.func.char_length("1234567", type_=Integer)),
Column('foober', String(30), default="im foober", onupdate="im the update"),
mysql_engine='MyISAM')
st = Table('secondary_table', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(50)),
mysql_engine='MyISAM')
if testing.against('postgresql', 'oracle'):
dt.append_column(
Column('secondary_id', Integer, sa.Sequence('sec_id_seq'),
unique=True))
st.append_column(
Column('fk_val', Integer,
ForeignKey('default_t.secondary_id')))
elif testing.against('mssql'):
st.append_column(
Column('fk_val', Integer,
ForeignKey('default_t.id')))
else:
st.append_column(
Column('hoho', hohotype, ForeignKey('default_t.hoho')))
@classmethod
def setup_classes(cls):
class Hoho(cls.Comparable):
pass
class Secondary(cls.Comparable):
pass
@testing.fails_on('firebird', 'Data type unknown on the parameter')
def test_insert(self):
althohoval, hohoval, default_t, Hoho = (self.other.althohoval,
self.other.hohoval,
self.tables.default_t,
self.classes.Hoho)
mapper(Hoho, default_t)
h1 = Hoho(hoho=althohoval)
h2 = Hoho(counter=12)
h3 = Hoho(hoho=althohoval, counter=12)
h4 = Hoho()
h5 = Hoho(foober='im the new foober')
session = create_session(autocommit=False)
session.add_all((h1, h2, h3, h4, h5))
session.commit()
eq_(h1.hoho, althohoval)
eq_(h3.hoho, althohoval)
def go():
# test deferred load of attribues, one select per instance
self.assert_(h2.hoho == h4.hoho == h5.hoho == hohoval)
self.sql_count_(3, go)
def go():
self.assert_(h1.counter == h4.counter == h5.counter == 7)
self.sql_count_(1, go)
def go():
self.assert_(h3.counter == h2.counter == 12)
self.assert_(h2.foober == h3.foober == h4.foober == 'im foober')
self.assert_(h5.foober == 'im the new foober')
self.sql_count_(0, go)
session.expunge_all()
(h1, h2, h3, h4, h5) = session.query(Hoho).order_by(Hoho.id).all()
eq_(h1.hoho, althohoval)
eq_(h3.hoho, althohoval)
self.assert_(h2.hoho == h4.hoho == h5.hoho == hohoval)
self.assert_(h3.counter == h2.counter == 12)
self.assert_(h1.counter == h4.counter == h5.counter == 7)
self.assert_(h2.foober == h3.foober == h4.foober == 'im foober')
eq_(h5.foober, 'im the new foober')
@testing.fails_on('firebird', 'Data type unknown on the parameter')
@testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug")
def test_eager_defaults(self):
hohoval, default_t, Hoho = (self.other.hohoval,
self.tables.default_t,
self.classes.Hoho)
Secondary = self.classes.Secondary
mapper(Hoho, default_t, eager_defaults=True, properties={
"sec": relationship(Secondary),
"syn": sa.orm.synonym(default_t.c.counter)
})
mapper(Secondary, self.tables.secondary_table)
h1 = Hoho()
session = create_session()
session.add(h1)
if testing.db.dialect.implicit_returning:
self.sql_count_(1, session.flush)
else:
self.sql_count_(2, session.flush)
self.sql_count_(0, lambda: eq_(h1.hoho, hohoval))
# no actual eager defaults, make sure error isn't raised
h2 = Hoho(hoho=hohoval, counter=5)
session.add(h2)
session.flush()
eq_(h2.hoho, hohoval)
eq_(h2.counter, 5)
def test_insert_nopostfetch(self):
default_t, Hoho = self.tables.default_t, self.classes.Hoho
# populates from the FetchValues explicitly so there is no
# "post-update"
mapper(Hoho, default_t)
h1 = Hoho(hoho="15", counter=15)
session = create_session()
session.add(h1)
session.flush()
def go():
eq_(h1.hoho, "15")
eq_(h1.counter, 15)
eq_(h1.foober, "im foober")
self.sql_count_(0, go)
@testing.fails_on('firebird', 'Data type unknown on the parameter')
def test_update(self):
default_t, Hoho = self.tables.default_t, self.classes.Hoho
mapper(Hoho, default_t)
h1 = Hoho()
session = create_session()
session.add(h1)
session.flush()
eq_(h1.foober, 'im foober')
h1.counter = 19
session.flush()
eq_(h1.foober, 'im the update')
@testing.fails_on('firebird', 'Data type unknown on the parameter')
def test_used_in_relationship(self):
"""A server-side default can be used as the target of a foreign key"""
Hoho, hohoval, default_t, secondary_table, Secondary = (self.classes.Hoho,
self.other.hohoval,
self.tables.default_t,
self.tables.secondary_table,
self.classes.Secondary)
mapper(Hoho, default_t, properties={
'secondaries':relationship(Secondary, order_by=secondary_table.c.id)})
mapper(Secondary, secondary_table)
h1 = Hoho()
s1 = Secondary(data='s1')
h1.secondaries.append(s1)
session = create_session()
session.add(h1)
session.flush()
session.expunge_all()
eq_(session.query(Hoho).get(h1.id),
Hoho(hoho=hohoval,
secondaries=[
Secondary(data='s1')]))
h1 = session.query(Hoho).get(h1.id)
h1.secondaries.append(Secondary(data='s2'))
session.flush()
session.expunge_all()
eq_(session.query(Hoho).get(h1.id),
Hoho(hoho=hohoval,
secondaries=[
Secondary(data='s1'),
Secondary(data='s2')]))
class ColumnPropertyTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('data', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('a', String(50)),
Column('b', String(50))
)
Table('subdata', metadata,
Column('id', Integer, ForeignKey('data.id'), primary_key=True),
Column('c', String(50)),
)
@classmethod
def setup_mappers(cls):
class Data(cls.Basic):
pass
def test_refreshes(self):
Data, data = self.classes.Data, self.tables.data
mapper(Data, data, properties={
'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b)
})
self._test(True)
def test_no_refresh(self):
Data, data = self.classes.Data, self.tables.data
mapper(Data, data, properties={
'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b,
expire_on_flush=False)
})
self._test(False)
def test_refreshes_post_init(self):
Data, data = self.classes.Data, self.tables.data
m = mapper(Data, data)
m.add_property('aplusb', column_property(data.c.a + literal_column("' '") + data.c.b))
self._test(True)
def test_with_inheritance(self):
subdata, data, Data = (self.tables.subdata,
self.tables.data,
self.classes.Data)
class SubData(Data):
pass
mapper(Data, data, properties={
'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b)
})
mapper(SubData, subdata, inherits=Data)
sess = create_session()
sd1 = SubData(a="hello", b="there", c="hi")
sess.add(sd1)
sess.flush()
eq_(sd1.aplusb, "hello there")
def _test(self, expect_expiry):
Data = self.classes.Data
sess = create_session()
d1 = Data(a="hello", b="there")
sess.add(d1)
sess.flush()
eq_(d1.aplusb, "hello there")
d1.b = "bye"
sess.flush()
if expect_expiry:
eq_(d1.aplusb, "hello bye")
else:
eq_(d1.aplusb, "hello there")
d1.b = 'foobar'
d1.aplusb = 'im setting this explicitly'
sess.flush()
eq_(d1.aplusb, "im setting this explicitly")
class OneToManyTest(_fixtures.FixtureTest):
run_inserts = None
def test_one_to_many_1(self):
"""Basic save of one to many."""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
addresses = relationship(mapper(Address, addresses), lazy='select')
))
u = User(name= 'one2manytester')
a = Address(email_address='one2many@test.org')
u.addresses.append(a)
a2 = Address(email_address='lala@test.org')
u.addresses.append(a2)
session = create_session()
session.add(u)
session.flush()
user_rows = users.select(users.c.id.in_([u.id])).execute().fetchall()
eq_(list(user_rows[0].values()), [u.id, 'one2manytester'])
address_rows = addresses.select(
addresses.c.id.in_([a.id, a2.id]),
order_by=[addresses.c.email_address]).execute().fetchall()
eq_(list(address_rows[0].values()), [a2.id, u.id, 'lala@test.org'])
eq_(list(address_rows[1].values()), [a.id, u.id, 'one2many@test.org'])
userid = u.id
addressid = a2.id
a2.email_address = 'somethingnew@foo.com'
session.flush()
address_rows = addresses.select(
addresses.c.id == addressid).execute().fetchall()
eq_(list(address_rows[0].values()),
[addressid, userid, 'somethingnew@foo.com'])
self.assert_(u.id == userid and a2.id == addressid)
def test_one_to_many_2(self):
"""Modifying the child items of an object."""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
addresses = relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u1.addresses = []
a1 = Address(email_address='emailaddress1')
u1.addresses.append(a1)
u2 = User(name='user2')
u2.addresses = []
a2 = Address(email_address='emailaddress2')
u2.addresses.append(a2)
a3 = Address(email_address='emailaddress3')
session = create_session()
session.add_all((u1, u2, a3))
session.flush()
# modify user2 directly, append an address to user1.
# upon commit, user2 should be updated, user1 should not
# both address1 and address3 should be updated
u2.name = 'user2modified'
u1.addresses.append(a3)
del u1.addresses[0]
self.assert_sql(testing.db, session.flush, [
("UPDATE users SET name=:name "
"WHERE users.id = :users_id",
{'users_id': u2.id, 'name': 'user2modified'}),
("UPDATE addresses SET user_id=:user_id "
"WHERE addresses.id = :addresses_id",
[
{'user_id': None, 'addresses_id': a1.id},
{'user_id': u1.id, 'addresses_id': a3.id}
]),
])
def test_child_move(self):
"""Moving a child from one parent to another, with a delete.
Tests that deleting the first parent properly updates the child with
the new parent. This tests the 'trackparent' option in the attributes
module.
"""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
addresses = relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u2 = User(name='user2')
a = Address(email_address='address1')
u1.addresses.append(a)
session = create_session()
session.add_all((u1, u2))
session.flush()
del u1.addresses[0]
u2.addresses.append(a)
session.delete(u1)
session.flush()
session.expunge_all()
u2 = session.query(User).get(u2.id)
eq_(len(u2.addresses), 1)
def test_child_move_2(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
addresses = relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u2 = User(name='user2')
a = Address(email_address='address1')
u1.addresses.append(a)
session = create_session()
session.add_all((u1, u2))
session.flush()
del u1.addresses[0]
u2.addresses.append(a)
session.flush()
session.expunge_all()
u2 = session.query(User).get(u2.id)
eq_(len(u2.addresses), 1)
def test_o2m_delete_parent(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
address = relationship(mapper(Address, addresses),
lazy='select',
uselist=False)))
u = User(name='one2onetester')
a = Address(email_address='myonlyaddress@foo.com')
u.address = a
session = create_session()
session.add(u)
session.flush()
session.delete(u)
session.flush()
assert a.id is not None
assert a.user_id is None
assert sa.orm.attributes.instance_state(a).key in session.identity_map
assert sa.orm.attributes.instance_state(u).key not in session.identity_map
def test_one_to_one(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
address = relationship(mapper(Address, addresses),
lazy='select',
uselist=False)))
u = User(name='one2onetester')
u.address = Address(email_address='myonlyaddress@foo.com')
session = create_session()
session.add(u)
session.flush()
u.name = 'imnew'
session.flush()
u.address.email_address = 'imnew@foo.com'
session.flush()
def test_bidirectional(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
m1 = mapper(User, users)
m2 = mapper(Address, addresses, properties=dict(
user = relationship(m1, lazy='joined', backref='addresses')))
u = User(name='test')
a = Address(email_address='testaddress', user=u)
session = create_session()
session.add(u)
session.flush()
session.delete(u)
session.flush()
def test_double_relationship(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m2 = mapper(Address, addresses)
m = mapper(User, users, properties={
'boston_addresses' : relationship(m2, primaryjoin=
sa.and_(users.c.id==addresses.c.user_id,
addresses.c.email_address.like('%boston%'))),
'newyork_addresses' : relationship(m2, primaryjoin=
sa.and_(users.c.id==addresses.c.user_id,
addresses.c.email_address.like('%newyork%')))})
u = User(name='u1')
a = Address(email_address='foo@boston.com')
b = Address(email_address='bar@newyork.com')
u.boston_addresses.append(a)
u.newyork_addresses.append(b)
session = create_session()
session.add(u)
session.flush()
class SaveTest(_fixtures.FixtureTest):
run_inserts = None
def test_basic(self):
User, users = self.classes.User, self.tables.users
m = mapper(User, users)
# save two users
u = User(name='savetester')
u2 = User(name='savetester2')
session = create_session()
session.add_all((u, u2))
session.flush()
# assert the first one retrieves the same from the identity map
nu = session.query(m).get(u.id)
assert u is nu
# clear out the identity map, so next get forces a SELECT
session.expunge_all()
# check it again, identity should be different but ids the same
nu = session.query(m).get(u.id)
assert u is not nu and u.id == nu.id and nu.name == 'savetester'
# change first users name and save
session = create_session()
session.add(u)
u.name = 'modifiedname'
assert u in session.dirty
session.flush()
# select both
userlist = session.query(User).filter(
users.c.id.in_([u.id, u2.id])).order_by(users.c.name).all()
eq_(u.id, userlist[0].id)
eq_(userlist[0].name, 'modifiedname')
eq_(u2.id, userlist[1].id)
eq_(userlist[1].name, 'savetester2')
def test_synonym(self):
users = self.tables.users
class SUser(fixtures.BasicEntity):
def _get_name(self):
return "User:" + self.name
def _set_name(self, name):
self.name = name + ":User"
syn_name = property(_get_name, _set_name)
mapper(SUser, users, properties={
'syn_name': sa.orm.synonym('name')
})
u = SUser(syn_name="some name")
eq_(u.syn_name, 'User:some name:User')
session = create_session()
session.add(u)
session.flush()
session.expunge_all()
u = session.query(SUser).first()
eq_(u.syn_name, 'User:some name:User')
def test_lazyattr_commit(self):
"""Lazily loaded relationships.
When a lazy-loaded list is unloaded, and a commit occurs, that the
'passive' call on that list does not blow away its value
"""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties = {
'addresses': relationship(mapper(Address, addresses))})
u = User(name='u1')
u.addresses.append(Address(email_address='u1@e1'))
u.addresses.append(Address(email_address='u1@e2'))
u.addresses.append(Address(email_address='u1@e3'))
u.addresses.append(Address(email_address='u1@e4'))
session = create_session()
session.add(u)
session.flush()
session.expunge_all()
u = session.query(User).one()
u.name = 'newname'
session.flush()
eq_(len(u.addresses), 4)
def test_inherits(self):
"""a user object that also has the users mailing address."""
users, addresses, User = (self.tables.users,
self.tables.addresses,
self.classes.User)
m1 = mapper(User, users)
class AddressUser(User):
pass
# define a mapper for AddressUser that inherits the User.mapper, and
# joins on the id column
mapper(AddressUser, addresses, inherits=m1, properties={
'address_id': addresses.c.id
})
au = AddressUser(name='u', email_address='u@e')
session = create_session()
session.add(au)
session.flush()
session.expunge_all()
rt = session.query(AddressUser).one()
eq_(au.user_id, rt.user_id)
eq_(rt.id, rt.id)
def test_deferred(self):
"""Deferred column operations"""
orders, Order = self.tables.orders, self.classes.Order
mapper(Order, orders, properties={
'description': sa.orm.deferred(orders.c.description)})
# don't set deferred attribute, commit session
o = Order(id=42)
session = create_session(autocommit=False)
session.add(o)
session.commit()
# assert that changes get picked up
o.description = 'foo'
session.commit()
eq_(list(session.execute(orders.select(), mapper=Order)),
[(42, None, None, 'foo', None)])
session.expunge_all()
# assert that a set operation doesn't trigger a load operation
o = session.query(Order).filter(Order.description == 'foo').one()
def go():
o.description = 'hoho'
self.sql_count_(0, go)
session.flush()
eq_(list(session.execute(orders.select(), mapper=Order)),
[(42, None, None, 'hoho', None)])
session.expunge_all()
# test assigning None to an unloaded deferred also works
o = session.query(Order).filter(Order.description == 'hoho').one()
o.description = None
session.flush()
eq_(list(session.execute(orders.select(), mapper=Order)),
[(42, None, None, None, None)])
session.close()
# why no support on oracle ? because oracle doesn't save
# "blank" strings; it saves a single space character.
@testing.fails_on('oracle', 'FIXME: unknown')
def test_dont_update_blanks(self):
User, users = self.classes.User, self.tables.users
mapper(User, users)
u = User(name='')
session = create_session()
session.add(u)
session.flush()
session.expunge_all()
u = session.query(User).get(u.id)
u.name = ''
self.sql_count_(0, session.flush)
def test_multi_table_selectable(self):
"""Mapped selectables that span tables.
Also tests redefinition of the keynames for the column properties.
"""
addresses, users, User = (self.tables.addresses,
self.tables.users,
self.classes.User)
usersaddresses = sa.join(users, addresses,
users.c.id == addresses.c.user_id)
m = mapper(User, usersaddresses,
properties=dict(
email = addresses.c.email_address,
foo_id = [users.c.id, addresses.c.user_id]))
u = User(name='multitester', email='multi@test.org')
session = create_session()
session.add(u)
session.flush()
session.expunge_all()
id = m.primary_key_from_instance(u)
u = session.query(User).get(id)
assert u.name == 'multitester'
user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
eq_(list(user_rows[0].values()), [u.foo_id, 'multitester'])
address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
eq_(list(address_rows[0].values()), [u.id, u.foo_id, 'multi@test.org'])
u.email = 'lala@hey.com'
u.name = 'imnew'
session.flush()
user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
eq_(list(user_rows[0].values()), [u.foo_id, 'imnew'])
address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
eq_(list(address_rows[0].values()), [u.id, u.foo_id, 'lala@hey.com'])
session.expunge_all()
u = session.query(User).get(id)
assert u.name == 'imnew'
def test_history_get(self):
"""The history lazy-fetches data when it wasn't otherwise loaded."""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address, cascade="all, delete-orphan")})
mapper(Address, addresses)
u = User(name='u1')
u.addresses.append(Address(email_address='u1@e1'))
u.addresses.append(Address(email_address='u1@e2'))
session = create_session()
session.add(u)
session.flush()
session.expunge_all()
u = session.query(User).get(u.id)
session.delete(u)
session.flush()
assert users.count().scalar() == 0
assert addresses.count().scalar() == 0
def test_batch_mode(self):
"""The 'batch=False' flag on mapper()"""
users, User = self.tables.users, self.classes.User
names = []
class Events(object):
def before_insert(self, mapper, connection, instance):
self.current_instance = instance
names.append(instance.name)
def after_insert(self, mapper, connection, instance):
assert instance is self.current_instance
mapper(User, users, batch=False)
evt = Events()
event.listen(User, "before_insert", evt.before_insert)
event.listen(User, "after_insert", evt.after_insert)
u1 = User(name='user1')
u2 = User(name='user2')
session = create_session()
session.add_all((u1, u2))
session.flush()
u3 = User(name='user3')
u4 = User(name='user4')
u5 = User(name='user5')
session.add_all([u4, u5, u3])
session.flush()
# test insert ordering is maintained
assert names == ['user1', 'user2', 'user4', 'user5', 'user3']
session.expunge_all()
sa.orm.clear_mappers()
m = mapper(User, users)
evt = Events()
event.listen(User, "before_insert", evt.before_insert)
event.listen(User, "after_insert", evt.after_insert)
u1 = User(name='user1')
u2 = User(name='user2')
session.add_all((u1, u2))
assert_raises(AssertionError, session.flush)
class ManyToOneTest(_fixtures.FixtureTest):
run_inserts = None
def test_m2o_one_to_one(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
# TODO: put assertion in here !!!
m = mapper(Address, addresses, properties=dict(
user = relationship(mapper(User, users), lazy='select', uselist=False)))
session = create_session()
data = [
{'name': 'thesub' , 'email_address': 'bar@foo.com'},
{'name': 'assdkfj' , 'email_address': 'thesdf@asdf.com'},
{'name': 'n4knd' , 'email_address': 'asf3@bar.org'},
{'name': 'v88f4' , 'email_address': 'adsd5@llala.net'},
{'name': 'asdf8d' , 'email_address': 'theater@foo.com'}
]
objects = []
for elem in data:
a = Address()
a.email_address = elem['email_address']
a.user = User()
a.user.name = elem['name']
objects.append(a)
session.add(a)
session.flush()
objects[2].email_address = 'imnew@foo.bar'
objects[3].user = User()
objects[3].user.name = 'imnewlyadded'
self.assert_sql_execution(testing.db,
session.flush,
CompiledSQL("INSERT INTO users (name) VALUES (:name)",
{'name': 'imnewlyadded'} ),
AllOf(
CompiledSQL("UPDATE addresses SET email_address=:email_address "
"WHERE addresses.id = :addresses_id",
lambda ctx: {'email_address': 'imnew@foo.bar',
'addresses_id': objects[2].id}),
CompiledSQL("UPDATE addresses SET user_id=:user_id "
"WHERE addresses.id = :addresses_id",
lambda ctx: {'user_id': objects[3].user.id,
'addresses_id': objects[3].id})
)
)
l = sa.select([users, addresses],
sa.and_(users.c.id==addresses.c.user_id,
addresses.c.id==a.id)).execute()
eq_(list(l.first().values()),
[a.user.id, 'asdf8d', a.id, a.user_id, 'theater@foo.com'])
def test_many_to_one_1(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
m = mapper(Address, addresses, properties=dict(
user = relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
u1 = User(name='user1')
a1.user = u1
session = create_session()
session.add(a1)
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
u1 = session.query(User).get(u1.id)
assert a1.user is u1
a1.user = None
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
u1 = session.query(User).get(u1.id)
assert a1.user is None
def test_many_to_one_2(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
m = mapper(Address, addresses, properties=dict(
user = relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
a2 = Address(email_address='emailaddress2')
u1 = User(name='user1')
a1.user = u1
session = create_session()
session.add_all((a1, a2))
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
a2 = session.query(Address).get(a2.id)
u1 = session.query(User).get(u1.id)
assert a1.user is u1
a1.user = None
a2.user = u1
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
a2 = session.query(Address).get(a2.id)
u1 = session.query(User).get(u1.id)
assert a1.user is None
assert a2.user is u1
def test_many_to_one_3(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
m = mapper(Address, addresses, properties=dict(
user = relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
u1 = User(name='user1')
u2 = User(name='user2')
a1.user = u1
session = create_session()
session.add_all((a1, u1, u2))
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
u1 = session.query(User).get(u1.id)
u2 = session.query(User).get(u2.id)
assert a1.user is u1
a1.user = u2
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
u1 = session.query(User).get(u1.id)
u2 = session.query(User).get(u2.id)
assert a1.user is u2
def test_bidirectional_no_load(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address, backref='user', lazy='noload')})
mapper(Address, addresses)
# try it on unsaved objects
u1 = User(name='u1')
a1 = Address(email_address='e1')
a1.user = u1
session = create_session()
session.add(u1)
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
a1.user = None
session.flush()
session.expunge_all()
assert session.query(Address).get(a1.id).user is None
assert session.query(User).get(u1.id).addresses == []
class ManyToManyTest(_fixtures.FixtureTest):
run_inserts = None
def test_many_to_many(self):
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
m = mapper(Item, items, properties=dict(
keywords=relationship(Keyword,
item_keywords,
lazy='joined',
order_by=keywords.c.name)))
data = [Item,
{'description': 'mm_item1',
'keywords' : (Keyword, [{'name': 'big'},
{'name': 'green'},
{'name': 'purple'},
{'name': 'round'}])},
{'description': 'mm_item2',
'keywords' : (Keyword, [{'name':'blue'},
{'name':'imnew'},
{'name':'round'},
{'name':'small'}])},
{'description': 'mm_item3',
'keywords' : (Keyword, [])},
{'description': 'mm_item4',
'keywords' : (Keyword, [{'name':'big'},
{'name':'blue'},])},
{'description': 'mm_item5',
'keywords' : (Keyword, [{'name':'big'},
{'name':'exacting'},
{'name':'green'}])},
{'description': 'mm_item6',
'keywords' : (Keyword, [{'name':'red'},
{'name':'round'},
{'name':'small'}])}]
session = create_session()
objects = []
_keywords = dict([(k.name, k) for k in session.query(Keyword)])
for elem in data[1:]:
item = Item(description=elem['description'])
objects.append(item)
for spec in elem['keywords'][1]:
keyword_name = spec['name']
try:
kw = _keywords[keyword_name]
except KeyError:
_keywords[keyword_name] = kw = Keyword(name=keyword_name)
item.keywords.append(kw)
session.add_all(objects)
session.flush()
l = (session.query(Item).
filter(Item.description.in_([e['description']
for e in data[1:]])).
order_by(Item.description).all())
self.assert_result(l, *data)
objects[4].description = 'item4updated'
k = Keyword()
k.name = 'yellow'
objects[5].keywords.append(k)
self.assert_sql_execution(
testing.db,
session.flush,
AllOf(
CompiledSQL("UPDATE items SET description=:description "
"WHERE items.id = :items_id",
{'description': 'item4updated',
'items_id': objects[4].id},
),
CompiledSQL("INSERT INTO keywords (name) "
"VALUES (:name)",
{'name': 'yellow'},
)
),
CompiledSQL("INSERT INTO item_keywords (item_id, keyword_id) "
"VALUES (:item_id, :keyword_id)",
lambda ctx: [{'item_id': objects[5].id,
'keyword_id': k.id}])
)
objects[2].keywords.append(k)
dkid = objects[5].keywords[1].id
del objects[5].keywords[1]
self.assert_sql_execution(
testing.db,
session.flush,
CompiledSQL("DELETE FROM item_keywords "
"WHERE item_keywords.item_id = :item_id AND "
"item_keywords.keyword_id = :keyword_id",
[{'item_id': objects[5].id, 'keyword_id': dkid}]),
CompiledSQL("INSERT INTO item_keywords (item_id, keyword_id) "
"VALUES (:item_id, :keyword_id)",
lambda ctx: [{'item_id': objects[2].id, 'keyword_id': k.id}]
))
session.delete(objects[3])
session.flush()
def test_many_to_many_remove(self):
"""Setting a collection to empty deletes many-to-many rows.
Tests that setting a list-based attribute to '[]' properly affects the
history and allows the many-to-many rows to be deleted
"""
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords = relationship(Keyword, item_keywords, lazy='joined'),
))
i = Item(description='i1')
k1 = Keyword(name='k1')
k2 = Keyword(name='k2')
i.keywords.append(k1)
i.keywords.append(k2)
session = create_session()
session.add(i)
session.flush()
assert item_keywords.count().scalar() == 2
i.keywords = []
session.flush()
assert item_keywords.count().scalar() == 0
def test_scalar(self):
"""sa.dependency won't delete an m2m relationship referencing None."""
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keyword=relationship(Keyword, secondary=item_keywords, uselist=False)))
i = Item(description='x')
session = create_session()
session.add(i)
session.flush()
session.delete(i)
session.flush()
def test_many_to_many_update(self):
"""Assorted history operations on a many to many"""
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords=relationship(Keyword,
secondary=item_keywords,
lazy='joined',
order_by=keywords.c.name)))
k1 = Keyword(name='keyword 1')
k2 = Keyword(name='keyword 2')
k3 = Keyword(name='keyword 3')
item = Item(description='item 1')
item.keywords.extend([k1, k2, k3])
session = create_session()
session.add(item)
session.flush()
item.keywords = []
item.keywords.append(k1)
item.keywords.append(k2)
session.flush()
session.expunge_all()
item = session.query(Item).get(item.id)
assert item.keywords == [k1, k2]
def test_association(self):
"""Basic test of an association object"""
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
class IKAssociation(fixtures.ComparableEntity):
pass
mapper(Keyword, keywords)
# note that we are breaking a rule here, making a second
# mapper(Keyword, keywords) the reorganization of mapper construction
# affected this, but was fixed again
mapper(IKAssociation, item_keywords,
primary_key=[item_keywords.c.item_id, item_keywords.c.keyword_id],
properties=dict(
keyword=relationship(mapper(Keyword, keywords, non_primary=True),
lazy='joined',
uselist=False,
order_by=keywords.c.name # note here is a valid place where order_by can be used
))) # on a scalar relationship(); to determine eager ordering of
# the parent object within its collection.
mapper(Item, items, properties=dict(
keywords=relationship(IKAssociation, lazy='joined')))
session = create_session()
def fixture():
_kw = dict([(k.name, k) for k in session.query(Keyword)])
for n in ('big', 'green', 'purple', 'round', 'huge',
'violet', 'yellow', 'blue'):
if n not in _kw:
_kw[n] = Keyword(name=n)
def assocs(*names):
return [IKAssociation(keyword=kw)
for kw in [_kw[n] for n in names]]
return [
Item(description='a_item1',
keywords=assocs('big', 'green', 'purple', 'round')),
Item(description='a_item2',
keywords=assocs('huge', 'violet', 'yellow')),
Item(description='a_item3',
keywords=assocs('big', 'blue'))]
session.add_all(fixture())
session.flush()
eq_(fixture(), session.query(Item).order_by(Item.description).all())
class SaveTest2(_fixtures.FixtureTest):
run_inserts = None
def test_m2o_nonmatch(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties=dict(
user = relationship(User, lazy='select', uselist=False)))
session = create_session()
def fixture():
return [
Address(email_address='a1', user=User(name='u1')),
Address(email_address='a2', user=User(name='u2'))]
session.add_all(fixture())
self.assert_sql_execution(
testing.db,
session.flush,
CompiledSQL("INSERT INTO users (name) VALUES (:name)",
{'name': 'u1'}),
CompiledSQL("INSERT INTO users (name) VALUES (:name)",
{'name': 'u2'}),
CompiledSQL("INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
{'user_id': 1, 'email_address': 'a1'}),
CompiledSQL("INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
{'user_id': 2, 'email_address': 'a2'}),
)
class SaveTest3(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('items', metadata,
Column('item_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('item_name', String(50)))
Table('keywords', metadata,
Column('keyword_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)))
Table('assoc', metadata,
Column('item_id', Integer, ForeignKey("items")),
Column('keyword_id', Integer, ForeignKey("keywords")),
Column('foo', sa.Boolean, default=True))
@classmethod
def setup_classes(cls):
class Keyword(cls.Basic):
pass
class Item(cls.Basic):
pass
def test_manytomany_xtracol_delete(self):
"""A many-to-many on a table that has an extra column can properly delete rows from the table without referencing the extra column"""
keywords, items, assoc, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.assoc,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords = relationship(Keyword, secondary=assoc, lazy='joined'),))
i = Item()
k1 = Keyword()
k2 = Keyword()
i.keywords.append(k1)
i.keywords.append(k2)
session = create_session()
session.add(i)
session.flush()
assert assoc.count().scalar() == 2
i.keywords = []
session.flush()
assert assoc.count().scalar() == 0
class BooleanColTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1_t', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('name', String(30)),
Column('value', sa.Boolean))
def test_boolean(self):
t1_t = self.tables.t1_t
# use the regular mapper
class T(fixtures.ComparableEntity):
pass
orm_mapper(T, t1_t, order_by=t1_t.c.id)
sess = create_session()
t1 = T(value=True, name="t1")
t2 = T(value=False, name="t2")
t3 = T(value=True, name="t3")
sess.add_all((t1, t2, t3))
sess.flush()
for clear in (False, True):
if clear:
sess.expunge_all()
eq_(sess.query(T).all(), [T(value=True, name="t1"), T(value=False, name="t2"), T(value=True, name="t3")])
if clear:
sess.expunge_all()
eq_(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"),T(value=True, name="t3")])
if clear:
sess.expunge_all()
eq_(sess.query(T).filter(T.value==False).all(), [T(value=False, name="t2")])
t2 = sess.query(T).get(t2.id)
t2.value = True
sess.flush()
eq_(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"), T(value=True, name="t2"), T(value=True, name="t3")])
t2.value = False
sess.flush()
eq_(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"),T(value=True, name="t3")])
class RowSwitchTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
# parent
Table('t5', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30), nullable=False))
# onetomany
Table('t6', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30), nullable=False),
Column('t5id', Integer, ForeignKey('t5.id'),nullable=False))
# associated
Table('t7', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30), nullable=False))
#manytomany
Table('t5t7', metadata,
Column('t5id', Integer, ForeignKey('t5.id'),nullable=False),
Column('t7id', Integer, ForeignKey('t7.id'),nullable=False))
@classmethod
def setup_classes(cls):
class T5(cls.Comparable):
pass
class T6(cls.Comparable):
pass
class T7(cls.Comparable):
pass
def test_onetomany(self):
t6, T6, t5, T5 = (self.tables.t6,
self.classes.T6,
self.tables.t5,
self.classes.T5)
mapper(T5, t5, properties={
't6s':relationship(T6, cascade="all, delete-orphan")
})
mapper(T6, t6)
sess = create_session()
o5 = T5(data='some t5', id=1)
o5.t6s.append(T6(data='some t6', id=1))
o5.t6s.append(T6(data='some other t6', id=2))
sess.add(o5)
sess.flush()
eq_(
list(sess.execute(t5.select(), mapper=T5)),
[(1, 'some t5')]
)
eq_(
list(sess.execute(t6.select().order_by(t6.c.id), mapper=T5)),
[(1, 'some t6', 1), (2, 'some other t6', 1)]
)
o6 = T5(data='some other t5', id=o5.id, t6s=[
T6(data='third t6', id=3),
T6(data='fourth t6', id=4),
])
sess.delete(o5)
sess.add(o6)
sess.flush()
eq_(
list(sess.execute(t5.select(), mapper=T5)),
[(1, 'some other t5')]
)
eq_(
list(sess.execute(t6.select().order_by(t6.c.id), mapper=T5)),
[(3, 'third t6', 1), (4, 'fourth t6', 1)]
)
def test_manytomany(self):
t7, t5, t5t7, T5, T7 = (self.tables.t7,
self.tables.t5,
self.tables.t5t7,
self.classes.T5,
self.classes.T7)
mapper(T5, t5, properties={
't7s':relationship(T7, secondary=t5t7, cascade="all")
})
mapper(T7, t7)
sess = create_session()
o5 = T5(data='some t5', id=1)
o5.t7s.append(T7(data='some t7', id=1))
o5.t7s.append(T7(data='some other t7', id=2))
sess.add(o5)
sess.flush()
assert list(sess.execute(t5.select(), mapper=T5)) == [(1, 'some t5')]
assert testing.rowset(sess.execute(t5t7.select(), mapper=T5)) == set([(1,1), (1, 2)])
assert list(sess.execute(t7.select(), mapper=T5)) == [(1, 'some t7'), (2, 'some other t7')]
o6 = T5(data='some other t5', id=1, t7s=[
T7(data='third t7', id=3),
T7(data='fourth t7', id=4),
])
sess.delete(o5)
assert o5 in sess.deleted
assert o5.t7s[0] in sess.deleted
assert o5.t7s[1] in sess.deleted
sess.add(o6)
sess.flush()
assert list(sess.execute(t5.select(), mapper=T5)) == [(1, 'some other t5')]
assert list(sess.execute(t7.select(), mapper=T5)) == [(3, 'third t7'), (4, 'fourth t7')]
def test_manytoone(self):
t6, T6, t5, T5 = (self.tables.t6,
self.classes.T6,
self.tables.t5,
self.classes.T5)
mapper(T6, t6, properties={
't5':relationship(T5)
})
mapper(T5, t5)
sess = create_session()
o5 = T6(data='some t6', id=1)
o5.t5 = T5(data='some t5', id=1)
sess.add(o5)
sess.flush()
assert list(sess.execute(t5.select(), mapper=T5)) == [(1, 'some t5')]
assert list(sess.execute(t6.select(), mapper=T5)) == [(1, 'some t6', 1)]
o6 = T6(data='some other t6', id=1, t5=T5(data='some other t5', id=2))
sess.delete(o5)
sess.delete(o5.t5)
sess.add(o6)
sess.flush()
assert list(sess.execute(t5.select(), mapper=T5)) == [(2, 'some other t5')]
assert list(sess.execute(t6.select(), mapper=T5)) == [(1, 'some other t6', 2)]
class InheritingRowSwitchTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
Column('pid', Integer, primary_key=True),
Column('pdata', String(30))
)
Table('child', metadata,
Column('cid', Integer, primary_key=True),
Column('pid', Integer, ForeignKey('parent.pid')),
Column('cdata', String(30))
)
@classmethod
def setup_classes(cls):
class P(cls.Comparable):
pass
class C(P):
pass
def test_row_switch_no_child_table(self):
P, C, parent, child = (self.classes.P,
self.classes.C,
self.tables.parent,
self.tables.child)
mapper(P, parent)
mapper(C, child, inherits=P)
sess = create_session()
c1 = C(pid=1, cid=1, pdata='c1', cdata='c1')
sess.add(c1)
sess.flush()
# establish a row switch between c1 and c2.
# c2 has no value for the "child" table
c2 = C(pid=1, cid=1, pdata='c2')
sess.add(c2)
sess.delete(c1)
self.assert_sql_execution(testing.db, sess.flush,
CompiledSQL("UPDATE parent SET pdata=:pdata WHERE parent.pid = :parent_pid",
{'pdata':'c2', 'parent_pid':1}
),
# this fires as of [ticket:1362], since we synchronzize
# PK/FKs on UPDATES. c2 is new so the history shows up as
# pure added, update occurs. If a future change limits the
# sync operation during _save_obj().update, this is safe to remove again.
CompiledSQL("UPDATE child SET pid=:pid WHERE child.cid = :child_cid",
{'pid':1, 'child_cid':1}
)
)
class TransactionTest(fixtures.MappedTest):
__requires__ = ('deferrable_or_no_constraints',)
@classmethod
def define_tables(cls, metadata):
t1 = Table('t1', metadata,
Column('id', Integer, primary_key=True))
t2 = Table('t2', metadata,
Column('id', Integer, primary_key=True),
Column('t1_id', Integer,
ForeignKey('t1.id', deferrable=True, initially='deferred')
))
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
class T2(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
T2, T1, t2, t1 = (cls.classes.T2,
cls.classes.T1,
cls.tables.t2,
cls.tables.t1)
orm_mapper(T1, t1)
orm_mapper(T2, t2)
def test_close_transaction_on_commit_fail(self):
T2, t1 = self.classes.T2, self.tables.t1
session = create_session(autocommit=True)
# with a deferred constraint, this fails at COMMIT time instead
# of at INSERT time.
session.add(T2(t1_id=123))
try:
session.flush()
assert False
except:
# Flush needs to rollback also when commit fails
assert session.transaction is None
# todo: on 8.3 at least, the failed commit seems to close the cursor?
# needs investigation. leaving in the DDL above now to help verify
# that the new deferrable support on FK isn't involved in this issue.
if testing.against('postgresql'):
t1.bind.engine.dispose()
class PartialNullPKTest(fixtures.MappedTest):
# sqlite totally fine with NULLs in pk columns.
# no other DB is like this.
__only_on__ = ('sqlite',)
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
Column('col1', String(10), primary_key=True, nullable=True),
Column('col2', String(10), primary_key=True, nullable=True),
Column('col3', String(50))
)
@classmethod
def setup_classes(cls):
class T1(cls.Basic):
pass
@classmethod
def setup_mappers(cls):
orm_mapper(cls.classes.T1, cls.tables.t1)
def test_key_switch(self):
T1 = self.classes.T1
s = Session()
s.add(T1(col1="1", col2=None))
t1 = s.query(T1).first()
t1.col2 = 5
assert_raises_message(
orm_exc.FlushError,
"Can't update table t1 using NULL for primary "
"key value on column t1.col2",
s.commit
)
def test_plain_update(self):
T1 = self.classes.T1
s = Session()
s.add(T1(col1="1", col2=None))
t1 = s.query(T1).first()
t1.col3 = 'hi'
assert_raises_message(
orm_exc.FlushError,
"Can't update table t1 using NULL for primary "
"key value on column t1.col2",
s.commit
)
def test_delete(self):
T1 = self.classes.T1
s = Session()
s.add(T1(col1="1", col2=None))
t1 = s.query(T1).first()
s.delete(t1)
assert_raises_message(
orm_exc.FlushError,
"Can't delete from table t1 using NULL "
"for primary key value on column t1.col2",
s.commit
)
def test_total_null(self):
T1 = self.classes.T1
s = Session()
s.add(T1(col1=None, col2=None))
assert_raises_message(
orm_exc.FlushError,
r"Instance \<T1 at .+?\> has a NULL "
"identity key. If this is an auto-generated value, "
"check that the database table allows generation ",
s.commit
)
def test_dont_complain_if_no_update(self):
T1 = self.classes.T1
s = Session()
t = T1(col1="1", col2=None)
s.add(t)
s.commit()
t.col1 = "1"
s.commit() | mit |
Asus-T100/kernel | scripts/gdb/linux/symbols.py | 467 | 6343 | #
# gdb helper commands and functions for Linux kernel debugging
#
# load kernel and module symbols
#
# Copyright (c) Siemens AG, 2011-2013
#
# Authors:
# Jan Kiszka <jan.kiszka@siemens.com>
#
# This work is licensed under the terms of the GNU GPL version 2.
#
import gdb
import os
import re
from linux import modules
if hasattr(gdb, 'Breakpoint'):
class LoadModuleBreakpoint(gdb.Breakpoint):
def __init__(self, spec, gdb_command):
super(LoadModuleBreakpoint, self).__init__(spec, internal=True)
self.silent = True
self.gdb_command = gdb_command
def stop(self):
module = gdb.parse_and_eval("mod")
module_name = module['name'].string()
cmd = self.gdb_command
# enforce update if object file is not found
cmd.module_files_updated = False
# Disable pagination while reporting symbol (re-)loading.
# The console input is blocked in this context so that we would
# get stuck waiting for the user to acknowledge paged output.
show_pagination = gdb.execute("show pagination", to_string=True)
pagination = show_pagination.endswith("on.\n")
gdb.execute("set pagination off")
if module_name in cmd.loaded_modules:
gdb.write("refreshing all symbols to reload module "
"'{0}'\n".format(module_name))
cmd.load_all_symbols()
else:
cmd.load_module_symbols(module)
# restore pagination state
gdb.execute("set pagination %s" % ("on" if pagination else "off"))
return False
class LxSymbols(gdb.Command):
"""(Re-)load symbols of Linux kernel and currently loaded modules.
The kernel (vmlinux) is taken from the current working directly. Modules (.ko)
are scanned recursively, starting in the same directory. Optionally, the module
search path can be extended by a space separated list of paths passed to the
lx-symbols command."""
module_paths = []
module_files = []
module_files_updated = False
loaded_modules = []
breakpoint = None
def __init__(self):
super(LxSymbols, self).__init__("lx-symbols", gdb.COMMAND_FILES,
gdb.COMPLETE_FILENAME)
def _update_module_files(self):
self.module_files = []
for path in self.module_paths:
gdb.write("scanning for modules in {0}\n".format(path))
for root, dirs, files in os.walk(path):
for name in files:
if name.endswith(".ko"):
self.module_files.append(root + "/" + name)
self.module_files_updated = True
def _get_module_file(self, module_name):
module_pattern = ".*/{0}\.ko$".format(
module_name.replace("_", r"[_\-]"))
for name in self.module_files:
if re.match(module_pattern, name) and os.path.exists(name):
return name
return None
def _section_arguments(self, module):
try:
sect_attrs = module['sect_attrs'].dereference()
except gdb.error:
return ""
attrs = sect_attrs['attrs']
section_name_to_address = {
attrs[n]['name'].string(): attrs[n]['address']
for n in range(int(sect_attrs['nsections']))}
args = []
for section_name in [".data", ".data..read_mostly", ".rodata", ".bss"]:
address = section_name_to_address.get(section_name)
if address:
args.append(" -s {name} {addr}".format(
name=section_name, addr=str(address)))
return "".join(args)
def load_module_symbols(self, module):
module_name = module['name'].string()
module_addr = str(module['core_layout']['base']).split()[0]
module_file = self._get_module_file(module_name)
if not module_file and not self.module_files_updated:
self._update_module_files()
module_file = self._get_module_file(module_name)
if module_file:
gdb.write("loading @{addr}: {filename}\n".format(
addr=module_addr, filename=module_file))
cmdline = "add-symbol-file {filename} {addr}{sections}".format(
filename=module_file,
addr=module_addr,
sections=self._section_arguments(module))
gdb.execute(cmdline, to_string=True)
if module_name not in self.loaded_modules:
self.loaded_modules.append(module_name)
else:
gdb.write("no module object found for '{0}'\n".format(module_name))
def load_all_symbols(self):
gdb.write("loading vmlinux\n")
# Dropping symbols will disable all breakpoints. So save their states
# and restore them afterward.
saved_states = []
if hasattr(gdb, 'breakpoints') and not gdb.breakpoints() is None:
for bp in gdb.breakpoints():
saved_states.append({'breakpoint': bp, 'enabled': bp.enabled})
# drop all current symbols and reload vmlinux
gdb.execute("symbol-file", to_string=True)
gdb.execute("symbol-file vmlinux")
self.loaded_modules = []
module_list = modules.module_list()
if not module_list:
gdb.write("no modules found\n")
else:
[self.load_module_symbols(module) for module in module_list]
for saved_state in saved_states:
saved_state['breakpoint'].enabled = saved_state['enabled']
def invoke(self, arg, from_tty):
self.module_paths = [os.path.expanduser(p) for p in arg.split()]
self.module_paths.append(os.getcwd())
# enforce update
self.module_files = []
self.module_files_updated = False
self.load_all_symbols()
if hasattr(gdb, 'Breakpoint'):
if self.breakpoint is not None:
self.breakpoint.delete()
self.breakpoint = None
self.breakpoint = LoadModuleBreakpoint(
"kernel/module.c:do_init_module", self)
else:
gdb.write("Note: symbol update on module loading not supported "
"with this gdb version\n")
LxSymbols()
| gpl-2.0 |
vrv/tensorflow | tensorflow/contrib/distributions/python/kernel_tests/bijectors/chain_test.py | 6 | 3498 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Chain Tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops.bijectors.bijector_test_util import assert_scalar_congruency
from tensorflow.contrib.distributions.python.ops.bijectors.chain import Chain
from tensorflow.contrib.distributions.python.ops.bijectors.exp import Exp
from tensorflow.contrib.distributions.python.ops.bijectors.softmax_centered import SoftmaxCentered
from tensorflow.contrib.distributions.python.ops.bijectors.softplus import Softplus
from tensorflow.python.framework import tensor_shape
from tensorflow.python.platform import test
class ChainBijectorTest(test.TestCase):
"""Tests the correctness of the Y = Chain(bij1, bij2, bij3) transformation."""
def testBijector(self):
with self.test_session():
chain = Chain((Exp(event_ndims=1), Softplus(event_ndims=1)))
self.assertEqual("chain_of_exp_of_softplus", chain.name)
x = np.asarray([[[1., 2.],
[2., 3.]]])
self.assertAllClose(1. + np.exp(x), chain.forward(x).eval())
self.assertAllClose(np.log(x - 1.), chain.inverse(x).eval())
self.assertAllClose(
-np.sum(np.log(x - 1.), axis=2),
chain.inverse_log_det_jacobian(x).eval())
self.assertAllClose(
np.sum(x, axis=2), chain.forward_log_det_jacobian(x).eval())
def testBijectorIdentity(self):
with self.test_session():
chain = Chain()
self.assertEqual("identity", chain.name)
x = np.asarray([[[1., 2.],
[2., 3.]]])
self.assertAllClose(x, chain.forward(x).eval())
self.assertAllClose(x, chain.inverse(x).eval())
self.assertAllClose(0., chain.inverse_log_det_jacobian(x).eval())
self.assertAllClose(0., chain.forward_log_det_jacobian(x).eval())
def testScalarCongruency(self):
with self.test_session():
bijector = Chain((Exp(), Softplus()))
assert_scalar_congruency(
bijector, lower_x=1e-3, upper_x=1.5, rtol=0.05)
def testShapeGetters(self):
with self.test_session():
bijector = Chain([
SoftmaxCentered(
event_ndims=1, validate_args=True),
SoftmaxCentered(
event_ndims=0, validate_args=True)
])
x = tensor_shape.TensorShape([])
y = tensor_shape.TensorShape([2 + 1])
self.assertAllEqual(y, bijector.forward_event_shape(x))
self.assertAllEqual(
y.as_list(),
bijector.forward_event_shape_tensor(x.as_list()).eval())
self.assertAllEqual(x, bijector.inverse_event_shape(y))
self.assertAllEqual(
x.as_list(),
bijector.inverse_event_shape_tensor(y.as_list()).eval())
if __name__ == "__main__":
test.main()
| apache-2.0 |
alexrudy/Cauldron | Cauldron/local/dispatcher.py | 1 | 3542 | # -*- coding: utf-8 -*-
"""
Local dispatcher.
The local interface is process-local. It is lightweight, and good for testing environments, but doesn't handle anything that wouldn't normally be process local.
"""
from ..base import DispatcherService, DispatcherKeyword
from ..scheduler import Scheduler
from ..utils.callbacks import Callbacks
from .. import registry
import time
import weakref
import logging
import threading
__all__ = ['Service', 'Keyword']
_registry = weakref.WeakValueDictionary()
@registry.dispatcher.teardown_for("local")
def clear():
"""Clear the registry."""
for service in _registry.values():
service.shutdown()
_registry.clear()
class LocalScheduler(Scheduler, threading.Thread):
"""A local scheduling object"""
def __init__(self, name, log=None):
super(LocalScheduler, self).__init__(name="Scheudler for {0:s}".format(name))
self.log = log or logging.getLogger("DFW.local.Scheduler.{0:s}".format(name))
self.shutdown = threading.Event()
self.waker = threading.Event()
def wake(self):
"""Wake up the thread."""
self.waker.set()
def run(self):
"""Run the task queue thread."""
while not self.shutdown.isSet():
now = time.time()
self.run_periods(at=now)
self.run_appointments(at=now)
timeout = self.get_timeout()
self.waker.wait(timeout=timeout)
self.waker.clear()
def stop(self):
"""Stop the task-queue thread."""
self.shutdown.set()
self.waker.set()
self.join()
self.log.debug("Closed task queue")
@registry.dispatcher.service_for("local")
class Service(DispatcherService):
_scheduler = None
@classmethod
def get_service(cls, name):
"""Get a dispatcher for a service."""
#TODO: Support inverse client startup ordering.
name = str(name).lower()
return _registry[name]
def __init__(self, name, config, setup=None, dispatcher=None):
if str(name).lower() in _registry:
raise ValueError("Cannot have two services with name '{0}' in local registry.".format(name))
super(Service, self).__init__(name, config, setup, dispatcher)
def _prepare(self):
self._scheduler = LocalScheduler(self.name)
def _begin(self):
"""Indicate that this service is ready to act, by inserting it into the local registry."""
_registry[self.name] = self
self._scheduler.start()
def shutdown(self):
"""To shutdown this service, delete it."""
if self._scheduler is not None:
try:
self._scheduler.stop()
except Exception:
pass
@registry.dispatcher.keyword_for("local")
class Keyword(DispatcherKeyword):
def __init__(self, name, service, initial=None, period=None):
super(Keyword, self).__init__(name, service, initial, period)
self._consumers = Callbacks()
def _broadcast(self, value):
"""Notify consumers that this value has changed."""
self._consumers(value)
def schedule(self, appointment=None, cancel=False):
if cancel:
self.service._scheduler.cancel_appointment(appointment, self)
else:
self.service._scheduler.appointment(appointment, self)
def period(self, period):
self.service._scheduler.period(period, self)
| bsd-3-clause |
Chilledheart/chromium | tools/telemetry/third_party/gsutilz/third_party/boto/tests/unit/vpc/test_vpc.py | 100 | 13752 | # -*- coding: UTF-8 -*-
from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
from boto.vpc import VPCConnection, VPC
from boto.ec2.securitygroup import SecurityGroup
DESCRIBE_VPCS = b'''<?xml version="1.0" encoding="UTF-8"?>
<DescribeVpcsResponse xmlns="http://ec2.amazonaws.com/doc/2013-02-01/">
<requestId>623040d1-b51c-40bc-8080-93486f38d03d</requestId>
<vpcSet>
<item>
<vpcId>vpc-12345678</vpcId>
<state>available</state>
<cidrBlock>172.16.0.0/16</cidrBlock>
<dhcpOptionsId>dopt-12345678</dhcpOptionsId>
<instanceTenancy>default</instanceTenancy>
<isDefault>false</isDefault>
</item>
</vpcSet>
</DescribeVpcsResponse>'''
class TestDescribeVPCs(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return DESCRIBE_VPCS
def test_get_vpcs(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.get_all_vpcs()
self.assertEqual(len(api_response), 1)
vpc = api_response[0]
self.assertFalse(vpc.is_default)
self.assertEqual(vpc.instance_tenancy, 'default')
class TestCreateVpc(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<CreateVpcResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<vpc>
<vpcId>vpc-1a2b3c4d</vpcId>
<state>pending</state>
<cidrBlock>10.0.0.0/16</cidrBlock>
<dhcpOptionsId>dopt-1a2b3c4d2</dhcpOptionsId>
<instanceTenancy>default</instanceTenancy>
<tagSet/>
</vpc>
</CreateVpcResponse>
"""
def test_create_vpc(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_vpc('10.0.0.0/16', 'default')
self.assert_request_parameters({
'Action': 'CreateVpc',
'InstanceTenancy': 'default',
'CidrBlock': '10.0.0.0/16'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertIsInstance(api_response, VPC)
self.assertEquals(api_response.id, 'vpc-1a2b3c4d')
self.assertEquals(api_response.state, 'pending')
self.assertEquals(api_response.cidr_block, '10.0.0.0/16')
self.assertEquals(api_response.dhcp_options_id, 'dopt-1a2b3c4d2')
self.assertEquals(api_response.instance_tenancy, 'default')
class TestDeleteVpc(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DeleteVpcResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<return>true</return>
</DeleteVpcResponse>
"""
def test_delete_vpc(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.delete_vpc('vpc-1a2b3c4d')
self.assert_request_parameters({
'Action': 'DeleteVpc',
'VpcId': 'vpc-1a2b3c4d'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestModifyVpcAttribute(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<ModifyVpcAttributeResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<return>true</return>
</ModifyVpcAttributeResponse>
"""
def test_modify_vpc_attribute_dns_support(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.modify_vpc_attribute(
'vpc-1a2b3c4d', enable_dns_support=True)
self.assert_request_parameters({
'Action': 'ModifyVpcAttribute',
'VpcId': 'vpc-1a2b3c4d',
'EnableDnsSupport.Value': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
def test_modify_vpc_attribute_dns_hostnames(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.modify_vpc_attribute(
'vpc-1a2b3c4d', enable_dns_hostnames=True)
self.assert_request_parameters({
'Action': 'ModifyVpcAttribute',
'VpcId': 'vpc-1a2b3c4d',
'EnableDnsHostnames.Value': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestGetAllClassicLinkVpc(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DescribeVpcClassicLinkResponse xmlns="http://ec2.amazonaws.com/doc/2014-09-01/">
<requestId>2484655d-d669-4950-bf55-7ba559805d36</requestId>
<vpcSet>
<item>
<vpcId>vpc-6226ab07</vpcId>
<classicLinkEnabled>false</classicLinkEnabled>
<tagSet>
<item>
<key>Name</key>
<value>hello</value>[
</item>
</tagSet>
</item>
<item>
<vpcId>vpc-9d24f8f8</vpcId>
<classicLinkEnabled>true</classicLinkEnabled>
<tagSet/>
</item>
</vpcSet>
</DescribeVpcClassicLinkResponse>
"""
def test_get_all_classic_link_vpcs(self):
self.set_http_response(status_code=200)
response = self.service_connection.get_all_classic_link_vpcs()
self.assertEqual(len(response), 2)
vpc = response[0]
self.assertEqual(vpc.id, 'vpc-6226ab07')
self.assertEqual(vpc.classic_link_enabled, 'false')
self.assertEqual(vpc.tags, {'Name': 'hello'})
def test_get_all_classic_link_vpcs_params(self):
self.set_http_response(status_code=200)
self.service_connection.get_all_classic_link_vpcs(
vpc_ids=['id1', 'id2'],
filters={'GroupId': 'sg-9b4343fe'},
dry_run=True,
)
self.assert_request_parameters({
'Action': 'DescribeVpcClassicLink',
'VpcId.1': 'id1',
'VpcId.2': 'id2',
'Filter.1.Name': 'GroupId',
'Filter.1.Value.1': 'sg-9b4343fe',
'DryRun': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
class TestVpcClassicLink(AWSMockServiceTestCase):
connection_class = VPCConnection
def setUp(self):
super(TestVpcClassicLink, self).setUp()
self.vpc = VPC(self.service_connection)
self.vpc_id = 'myid'
self.vpc.id = self.vpc_id
class TestAttachClassicLinkVpc(TestVpcClassicLink):
def default_body(self):
return b"""
<AttachClassicLinkVpcResponse xmlns="http://ec2.amazonaws.com/doc/2014-09-01/">
<requestId>88673bdf-cd16-40bf-87a1-6132fec47257</requestId>
<return>true</return>
</AttachClassicLinkVpcResponse>
"""
def test_attach_classic_link_instance_string_groups(self):
groups = ['sg-foo', 'sg-bar']
self.set_http_response(status_code=200)
response = self.vpc.attach_classic_instance(
instance_id='my_instance_id',
groups=groups,
dry_run=True
)
self.assertTrue(response)
self.assert_request_parameters({
'Action': 'AttachClassicLinkVpc',
'VpcId': self.vpc_id,
'InstanceId': 'my_instance_id',
'SecurityGroupId.1': 'sg-foo',
'SecurityGroupId.2': 'sg-bar',
'DryRun': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
def test_attach_classic_link_instance_object_groups(self):
sec_group_1 = SecurityGroup()
sec_group_1.id = 'sg-foo'
sec_group_2 = SecurityGroup()
sec_group_2.id = 'sg-bar'
groups = [sec_group_1, sec_group_2]
self.set_http_response(status_code=200)
response = self.vpc.attach_classic_instance(
instance_id='my_instance_id',
groups=groups,
dry_run=True
)
self.assertTrue(response)
self.assert_request_parameters({
'Action': 'AttachClassicLinkVpc',
'VpcId': self.vpc_id,
'InstanceId': 'my_instance_id',
'SecurityGroupId.1': 'sg-foo',
'SecurityGroupId.2': 'sg-bar',
'DryRun': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
class TestDetachClassicLinkVpc(TestVpcClassicLink):
def default_body(self):
return b"""
<DetachClassicLinkVpcResponse xmlns="http://ec2.amazonaws.com/doc/2014-09-01/">
<requestId>5565033d-1321-4eef-b121-6aa46f152ed7</requestId>
<return>true</return>
</DetachClassicLinkVpcResponse>
"""
def test_detach_classic_link_instance(self):
self.set_http_response(status_code=200)
response = self.vpc.detach_classic_instance(
instance_id='my_instance_id',
dry_run=True
)
self.assertTrue(response)
self.assert_request_parameters({
'Action': 'DetachClassicLinkVpc',
'VpcId': self.vpc_id,
'InstanceId': 'my_instance_id',
'DryRun': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
class TestEnableClassicLinkVpc(TestVpcClassicLink):
def default_body(self):
return b"""
<EnableVpcClassicLinkResponse xmlns="http://ec2.amazonaws.com/doc/2014-09-01/">
<requestId>4ab2b2b3-a267-4366-a070-bab853b5927d</requestId>
<return>true</return>
</EnableVpcClassicLinkResponse>
"""
def test_enable_classic_link(self):
self.set_http_response(status_code=200)
response = self.vpc.enable_classic_link(
dry_run=True
)
self.assertTrue(response)
self.assert_request_parameters({
'Action': 'EnableVpcClassicLink',
'VpcId': self.vpc_id,
'DryRun': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
class TestDisableClassicLinkVpc(TestVpcClassicLink):
def default_body(self):
return b"""
<DisableVpcClassicLinkResponse xmlns="http://ec2.amazonaws.com/doc/2014-09-01/">
<requestId>4ab2b2b3-a267-4366-a070-bab853b5927d</requestId>
<return>true</return>
</DisableVpcClassicLinkResponse>
"""
def test_enable_classic_link(self):
self.set_http_response(status_code=200)
response = self.vpc.disable_classic_link(
dry_run=True
)
self.assertTrue(response)
self.assert_request_parameters({
'Action': 'DisableVpcClassicLink',
'VpcId': self.vpc_id,
'DryRun': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
class TestUpdateClassicLinkVpc(TestVpcClassicLink):
def default_body(self):
return b"""
<DescribeVpcClassicLinkResponse xmlns="http://ec2.amazonaws.com/doc/2014-09-01/">
<requestId>2484655d-d669-4950-bf55-7ba559805d36</requestId>
<vpcSet>
<item>
<vpcId>myid</vpcId>
<classicLinkEnabled>true</classicLinkEnabled>
<tagSet/>
</item>
</vpcSet>
</DescribeVpcClassicLinkResponse>
"""
def test_vpc_update_classic_link_enabled(self):
self.vpc.classic_link_enabled = False
self.set_http_response(status_code=200)
self.vpc.update_classic_link_enabled(
dry_run=True,
validate=True
)
self.assert_request_parameters({
'Action': 'DescribeVpcClassicLink',
'VpcId.1': self.vpc_id,
'DryRun': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
self.assertEqual(self.vpc.classic_link_enabled, 'true')
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
chand3040/sree_odoo | openerp/addons/web/tests/test_menu.py | 303 | 1628 | # -*- coding: utf-8 -*-
import unittest2
from ..controllers import main
class ActionMungerTest(unittest2.TestCase):
def test_actual_treeview(self):
action = {
"views": [[False, "tree"], [False, "form"],
[False, "calendar"]],
"view_type": "tree",
"view_id": False,
"view_mode": "tree,form,calendar"
}
changed = action.copy()
del action['view_type']
main.fix_view_modes(changed)
self.assertEqual(changed, action)
def test_list_view(self):
action = {
"views": [[False, "tree"], [False, "form"],
[False, "calendar"]],
"view_type": "form",
"view_id": False,
"view_mode": "tree,form,calendar"
}
main.fix_view_modes(action)
self.assertEqual(action, {
"views": [[False, "list"], [False, "form"],
[False, "calendar"]],
"view_id": False,
"view_mode": "list,form,calendar"
})
def test_redundant_views(self):
action = {
"views": [[False, "tree"], [False, "form"],
[False, "calendar"], [42, "tree"]],
"view_type": "form",
"view_id": False,
"view_mode": "tree,form,calendar"
}
main.fix_view_modes(action)
self.assertEqual(action, {
"views": [[False, "list"], [False, "form"],
[False, "calendar"], [42, "list"]],
"view_id": False,
"view_mode": "list,form,calendar"
})
| agpl-3.0 |
EtiennePerot/tunnels | src/tunnels/proxy.py | 1 | 7022 | import asyncore as _asyncore
import random as _random
import threading as _threading
import time as _time
from .confsys import Configurable as _Configurable
from .logger import mkInfoFunction as _mkInfoFunction
from .logger import mkWarnFunction as _mkWarnFunction
_proxyInfo = _mkInfoFunction('Proxy')
_proxyWarn = _mkWarnFunction('Proxy')
_definedProxyClasses = {}
class _ProxyMetaclass(type):
def __new__(*args, **kwargs):
builtClass = type.__new__(*args, **kwargs)
if builtClass.__name__ in _definedProxyClasses:
raise SystemError('Cannot define two message classes with the same name.')
_definedProxyClasses[builtClass.__name__] = builtClass
return builtClass
class ProxyThread(_threading.Thread):
def __init__(self, parentProxy, rule, domain, port, incomingSocket):
self._parentProxy = parentProxy
self._rule = rule
self._domain = domain
self._port = port
self._incomingSocket = incomingSocket
self._alive = True
self._destinations = self._rule.getForcedAddresses()
_threading.Thread.__init__(self, name='Thread for ' + domain + ':' + str(port))
self.daemon = True
def getParentProxy(self):
return self._parentProxy
def getRule(self):
return self._rule
def getDomain(self):
if self._destinations is not None:
if len(self._destinations) == 1:
return self._destinations[0]
destination = _random.choice(self._destinations)
_proxyInfo(self, 'to', self._domain, 'picked final address', destination, 'out of the', len(self._destinations), 'choices')
return destination
return self._domain
def getPort(self):
return self._port
def getDestination(self):
return self.getDomain(), self.getPort()
def getIncomingSocket(self):
return self._incomingSocket
def isAlive(self):
return self._alive
def close(self): # Overriddable
self._alive = False
self._parentProxy.notifyProxyClosed(self)
def run(self): # Overriddable
pass
class ForwarderProxyThread(ProxyThread):
def __init__(self, *args, **kwargs):
ProxyThread.__init__(self, *args, **kwargs)
self._incomingBuffer = b''
self._outgoingBuffer = b''
self._asyncSockets = {}
self._asyncIncoming = _asyncore.dispatcher(self.getIncomingSocket(), self._asyncSockets)
self._asyncIncoming.handle_read = self._incomingRead
self._asyncIncoming.handle_write = self._incomingWrite
self._asyncIncoming.writable = self._incomingWritable
self._asyncIncoming.handle_close = self._handleClose
self._asyncOutgoing = _asyncore.dispatcher(self._mkOutgoingSocket(), self._asyncSockets)
self._asyncOutgoing.handle_read = self._outgoingRead
self._asyncOutgoing.handle_write = self._outgoingWrite
self._asyncOutgoing.writable = self._outgoingWritable
self._asyncOutgoing.handle_close = self._handleClose
self._readSize = self._getReadSize()
self._buffered = self._isBuffered()
def _incomingRead(self):
read = self._asyncIncoming.recv(self._readSize)
if read:
self._incomingBuffer += read
if not self._buffered:
while self._incomingBuffer:
self._outgoingWrite()
def _incomingWrite(self):
sent = self._asyncIncoming.send(self._outgoingBuffer)
if sent:
self._outgoingBuffer = self._outgoingBuffer[sent:]
def _incomingWritable(self):
return self._outgoingBuffer
def _outgoingRead(self):
read = self._asyncOutgoing.recv(self._readSize)
if read:
self._outgoingBuffer += read
if not self._buffered:
while self._outgoingBuffer:
self._incomingWrite()
def _outgoingWrite(self):
sent = self._asyncOutgoing.send(self._incomingBuffer)
if sent:
self._incomingBuffer = self._incomingBuffer[sent:]
def _outgoingWritable(self):
return self._incomingBuffer
def _handleClose(self):
try:
self._asyncIncoming.close()
except:
pass
try:
self._asyncOutgoing.close()
except:
pass
self.close()
def run(self):
_asyncore.loop(map=self._asyncSockets)
def _getReadSize(self): # Overriddable
return 655365
def _isBuffered(self): # Overriddable
return True
def _mkOutgoingSocket(self): # Overriddable
raise NotImplementedError()
class Proxy(_Configurable):
__metaclass__ = _ProxyMetaclass
def __init__(self, name, providedConfig):
_Configurable.__init__(self, self.__class__.__name__ + u'<' + name + '>', providedConfig, self.__class__._proxyConfig, self.__class__._proxyConfigRequired)
def supportsTCP(self): # Overriddable
return True
def supportsUDP(self): # Overriddable
return False
def spawnTCP(self, rule, domain, tcpPort, incomingSocket):
if not self.supportsTCP():
raise SystemError(u'Cannot create a TCP connection; ' + str(self) + u' does not support TCP.')
return self._doSpawnTCP(rule, domain, tcpPort, incomingSocket)
def spawnUDP(self, rule, domain, udpPort, incomingSocket):
if not self.supportsUDP():
raise SystemError(u'Cannot create a UDP connection; ' + str(self) + u' does not support UDP.')
return self._doSpawnUDP(rule, domain, tcpPort, incomingSocket)
def _doSpawnTCP(self, rule, domain, tcpPort, incomingSocket): # Overriddable
self._getTCPThreadClass()(self, rule, domain, tcpPort, incomingSocket).start()
return True
def _doSpawnUDP(self, rule, domain, udpPort, incomingSocket): # Overriddable
self._getUDPThreadClass()(self, rule, domain, udpPort, incomingSocket).start()
return True
def _getTCPThreadClass(self): # Overriddable
raise NotImplementedError()
def _getUDPThreadClass(self): # Overriddable
raise NotImplementedError()
def onRegister(self): # Overriddable
pass
def notifyProxyClosed(self, proxyThread): # Overriddable
pass
class MultiplexingProxy(Proxy):
class Error(Exception):
pass
def __init__(self, *args, **kwargs):
Proxy.__init__(self, *args, **kwargs)
self._lock = _threading.RLock()
self._socket = None
self._activeCount = 0
def _getKeepalivePolicy(self): # Overriddable
raise NotImplementedError()
def _mkSocket(self): # Overriddable
raise NotImplementedError()
def _disconnectSocket(self): # Overriddable
self._socket.close()
def _autoReconnectSleep(self): # Overriddable
return 5
def _mkSocketLoop(self):
socket = None
while socket is None:
try:
socket = self._mkSocket()
except MultiplexingProxy.Error as e:
_proxyWarn(e)
if socket is None:
_time.sleep(self._autoReconnectSleep())
return socket
def acquireSocket(self, countAsActive=True):
with self._lock:
if self._socket is None:
self._activeCount = 0
self._socket = self._mkSocketLoop()
if countAsActive:
self._activeCount += 1
return self._socket
def socketBroken(self):
with self._lock:
try:
self._disconnectSocket()
except:
pass
self._socket = None
self._activeCount = 0
if self._getKeepalivePolicy():
_time.sleep(self._autoReconnectSleep())
self._socket = self._mkSocketLoop()
def notifyProxyClosed(self, proxyThread):
Proxy.notifyProxyClosed(self, proxyThread)
with self._lock:
self._activeCount -= 1
if self._activeCount < 1 and not self._getKeepalivePolicy():
self._disconnectSocket()
self._socket = None
self._activeCount = 0
| lgpl-3.0 |
anonymous10101/collusion_resist_codes | programs/gs_con.py | 1 | 6144 | # This program verifies the continuous crowdsensing game model is group strategyproofed.
import random
import math
from copy import deepcopy
# Let there be 100 users.
n = 100
# Let the upper bound of the users' unit costs be 10.
k_max = 10.0
# Let the upper bound of the revenue coefficients be 20.
lambda_max = 20.0
# Baseline reward of the platform is 100.
R = 100
# Platform utility
u_p = R
class SensingUser:
'''the class for a user participating in sensing.'''
k = 0; # the unit cost
s = 0; # the user strategy
l = 0; # the revenue coefficient
t = 0; # sensing time of this user
p = 0; # payment to this user
u = 0; # user utility
def __init__(self):
# The initial s_i = kappa_i
self.s = self.k = random.uniform(0.01, k_max)
self.l = random.uniform(0.01, lambda_max)
# Initialize the users.
users = []
for i in range(n):
users.append(SensingUser())
# Let us compute the sensing time and the payment for each user.
# Also we calculate each user utility and the platform utility.
for user in users:
if user.s < user.l:
user.t = user.l / user.s - 1.0
user.p = user.l * math.log(user.l / user.s)
user.u = user.p - user.k * user.t
u_p = u_p + user.l * math.log(1+user.t) - user.p
else:
user.t = user.p = user.u = 0
###############################################################
# We consider different collusion sizes.
# number of colluded users
n_collusion = 0
# colluded users
colluded_users = []
# Let the maximum perturbation on user strategies when collusion be 1.
max_perturb = 1.0
# There are 1000 test samples.
n_tests = 1000
# new platform utility
new_u_p = R
# The outputs will be stored in the text file ratio_group_con.txt
f = open('ratio_group_con.txt', 'w')
for n_collusion in range(2, 2*n/3, (n-2)/10):
# ratio of the loss users
ratio_loss = 0.0
# the standard deviation of the ratio above
dev_ratio = 0.0
arr_ratio = []
# loss of platform utility
u_p_loss = 0.0
# the standard deviation of the u_p loss above
dev_u_p = 0.0
arr_u_p = []
for i in range(n_tests):
# deep copy of the user arrays
copy_users = deepcopy(users)
new_u_p = R;
# Initialize the colluded users
colluded_users = []
for j in range(n_collusion):
while True:
k = random.randint(0, n-1)
if k not in colluded_users:
break
colluded_users.append(k)
for k in colluded_users:
# give perturbations on the copied user strategies
copy_users[k].s = copy_users[k].s + random.uniform(0, 2.0* max_perturb) - max_perturb
if copy_users[k].s < 0.01:
copy_users[k].s = 0.01
# Let us recompute the sensing times and the payments.
# We also recalculate each user utility and the platform utility.
for user in copy_users:
if user.s < user.l:
user.t = user.l / user.s - 1.0
user.p = user.l * math.log(user.l / user.s)
user.u = user.p - user.k * user.t
new_u_p = new_u_p + user.l * math.log(1+user.t) - user.p
else:
user.t = user.p = user.u = 0
# Let us count how many users suffer utility loss
n_loss = 0
for j in range(n):
if copy_users[j].u < users[j].u:
n_loss = n_loss + 1
# statistics
ratio_loss = ratio_loss + n_loss*1.0 / n_collusion
u_p_loss = u_p_loss - new_u_p + u_p
# save for later: i.e. variance computation
arr_ratio.append(n_loss*1.0 / n_collusion)
arr_u_p.append(u_p - new_u_p)
# Calculate the variances
for r in arr_ratio:
dev_ratio = dev_ratio + (r-ratio_loss / n_tests)**2
for r in arr_u_p:
dev_u_p = dev_u_p + (r-u_p_loss / n_tests)**2
# Output the statistical results.
f.write('{0:f} {1:f} {2:f} {3:f} {4:f}\n'.format(n_collusion, ratio_loss / n_tests, (dev_ratio / n_tests)**0.5, u_p_loss / n_tests, (dev_u_p / n_tests)**0.5))
f.close()
###############################################################
# We consider different strategies' perturbation sizes.
# Let the number of colluded users be half of n.
n_collusion = n/2+1
# colluded users
colluded_users = []
# The perturbation sizes on user strategies when collusion.
max_perturb = 1.0
# There are 1000 test samples.
n_tests = 1000
# new platform utility
new_u_p = R
# The outputs will be stored in the text file perturb_group_con.txt
f = open('perturb_group_con.txt', 'w')
for max_perturb in range(1, 11):
# ratio of the loss users
ratio_loss = 0.0
# the standard deviation of the ratio above
dev_ratio = 0.0
arr_ratio = []
# loss of platform utility
u_p_loss = 0.0
# the standard deviation of the u_p loss above
dev_u_p = 0.0
arr_u_p = []
for i in range(n_tests):
# deep copy of the user arrays
copy_users = deepcopy(users)
new_u_p = R;
# Initialize the colluded users
colluded_users = []
for j in range(n_collusion):
while True:
k = random.randint(0, n-1)
if k not in colluded_users:
break
colluded_users.append(k)
for k in colluded_users:
# give perturbations on the copied user strategies
copy_users[k].s = copy_users[k].s + random.uniform(0, 2.0* max_perturb) - max_perturb
if copy_users[k].s < 0.01:
copy_users[k].s = 0.01
# Let us recompute the sensing times and the payments.
# We also recalculate each user utility and the platform utility.
for user in copy_users:
if user.s < user.l:
user.t = user.l / user.s - 1.0
user.p = user.l * math.log(user.l / user.s)
user.u = user.p - user.k * user.t
new_u_p = new_u_p + user.l * math.log(1+user.t) - user.p
else:
user.t = user.p = user.u = 0
# Let us count how many users suffer utility loss
n_loss = 0
for j in range(n):
if copy_users[j].u < users[j].u:
n_loss = n_loss + 1
# statistics
ratio_loss = ratio_loss + n_loss*1.0 / n_collusion
u_p_loss = u_p_loss - new_u_p + u_p
# save for later: i.e. variance computation
arr_ratio.append(n_loss*1.0 / n_collusion)
arr_u_p.append(u_p - new_u_p)
# Calculate the variances
for r in arr_ratio:
dev_ratio = dev_ratio + (r-ratio_loss / n_tests)**2
for r in arr_u_p:
dev_u_p = dev_u_p + (r-u_p_loss / n_tests)**2
# Output the statistical results.
f.write('{0:f} {1:f} {2:f} {3:f} {4:f}\n'.format(max_perturb, ratio_loss / n_tests, (dev_ratio / n_tests)**0.5, u_p_loss / n_tests, (dev_u_p / n_tests)**0.5))
f.close() | gpl-2.0 |
Cal-CS-61A-Staff/ok-client | release.py | 1 | 6652 | #!/usr/bin/env python3
from distutils.version import StrictVersion
import os
import re
import requests
import subprocess
import sys
import tempfile
from client.api import assignment
from client.utils import auth
GITHUB_TOKEN_FILE = '.github-token'
GITHUB_REPO = 'okpy/ok-client'
OK_SERVER_URL = 'okpy.org'
def abort(message=None):
if message:
print('ERROR:', message, file=sys.stderr)
sys.exit(1)
def shell(command, capture_output=False):
kwargs = dict(shell=True, check=True)
if capture_output:
kwargs.update(stderr=subprocess.PIPE, stdout=subprocess.PIPE)
else:
print('>', command)
try:
output = subprocess.run(command, **kwargs)
except subprocess.CalledProcessError as e:
print(str(e), file=sys.stderr)
if capture_output:
print(e.stderr.decode('utf-8'), file=sys.stderr)
abort()
if capture_output:
return output.stdout.decode('utf-8').strip()
def edit(text):
# from Mercurial sources:
# https://selenic.com/repo/hg-stable/file/2770d03ae49f/mercurial/ui.py#l318
(fd, name) = tempfile.mkstemp(
prefix='ok-client-release-', suffix='.txt', text=True)
try:
f = os.fdopen(fd, 'w')
f.write(text)
f.close()
editor = os.environ.get('EDITOR', 'vi')
shell('{} \"{}\"'.format(editor, name))
f = open(name)
t = f.read()
f.close()
finally:
os.unlink(name)
return t
def post_request(url, *args, **kwargs):
try:
print('POST', url)
r = requests.post(url, *args, **kwargs)
r.raise_for_status()
except Exception as e:
abort(str(e))
return r.json()
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Usage: {} NEW_VERSION'.format(sys.argv[0]), file=sys.stderr)
abort()
new_release = sys.argv[1]
# change to directory that script is in; should be root of project
os.chdir(os.path.dirname(os.path.realpath(__file__)))
# read GitHub token
try:
with open(GITHUB_TOKEN_FILE, 'r') as f:
github_token = f.read().strip()
except (OSError, IOError) as e:
print('No GitHub access token found.', file=sys.stderr)
print('Generate an access token with the "repo" scope as per', file=sys.stderr)
print('https://help.github.com/articles/creating-an-access-token-for-command-line-use/', file=sys.stderr)
print('and paste the token into a file named "{}".'.format(GITHUB_TOKEN_FILE), file=sys.stderr)
abort()
if new_release[:1] != 'v':
abort("Version must start with 'v'")
if shell('git rev-parse --abbrev-ref HEAD', capture_output=True) != 'master':
abort('You must be on master to release a new version')
shell('git pull --ff-only --tags')
# find latest release
latest_release = shell('git describe --tags --abbrev=0', capture_output=True)
latest_release_commit = shell('git rev-list -n 1 {}'.format(latest_release),
capture_output=True)
print('Latest version: {} ({})'.format(latest_release, latest_release_commit[:7]))
print('New version: {}'.format(new_release))
# uninstall ``okpy`` in virtual env, if installed
shell('pip uninstall okpy')
# run tests
shell('nosetests tests')
# check that release numbers are sane
try:
latest_version = StrictVersion(latest_release[1:])
new_version = StrictVersion(new_release[1:])
except ValueError as e:
abort(str(e))
if latest_version >= new_version:
abort('Version numbers must be increasing')
elif new_version.version[0] > latest_version.version[0] + 1:
abort('Major version number cannot increase by more than one')
# edit changelog message
log = shell('git log --pretty=format:"- %s" {}..HEAD'.format(latest_release),
capture_output=True)
changelog = edit('\n'.join([
'Changelog:',
log,
'',
'# Please enter a changelog since the latest release. Lines starting',
"# with '#' will be ignored, and an empty message aborts the release.",
]))
log_lines = [line for line in changelog.splitlines() if line[:1] != '#']
changelog = '\n'.join(log_lines).strip()
if not changelog:
abort('Empty changelog, aborting')
# edit client/__init__.py and commit
init_file = 'client/__init__.py'
with open(init_file, 'r', encoding='utf-8') as f:
old_init = f.read()
new_init = re.sub(
r"^__version__ = '([a-zA-Z0-9.]+)'",
"__version__ = '{}'".format(new_release),
old_init)
if old_init != new_init:
print('Editing version string in {}'.format(init_file))
with open(init_file, 'w', encoding='utf-8') as f:
init = f.write(new_init)
shell('git add {}'.format(init_file))
shell('git commit -m "Bump version to {}"'.format(new_release))
shell('git push')
print('Uploading release to GitHub...')
shell('python setup.py develop')
shell('ok-publish')
github_release = post_request(
'https://api.github.com/repos/{}/releases'.format(GITHUB_REPO),
headers={
'Authorization': 'token ' + github_token,
},
json={
'tag_name': new_release,
'target_commitish': 'master',
'name': new_release,
'body': changelog,
'draft': False,
'prerelease': False
},
)
with open('ok', 'rb') as f:
github_asset = post_request(
'https://uploads.github.com/repos/{}/releases/{}/assets'.format(
GITHUB_REPO, github_release['id']),
params={
'name': 'ok'
},
headers={
'Authorization': 'token ' + github_token,
'Content-Type': 'application/octet-stream',
},
data=f,
)
print('Updating version on {}...'.format(OK_SERVER_URL))
args = assignment.Settings(server=OK_SERVER_URL)
access_token = auth.authenticate(args, force=True)
post_request('https://{}/api/v3/version/ok-client'.format(OK_SERVER_URL),
headers={
'Authorization': 'Bearer ' + access_token,
},
json={
'current_version': new_release,
'download_link': github_asset['browser_download_url'],
},
)
print('Uploading release to PyPI...')
shell('python setup.py sdist bdist_wheel upload')
print()
print('Released okpy=={}'.format(new_release))
print('Remember to update the requirements.txt file for any repos that depend on okpy.')
| apache-2.0 |
ryanofsky/bitcoin | test/functional/wallet_address_types.py | 8 | 14298 | #!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that the wallet can send and receive using all combinations of address types.
There are 5 nodes-under-test:
- node0 uses legacy addresses
- node1 uses p2sh/segwit addresses
- node2 uses p2sh/segwit addresses and bech32 addresses for change
- node3 uses bech32 addresses
- node4 uses a p2sh/segwit addresses for change
node5 exists to generate new blocks.
## Multisig address test
Test that adding a multisig address with:
- an uncompressed pubkey always gives a legacy address
- only compressed pubkeys gives the an `-addresstype` address
## Sending to address types test
A series of tests, iterating over node0-node4. In each iteration of the test, one node sends:
- 10/101th of its balance to itself (using getrawchangeaddress for single key addresses)
- 20/101th to the next node
- 30/101th to the node after that
- 40/101th to the remaining node
- 1/101th remains as fee+change
Iterate over each node for single key addresses, and then over each node for
multisig addresses.
Repeat test, but with explicit address_type parameters passed to getnewaddress
and getrawchangeaddress:
- node0 and node3 send to p2sh.
- node1 sends to bech32.
- node2 sends to legacy.
As every node sends coins after receiving, this also
verifies that spending coins sent to all these address types works.
## Change type test
Test that the nodes generate the correct change address type:
- node0 always uses a legacy change address.
- node1 uses a bech32 addresses for change if any destination address is bech32.
- node2 always uses a bech32 address for change
- node3 always uses a bech32 address for change
- node4 always uses p2sh/segwit output for change.
"""
from decimal import Decimal
import itertools
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
connect_nodes_bi,
sync_blocks,
sync_mempools,
)
class AddressTypeTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 6
self.extra_args = [
["-addresstype=legacy"],
["-addresstype=p2sh-segwit"],
["-addresstype=p2sh-segwit", "-changetype=bech32"],
["-addresstype=bech32"],
["-changetype=p2sh-segwit"],
[]
]
def setup_network(self):
self.setup_nodes()
# Fully mesh-connect nodes for faster mempool sync
for i, j in itertools.product(range(self.num_nodes), repeat=2):
if i > j:
connect_nodes_bi(self.nodes, i, j)
self.sync_all()
def get_balances(self, confirmed=True):
"""Return a list of confirmed or unconfirmed balances."""
if confirmed:
return [self.nodes[i].getbalance() for i in range(4)]
else:
return [self.nodes[i].getunconfirmedbalance() for i in range(4)]
def test_address(self, node, address, multisig, typ):
"""Run sanity checks on an address."""
info = self.nodes[node].validateaddress(address)
assert(info['isvalid'])
if not multisig and typ == 'legacy':
# P2PKH
assert(not info['isscript'])
assert(not info['iswitness'])
assert('pubkey' in info)
elif not multisig and typ == 'p2sh-segwit':
# P2SH-P2WPKH
assert(info['isscript'])
assert(not info['iswitness'])
assert_equal(info['script'], 'witness_v0_keyhash')
assert('pubkey' in info)
elif not multisig and typ == 'bech32':
# P2WPKH
assert(not info['isscript'])
assert(info['iswitness'])
assert_equal(info['witness_version'], 0)
assert_equal(len(info['witness_program']), 40)
assert('pubkey' in info)
elif typ == 'legacy':
# P2SH-multisig
assert(info['isscript'])
assert_equal(info['script'], 'multisig')
assert(not info['iswitness'])
assert('pubkeys' in info)
elif typ == 'p2sh-segwit':
# P2SH-P2WSH-multisig
assert(info['isscript'])
assert_equal(info['script'], 'witness_v0_scripthash')
assert(not info['iswitness'])
assert(info['embedded']['isscript'])
assert_equal(info['embedded']['script'], 'multisig')
assert(info['embedded']['iswitness'])
assert_equal(info['embedded']['witness_version'], 0)
assert_equal(len(info['embedded']['witness_program']), 64)
assert('pubkeys' in info['embedded'])
elif typ == 'bech32':
# P2WSH-multisig
assert(info['isscript'])
assert_equal(info['script'], 'multisig')
assert(info['iswitness'])
assert_equal(info['witness_version'], 0)
assert_equal(len(info['witness_program']), 64)
assert('pubkeys' in info)
else:
# Unknown type
assert(False)
def test_change_output_type(self, node_sender, destinations, expected_type):
txid = self.nodes[node_sender].sendmany(fromaccount="", amounts=dict.fromkeys(destinations, 0.001))
raw_tx = self.nodes[node_sender].getrawtransaction(txid)
tx = self.nodes[node_sender].decoderawtransaction(raw_tx)
# Make sure the transaction has change:
assert_equal(len(tx["vout"]), len(destinations) + 1)
# Make sure the destinations are included, and remove them:
output_addresses = [vout['scriptPubKey']['addresses'][0] for vout in tx["vout"]]
change_addresses = [d for d in output_addresses if d not in destinations]
assert_equal(len(change_addresses), 1)
self.log.debug("Check if change address " + change_addresses[0] + " is " + expected_type)
self.test_address(node_sender, change_addresses[0], multisig=False, typ=expected_type)
def run_test(self):
# Mine 101 blocks on node5 to bring nodes out of IBD and make sure that
# no coinbases are maturing for the nodes-under-test during the test
self.nodes[5].generate(101)
sync_blocks(self.nodes)
uncompressed_1 = "0496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858ee"
uncompressed_2 = "047211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073dee6c89064984f03385237d92167c13e236446b417ab79a0fcae412ae3316b77"
compressed_1 = "0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52"
compressed_2 = "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073"
# addmultisigaddress with at least 1 uncompressed key should return a legacy address.
for node in range(4):
self.test_address(node, self.nodes[node].addmultisigaddress(2, [uncompressed_1, uncompressed_2])['address'], True, 'legacy')
self.test_address(node, self.nodes[node].addmultisigaddress(2, [compressed_1, uncompressed_2])['address'], True, 'legacy')
self.test_address(node, self.nodes[node].addmultisigaddress(2, [uncompressed_1, compressed_2])['address'], True, 'legacy')
# addmultisigaddress with all compressed keys should return the appropriate address type (even when the keys are not ours).
self.test_address(0, self.nodes[0].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'legacy')
self.test_address(1, self.nodes[1].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'p2sh-segwit')
self.test_address(2, self.nodes[2].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'p2sh-segwit')
self.test_address(3, self.nodes[3].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'bech32')
for explicit_type, multisig, from_node in itertools.product([False, True], [False, True], range(4)):
address_type = None
if explicit_type and not multisig:
if from_node == 1:
address_type = 'bech32'
elif from_node == 0 or from_node == 3:
address_type = 'p2sh-segwit'
else:
address_type = 'legacy'
self.log.info("Sending from node {} ({}) with{} multisig using {}".format(from_node, self.extra_args[from_node], "" if multisig else "out", "default" if address_type is None else address_type))
old_balances = self.get_balances()
self.log.debug("Old balances are {}".format(old_balances))
to_send = (old_balances[from_node] / 101).quantize(Decimal("0.00000001"))
sends = {}
self.log.debug("Prepare sends")
for n, to_node in enumerate(range(from_node, from_node + 4)):
to_node %= 4
change = False
if not multisig:
if from_node == to_node:
# When sending non-multisig to self, use getrawchangeaddress
address = self.nodes[to_node].getrawchangeaddress(address_type=address_type)
change = True
else:
address = self.nodes[to_node].getnewaddress(address_type=address_type)
else:
addr1 = self.nodes[to_node].getnewaddress()
addr2 = self.nodes[to_node].getnewaddress()
address = self.nodes[to_node].addmultisigaddress(2, [addr1, addr2])['address']
# Do some sanity checking on the created address
if address_type is not None:
typ = address_type
elif to_node == 0:
typ = 'legacy'
elif to_node == 1 or (to_node == 2 and not change):
typ = 'p2sh-segwit'
else:
typ = 'bech32'
self.test_address(to_node, address, multisig, typ)
# Output entry
sends[address] = to_send * 10 * (1 + n)
self.log.debug("Sending: {}".format(sends))
self.nodes[from_node].sendmany("", sends)
sync_mempools(self.nodes)
unconf_balances = self.get_balances(False)
self.log.debug("Check unconfirmed balances: {}".format(unconf_balances))
assert_equal(unconf_balances[from_node], 0)
for n, to_node in enumerate(range(from_node + 1, from_node + 4)):
to_node %= 4
assert_equal(unconf_balances[to_node], to_send * 10 * (2 + n))
# node5 collects fee and block subsidy to keep accounting simple
self.nodes[5].generate(1)
sync_blocks(self.nodes)
new_balances = self.get_balances()
self.log.debug("Check new balances: {}".format(new_balances))
# We don't know what fee was set, so we can only check bounds on the balance of the sending node
assert_greater_than(new_balances[from_node], to_send * 10)
assert_greater_than(to_send * 11, new_balances[from_node])
for n, to_node in enumerate(range(from_node + 1, from_node + 4)):
to_node %= 4
assert_equal(new_balances[to_node], old_balances[to_node] + to_send * 10 * (2 + n))
# Get one p2sh/segwit address from node2 and two bech32 addresses from node3:
to_address_p2sh = self.nodes[2].getnewaddress()
to_address_bech32_1 = self.nodes[3].getnewaddress()
to_address_bech32_2 = self.nodes[3].getnewaddress()
# Fund node 4:
self.nodes[5].sendtoaddress(self.nodes[4].getnewaddress(), Decimal("1"))
self.nodes[5].generate(1)
sync_blocks(self.nodes)
assert_equal(self.nodes[4].getbalance(), 1)
self.log.info("Nodes with addresstype=legacy never use a P2WPKH change output")
self.test_change_output_type(0, [to_address_bech32_1], 'legacy')
self.log.info("Nodes with addresstype=p2sh-segwit only use a P2WPKH change output if any destination address is bech32:")
self.test_change_output_type(1, [to_address_p2sh], 'p2sh-segwit')
self.test_change_output_type(1, [to_address_bech32_1], 'bech32')
self.test_change_output_type(1, [to_address_p2sh, to_address_bech32_1], 'bech32')
self.test_change_output_type(1, [to_address_bech32_1, to_address_bech32_2], 'bech32')
self.log.info("Nodes with change_type=bech32 always use a P2WPKH change output:")
self.test_change_output_type(2, [to_address_bech32_1], 'bech32')
self.test_change_output_type(2, [to_address_p2sh], 'bech32')
self.log.info("Nodes with addresstype=bech32 always use a P2WPKH change output (unless changetype is set otherwise):")
self.test_change_output_type(3, [to_address_bech32_1], 'bech32')
self.test_change_output_type(3, [to_address_p2sh], 'bech32')
self.log.info('getrawchangeaddress defaults to addresstype if -changetype is not set and argument is absent')
self.test_address(3, self.nodes[3].getrawchangeaddress(), multisig=False, typ='bech32')
self.log.info('getrawchangeaddress fails with invalid changetype argument')
assert_raises_rpc_error(-5, "Unknown address type 'bech23'", self.nodes[3].getrawchangeaddress, 'bech23')
self.log.info("Nodes with changetype=p2sh-segwit never use a P2WPKH change output")
self.test_change_output_type(4, [to_address_bech32_1], 'p2sh-segwit')
self.test_address(4, self.nodes[4].getrawchangeaddress(), multisig=False, typ='p2sh-segwit')
self.log.info("Except for getrawchangeaddress if specified:")
self.test_address(4, self.nodes[4].getrawchangeaddress(), multisig=False, typ='p2sh-segwit')
self.test_address(4, self.nodes[4].getrawchangeaddress('bech32'), multisig=False, typ='bech32')
if __name__ == '__main__':
AddressTypeTest().main()
| mit |
icambridge/hockeystats | hr_coach_crawler.py | 1 | 4898 | #!/usr/bin/env python
# Copyright 2017-present Iain Cambridge.
#
# Licensed under the MIT License(the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# https://opensource.org/licenses/MIT
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import http.client
from lxml import html
from datetime import datetime
import csv
import os
import time
conn = http.client.HTTPSConnection("www.hockey-reference.com")
def writeCsv(coach, years):
if len(years) == 0:
print("No years")
return
directory = "./stats/coach"
filename = "%s/%s.csv" % (directory, coach)
if not os.path.exists(directory):
os.makedirs(directory)
with open(filename, 'w', newline='') as csvfile:
fieldnames = []
for fieldname in years[0]:
fieldnames.append(fieldname)
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for i in range(len(years)):
writer.writerow(years[i])
def buildYears(tree):
seasons = tree.xpath('//*[@id="coach"]/tbody/tr/th/text()')
agesPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[1]')
teams = tree.xpath('//*[@id="coach"]/tbody/tr/td[2]/a/text()')
leagues = tree.xpath('//*[@id="coach"]/tbody/tr/td[3]/a/text()')
gamesPlayedPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[4]')
winsPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[5]')
losesPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[6]')
tiesPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[7]')
otLosesPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[8]')
pointsPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[9]')
pointsPrecentagePre = tree.xpath('//*[@id="coach"]/tbody/tr/td[10]')
finishPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[12]')
playoffWinsPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[13]')
playoffLosesPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[14]')
playoffTiesPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[15]')
playoffWinLosePre = tree.xpath('//*[@id="coach"]/tbody/tr/td[16]')
playoffNotesPre = tree.xpath('//*[@id="coach"]/tbody/tr/td[17]')
ages = [x.text if x.text else 0 for x in agesPre]
gamesPlayed = [x.text if x.text else 0 for x in gamesPlayedPre]
wins = [x.text if x.text else 0 for x in winsPre]
loses = [x.text if x.text else 0 for x in losesPre]
ties = [x.text if x.text else 0 for x in tiesPre]
otLoses = [x.text if x.text else 0 for x in otLosesPre]
points = [x.text if x.text else 0 for x in pointsPre]
pointsPercentages = [x.text if x.text else 0 for x in pointsPrecentagePre]
finishes = [x.text if x.text else '' for x in finishPre]
playoffWins = [x.text if x.text else 0 for x in playoffWinsPre]
playoffLoses = [x.text if x.text else 0 for x in playoffLosesPre]
playoffTies = [x.text if x.text else 0 for x in playoffTiesPre]
playoffWinLose = [x.text if x.text else 0 for x in playoffWinLosePre]
playoffNotes = [x.text if x.text else '' for x in playoffNotesPre]
years = []
i = 0
while i < len(seasons):
year = {
"season": seasons[i],
"leage": leagues[i],
"team": teams[i],
"games_played": gamesPlayed[i],
"wins": wins[i],
"loses": loses[i],
"ties": ties[i],
"overtime_loses": otLoses[i],
"points": points[i],
"points_percentage": pointsPercentages[i],
"finishing_position": finishes[i],
"playoff_note": playoffNotes[i],
"playoff_wins": playoffWins[i],
"playoff_loses": playoffLoses[i],
"playoff_ties": playoffTies[i],
"playoff_win_lose": playoffWinLose[i]
}
years.append(year)
i = i + 1
return years
def getHtml(url):
time.sleep(2)
conn.request("GET", url)
response = conn.getresponse()
print(url, response.status, response.reason)
return response.read()
def crawlCoach(url):
content = getHtml(url)
coachName = url.split("/")[2].split(".")[0]
tree = html.fromstring(content)
years = buildYears(tree)
writeCsv(coachName, years)
content = getHtml("/coaches/")
tree = html.fromstring(content)
unBoldCoachPages = tree.xpath('//*[@id="coaches"]/tbody/tr/th/a/@href')
boldCoachPages = tree.xpath('//*[@id="coaches"]/tbody/tr/th//strong/a/@href')
coachPages = unBoldCoachPages + boldCoachPages
numberFound = len(coachPages)
print("Found %s" % (numberFound))
for i in range(numberFound):
crawlCoach(coachPages[i])
| mit |
sw-irou/flasktest | lib/mongoengine/base/fields.py | 3 | 19569 | import operator
import warnings
import weakref
from bson import DBRef, ObjectId, SON
import pymongo
from mongoengine.common import _import_class
from mongoengine.errors import ValidationError
from mongoengine.base.common import ALLOW_INHERITANCE
from mongoengine.base.datastructures import BaseDict, BaseList
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
class BaseField(object):
"""A base class for fields in a MongoDB document. Instances of this class
may be added to subclasses of `Document` to define a document's schema.
.. versionchanged:: 0.5 - added verbose and help text
"""
name = None
_geo_index = False
_auto_gen = False # Call `generate` to generate a value
_auto_dereference = True
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that MongoEngine implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
def __init__(self, db_field=None, name=None, required=False, default=None,
unique=False, unique_with=None, primary_key=False,
validation=None, choices=None, verbose_name=None,
help_text=None):
"""
:param db_field: The database field to store this field in
(defaults to the name of the field)
:param name: Depreciated - use db_field
:param required: If the field is required. Whether it has to have a
value or not. Defaults to False.
:param default: (optional) The default value for this field if no value
has been set (or if the value has been unset). It Can be a
callable.
:param unique: Is the field value unique or not. Defaults to False.
:param unique_with: (optional) The other field this field should be
unique with.
:param primary_key: Mark this field as the primary key. Defaults to False.
:param validation: (optional) A callable to validate the value of the
field. Generally this is deprecated in favour of the
`FIELD.validate` method
:param choices: (optional) The valid choices
:param verbose_name: (optional) The verbose name for the field.
Designed to be human readable and is often used when generating
model forms from the document model.
:param help_text: (optional) The help text for this field and is often
used when generating model forms from the document model.
"""
self.db_field = (db_field or name) if not primary_key else '_id'
if name:
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
warnings.warn(msg, DeprecationWarning)
self.required = required or primary_key
self.default = default
self.unique = bool(unique or unique_with)
self.unique_with = unique_with
self.primary_key = primary_key
self.validation = validation
self.choices = choices
self.verbose_name = verbose_name
self.help_text = help_text
# Adjust the appropriate creation counter, and save our local copy.
if self.db_field == '_id':
self.creation_counter = BaseField.auto_creation_counter
BaseField.auto_creation_counter -= 1
else:
self.creation_counter = BaseField.creation_counter
BaseField.creation_counter += 1
def __get__(self, instance, owner):
"""Descriptor for retrieving a value from a field in a document.
"""
if instance is None:
# Document class being used rather than a document object
return self
# Get value from document instance if available
value = instance._data.get(self.name)
EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(value, EmbeddedDocument) and value._instance is None:
value._instance = weakref.proxy(instance)
return value
def __set__(self, instance, value):
"""Descriptor for assigning a value to a field in a document.
"""
# If setting to None and theres a default
# Then set the value to the default value
if value is None and self.default is not None:
value = self.default
if callable(value):
value = value()
if instance._initialised:
try:
if (self.name not in instance._data or
instance._data[self.name] != value):
instance._mark_as_changed(self.name)
except:
# Values cant be compared eg: naive and tz datetimes
# So mark it as changed
instance._mark_as_changed(self.name)
instance._data[self.name] = value
def error(self, message="", errors=None, field_name=None):
"""Raises a ValidationError.
"""
field_name = field_name if field_name else self.name
raise ValidationError(message, errors=errors, field_name=field_name)
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type.
"""
return value
def to_mongo(self, value):
"""Convert a Python type to a MongoDB-compatible type.
"""
return self.to_python(value)
def prepare_query_value(self, op, value):
"""Prepare a value that is being used in a query for PyMongo.
"""
return value
def validate(self, value, clean=True):
"""Perform validation on a value.
"""
pass
def _validate(self, value, **kwargs):
Document = _import_class('Document')
EmbeddedDocument = _import_class('EmbeddedDocument')
# check choices
if self.choices:
is_cls = isinstance(value, (Document, EmbeddedDocument))
value_to_check = value.__class__ if is_cls else value
err_msg = 'an instance' if is_cls else 'one'
if isinstance(self.choices[0], (list, tuple)):
option_keys = [k for k, v in self.choices]
if value_to_check not in option_keys:
msg = ('Value must be %s of %s' %
(err_msg, unicode(option_keys)))
self.error(msg)
elif value_to_check not in self.choices:
msg = ('Value must be %s of %s' %
(err_msg, unicode(self.choices)))
self.error(msg)
# check validation argument
if self.validation is not None:
if callable(self.validation):
if not self.validation(value):
self.error('Value does not match custom validation method')
else:
raise ValueError('validation argument for "%s" must be a '
'callable.' % self.name)
self.validate(value, **kwargs)
class ComplexBaseField(BaseField):
"""Handles complex fields, such as lists / dictionaries.
Allows for nesting of embedded documents inside complex types.
Handles the lazy dereferencing of a queryset by lazily dereferencing all
items in a list / dict rather than one at a time.
.. versionadded:: 0.5
"""
field = None
def __get__(self, instance, owner):
"""Descriptor to automatically dereference references.
"""
if instance is None:
# Document class being used rather than a document object
return self
ReferenceField = _import_class('ReferenceField')
GenericReferenceField = _import_class('GenericReferenceField')
dereference = (self._auto_dereference and
(self.field is None or isinstance(self.field,
(GenericReferenceField, ReferenceField))))
_dereference = _import_class("DeReference")()
self._auto_dereference = instance._fields[self.name]._auto_dereference
if instance._initialised and dereference:
instance._data[self.name] = _dereference(
instance._data.get(self.name), max_depth=1, instance=instance,
name=self.name
)
value = super(ComplexBaseField, self).__get__(instance, owner)
# Convert lists / values so we can watch for any changes on them
if (isinstance(value, (list, tuple)) and
not isinstance(value, BaseList)):
value = BaseList(value, instance, self.name)
instance._data[self.name] = value
elif isinstance(value, dict) and not isinstance(value, BaseDict):
value = BaseDict(value, instance, self.name)
instance._data[self.name] = value
if (self._auto_dereference and instance._initialised and
isinstance(value, (BaseList, BaseDict))
and not value._dereferenced):
value = _dereference(
value, max_depth=1, instance=instance, name=self.name
)
value._dereferenced = True
instance._data[self.name] = value
return value
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type.
"""
Document = _import_class('Document')
if isinstance(value, basestring):
return value
if hasattr(value, 'to_python'):
return value.to_python()
is_list = False
if not hasattr(value, 'items'):
try:
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
except TypeError: # Not iterable return the value
return value
if self.field:
value_dict = dict([(key, self.field.to_python(item))
for key, item in value.items()])
else:
value_dict = {}
for k, v in value.items():
if isinstance(v, Document):
# We need the id from the saved object to create the DBRef
if v.pk is None:
self.error('You can only reference documents once they'
' have been saved to the database')
collection = v._get_collection_name()
value_dict[k] = DBRef(collection, v.pk)
elif hasattr(v, 'to_python'):
value_dict[k] = v.to_python()
else:
value_dict[k] = self.to_python(v)
if is_list: # Convert back to a list
return [v for k, v in sorted(value_dict.items(),
key=operator.itemgetter(0))]
return value_dict
def to_mongo(self, value):
"""Convert a Python type to a MongoDB-compatible type.
"""
Document = _import_class("Document")
EmbeddedDocument = _import_class("EmbeddedDocument")
GenericReferenceField = _import_class("GenericReferenceField")
if isinstance(value, basestring):
return value
if hasattr(value, 'to_mongo'):
if isinstance(value, Document):
return GenericReferenceField().to_mongo(value)
cls = value.__class__
val = value.to_mongo()
# If we its a document thats not inherited add _cls
if (isinstance(value, EmbeddedDocument)):
val['_cls'] = cls.__name__
return val
is_list = False
if not hasattr(value, 'items'):
try:
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
except TypeError: # Not iterable return the value
return value
if self.field:
value_dict = dict([(key, self.field.to_mongo(item))
for key, item in value.iteritems()])
else:
value_dict = {}
for k, v in value.iteritems():
if isinstance(v, Document):
# We need the id from the saved object to create the DBRef
if v.pk is None:
self.error('You can only reference documents once they'
' have been saved to the database')
# If its a document that is not inheritable it won't have
# any _cls data so make it a generic reference allows
# us to dereference
meta = getattr(v, '_meta', {})
allow_inheritance = (
meta.get('allow_inheritance', ALLOW_INHERITANCE)
is True)
if not allow_inheritance and not self.field:
value_dict[k] = GenericReferenceField().to_mongo(v)
else:
collection = v._get_collection_name()
value_dict[k] = DBRef(collection, v.pk)
elif hasattr(v, 'to_mongo'):
cls = v.__class__
val = v.to_mongo()
# If we its a document thats not inherited add _cls
if (isinstance(v, (Document, EmbeddedDocument))):
val['_cls'] = cls.__name__
value_dict[k] = val
else:
value_dict[k] = self.to_mongo(v)
if is_list: # Convert back to a list
return [v for k, v in sorted(value_dict.items(),
key=operator.itemgetter(0))]
return value_dict
def validate(self, value):
"""If field is provided ensure the value is valid.
"""
errors = {}
if self.field:
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
sequence = value.iteritems()
else:
sequence = enumerate(value)
for k, v in sequence:
try:
self.field._validate(v)
except ValidationError, error:
errors[k] = error.errors or error
except (ValueError, AssertionError), error:
errors[k] = error
if errors:
field_class = self.field.__class__.__name__
self.error('Invalid %s item (%s)' % (field_class, value),
errors=errors)
# Don't allow empty values if required
if self.required and not value:
self.error('Field is required and cannot be empty')
def prepare_query_value(self, op, value):
return self.to_mongo(value)
def lookup_member(self, member_name):
if self.field:
return self.field.lookup_member(member_name)
return None
def _set_owner_document(self, owner_document):
if self.field:
self.field.owner_document = owner_document
self._owner_document = owner_document
def _get_owner_document(self, owner_document):
self._owner_document = owner_document
owner_document = property(_get_owner_document, _set_owner_document)
class ObjectIdField(BaseField):
"""A field wrapper around MongoDB's ObjectIds.
"""
def to_python(self, value):
if not isinstance(value, ObjectId):
value = ObjectId(value)
return value
def to_mongo(self, value):
if not isinstance(value, ObjectId):
try:
return ObjectId(unicode(value))
except Exception, e:
# e.message attribute has been deprecated since Python 2.6
self.error(unicode(e))
return value
def prepare_query_value(self, op, value):
return self.to_mongo(value)
def validate(self, value):
try:
ObjectId(unicode(value))
except:
self.error('Invalid Object ID')
class GeoJsonBaseField(BaseField):
"""A geo json field storing a geojson style object.
.. versionadded:: 0.8
"""
_geo_index = pymongo.GEOSPHERE
_type = "GeoBase"
def __init__(self, auto_index=True, *args, **kwargs):
"""
:param auto_index: Automatically create a "2dsphere" index. Defaults
to `True`.
"""
self._name = "%sField" % self._type
if not auto_index:
self._geo_index = False
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
def validate(self, value):
"""Validate the GeoJson object based on its type
"""
if isinstance(value, dict):
if set(value.keys()) == set(['type', 'coordinates']):
if value['type'] != self._type:
self.error('%s type must be "%s"' % (self._name, self._type))
return self.validate(value['coordinates'])
else:
self.error('%s can only accept a valid GeoJson dictionary'
' or lists of (x, y)' % self._name)
return
elif not isinstance(value, (list, tuple)):
self.error('%s can only accept lists of [x, y]' % self._name)
return
validate = getattr(self, "_validate_%s" % self._type.lower())
error = validate(value)
if error:
self.error(error)
def _validate_polygon(self, value):
if not isinstance(value, (list, tuple)):
return 'Polygons must contain list of linestrings'
# Quick and dirty validator
try:
value[0][0][0]
except:
return "Invalid Polygon must contain at least one valid linestring"
errors = []
for val in value:
error = self._validate_linestring(val, False)
if not error and val[0] != val[-1]:
error = 'LineStrings must start and end at the same point'
if error and error not in errors:
errors.append(error)
if errors:
return "Invalid Polygon:\n%s" % ", ".join(errors)
def _validate_linestring(self, value, top_level=True):
"""Validates a linestring"""
if not isinstance(value, (list, tuple)):
return 'LineStrings must contain list of coordinate pairs'
# Quick and dirty validator
try:
value[0][0]
except:
return "Invalid LineString must contain at least one valid point"
errors = []
for val in value:
error = self._validate_point(val)
if error and error not in errors:
errors.append(error)
if errors:
if top_level:
return "Invalid LineString:\n%s" % ", ".join(errors)
else:
return "%s" % ", ".join(errors)
def _validate_point(self, value):
"""Validate each set of coords"""
if not isinstance(value, (list, tuple)):
return 'Points must be a list of coordinate pairs'
elif not len(value) == 2:
return "Value (%s) must be a two-dimensional point" % repr(value)
elif (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
return "Both values (%s) in point must be float or int" % repr(value)
def to_mongo(self, value):
if isinstance(value, dict):
return value
return SON([("type", self._type), ("coordinates", value)])
| bsd-3-clause |
ymollard/apex_playground | ros/apex_playground/src/hardware_controllers/sound.py | 2 | 1760 | #!/usr/bin/env python
import rospy
import pyaudio
import numpy as np
import json
from rospkg import RosPack
from std_msgs.msg import Float32
from os.path import join
class SoundController(object):
def __init__(self):
self.rospack = RosPack()
with open(join(self.rospack.get_path('apex_playground'), 'config', 'environment.json')) as f:
self.params = json.load(f)
with open(join(self.rospack.get_path('apex_playground'), 'config', 'bounds.json')) as f:
self.bounds = json.load(f)["sensory"]["sound"][0]
self.p = pyaudio.PyAudio()
self.fs = 44100 # sampling rate, Hz, must be integer
self.duration = 1./self.params['rate']
# for paFloat32 sample values must be in range [-1.0, 1.0]
self.stream = self.p.open(format=pyaudio.paFloat32,
channels=1,
rate=self.fs,
output=True)
def cb_sound(self, msg):
value = msg.data
if value != 0.:
f = (value-self.bounds[0])/(self.bounds[1]-self.bounds[0]) *\
(self.params["sound"]["freq"][1] - self.params["sound"]["freq"][0]) +\
self.params["sound"]["freq"][0]
self.beep(f)
def beep(self, f):
samples = (np.sin(2*np.pi*np.arange(self.fs*self.duration)*f/self.fs)).astype(np.float32)
self.stream.write(samples)
def run(self):
rospy.Subscriber("apex_playground/environment/sound", Float32, self.cb_sound)
rospy.spin()
self.stream.stop_stream()
self.stream.close()
self.p.terminate()
if __name__ == '__main__':
rospy.init_node("sound_controlller")
SoundController().run()
| gpl-3.0 |
UFAL-DSG/pjsip | tests/pjsua/scripts-recvfrom/231_reg_bad_fail_stale_false_nonce_changed.py | 59 | 1644 | # $Id: 231_reg_bad_fail_stale_false_nonce_changed.py 2392 2008-12-22 18:54:58Z bennylp $
import inc_sip as sip
import inc_sdp as sdp
# In this test we simulate broken server, where:
# - it wants to signal that NONCE has change
# - but it sets stale=false
# For this case pjsip will retry authentication until
# PJSIP_MAX_STALE_COUNT is exceeded.
#
pjsua = "--null-audio --id=sip:CLIENT --registrar sip:127.0.0.1:$PORT " + \
"--realm=python --user=username --password=password"
req1 = sip.RecvfromTransaction("Initial request", 401,
include=["REGISTER sip"],
exclude=["Authorization"],
resp_hdr=["WWW-Authenticate: Digest realm=\"python\", nonce=\"1\""]
)
req2 = sip.RecvfromTransaction("First retry", 401,
include=["REGISTER sip", "Authorization", "nonce=\"1\""],
exclude=["Authorization:[\\s\\S]+Authorization:"],
resp_hdr=["WWW-Authenticate: Digest realm=\"python\", nonce=\"2\", stale=true"]
)
req3 = sip.RecvfromTransaction("Second retry retry", 401,
include=["REGISTER sip", "Authorization", "nonce=\"2\""],
exclude=["Authorization:[\\s\\S]+Authorization:"],
resp_hdr=["WWW-Authenticate: Digest realm=\"python\", nonce=\"3\", stale=true"]
)
req4 = sip.RecvfromTransaction("Third retry", 401,
include=["REGISTER sip", "Authorization", "nonce=\"3\""],
exclude=["Authorization:[\\s\\S]+Authorization:"],
resp_hdr=["WWW-Authenticate: Digest realm=\"python\", nonce=\"4\", stale=true"],
expect="PJSIP_EAUTHSTALECOUNT"
)
recvfrom_cfg = sip.RecvfromCfg("Failed registration retry (server rejects with stale=true) ",
pjsua, [req1, req2, req3, req4])
| gpl-2.0 |
chijure/android_kernel_lge_vee1 | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
atugushev/django-password-session | setup.py | 2 | 1482 | import os
import re
from setuptools import setup
VERSION = re.search(
r"VERSION\s*=\s*['\"](.*)['\"]",
open(os.path.join(os.path.dirname(__file__), 'password_session', '__init__.py')).read()
).group(1)
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-password-session',
version=VERSION,
packages=['password_session'],
install_requires=['Django>=1.3,<1.7'],
include_package_data=True,
license='MIT License',
description='A reusable Django app that will invalidate all active sessions after change password.',
long_description=README,
url='https://github.com/atugushev/django-password-session',
author='Albert Tugushev',
author_email='albert@tugushev.ru',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
)
| mit |
MobinRanjbar/hue | apps/sqoop/src/sqoop/test_client.py | 28 | 3467 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import tempfile
from nose.tools import assert_true, assert_equal, assert_false
from sqoop.conf import SQOOP_CONF_DIR
from sqoop.client.base import SqoopClient
from sqoop.sqoop_properties import reset
def test_security_plain():
tmpdir = tempfile.mkdtemp()
finish = SQOOP_CONF_DIR.set_for_testing(tmpdir)
try:
xml = sqoop_properties(authentication='SIMPLE')
with file(os.path.join(tmpdir, 'sqoop.properties'), 'w') as f:
f.write(xml)
reset()
client = SqoopClient('test.com', 'test')
assert_false(client._security_enabled)
finally:
reset()
finish()
shutil.rmtree(tmpdir)
def test_security_kerberos():
tmpdir = tempfile.mkdtemp()
finish = SQOOP_CONF_DIR.set_for_testing(tmpdir)
try:
xml = sqoop_properties(authentication='KERBEROS')
with file(os.path.join(tmpdir, 'sqoop.properties'), 'w') as f:
f.write(xml)
reset()
client = SqoopClient('test.com', 'test')
assert_true(client._security_enabled)
finally:
reset()
finish()
shutil.rmtree(tmpdir)
def sqoop_properties(authentication='SIMPLE'):
return """
org.apache.sqoop.repository.provider=org.apache.sqoop.repository.JdbcRepositoryProvider
org.apache.sqoop.repository.jdbc.transaction.isolation=READ_COMMITTED
org.apache.sqoop.repository.jdbc.maximum.connections=10
org.apache.sqoop.repository.jdbc.handler=org.apache.sqoop.repository.derby.DerbyRepositoryHandler
org.apache.sqoop.repository.jdbc.url=jdbc:derby:/var/lib/sqoop2/repository/db;create=true
org.apache.sqoop.repository.jdbc.driver=org.apache.derby.jdbc.EmbeddedDriver
org.apache.sqoop.repository.jdbc.create.schema=true
org.apache.sqoop.repository.jdbc.user=sa
org.apache.sqoop.repository.jdbc.password=
org.apache.sqoop.repository.sysprop.derby.stream.error.file=/var/log/sqoop2/derbyrepo.log
org.apache.sqoop.submission.engine=org.apache.sqoop.submission.mapreduce.MapreduceSubmissionEngine
org.apache.sqoop.submission.engine.mapreduce.configuration.directory={{CMF_CONF_DIR}}/yarn-conf
org.apache.sqoop.execution.engine=org.apache.sqoop.execution.mapreduce.MapreduceExecutionEngine
org.apache.sqoop.security.authentication.type=%(authentication)s
org.apache.sqoop.security.authentication.handler=org.apache.sqoop.security.KerberosAuthenticationHandler
org.apache.sqoop.security.authentication.kerberos.principal=sqoop2/_HOST@VPC.CLOUDERA.COM
org.apache.sqoop.security.authentication.kerberos.http.principal=HTTP/_HOST@VPC.CLOUDERA.COM
org.apache.sqoop.security.authentication.kerberos.keytab=sqoop.keytab
org.apache.sqoop.security.authentication.kerberos.http.keytab=sqoop.keytab
""" % {
'authentication': authentication,
}
| apache-2.0 |
xyzz/vcmi-build | project/jni/python/src/Lib/distutils/versionpredicate.py | 397 | 5095 | """Module for parsing and testing package version predicate strings.
"""
import re
import distutils.version
import operator
re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)")
# (package) (rest)
re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses
re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
# (comp) (version)
def splitUp(pred):
"""Parse a single version comparison.
Return (comparison string, StrictVersion)
"""
res = re_splitComparison.match(pred)
if not res:
raise ValueError("bad package restriction syntax: %r" % pred)
comp, verStr = res.groups()
return (comp, distutils.version.StrictVersion(verStr))
compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq,
">": operator.gt, ">=": operator.ge, "!=": operator.ne}
class VersionPredicate:
"""Parse and test package version predicates.
>>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
The `name` attribute provides the full dotted name that is given::
>>> v.name
'pyepat.abc'
The str() of a `VersionPredicate` provides a normalized
human-readable version of the expression::
>>> print v
pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
The `satisfied_by()` method can be used to determine with a given
version number is included in the set described by the version
restrictions::
>>> v.satisfied_by('1.1')
True
>>> v.satisfied_by('1.4')
True
>>> v.satisfied_by('1.0')
False
>>> v.satisfied_by('4444.4')
False
>>> v.satisfied_by('1555.1b3')
False
`VersionPredicate` is flexible in accepting extra whitespace::
>>> v = VersionPredicate(' pat( == 0.1 ) ')
>>> v.name
'pat'
>>> v.satisfied_by('0.1')
True
>>> v.satisfied_by('0.2')
False
If any version numbers passed in do not conform to the
restrictions of `StrictVersion`, a `ValueError` is raised::
>>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
Traceback (most recent call last):
...
ValueError: invalid version number '1.2zb3'
It the module or package name given does not conform to what's
allowed as a legal module or package name, `ValueError` is
raised::
>>> v = VersionPredicate('foo-bar')
Traceback (most recent call last):
...
ValueError: expected parenthesized list: '-bar'
>>> v = VersionPredicate('foo bar (12.21)')
Traceback (most recent call last):
...
ValueError: expected parenthesized list: 'bar (12.21)'
"""
def __init__(self, versionPredicateStr):
"""Parse a version predicate string.
"""
# Fields:
# name: package name
# pred: list of (comparison string, StrictVersion)
versionPredicateStr = versionPredicateStr.strip()
if not versionPredicateStr:
raise ValueError("empty package restriction")
match = re_validPackage.match(versionPredicateStr)
if not match:
raise ValueError("bad package name in %r" % versionPredicateStr)
self.name, paren = match.groups()
paren = paren.strip()
if paren:
match = re_paren.match(paren)
if not match:
raise ValueError("expected parenthesized list: %r" % paren)
str = match.groups()[0]
self.pred = [splitUp(aPred) for aPred in str.split(",")]
if not self.pred:
raise ValueError("empty parenthesized list in %r"
% versionPredicateStr)
else:
self.pred = []
def __str__(self):
if self.pred:
seq = [cond + " " + str(ver) for cond, ver in self.pred]
return self.name + " (" + ", ".join(seq) + ")"
else:
return self.name
def satisfied_by(self, version):
"""True if version is compatible with all the predicates in self.
The parameter version must be acceptable to the StrictVersion
constructor. It may be either a string or StrictVersion.
"""
for cond, ver in self.pred:
if not compmap[cond](version, ver):
return False
return True
_provision_rx = None
def split_provision(value):
"""Return the name and optional version number of a provision.
The version number, if given, will be returned as a `StrictVersion`
instance, otherwise it will be `None`.
>>> split_provision('mypkg')
('mypkg', None)
>>> split_provision(' mypkg( 1.2 ) ')
('mypkg', StrictVersion ('1.2'))
"""
global _provision_rx
if _provision_rx is None:
_provision_rx = re.compile(
"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$")
value = value.strip()
m = _provision_rx.match(value)
if not m:
raise ValueError("illegal provides specification: %r" % value)
ver = m.group(2) or None
if ver:
ver = distutils.version.StrictVersion(ver)
return m.group(1), ver
| lgpl-2.1 |
liberorbis/libernext | env/lib/python2.7/site-packages/chardet/euckrprober.py | 2931 | 1675 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCKRDistributionAnalysis
from .mbcssm import EUCKRSMModel
class EUCKRProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(EUCKRSMModel)
self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
self.reset()
def get_charset_name(self):
return "EUC-KR"
| gpl-2.0 |
SantosDevelopers/sborganicos | venv/lib/python3.5/site-packages/pip/_vendor/distlib/wheel.py | 412 | 39115 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2016 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from __future__ import unicode_literals
import base64
import codecs
import datetime
import distutils.util
from email import message_from_file
import hashlib
import imp
import json
import logging
import os
import posixpath
import re
import shutil
import sys
import tempfile
import zipfile
from . import __version__, DistlibException
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
from .database import InstalledDistribution
from .metadata import Metadata, METADATA_FILENAME
from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
cached_property, get_cache_base, read_exports, tempdir)
from .version import NormalizedVersion, UnsupportedVersionError
logger = logging.getLogger(__name__)
cache = None # created when needed
if hasattr(sys, 'pypy_version_info'):
IMP_PREFIX = 'pp'
elif sys.platform.startswith('java'):
IMP_PREFIX = 'jy'
elif sys.platform == 'cli':
IMP_PREFIX = 'ip'
else:
IMP_PREFIX = 'cp'
VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
if not VER_SUFFIX: # pragma: no cover
VER_SUFFIX = '%s%s' % sys.version_info[:2]
PYVER = 'py' + VER_SUFFIX
IMPVER = IMP_PREFIX + VER_SUFFIX
ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_')
ABI = sysconfig.get_config_var('SOABI')
if ABI and ABI.startswith('cpython-'):
ABI = ABI.replace('cpython-', 'cp')
else:
def _derive_abi():
parts = ['cp', VER_SUFFIX]
if sysconfig.get_config_var('Py_DEBUG'):
parts.append('d')
if sysconfig.get_config_var('WITH_PYMALLOC'):
parts.append('m')
if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:
parts.append('u')
return ''.join(parts)
ABI = _derive_abi()
del _derive_abi
FILENAME_RE = re.compile(r'''
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?
-(?P<py>\w+\d+(\.\w+\d+)*)
-(?P<bi>\w+)
-(?P<ar>\w+(\.\w+)*)
\.whl$
''', re.IGNORECASE | re.VERBOSE)
NAME_VERSION_RE = re.compile(r'''
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?$
''', re.IGNORECASE | re.VERBOSE)
SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
SHEBANG_PYTHON = b'#!python'
SHEBANG_PYTHONW = b'#!pythonw'
if os.sep == '/':
to_posix = lambda o: o
else:
to_posix = lambda o: o.replace(os.sep, '/')
class Mounter(object):
def __init__(self):
self.impure_wheels = {}
self.libs = {}
def add(self, pathname, extensions):
self.impure_wheels[pathname] = extensions
self.libs.update(extensions)
def remove(self, pathname):
extensions = self.impure_wheels.pop(pathname)
for k, v in extensions:
if k in self.libs:
del self.libs[k]
def find_module(self, fullname, path=None):
if fullname in self.libs:
result = self
else:
result = None
return result
def load_module(self, fullname):
if fullname in sys.modules:
result = sys.modules[fullname]
else:
if fullname not in self.libs:
raise ImportError('unable to find extension for %s' % fullname)
result = imp.load_dynamic(fullname, self.libs[fullname])
result.__loader__ = self
parts = fullname.rsplit('.', 1)
if len(parts) > 1:
result.__package__ = parts[0]
return result
_hook = Mounter()
class Wheel(object):
"""
Class to build and install from Wheel files (PEP 427).
"""
wheel_version = (1, 1)
hash_kind = 'sha256'
def __init__(self, filename=None, sign=False, verify=False):
"""
Initialise an instance using a (valid) filename.
"""
self.sign = sign
self.should_verify = verify
self.buildver = ''
self.pyver = [PYVER]
self.abi = ['none']
self.arch = ['any']
self.dirname = os.getcwd()
if filename is None:
self.name = 'dummy'
self.version = '0.1'
self._filename = self.filename
else:
m = NAME_VERSION_RE.match(filename)
if m:
info = m.groupdict('')
self.name = info['nm']
# Reinstate the local version separator
self.version = info['vn'].replace('_', '-')
self.buildver = info['bn']
self._filename = self.filename
else:
dirname, filename = os.path.split(filename)
m = FILENAME_RE.match(filename)
if not m:
raise DistlibException('Invalid name or '
'filename: %r' % filename)
if dirname:
self.dirname = os.path.abspath(dirname)
self._filename = filename
info = m.groupdict('')
self.name = info['nm']
self.version = info['vn']
self.buildver = info['bn']
self.pyver = info['py'].split('.')
self.abi = info['bi'].split('.')
self.arch = info['ar'].split('.')
@property
def filename(self):
"""
Build and return a filename from the various components.
"""
if self.buildver:
buildver = '-' + self.buildver
else:
buildver = ''
pyver = '.'.join(self.pyver)
abi = '.'.join(self.abi)
arch = '.'.join(self.arch)
# replace - with _ as a local version separator
version = self.version.replace('-', '_')
return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
pyver, abi, arch)
@property
def exists(self):
path = os.path.join(self.dirname, self.filename)
return os.path.isfile(path)
@property
def tags(self):
for pyver in self.pyver:
for abi in self.abi:
for arch in self.arch:
yield pyver, abi, arch
@cached_property
def metadata(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
wheel_metadata = self.get_wheel_metadata(zf)
wv = wheel_metadata['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if file_version < (1, 1):
fn = 'METADATA'
else:
fn = METADATA_FILENAME
try:
metadata_filename = posixpath.join(info_dir, fn)
with zf.open(metadata_filename) as bf:
wf = wrapper(bf)
result = Metadata(fileobj=wf)
except KeyError:
raise ValueError('Invalid wheel, because %s is '
'missing' % fn)
return result
def get_wheel_metadata(self, zf):
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
metadata_filename = posixpath.join(info_dir, 'WHEEL')
with zf.open(metadata_filename) as bf:
wf = codecs.getreader('utf-8')(bf)
message = message_from_file(wf)
return dict(message)
@cached_property
def info(self):
pathname = os.path.join(self.dirname, self.filename)
with ZipFile(pathname, 'r') as zf:
result = self.get_wheel_metadata(zf)
return result
def process_shebang(self, data):
m = SHEBANG_RE.match(data)
if m:
end = m.end()
shebang, data_after_shebang = data[:end], data[end:]
# Preserve any arguments after the interpreter
if b'pythonw' in shebang.lower():
shebang_python = SHEBANG_PYTHONW
else:
shebang_python = SHEBANG_PYTHON
m = SHEBANG_DETAIL_RE.match(shebang)
if m:
args = b' ' + m.groups()[-1]
else:
args = b''
shebang = shebang_python + args
data = shebang + data_after_shebang
else:
cr = data.find(b'\r')
lf = data.find(b'\n')
if cr < 0 or cr > lf:
term = b'\n'
else:
if data[cr:cr + 2] == b'\r\n':
term = b'\r\n'
else:
term = b'\r'
data = SHEBANG_PYTHON + term + data
return data
def get_hash(self, data, hash_kind=None):
if hash_kind is None:
hash_kind = self.hash_kind
try:
hasher = getattr(hashlib, hash_kind)
except AttributeError:
raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
result = hasher(data).digest()
result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
return hash_kind, result
def write_record(self, records, record_path, base):
records = list(records) # make a copy for sorting
p = to_posix(os.path.relpath(record_path, base))
records.append((p, '', ''))
records.sort()
with CSVWriter(record_path) as writer:
for row in records:
writer.writerow(row)
def write_records(self, info, libdir, archive_paths):
records = []
distinfo, info_dir = info
hasher = getattr(hashlib, self.hash_kind)
for ap, p in archive_paths:
with open(p, 'rb') as f:
data = f.read()
digest = '%s=%s' % self.get_hash(data)
size = os.path.getsize(p)
records.append((ap, digest, size))
p = os.path.join(distinfo, 'RECORD')
self.write_record(records, p, libdir)
ap = to_posix(os.path.join(info_dir, 'RECORD'))
archive_paths.append((ap, p))
def build_zip(self, pathname, archive_paths):
with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
for ap, p in archive_paths:
logger.debug('Wrote %s to %s in wheel', p, ap)
zf.write(p, ap)
def build(self, paths, tags=None, wheel_version=None):
"""
Build a wheel from files in specified paths, and use any specified tags
when determining the name of the wheel.
"""
if tags is None:
tags = {}
libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
if libkey == 'platlib':
is_pure = 'false'
default_pyver = [IMPVER]
default_abi = [ABI]
default_arch = [ARCH]
else:
is_pure = 'true'
default_pyver = [PYVER]
default_abi = ['none']
default_arch = ['any']
self.pyver = tags.get('pyver', default_pyver)
self.abi = tags.get('abi', default_abi)
self.arch = tags.get('arch', default_arch)
libdir = paths[libkey]
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
archive_paths = []
# First, stuff which is not in site-packages
for key in ('data', 'headers', 'scripts'):
if key not in paths:
continue
path = paths[key]
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
for fn in files:
p = fsdecode(os.path.join(root, fn))
rp = os.path.relpath(p, path)
ap = to_posix(os.path.join(data_dir, key, rp))
archive_paths.append((ap, p))
if key == 'scripts' and not p.endswith('.exe'):
with open(p, 'rb') as f:
data = f.read()
data = self.process_shebang(data)
with open(p, 'wb') as f:
f.write(data)
# Now, stuff which is in site-packages, other than the
# distinfo stuff.
path = libdir
distinfo = None
for root, dirs, files in os.walk(path):
if root == path:
# At the top level only, save distinfo for later
# and skip it for now
for i, dn in enumerate(dirs):
dn = fsdecode(dn)
if dn.endswith('.dist-info'):
distinfo = os.path.join(root, dn)
del dirs[i]
break
assert distinfo, '.dist-info directory expected, not found'
for fn in files:
# comment out next suite to leave .pyc files in
if fsdecode(fn).endswith(('.pyc', '.pyo')):
continue
p = os.path.join(root, fn)
rp = to_posix(os.path.relpath(p, path))
archive_paths.append((rp, p))
# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
files = os.listdir(distinfo)
for fn in files:
if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
p = fsdecode(os.path.join(distinfo, fn))
ap = to_posix(os.path.join(info_dir, fn))
archive_paths.append((ap, p))
wheel_metadata = [
'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
'Generator: distlib %s' % __version__,
'Root-Is-Purelib: %s' % is_pure,
]
for pyver, abi, arch in self.tags:
wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
p = os.path.join(distinfo, 'WHEEL')
with open(p, 'w') as f:
f.write('\n'.join(wheel_metadata))
ap = to_posix(os.path.join(info_dir, 'WHEEL'))
archive_paths.append((ap, p))
# Now, at last, RECORD.
# Paths in here are archive paths - nothing else makes sense.
self.write_records((distinfo, info_dir), libdir, archive_paths)
# Now, ready to build the zip file
pathname = os.path.join(self.dirname, self.filename)
self.build_zip(pathname, archive_paths)
return pathname
def install(self, paths, maker, **kwargs):
"""
Install a wheel to the specified paths. If kwarg ``warner`` is
specified, it should be a callable, which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file, if there is a discrepancy in the versions.
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
not written.
The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``.
"""
dry_run = maker.dry_run
warner = kwargs.get('warner')
lib_only = kwargs.get('lib_only', False)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if (file_version != self.wheel_version) and warner:
warner(self.wheel_version, file_version)
if message['Root-Is-Purelib'] == 'true':
libdir = paths['purelib']
else:
libdir = paths['platlib']
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
data_pfx = posixpath.join(data_dir, '')
info_pfx = posixpath.join(info_dir, '')
script_pfx = posixpath.join(data_dir, 'scripts', '')
# make a new instance rather than a copy of maker's,
# as we mutate it
fileop = FileOperator(dry_run=dry_run)
fileop.record = True # so we can rollback if needed
bc = not sys.dont_write_bytecode # Double negatives. Lovely!
outfiles = [] # for RECORD writing
# for script copying/shebang processing
workdir = tempfile.mkdtemp()
# set target dir later
# we default add_launchers to False, as the
# Python Launcher should be used instead
maker.source_dir = workdir
maker.target_dir = None
try:
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
# The signature file won't be in RECORD,
# and we don't currently don't do anything with it
if u_arcname.endswith('/RECORD.jws'):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
logger.debug('lib_only: skipping %s', u_arcname)
continue
is_script = (u_arcname.startswith(script_pfx)
and not u_arcname.endswith('.exe'))
if u_arcname.startswith(data_pfx):
_, where, rp = u_arcname.split('/', 2)
outfile = os.path.join(paths[where], convert_path(rp))
else:
# meant for site-packages.
if u_arcname in (wheel_metadata_name, record_name):
continue
outfile = os.path.join(libdir, convert_path(u_arcname))
if not is_script:
with zf.open(arcname) as bf:
fileop.copy_stream(bf, outfile)
outfiles.append(outfile)
# Double check the digest of the written file
if not dry_run and row[1]:
with open(outfile, 'rb') as bf:
data = bf.read()
_, newdigest = self.get_hash(data, kind)
if newdigest != digest:
raise DistlibException('digest mismatch '
'on write for '
'%s' % outfile)
if bc and outfile.endswith('.py'):
try:
pyc = fileop.byte_compile(outfile)
outfiles.append(pyc)
except Exception:
# Don't give up if byte-compilation fails,
# but log it and perhaps warn the user
logger.warning('Byte-compilation failed',
exc_info=True)
else:
fn = os.path.basename(convert_path(arcname))
workname = os.path.join(workdir, fn)
with zf.open(arcname) as bf:
fileop.copy_stream(bf, workname)
dn, fn = os.path.split(outfile)
maker.target_dir = dn
filenames = maker.make(fn)
fileop.set_executable_mode(filenames)
outfiles.extend(filenames)
if lib_only:
logger.debug('lib_only: returning None')
dist = None
else:
# Generate scripts
# Try to get pydist.json so we can see if there are
# any commands to generate. If this fails (e.g. because
# of a legacy wheel), log a warning but don't give up.
commands = None
file_version = self.info['Wheel-Version']
if file_version == '1.0':
# Use legacy info
ep = posixpath.join(info_dir, 'entry_points.txt')
try:
with zf.open(ep) as bwf:
epdata = read_exports(bwf)
commands = {}
for key in ('console', 'gui'):
k = '%s_scripts' % key
if k in epdata:
commands['wrap_%s' % key] = d = {}
for v in epdata[k].values():
s = '%s:%s' % (v.prefix, v.suffix)
if v.flags:
s += ' %s' % v.flags
d[v.name] = s
except Exception:
logger.warning('Unable to read legacy script '
'metadata, so cannot generate '
'scripts')
else:
try:
with zf.open(metadata_name) as bwf:
wf = wrapper(bwf)
commands = json.load(wf).get('extensions')
if commands:
commands = commands.get('python.commands')
except Exception:
logger.warning('Unable to read JSON metadata, so '
'cannot generate scripts')
if commands:
console_scripts = commands.get('wrap_console', {})
gui_scripts = commands.get('wrap_gui', {})
if console_scripts or gui_scripts:
script_dir = paths.get('scripts', '')
if not os.path.isdir(script_dir):
raise ValueError('Valid script path not '
'specified')
maker.target_dir = script_dir
for k, v in console_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script)
fileop.set_executable_mode(filenames)
if gui_scripts:
options = {'gui': True }
for k, v in gui_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script, options)
fileop.set_executable_mode(filenames)
p = os.path.join(libdir, info_dir)
dist = InstalledDistribution(p)
# Write SHARED
paths = dict(paths) # don't change passed in dict
del paths['purelib']
del paths['platlib']
paths['lib'] = libdir
p = dist.write_shared_locations(paths, dry_run)
if p:
outfiles.append(p)
# Write RECORD
dist.write_installed_files(outfiles, paths['prefix'],
dry_run)
return dist
except Exception: # pragma: no cover
logger.exception('installation failed.')
fileop.rollback()
raise
finally:
shutil.rmtree(workdir)
def _get_dylib_cache(self):
global cache
if cache is None:
# Use native string to avoid issues on 2.x: see Python #20140.
base = os.path.join(get_cache_base(), str('dylib-cache'),
sys.version[:3])
cache = Cache(base)
return cache
def _get_extensions(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
arcname = posixpath.join(info_dir, 'EXTENSIONS')
wrapper = codecs.getreader('utf-8')
result = []
with ZipFile(pathname, 'r') as zf:
try:
with zf.open(arcname) as bf:
wf = wrapper(bf)
extensions = json.load(wf)
cache = self._get_dylib_cache()
prefix = cache.prefix_to_dir(pathname)
cache_base = os.path.join(cache.base, prefix)
if not os.path.isdir(cache_base):
os.makedirs(cache_base)
for name, relpath in extensions.items():
dest = os.path.join(cache_base, convert_path(relpath))
if not os.path.exists(dest):
extract = True
else:
file_time = os.stat(dest).st_mtime
file_time = datetime.datetime.fromtimestamp(file_time)
info = zf.getinfo(relpath)
wheel_time = datetime.datetime(*info.date_time)
extract = wheel_time > file_time
if extract:
zf.extract(relpath, cache_base)
result.append((name, dest))
except KeyError:
pass
return result
def is_compatible(self):
"""
Determine if a wheel is compatible with the running system.
"""
return is_compatible(self)
def is_mountable(self):
"""
Determine if a wheel is asserted as mountable by its metadata.
"""
return True # for now - metadata details TBD
def mount(self, append=False):
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
if not self.is_compatible():
msg = 'Wheel %s not compatible with this Python.' % pathname
raise DistlibException(msg)
if not self.is_mountable():
msg = 'Wheel %s is marked as not mountable.' % pathname
raise DistlibException(msg)
if pathname in sys.path:
logger.debug('%s already in path', pathname)
else:
if append:
sys.path.append(pathname)
else:
sys.path.insert(0, pathname)
extensions = self._get_extensions()
if extensions:
if _hook not in sys.meta_path:
sys.meta_path.append(_hook)
_hook.add(pathname, extensions)
def unmount(self):
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
if pathname not in sys.path:
logger.debug('%s not in path', pathname)
else:
sys.path.remove(pathname)
if pathname in _hook.impure_wheels:
_hook.remove(pathname)
if not _hook.impure_wheels:
if _hook in sys.meta_path:
sys.meta_path.remove(_hook)
def verify(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
# TODO version verification
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
if '..' in u_arcname:
raise DistlibException('invalid entry in '
'wheel: %r' % u_arcname)
# The signature file won't be in RECORD,
# and we don't currently don't do anything with it
if u_arcname.endswith('/RECORD.jws'):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
def update(self, modifier, dest_dir=None, **kwargs):
"""
Update the contents of a wheel in a generic way. The modifier should
be a callable which expects a dictionary argument: its keys are
archive-entry paths, and its values are absolute filesystem paths
where the contents the corresponding archive entries can be found. The
modifier is free to change the contents of the files pointed to, add
new entries and remove entries, before returning. This method will
extract the entire contents of the wheel to a temporary location, call
the modifier, and then use the passed (and possibly updated)
dictionary to write a new wheel. If ``dest_dir`` is specified, the new
wheel is written there -- otherwise, the original wheel is overwritten.
The modifier should return True if it updated the wheel, else False.
This method returns the same value the modifier returns.
"""
def get_version(path_map, info_dir):
version = path = None
key = '%s/%s' % (info_dir, METADATA_FILENAME)
if key not in path_map:
key = '%s/PKG-INFO' % info_dir
if key in path_map:
path = path_map[key]
version = Metadata(path=path).version
return version, path
def update_version(version, path):
updated = None
try:
v = NormalizedVersion(version)
i = version.find('-')
if i < 0:
updated = '%s+1' % version
else:
parts = [int(s) for s in version[i + 1:].split('.')]
parts[-1] += 1
updated = '%s+%s' % (version[:i],
'.'.join(str(i) for i in parts))
except UnsupportedVersionError:
logger.debug('Cannot update non-compliant (PEP-440) '
'version %r', version)
if updated:
md = Metadata(path=path)
md.version = updated
legacy = not path.endswith(METADATA_FILENAME)
md.write(path=path, legacy=legacy)
logger.debug('Version updated from %r to %r', version,
updated)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
record_name = posixpath.join(info_dir, 'RECORD')
with tempdir() as workdir:
with ZipFile(pathname, 'r') as zf:
path_map = {}
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
if u_arcname == record_name:
continue
if '..' in u_arcname:
raise DistlibException('invalid entry in '
'wheel: %r' % u_arcname)
zf.extract(zinfo, workdir)
path = os.path.join(workdir, convert_path(u_arcname))
path_map[u_arcname] = path
# Remember the version.
original_version, _ = get_version(path_map, info_dir)
# Files extracted. Call the modifier.
modified = modifier(path_map, **kwargs)
if modified:
# Something changed - need to build a new wheel.
current_version, path = get_version(path_map, info_dir)
if current_version and (current_version == original_version):
# Add or update local version to signify changes.
update_version(current_version, path)
# Decide where the new wheel goes.
if dest_dir is None:
fd, newpath = tempfile.mkstemp(suffix='.whl',
prefix='wheel-update-',
dir=workdir)
os.close(fd)
else:
if not os.path.isdir(dest_dir):
raise DistlibException('Not a directory: %r' % dest_dir)
newpath = os.path.join(dest_dir, self.filename)
archive_paths = list(path_map.items())
distinfo = os.path.join(workdir, info_dir)
info = distinfo, info_dir
self.write_records(info, workdir, archive_paths)
self.build_zip(newpath, archive_paths)
if dest_dir is None:
shutil.copyfile(newpath, pathname)
return modified
def compatible_tags():
"""
Return (pyver, abi, arch) tuples compatible with this Python.
"""
versions = [VER_SUFFIX]
major = VER_SUFFIX[0]
for minor in range(sys.version_info[1] - 1, - 1, -1):
versions.append(''.join([major, str(minor)]))
abis = []
for suffix, _, _ in imp.get_suffixes():
if suffix.startswith('.abi'):
abis.append(suffix.split('.', 2)[1])
abis.sort()
if ABI != 'none':
abis.insert(0, ABI)
abis.append('none')
result = []
arches = [ARCH]
if sys.platform == 'darwin':
m = re.match('(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
if m:
name, major, minor, arch = m.groups()
minor = int(minor)
matches = [arch]
if arch in ('i386', 'ppc'):
matches.append('fat')
if arch in ('i386', 'ppc', 'x86_64'):
matches.append('fat3')
if arch in ('ppc64', 'x86_64'):
matches.append('fat64')
if arch in ('i386', 'x86_64'):
matches.append('intel')
if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
matches.append('universal')
while minor >= 0:
for match in matches:
s = '%s_%s_%s_%s' % (name, major, minor, match)
if s != ARCH: # already there
arches.append(s)
minor -= 1
# Most specific - our Python version, ABI and arch
for abi in abis:
for arch in arches:
result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
# where no ABI / arch dependency, but IMP_PREFIX dependency
for i, version in enumerate(versions):
result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
if i == 0:
result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
# no IMP_PREFIX, ABI or arch dependency
for i, version in enumerate(versions):
result.append((''.join(('py', version)), 'none', 'any'))
if i == 0:
result.append((''.join(('py', version[0])), 'none', 'any'))
return set(result)
COMPATIBLE_TAGS = compatible_tags()
del compatible_tags
def is_compatible(wheel, tags=None):
if not isinstance(wheel, Wheel):
wheel = Wheel(wheel) # assume it's a filename
result = False
if tags is None:
tags = COMPATIBLE_TAGS
for ver, abi, arch in tags:
if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
result = True
break
return result
| mit |
AndroidOpenDevelopment/android_external_chromium_org | chrome/common/extensions/docs/server2/permissions_data_source_test.py | 10 | 4881 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
from operator import itemgetter
import unittest
from extensions_paths import CHROME_EXTENSIONS
from permissions_data_source import PermissionsDataSource
from server_instance import ServerInstance
from third_party.handlebar import Handlebar
from test_file_system import TestFileSystem
_PERMISSION_FEATURES = {
# This will appear for extensions with a description as defined in the
# permissions.json file.
'activeTab': {
'extension_types': ['extension'],
},
# This will appear for apps and extensions with an auto-generated description
# since the entry appears in _api_features.json.
'alarms': {
'extension_types': ['platform_app', 'extension'],
},
# This won't appear for anything since there's no entry in permissions.json
# and it's not an API.
'audioCapture': {
'extension_types': ['platform_app'],
},
# This won't appear for anything because it's private.
'commandLinePrivate': {
'extension_types': ['platform_app', 'extension']
},
# This will only appear for apps with an auto-generated description because
# it's an API.
'cookies': {
'extension_types': ['platform_app']
},
}
_PERMISSIONS_JSON = {
# This will appear for both apps and extensions with a custom description,
# anchor, etc.
'host-permissions': {
'anchor': 'custom-anchor',
'extension_types': ['platform_app', 'extension'],
'literal_name': True,
'name': 'match pattern',
'partial': 'permissions/host_permissions.html',
},
# A custom 'partial' here overrides the default partial.
'activeTab': {
'partial': 'permissions/active_tab.html'
},
}
_PERMISSIONS_PARTIALS = {
'active_tab.html': 'active tab',
'host_permissions.html': 'host permissions',
'generic_description.html': 'generic description',
}
_API_FEATURES = {
'alarms': {
'dependencies': ['permission:alarms']
},
'cookies': {
'dependencies': ['permission:cookies']
},
}
class PermissionsDataSourceTest(unittest.TestCase):
def testCreatePermissionsDataSource(self):
expected_extensions = [
{
'anchor': 'custom-anchor',
'description': 'host permissions',
'literal_name': True,
'name': 'match pattern',
'platforms': ['apps', 'extensions']
},
{
'anchor': 'activeTab',
'description': 'active tab',
'name': 'activeTab',
'platforms': ['extensions'],
},
{
'anchor': 'alarms',
'description': 'generic description',
'name': 'alarms',
'platforms': ['apps', 'extensions'],
},
]
expected_apps = [
{
'anchor': 'custom-anchor',
'description': 'host permissions',
'literal_name': True,
'name': 'match pattern',
'platforms': ['apps', 'extensions'],
},
{
'anchor': 'alarms',
'description': 'generic description',
'name': 'alarms',
'platforms': ['apps', 'extensions'],
},
{
'anchor': 'cookies',
'description': 'generic description',
'name': 'cookies',
'platforms': ['apps'],
},
]
test_file_system = TestFileSystem({
'api': {
'_api_features.json': json.dumps(_API_FEATURES),
'_manifest_features.json': '{}',
'_permission_features.json': json.dumps(_PERMISSION_FEATURES),
},
'docs': {
'templates': {
'json': {
'manifest.json': '{}',
'permissions.json': json.dumps(_PERMISSIONS_JSON),
},
'private': {
'permissions': _PERMISSIONS_PARTIALS
},
}
}
}, relative_to=CHROME_EXTENSIONS)
permissions_data_source = PermissionsDataSource(
ServerInstance.ForTest(test_file_system), None)
actual_extensions = permissions_data_source.get('declare_extensions')
actual_apps = permissions_data_source.get('declare_apps')
# Normalise all test data.
# - Sort keys. Since the tests don't use OrderedDicts we can't make
# assertions about the order, which is unfortunate. Oh well.
# - Render all of the Handlerbar instances so that we can use ==.
# Handlebars don't implement __eq__, but they probably should.
for lst in (actual_apps, actual_extensions,
expected_apps, expected_extensions):
lst.sort(key=itemgetter('name'))
for mapping in lst:
for key, value in mapping.iteritems():
if isinstance(value, Handlebar):
mapping[key] = value.Render().text
self.assertEqual(expected_extensions, actual_extensions)
self.assertEqual(expected_apps, actual_apps)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
StephenWeber/ansible | contrib/inventory/zone.py | 57 | 1489 | #!/usr/bin/env python
# (c) 2015, Dagobert Michelsen <dam@baltic-online.de>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen,PIPE
import sys
import json
result = {}
result['all'] = {}
pipe = Popen(['zoneadm', 'list', '-ip'], stdout=PIPE, universal_newlines=True)
result['all']['hosts'] = []
for l in pipe.stdout.readlines():
# 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared
s = l.split(':')
if s[1] != 'global':
result['all']['hosts'].append(s[1])
result['all']['vars'] = {}
result['all']['vars']['ansible_connection'] = 'zone'
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print(json.dumps(result))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({'ansible_connection': 'zone'}))
else:
sys.stderr.write("Need an argument, either --list or --host <host>\n")
| gpl-3.0 |
AICP/external_chromium_org | tools/telemetry/telemetry/page/record_wpr_unittest.py | 11 | 1462 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.page import page as page_module
from telemetry.page import record_wpr
class TestPage(page_module.Page):
def __init__(self):
super(TestPage, self).__init__(url='file://foo.html',
page_set=None,
base_dir=None)
self.run_navigate = False
self.run_foo = False
self.run_bar = False
def RunNavigateSteps(self, _):
self.run_navigate = True
def RunFoo(self, _):
self.run_foo = True
def RunBar(self, _):
self.run_bar = True
class FakeFooMeasurement(object):
def __init__(self):
self.action_name_to_run = "RunFoo"
class FakeBarMeasurement(object):
def __init__(self):
self.action_name_to_run = "RunBar"
class FakeTab(object):
def WaitForDocumentReadyStateToBeComplete(self):
pass
class RecordWprUnitTest(unittest.TestCase):
def setUp(self):
super(RecordWprUnitTest, self).setUp()
def testRunActions(self):
page = TestPage()
record_runner = record_wpr.RecordPage({1 : FakeFooMeasurement,
2 : FakeBarMeasurement})
record_runner.RunPage(page, tab=FakeTab(), results=None)
self.assertTrue(page.run_navigate)
self.assertTrue(page.run_foo)
self.assertTrue(page.run_bar)
| bsd-3-clause |
jasonamyers/gabbi | docs/source/conf.py | 2 | 8150 | # -*- coding: utf-8 -*-
#
# Gabbi documentation build configuration file, created by
# sphinx-quickstart on Wed Dec 31 17:07:32 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
docroot = os.path.abspath('../..')
sys.path.insert(0, docroot)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Gabbi'
copyright = u'2014-2015, Chris Dent'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Gabbidoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Gabbi.tex', u'Gabbi Documentation',
u'Chris Dent', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'gabbi', u'Gabbi Documentation',
[u'Chris Dent'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Gabbi', u'Gabbi Documentation',
u'Chris Dent', 'Gabbi', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| apache-2.0 |
kcarscad/multi-game-pygame | Sudoku/Sudoku.py | 1 | 7844 | # filename: 'Sudoku.py'
# author: Keith Carscadden
# date: 6/1/14
# purpose: Sudoku game, uses 3rd party file 'generator.py'
import math,sys,os,time,pygame as pg
from pygame.locals import *
from random import randint
from ScoreScreen import ScoreScreen
import Fade
from os import environ
environ['SDL_VIDEO_CENTERED'] = '1'
BG = (255,255,255)
C1 = (0,0,0)
C2 = (120,120,120)
HL = (255,255,0)
N = 9
T = 3
# random number, 1-N (9)
def rnd():
return randint(1,N)
# draw the number at the clicked position
def drawNum(n,pos,option=0):
x,y = pos
c = C2
if not locked[x][y] or option==2:
if option==1:
locked[x][y] = 1
c = C1
elif option==2:
c = (0,255,0)
i,j = coords[pos]
k,l = fontSize
pg.draw.rect(screen,BG,[i,j,k,l])
# draw num
if n:
label = font.render(str(n),1,c)
screen.blit(label,coords[pos])
board[x][y] = n
# draw grid
def drawLines(w,h):
[pg.draw.line(screen,C1,[i*w/N,0],[i*w/N,h],T if i%3==0 else 1) for i in range(N+1)]
[pg.draw.line(screen,C1,[0,i*h/N],[w,i*h/N],T if i%3==0 else 1) for i in range(N+1)]
# blink waiting for input
def blink(board,pos,oldPos,dark):
# make sure it changed, neither are blank, and the old number wasn't taken
# will also be called if a locked tile == currently clicked (fills in old one)
if pos != oldPos and pos and oldPos:
a,b = coords[oldPos]
c,d = fontSize
pg.draw.rect(screen,BG,[a,b,c,d])
if board[oldPos[0]][oldPos[1]]:
c=C2 if not locked[oldPos[0]][oldPos[1]] else C1
label = font.render(str(board[oldPos[0]][oldPos[1]]),1,c)
screen.blit(label,coords[oldPos])
if not locked[pos[0]][pos[1]]:
# draw blinking line
c = C1 if dark else BG
x1 = x2 = coords[pos][0] + fontSize[0]/2.0
y1 = coords[pos][1]+3
y2 = coords[pos][1] + fontSize[1]-3
pg.draw.line(screen,c,[x1,y1],[x2,y2])
# redraw if the number was already picked
num = board[pos[0]][pos[1]]
if num :
label = font.render(str(num),1,C2)
screen.blit(label,coords[pos])
def checkWin():
# check if the board == full
for row in board:
if 0 in row:
return 0
# columns
cols=[]
for i in range(9):
c = []
for j in range(9):
c.append(board[j][i])
cols.append(c)
# boxes
boxes = []
for i in range(3):
for j in range(3):
b = []
for k in range(3):
for l in range(3):
b.append(board[3*i+k][3*j+l])
boxes.append(b)
# check for any common numbers in rows,cols,boxes
for i in cols+board+boxes:
if len(i) != len(set(i)):
return 0
return 1
def win():
for i in range(9):
for j in range(9):
drawNum(board[i][j],(i,j),2)
############ MAIN ############
def main():
global screen,board,locked,coords,puzzle,font,fontSize
from . import generator
puzzle = generator.main()
global startTime,won
startTime = time.time()
won = 0
pg.init()
size = width,height = int(200/2*N),int(200/2*N)
xunit,yunit = int(width/N),int(height/N)
screen = pg.display.set_mode((width+int(T/2),height+int(T/2))) # +T to account for thick lines
pg.display.set_caption('Sudoku')
screen.fill(BG)
drawLines(width,height)
# clock for ticking, font
clock = pg.time.Clock()
font = pg.font.SysFont('Arial',63)
fontSize = font.size('1')
# create coordinates dictionary
# takes a tuple of coords (1-9,1-9)
# ready to use with drawing fonts/rects
coords = {}
for i,y in enumerate(range(0,height,yunit)):
for j,x in enumerate(range(0,width,xunit)):
a = x + xunit/2.0 - fontSize[0]/2.0 + int(j/3) # x + halfunitx - halfselfx + accountForThickLines
b = y + yunit/2.0 - fontSize[1]/2.0 + int(i/3) # CHANGE LAST COMPONENT FOR MINOR CHANGES IF NEEDED, WITH SIZING
coords[(j,i)] = a,b
# board:
# locked: which numbers are correct (there from start, maybe other use later)
# 2d lists.... board[2][0] == 3rd from the left, top row
board = [[0]*N for i in range(N)]
locked = [[0]*N for i in range(N)]
# initial board
for i in range(9):
for j in range(9):
if puzzle[i][j]:
drawNum(puzzle[i][j],(i,j),1)
Fade.fadein(screen.copy(),screen)
pg.display.flip()
running = darkBlinkingLine = True # main loop :
blinking = picked = False # blinking line : already picked a number at pos
pos=oldPos=c=0
while running:
# event handling
for ev in pg.event.get():
# keep track of clicks, blinking
if ev.type == MOUSEBUTTONDOWN:
oldPos,pos = pos,(int(ev.pos[0]/xunit),int(ev.pos[1]/yunit))
blinking = True
c,darkBlinkingLine=79,1
picked = False
# for clicking a little bit too far
if pos[0] > 8:
pos = (8,pos[1])
if pos[1] > 8:
pos = (pos[0],8)
# quit
elif ev.type == KEYDOWN and (ev.key == K_ESCAPE or ev.key == K_q):
running = False
elif ev.type == QUIT:
Fade.fadeout(screen.copy(),screen,0.0,1.0)
pg.quit()
sys.exit()
# draw number if number pressed, mouse clicked
if ev.type == KEYDOWN:
# for numbers
if 49 <= ev.key <= 57 and not picked:
picked = True
blinking = False
drawNum(ev.key-48,pos) # -48 to index it from 0-8
if checkWin():
win()
won = 1
elif ev.key == K_BACKSPACE:
drawNum(0,pos)
# arrow keys to navigate around
if (blinking or picked) and ev.key in [K_UP,K_DOWN,K_LEFT,K_RIGHT]:
drawNum(board[pos[0]][pos[1]],pos)
oldPos=pos
c,darkBlinkingLine=79,1
blinking,picked=True,False
# position changing
if ev.key == K_UP:
pos = (pos[0],pos[1]-1)
elif ev.key == K_DOWN:
pos = (pos[0],pos[1]+1)
elif ev.key == K_RIGHT:
pos = (pos[0]+1,pos[1])
elif ev.key == K_LEFT:
pos = (pos[0]-1,pos[1])
# correct going past allowed values
if pos[0] > 8:
pos = (8,pos[1])
elif pos[0] < 0:
pos = (0,pos[1])
if pos[1] > 8:
pos = (pos[0],8)
elif pos[1] < 0:
pos = (pos[0],0)
# for blinking
if blinking:
c+=1
if c%80==0:
blink(board,pos,oldPos,darkBlinkingLine)
darkBlinkingLine = 0 if darkBlinkingLine else 1
c=0
# update, regulate fps for blinking
pg.display.flip()
clock.tick(100)
score = None
sc = ScoreScreen(screen.copy(),3,time=time.time()-startTime,win=won)
if __name__ == "__main__":
main()
| mit |
foodszhang/kbengine | kbe/res/scripts/common/Lib/test/test_float.py | 80 | 58909 |
import unittest, struct
import os
import sys
from test import support
import math
from math import isinf, isnan, copysign, ldexp
import operator
import random, fractions
INF = float("inf")
NAN = float("nan")
have_getformat = hasattr(float, "__getformat__")
requires_getformat = unittest.skipUnless(have_getformat,
"requires __getformat__")
requires_setformat = unittest.skipUnless(hasattr(float, "__setformat__"),
"requires __setformat__")
#locate file with float format test values
test_dir = os.path.dirname(__file__) or os.curdir
format_testfile = os.path.join(test_dir, 'formatfloat_testcases.txt')
class GeneralFloatCases(unittest.TestCase):
def test_float(self):
self.assertEqual(float(3.14), 3.14)
self.assertEqual(float(314), 314.0)
self.assertEqual(float(" 3.14 "), 3.14)
self.assertEqual(float(b" 3.14 "), 3.14)
self.assertRaises(ValueError, float, " 0x3.1 ")
self.assertRaises(ValueError, float, " -0x3.p-1 ")
self.assertRaises(ValueError, float, " +0x3.p-1 ")
self.assertRaises(ValueError, float, "++3.14")
self.assertRaises(ValueError, float, "+-3.14")
self.assertRaises(ValueError, float, "-+3.14")
self.assertRaises(ValueError, float, "--3.14")
self.assertRaises(ValueError, float, ".nan")
self.assertRaises(ValueError, float, "+.inf")
self.assertRaises(ValueError, float, ".")
self.assertRaises(ValueError, float, "-.")
self.assertRaises(ValueError, float, b"-")
self.assertRaises(TypeError, float, {})
self.assertRaisesRegex(TypeError, "not 'dict'", float, {})
# Lone surrogate
self.assertRaises(UnicodeEncodeError, float, '\uD8F0')
# check that we don't accept alternate exponent markers
self.assertRaises(ValueError, float, "-1.7d29")
self.assertRaises(ValueError, float, "3D-14")
self.assertEqual(float(" \u0663.\u0661\u0664 "), 3.14)
self.assertEqual(float("\N{EM SPACE}3.14\N{EN SPACE}"), 3.14)
# extra long strings should not be a problem
float(b'.' + b'1'*1000)
float('.' + '1'*1000)
def test_error_message(self):
testlist = ('\xbd', '123\xbd', ' 123 456 ')
for s in testlist:
try:
float(s)
except ValueError as e:
self.assertIn(s.strip(), e.args[0])
else:
self.fail("Expected int(%r) to raise a ValueError", s)
@support.run_with_locale('LC_NUMERIC', 'fr_FR', 'de_DE')
def test_float_with_comma(self):
# set locale to something that doesn't use '.' for the decimal point
# float must not accept the locale specific decimal point but
# it still has to accept the normal python syntax
import locale
if not locale.localeconv()['decimal_point'] == ',':
self.skipTest('decimal_point is not ","')
self.assertEqual(float(" 3.14 "), 3.14)
self.assertEqual(float("+3.14 "), 3.14)
self.assertEqual(float("-3.14 "), -3.14)
self.assertEqual(float(".14 "), .14)
self.assertEqual(float("3. "), 3.0)
self.assertEqual(float("3.e3 "), 3000.0)
self.assertEqual(float("3.2e3 "), 3200.0)
self.assertEqual(float("2.5e-1 "), 0.25)
self.assertEqual(float("5e-1"), 0.5)
self.assertRaises(ValueError, float, " 3,14 ")
self.assertRaises(ValueError, float, " +3,14 ")
self.assertRaises(ValueError, float, " -3,14 ")
self.assertRaises(ValueError, float, " 0x3.1 ")
self.assertRaises(ValueError, float, " -0x3.p-1 ")
self.assertRaises(ValueError, float, " +0x3.p-1 ")
self.assertEqual(float(" 25.e-1 "), 2.5)
self.assertAlmostEqual(float(" .25e-1 "), .025)
def test_floatconversion(self):
# Make sure that calls to __float__() work properly
class Foo0:
def __float__(self):
return 42.
class Foo1(object):
def __float__(self):
return 42.
class Foo2(float):
def __float__(self):
return 42.
class Foo3(float):
def __new__(cls, value=0.):
return float.__new__(cls, 2*value)
def __float__(self):
return self
class Foo4(float):
def __float__(self):
return 42
# Issue 5759: __float__ not called on str subclasses (though it is on
# unicode subclasses).
class FooStr(str):
def __float__(self):
return float(str(self)) + 1
self.assertAlmostEqual(float(Foo0()), 42.)
self.assertAlmostEqual(float(Foo1()), 42.)
self.assertAlmostEqual(float(Foo2()), 42.)
self.assertAlmostEqual(float(Foo3(21)), 42.)
self.assertRaises(TypeError, float, Foo4(42))
self.assertAlmostEqual(float(FooStr('8')), 9.)
def test_is_integer(self):
self.assertFalse((1.1).is_integer())
self.assertTrue((1.).is_integer())
self.assertFalse(float("nan").is_integer())
self.assertFalse(float("inf").is_integer())
def test_floatasratio(self):
for f, ratio in [
(0.875, (7, 8)),
(-0.875, (-7, 8)),
(0.0, (0, 1)),
(11.5, (23, 2)),
]:
self.assertEqual(f.as_integer_ratio(), ratio)
for i in range(10000):
f = random.random()
f *= 10 ** random.randint(-100, 100)
n, d = f.as_integer_ratio()
self.assertEqual(float(n).__truediv__(d), f)
R = fractions.Fraction
self.assertEqual(R(0, 1),
R(*float(0.0).as_integer_ratio()))
self.assertEqual(R(5, 2),
R(*float(2.5).as_integer_ratio()))
self.assertEqual(R(1, 2),
R(*float(0.5).as_integer_ratio()))
self.assertEqual(R(4728779608739021, 2251799813685248),
R(*float(2.1).as_integer_ratio()))
self.assertEqual(R(-4728779608739021, 2251799813685248),
R(*float(-2.1).as_integer_ratio()))
self.assertEqual(R(-2100, 1),
R(*float(-2100.0).as_integer_ratio()))
self.assertRaises(OverflowError, float('inf').as_integer_ratio)
self.assertRaises(OverflowError, float('-inf').as_integer_ratio)
self.assertRaises(ValueError, float('nan').as_integer_ratio)
def test_float_containment(self):
floats = (INF, -INF, 0.0, 1.0, NAN)
for f in floats:
self.assertIn(f, [f])
self.assertIn(f, (f,))
self.assertIn(f, {f})
self.assertIn(f, {f: None})
self.assertEqual([f].count(f), 1, "[].count('%r') != 1" % f)
self.assertIn(f, floats)
for f in floats:
# nonidentical containers, same type, same contents
self.assertTrue([f] == [f], "[%r] != [%r]" % (f, f))
self.assertTrue((f,) == (f,), "(%r,) != (%r,)" % (f, f))
self.assertTrue({f} == {f}, "{%r} != {%r}" % (f, f))
self.assertTrue({f : None} == {f: None}, "{%r : None} != "
"{%r : None}" % (f, f))
# identical containers
l, t, s, d = [f], (f,), {f}, {f: None}
self.assertTrue(l == l, "[%r] not equal to itself" % f)
self.assertTrue(t == t, "(%r,) not equal to itself" % f)
self.assertTrue(s == s, "{%r} not equal to itself" % f)
self.assertTrue(d == d, "{%r : None} not equal to itself" % f)
def assertEqualAndEqualSign(self, a, b):
# fail unless a == b and a and b have the same sign bit;
# the only difference from assertEqual is that this test
# distinguishes -0.0 and 0.0.
self.assertEqual((a, copysign(1.0, a)), (b, copysign(1.0, b)))
@support.requires_IEEE_754
def test_float_mod(self):
# Check behaviour of % operator for IEEE 754 special cases.
# In particular, check signs of zeros.
mod = operator.mod
self.assertEqualAndEqualSign(mod(-1.0, 1.0), 0.0)
self.assertEqualAndEqualSign(mod(-1e-100, 1.0), 1.0)
self.assertEqualAndEqualSign(mod(-0.0, 1.0), 0.0)
self.assertEqualAndEqualSign(mod(0.0, 1.0), 0.0)
self.assertEqualAndEqualSign(mod(1e-100, 1.0), 1e-100)
self.assertEqualAndEqualSign(mod(1.0, 1.0), 0.0)
self.assertEqualAndEqualSign(mod(-1.0, -1.0), -0.0)
self.assertEqualAndEqualSign(mod(-1e-100, -1.0), -1e-100)
self.assertEqualAndEqualSign(mod(-0.0, -1.0), -0.0)
self.assertEqualAndEqualSign(mod(0.0, -1.0), -0.0)
self.assertEqualAndEqualSign(mod(1e-100, -1.0), -1.0)
self.assertEqualAndEqualSign(mod(1.0, -1.0), -0.0)
@support.requires_IEEE_754
def test_float_pow(self):
# test builtin pow and ** operator for IEEE 754 special cases.
# Special cases taken from section F.9.4.4 of the C99 specification
for pow_op in pow, operator.pow:
# x**NAN is NAN for any x except 1
self.assertTrue(isnan(pow_op(-INF, NAN)))
self.assertTrue(isnan(pow_op(-2.0, NAN)))
self.assertTrue(isnan(pow_op(-1.0, NAN)))
self.assertTrue(isnan(pow_op(-0.5, NAN)))
self.assertTrue(isnan(pow_op(-0.0, NAN)))
self.assertTrue(isnan(pow_op(0.0, NAN)))
self.assertTrue(isnan(pow_op(0.5, NAN)))
self.assertTrue(isnan(pow_op(2.0, NAN)))
self.assertTrue(isnan(pow_op(INF, NAN)))
self.assertTrue(isnan(pow_op(NAN, NAN)))
# NAN**y is NAN for any y except +-0
self.assertTrue(isnan(pow_op(NAN, -INF)))
self.assertTrue(isnan(pow_op(NAN, -2.0)))
self.assertTrue(isnan(pow_op(NAN, -1.0)))
self.assertTrue(isnan(pow_op(NAN, -0.5)))
self.assertTrue(isnan(pow_op(NAN, 0.5)))
self.assertTrue(isnan(pow_op(NAN, 1.0)))
self.assertTrue(isnan(pow_op(NAN, 2.0)))
self.assertTrue(isnan(pow_op(NAN, INF)))
# (+-0)**y raises ZeroDivisionError for y a negative odd integer
self.assertRaises(ZeroDivisionError, pow_op, -0.0, -1.0)
self.assertRaises(ZeroDivisionError, pow_op, 0.0, -1.0)
# (+-0)**y raises ZeroDivisionError for y finite and negative
# but not an odd integer
self.assertRaises(ZeroDivisionError, pow_op, -0.0, -2.0)
self.assertRaises(ZeroDivisionError, pow_op, -0.0, -0.5)
self.assertRaises(ZeroDivisionError, pow_op, 0.0, -2.0)
self.assertRaises(ZeroDivisionError, pow_op, 0.0, -0.5)
# (+-0)**y is +-0 for y a positive odd integer
self.assertEqualAndEqualSign(pow_op(-0.0, 1.0), -0.0)
self.assertEqualAndEqualSign(pow_op(0.0, 1.0), 0.0)
# (+-0)**y is 0 for y finite and positive but not an odd integer
self.assertEqualAndEqualSign(pow_op(-0.0, 0.5), 0.0)
self.assertEqualAndEqualSign(pow_op(-0.0, 2.0), 0.0)
self.assertEqualAndEqualSign(pow_op(0.0, 0.5), 0.0)
self.assertEqualAndEqualSign(pow_op(0.0, 2.0), 0.0)
# (-1)**+-inf is 1
self.assertEqualAndEqualSign(pow_op(-1.0, -INF), 1.0)
self.assertEqualAndEqualSign(pow_op(-1.0, INF), 1.0)
# 1**y is 1 for any y, even if y is an infinity or nan
self.assertEqualAndEqualSign(pow_op(1.0, -INF), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, -2.0), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, -1.0), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, -0.5), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, 0.5), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, 1.0), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, 2.0), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, INF), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, NAN), 1.0)
# x**+-0 is 1 for any x, even if x is a zero, infinity, or nan
self.assertEqualAndEqualSign(pow_op(-INF, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-2.0, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-1.0, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-0.5, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-0.0, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(0.0, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(0.5, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(2.0, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(INF, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(NAN, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-INF, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-2.0, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-1.0, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-0.5, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-0.0, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(0.0, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(0.5, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(2.0, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(INF, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(NAN, -0.0), 1.0)
# x**y defers to complex pow for finite negative x and
# non-integral y.
self.assertEqual(type(pow_op(-2.0, -0.5)), complex)
self.assertEqual(type(pow_op(-2.0, 0.5)), complex)
self.assertEqual(type(pow_op(-1.0, -0.5)), complex)
self.assertEqual(type(pow_op(-1.0, 0.5)), complex)
self.assertEqual(type(pow_op(-0.5, -0.5)), complex)
self.assertEqual(type(pow_op(-0.5, 0.5)), complex)
# x**-INF is INF for abs(x) < 1
self.assertEqualAndEqualSign(pow_op(-0.5, -INF), INF)
self.assertEqualAndEqualSign(pow_op(-0.0, -INF), INF)
self.assertEqualAndEqualSign(pow_op(0.0, -INF), INF)
self.assertEqualAndEqualSign(pow_op(0.5, -INF), INF)
# x**-INF is 0 for abs(x) > 1
self.assertEqualAndEqualSign(pow_op(-INF, -INF), 0.0)
self.assertEqualAndEqualSign(pow_op(-2.0, -INF), 0.0)
self.assertEqualAndEqualSign(pow_op(2.0, -INF), 0.0)
self.assertEqualAndEqualSign(pow_op(INF, -INF), 0.0)
# x**INF is 0 for abs(x) < 1
self.assertEqualAndEqualSign(pow_op(-0.5, INF), 0.0)
self.assertEqualAndEqualSign(pow_op(-0.0, INF), 0.0)
self.assertEqualAndEqualSign(pow_op(0.0, INF), 0.0)
self.assertEqualAndEqualSign(pow_op(0.5, INF), 0.0)
# x**INF is INF for abs(x) > 1
self.assertEqualAndEqualSign(pow_op(-INF, INF), INF)
self.assertEqualAndEqualSign(pow_op(-2.0, INF), INF)
self.assertEqualAndEqualSign(pow_op(2.0, INF), INF)
self.assertEqualAndEqualSign(pow_op(INF, INF), INF)
# (-INF)**y is -0.0 for y a negative odd integer
self.assertEqualAndEqualSign(pow_op(-INF, -1.0), -0.0)
# (-INF)**y is 0.0 for y negative but not an odd integer
self.assertEqualAndEqualSign(pow_op(-INF, -0.5), 0.0)
self.assertEqualAndEqualSign(pow_op(-INF, -2.0), 0.0)
# (-INF)**y is -INF for y a positive odd integer
self.assertEqualAndEqualSign(pow_op(-INF, 1.0), -INF)
# (-INF)**y is INF for y positive but not an odd integer
self.assertEqualAndEqualSign(pow_op(-INF, 0.5), INF)
self.assertEqualAndEqualSign(pow_op(-INF, 2.0), INF)
# INF**y is INF for y positive
self.assertEqualAndEqualSign(pow_op(INF, 0.5), INF)
self.assertEqualAndEqualSign(pow_op(INF, 1.0), INF)
self.assertEqualAndEqualSign(pow_op(INF, 2.0), INF)
# INF**y is 0.0 for y negative
self.assertEqualAndEqualSign(pow_op(INF, -2.0), 0.0)
self.assertEqualAndEqualSign(pow_op(INF, -1.0), 0.0)
self.assertEqualAndEqualSign(pow_op(INF, -0.5), 0.0)
# basic checks not covered by the special cases above
self.assertEqualAndEqualSign(pow_op(-2.0, -2.0), 0.25)
self.assertEqualAndEqualSign(pow_op(-2.0, -1.0), -0.5)
self.assertEqualAndEqualSign(pow_op(-2.0, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-2.0, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-2.0, 1.0), -2.0)
self.assertEqualAndEqualSign(pow_op(-2.0, 2.0), 4.0)
self.assertEqualAndEqualSign(pow_op(-1.0, -2.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-1.0, -1.0), -1.0)
self.assertEqualAndEqualSign(pow_op(-1.0, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-1.0, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(-1.0, 1.0), -1.0)
self.assertEqualAndEqualSign(pow_op(-1.0, 2.0), 1.0)
self.assertEqualAndEqualSign(pow_op(2.0, -2.0), 0.25)
self.assertEqualAndEqualSign(pow_op(2.0, -1.0), 0.5)
self.assertEqualAndEqualSign(pow_op(2.0, -0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(2.0, 0.0), 1.0)
self.assertEqualAndEqualSign(pow_op(2.0, 1.0), 2.0)
self.assertEqualAndEqualSign(pow_op(2.0, 2.0), 4.0)
# 1 ** large and -1 ** large; some libms apparently
# have problems with these
self.assertEqualAndEqualSign(pow_op(1.0, -1e100), 1.0)
self.assertEqualAndEqualSign(pow_op(1.0, 1e100), 1.0)
self.assertEqualAndEqualSign(pow_op(-1.0, -1e100), 1.0)
self.assertEqualAndEqualSign(pow_op(-1.0, 1e100), 1.0)
# check sign for results that underflow to 0
self.assertEqualAndEqualSign(pow_op(-2.0, -2000.0), 0.0)
self.assertEqual(type(pow_op(-2.0, -2000.5)), complex)
self.assertEqualAndEqualSign(pow_op(-2.0, -2001.0), -0.0)
self.assertEqualAndEqualSign(pow_op(2.0, -2000.0), 0.0)
self.assertEqualAndEqualSign(pow_op(2.0, -2000.5), 0.0)
self.assertEqualAndEqualSign(pow_op(2.0, -2001.0), 0.0)
self.assertEqualAndEqualSign(pow_op(-0.5, 2000.0), 0.0)
self.assertEqual(type(pow_op(-0.5, 2000.5)), complex)
self.assertEqualAndEqualSign(pow_op(-0.5, 2001.0), -0.0)
self.assertEqualAndEqualSign(pow_op(0.5, 2000.0), 0.0)
self.assertEqualAndEqualSign(pow_op(0.5, 2000.5), 0.0)
self.assertEqualAndEqualSign(pow_op(0.5, 2001.0), 0.0)
# check we don't raise an exception for subnormal results,
# and validate signs. Tests currently disabled, since
# they fail on systems where a subnormal result from pow
# is flushed to zero (e.g. Debian/ia64.)
#self.assertTrue(0.0 < pow_op(0.5, 1048) < 1e-315)
#self.assertTrue(0.0 < pow_op(-0.5, 1048) < 1e-315)
#self.assertTrue(0.0 < pow_op(0.5, 1047) < 1e-315)
#self.assertTrue(0.0 > pow_op(-0.5, 1047) > -1e-315)
#self.assertTrue(0.0 < pow_op(2.0, -1048) < 1e-315)
#self.assertTrue(0.0 < pow_op(-2.0, -1048) < 1e-315)
#self.assertTrue(0.0 < pow_op(2.0, -1047) < 1e-315)
#self.assertTrue(0.0 > pow_op(-2.0, -1047) > -1e-315)
@requires_setformat
class FormatFunctionsTestCase(unittest.TestCase):
def setUp(self):
self.save_formats = {'double':float.__getformat__('double'),
'float':float.__getformat__('float')}
def tearDown(self):
float.__setformat__('double', self.save_formats['double'])
float.__setformat__('float', self.save_formats['float'])
def test_getformat(self):
self.assertIn(float.__getformat__('double'),
['unknown', 'IEEE, big-endian', 'IEEE, little-endian'])
self.assertIn(float.__getformat__('float'),
['unknown', 'IEEE, big-endian', 'IEEE, little-endian'])
self.assertRaises(ValueError, float.__getformat__, 'chicken')
self.assertRaises(TypeError, float.__getformat__, 1)
def test_setformat(self):
for t in 'double', 'float':
float.__setformat__(t, 'unknown')
if self.save_formats[t] == 'IEEE, big-endian':
self.assertRaises(ValueError, float.__setformat__,
t, 'IEEE, little-endian')
elif self.save_formats[t] == 'IEEE, little-endian':
self.assertRaises(ValueError, float.__setformat__,
t, 'IEEE, big-endian')
else:
self.assertRaises(ValueError, float.__setformat__,
t, 'IEEE, big-endian')
self.assertRaises(ValueError, float.__setformat__,
t, 'IEEE, little-endian')
self.assertRaises(ValueError, float.__setformat__,
t, 'chicken')
self.assertRaises(ValueError, float.__setformat__,
'chicken', 'unknown')
BE_DOUBLE_INF = b'\x7f\xf0\x00\x00\x00\x00\x00\x00'
LE_DOUBLE_INF = bytes(reversed(BE_DOUBLE_INF))
BE_DOUBLE_NAN = b'\x7f\xf8\x00\x00\x00\x00\x00\x00'
LE_DOUBLE_NAN = bytes(reversed(BE_DOUBLE_NAN))
BE_FLOAT_INF = b'\x7f\x80\x00\x00'
LE_FLOAT_INF = bytes(reversed(BE_FLOAT_INF))
BE_FLOAT_NAN = b'\x7f\xc0\x00\x00'
LE_FLOAT_NAN = bytes(reversed(BE_FLOAT_NAN))
# on non-IEEE platforms, attempting to unpack a bit pattern
# representing an infinity or a NaN should raise an exception.
@requires_setformat
class UnknownFormatTestCase(unittest.TestCase):
def setUp(self):
self.save_formats = {'double':float.__getformat__('double'),
'float':float.__getformat__('float')}
float.__setformat__('double', 'unknown')
float.__setformat__('float', 'unknown')
def tearDown(self):
float.__setformat__('double', self.save_formats['double'])
float.__setformat__('float', self.save_formats['float'])
def test_double_specials_dont_unpack(self):
for fmt, data in [('>d', BE_DOUBLE_INF),
('>d', BE_DOUBLE_NAN),
('<d', LE_DOUBLE_INF),
('<d', LE_DOUBLE_NAN)]:
self.assertRaises(ValueError, struct.unpack, fmt, data)
def test_float_specials_dont_unpack(self):
for fmt, data in [('>f', BE_FLOAT_INF),
('>f', BE_FLOAT_NAN),
('<f', LE_FLOAT_INF),
('<f', LE_FLOAT_NAN)]:
self.assertRaises(ValueError, struct.unpack, fmt, data)
# on an IEEE platform, all we guarantee is that bit patterns
# representing infinities or NaNs do not raise an exception; all else
# is accident (today).
# let's also try to guarantee that -0.0 and 0.0 don't get confused.
class IEEEFormatTestCase(unittest.TestCase):
@support.requires_IEEE_754
def test_double_specials_do_unpack(self):
for fmt, data in [('>d', BE_DOUBLE_INF),
('>d', BE_DOUBLE_NAN),
('<d', LE_DOUBLE_INF),
('<d', LE_DOUBLE_NAN)]:
struct.unpack(fmt, data)
@support.requires_IEEE_754
def test_float_specials_do_unpack(self):
for fmt, data in [('>f', BE_FLOAT_INF),
('>f', BE_FLOAT_NAN),
('<f', LE_FLOAT_INF),
('<f', LE_FLOAT_NAN)]:
struct.unpack(fmt, data)
class FormatTestCase(unittest.TestCase):
def test_format(self):
# these should be rewritten to use both format(x, spec) and
# x.__format__(spec)
self.assertEqual(format(0.0, 'f'), '0.000000')
# the default is 'g', except for empty format spec
self.assertEqual(format(0.0, ''), '0.0')
self.assertEqual(format(0.01, ''), '0.01')
self.assertEqual(format(0.01, 'g'), '0.01')
# empty presentation type should format in the same way as str
# (issue 5920)
x = 100/7.
self.assertEqual(format(x, ''), str(x))
self.assertEqual(format(x, '-'), str(x))
self.assertEqual(format(x, '>'), str(x))
self.assertEqual(format(x, '2'), str(x))
self.assertEqual(format(1.0, 'f'), '1.000000')
self.assertEqual(format(-1.0, 'f'), '-1.000000')
self.assertEqual(format( 1.0, ' f'), ' 1.000000')
self.assertEqual(format(-1.0, ' f'), '-1.000000')
self.assertEqual(format( 1.0, '+f'), '+1.000000')
self.assertEqual(format(-1.0, '+f'), '-1.000000')
# % formatting
self.assertEqual(format(-1.0, '%'), '-100.000000%')
# conversion to string should fail
self.assertRaises(ValueError, format, 3.0, "s")
# other format specifiers shouldn't work on floats,
# in particular int specifiers
for format_spec in ([chr(x) for x in range(ord('a'), ord('z')+1)] +
[chr(x) for x in range(ord('A'), ord('Z')+1)]):
if not format_spec in 'eEfFgGn%':
self.assertRaises(ValueError, format, 0.0, format_spec)
self.assertRaises(ValueError, format, 1.0, format_spec)
self.assertRaises(ValueError, format, -1.0, format_spec)
self.assertRaises(ValueError, format, 1e100, format_spec)
self.assertRaises(ValueError, format, -1e100, format_spec)
self.assertRaises(ValueError, format, 1e-100, format_spec)
self.assertRaises(ValueError, format, -1e-100, format_spec)
# issue 3382
self.assertEqual(format(NAN, 'f'), 'nan')
self.assertEqual(format(NAN, 'F'), 'NAN')
self.assertEqual(format(INF, 'f'), 'inf')
self.assertEqual(format(INF, 'F'), 'INF')
@support.requires_IEEE_754
def test_format_testfile(self):
with open(format_testfile) as testfile:
for line in testfile:
if line.startswith('--'):
continue
line = line.strip()
if not line:
continue
lhs, rhs = map(str.strip, line.split('->'))
fmt, arg = lhs.split()
self.assertEqual(fmt % float(arg), rhs)
self.assertEqual(fmt % -float(arg), '-' + rhs)
def test_issue5864(self):
self.assertEqual(format(123.456, '.4'), '123.5')
self.assertEqual(format(1234.56, '.4'), '1.235e+03')
self.assertEqual(format(12345.6, '.4'), '1.235e+04')
class ReprTestCase(unittest.TestCase):
def test_repr(self):
floats_file = open(os.path.join(os.path.split(__file__)[0],
'floating_points.txt'))
for line in floats_file:
line = line.strip()
if not line or line.startswith('#'):
continue
v = eval(line)
self.assertEqual(v, eval(repr(v)))
floats_file.close()
@unittest.skipUnless(getattr(sys, 'float_repr_style', '') == 'short',
"applies only when using short float repr style")
def test_short_repr(self):
# test short float repr introduced in Python 3.1. One aspect
# of this repr is that we get some degree of str -> float ->
# str roundtripping. In particular, for any numeric string
# containing 15 or fewer significant digits, those exact same
# digits (modulo trailing zeros) should appear in the output.
# No more repr(0.03) -> "0.029999999999999999"!
test_strings = [
# output always includes *either* a decimal point and at
# least one digit after that point, or an exponent.
'0.0',
'1.0',
'0.01',
'0.02',
'0.03',
'0.04',
'0.05',
'1.23456789',
'10.0',
'100.0',
# values >= 1e16 get an exponent...
'1000000000000000.0',
'9999999999999990.0',
'1e+16',
'1e+17',
# ... and so do values < 1e-4
'0.001',
'0.001001',
'0.00010000000000001',
'0.0001',
'9.999999999999e-05',
'1e-05',
# values designed to provoke failure if the FPU rounding
# precision isn't set correctly
'8.72293771110361e+25',
'7.47005307342313e+26',
'2.86438000439698e+28',
'8.89142905246179e+28',
'3.08578087079232e+35',
]
for s in test_strings:
negs = '-'+s
self.assertEqual(s, repr(float(s)))
self.assertEqual(negs, repr(float(negs)))
# Since Python 3.2, repr and str are identical
self.assertEqual(repr(float(s)), str(float(s)))
self.assertEqual(repr(float(negs)), str(float(negs)))
@support.requires_IEEE_754
class RoundTestCase(unittest.TestCase):
def test_inf_nan(self):
self.assertRaises(OverflowError, round, INF)
self.assertRaises(OverflowError, round, -INF)
self.assertRaises(ValueError, round, NAN)
self.assertRaises(TypeError, round, INF, 0.0)
self.assertRaises(TypeError, round, -INF, 1.0)
self.assertRaises(TypeError, round, NAN, "ceci n'est pas un integer")
self.assertRaises(TypeError, round, -0.0, 1j)
def test_large_n(self):
for n in [324, 325, 400, 2**31-1, 2**31, 2**32, 2**100]:
self.assertEqual(round(123.456, n), 123.456)
self.assertEqual(round(-123.456, n), -123.456)
self.assertEqual(round(1e300, n), 1e300)
self.assertEqual(round(1e-320, n), 1e-320)
self.assertEqual(round(1e150, 300), 1e150)
self.assertEqual(round(1e300, 307), 1e300)
self.assertEqual(round(-3.1415, 308), -3.1415)
self.assertEqual(round(1e150, 309), 1e150)
self.assertEqual(round(1.4e-315, 315), 1e-315)
def test_small_n(self):
for n in [-308, -309, -400, 1-2**31, -2**31, -2**31-1, -2**100]:
self.assertEqual(round(123.456, n), 0.0)
self.assertEqual(round(-123.456, n), -0.0)
self.assertEqual(round(1e300, n), 0.0)
self.assertEqual(round(1e-320, n), 0.0)
def test_overflow(self):
self.assertRaises(OverflowError, round, 1.6e308, -308)
self.assertRaises(OverflowError, round, -1.7e308, -308)
@unittest.skipUnless(getattr(sys, 'float_repr_style', '') == 'short',
"applies only when using short float repr style")
def test_previous_round_bugs(self):
# particular cases that have occurred in bug reports
self.assertEqual(round(562949953421312.5, 1),
562949953421312.5)
self.assertEqual(round(56294995342131.5, 3),
56294995342131.5)
# round-half-even
self.assertEqual(round(25.0, -1), 20.0)
self.assertEqual(round(35.0, -1), 40.0)
self.assertEqual(round(45.0, -1), 40.0)
self.assertEqual(round(55.0, -1), 60.0)
self.assertEqual(round(65.0, -1), 60.0)
self.assertEqual(round(75.0, -1), 80.0)
self.assertEqual(round(85.0, -1), 80.0)
self.assertEqual(round(95.0, -1), 100.0)
@unittest.skipUnless(getattr(sys, 'float_repr_style', '') == 'short',
"applies only when using short float repr style")
def test_matches_float_format(self):
# round should give the same results as float formatting
for i in range(500):
x = i/1000.
self.assertEqual(float(format(x, '.0f')), round(x, 0))
self.assertEqual(float(format(x, '.1f')), round(x, 1))
self.assertEqual(float(format(x, '.2f')), round(x, 2))
self.assertEqual(float(format(x, '.3f')), round(x, 3))
for i in range(5, 5000, 10):
x = i/1000.
self.assertEqual(float(format(x, '.0f')), round(x, 0))
self.assertEqual(float(format(x, '.1f')), round(x, 1))
self.assertEqual(float(format(x, '.2f')), round(x, 2))
self.assertEqual(float(format(x, '.3f')), round(x, 3))
for i in range(500):
x = random.random()
self.assertEqual(float(format(x, '.0f')), round(x, 0))
self.assertEqual(float(format(x, '.1f')), round(x, 1))
self.assertEqual(float(format(x, '.2f')), round(x, 2))
self.assertEqual(float(format(x, '.3f')), round(x, 3))
def test_format_specials(self):
# Test formatting of nans and infs.
def test(fmt, value, expected):
# Test with both % and format().
self.assertEqual(fmt % value, expected, fmt)
fmt = fmt[1:] # strip off the %
self.assertEqual(format(value, fmt), expected, fmt)
for fmt in ['%e', '%f', '%g', '%.0e', '%.6f', '%.20g',
'%#e', '%#f', '%#g', '%#.20e', '%#.15f', '%#.3g']:
pfmt = '%+' + fmt[1:]
sfmt = '% ' + fmt[1:]
test(fmt, INF, 'inf')
test(fmt, -INF, '-inf')
test(fmt, NAN, 'nan')
test(fmt, -NAN, 'nan')
# When asking for a sign, it's always provided. nans are
# always positive.
test(pfmt, INF, '+inf')
test(pfmt, -INF, '-inf')
test(pfmt, NAN, '+nan')
test(pfmt, -NAN, '+nan')
# When using ' ' for a sign code, only infs can be negative.
# Others have a space.
test(sfmt, INF, ' inf')
test(sfmt, -INF, '-inf')
test(sfmt, NAN, ' nan')
test(sfmt, -NAN, ' nan')
# Beginning with Python 2.6 float has cross platform compatible
# ways to create and represent inf and nan
class InfNanTest(unittest.TestCase):
def test_inf_from_str(self):
self.assertTrue(isinf(float("inf")))
self.assertTrue(isinf(float("+inf")))
self.assertTrue(isinf(float("-inf")))
self.assertTrue(isinf(float("infinity")))
self.assertTrue(isinf(float("+infinity")))
self.assertTrue(isinf(float("-infinity")))
self.assertEqual(repr(float("inf")), "inf")
self.assertEqual(repr(float("+inf")), "inf")
self.assertEqual(repr(float("-inf")), "-inf")
self.assertEqual(repr(float("infinity")), "inf")
self.assertEqual(repr(float("+infinity")), "inf")
self.assertEqual(repr(float("-infinity")), "-inf")
self.assertEqual(repr(float("INF")), "inf")
self.assertEqual(repr(float("+Inf")), "inf")
self.assertEqual(repr(float("-iNF")), "-inf")
self.assertEqual(repr(float("Infinity")), "inf")
self.assertEqual(repr(float("+iNfInItY")), "inf")
self.assertEqual(repr(float("-INFINITY")), "-inf")
self.assertEqual(str(float("inf")), "inf")
self.assertEqual(str(float("+inf")), "inf")
self.assertEqual(str(float("-inf")), "-inf")
self.assertEqual(str(float("infinity")), "inf")
self.assertEqual(str(float("+infinity")), "inf")
self.assertEqual(str(float("-infinity")), "-inf")
self.assertRaises(ValueError, float, "info")
self.assertRaises(ValueError, float, "+info")
self.assertRaises(ValueError, float, "-info")
self.assertRaises(ValueError, float, "in")
self.assertRaises(ValueError, float, "+in")
self.assertRaises(ValueError, float, "-in")
self.assertRaises(ValueError, float, "infinit")
self.assertRaises(ValueError, float, "+Infin")
self.assertRaises(ValueError, float, "-INFI")
self.assertRaises(ValueError, float, "infinitys")
self.assertRaises(ValueError, float, "++Inf")
self.assertRaises(ValueError, float, "-+inf")
self.assertRaises(ValueError, float, "+-infinity")
self.assertRaises(ValueError, float, "--Infinity")
def test_inf_as_str(self):
self.assertEqual(repr(1e300 * 1e300), "inf")
self.assertEqual(repr(-1e300 * 1e300), "-inf")
self.assertEqual(str(1e300 * 1e300), "inf")
self.assertEqual(str(-1e300 * 1e300), "-inf")
def test_nan_from_str(self):
self.assertTrue(isnan(float("nan")))
self.assertTrue(isnan(float("+nan")))
self.assertTrue(isnan(float("-nan")))
self.assertEqual(repr(float("nan")), "nan")
self.assertEqual(repr(float("+nan")), "nan")
self.assertEqual(repr(float("-nan")), "nan")
self.assertEqual(repr(float("NAN")), "nan")
self.assertEqual(repr(float("+NAn")), "nan")
self.assertEqual(repr(float("-NaN")), "nan")
self.assertEqual(str(float("nan")), "nan")
self.assertEqual(str(float("+nan")), "nan")
self.assertEqual(str(float("-nan")), "nan")
self.assertRaises(ValueError, float, "nana")
self.assertRaises(ValueError, float, "+nana")
self.assertRaises(ValueError, float, "-nana")
self.assertRaises(ValueError, float, "na")
self.assertRaises(ValueError, float, "+na")
self.assertRaises(ValueError, float, "-na")
self.assertRaises(ValueError, float, "++nan")
self.assertRaises(ValueError, float, "-+NAN")
self.assertRaises(ValueError, float, "+-NaN")
self.assertRaises(ValueError, float, "--nAn")
def test_nan_as_str(self):
self.assertEqual(repr(1e300 * 1e300 * 0), "nan")
self.assertEqual(repr(-1e300 * 1e300 * 0), "nan")
self.assertEqual(str(1e300 * 1e300 * 0), "nan")
self.assertEqual(str(-1e300 * 1e300 * 0), "nan")
def test_inf_signs(self):
self.assertEqual(copysign(1.0, float('inf')), 1.0)
self.assertEqual(copysign(1.0, float('-inf')), -1.0)
@unittest.skipUnless(getattr(sys, 'float_repr_style', '') == 'short',
"applies only when using short float repr style")
def test_nan_signs(self):
# When using the dtoa.c code, the sign of float('nan') should
# be predictable.
self.assertEqual(copysign(1.0, float('nan')), 1.0)
self.assertEqual(copysign(1.0, float('-nan')), -1.0)
fromHex = float.fromhex
toHex = float.hex
class HexFloatTestCase(unittest.TestCase):
MAX = fromHex('0x.fffffffffffff8p+1024') # max normal
MIN = fromHex('0x1p-1022') # min normal
TINY = fromHex('0x0.0000000000001p-1022') # min subnormal
EPS = fromHex('0x0.0000000000001p0') # diff between 1.0 and next float up
def identical(self, x, y):
# check that floats x and y are identical, or that both
# are NaNs
if isnan(x) or isnan(y):
if isnan(x) == isnan(y):
return
elif x == y and (x != 0.0 or copysign(1.0, x) == copysign(1.0, y)):
return
self.fail('%r not identical to %r' % (x, y))
def test_ends(self):
self.identical(self.MIN, ldexp(1.0, -1022))
self.identical(self.TINY, ldexp(1.0, -1074))
self.identical(self.EPS, ldexp(1.0, -52))
self.identical(self.MAX, 2.*(ldexp(1.0, 1023) - ldexp(1.0, 970)))
def test_invalid_inputs(self):
invalid_inputs = [
'infi', # misspelt infinities and nans
'-Infinit',
'++inf',
'-+Inf',
'--nan',
'+-NaN',
'snan',
'NaNs',
'nna',
'an',
'nf',
'nfinity',
'inity',
'iinity',
'0xnan',
'',
' ',
'x1.0p0',
'0xX1.0p0',
'+ 0x1.0p0', # internal whitespace
'- 0x1.0p0',
'0 x1.0p0',
'0x 1.0p0',
'0x1 2.0p0',
'+0x1 .0p0',
'0x1. 0p0',
'-0x1.0 1p0',
'-0x1.0 p0',
'+0x1.0p +0',
'0x1.0p -0',
'0x1.0p 0',
'+0x1.0p+ 0',
'-0x1.0p- 0',
'++0x1.0p-0', # double signs
'--0x1.0p0',
'+-0x1.0p+0',
'-+0x1.0p0',
'0x1.0p++0',
'+0x1.0p+-0',
'-0x1.0p-+0',
'0x1.0p--0',
'0x1.0.p0',
'0x.p0', # no hex digits before or after point
'0x1,p0', # wrong decimal point character
'0x1pa',
'0x1p\uff10', # fullwidth Unicode digits
'\uff10x1p0',
'0x\uff11p0',
'0x1.\uff10p0',
'0x1p0 \n 0x2p0',
'0x1p0\0 0x1p0', # embedded null byte is not end of string
]
for x in invalid_inputs:
try:
result = fromHex(x)
except ValueError:
pass
else:
self.fail('Expected float.fromhex(%r) to raise ValueError; '
'got %r instead' % (x, result))
def test_whitespace(self):
value_pairs = [
('inf', INF),
('-Infinity', -INF),
('nan', NAN),
('1.0', 1.0),
('-0x.2', -0.125),
('-0.0', -0.0)
]
whitespace = [
'',
' ',
'\t',
'\n',
'\n \t',
'\f',
'\v',
'\r'
]
for inp, expected in value_pairs:
for lead in whitespace:
for trail in whitespace:
got = fromHex(lead + inp + trail)
self.identical(got, expected)
def test_from_hex(self):
MIN = self.MIN;
MAX = self.MAX;
TINY = self.TINY;
EPS = self.EPS;
# two spellings of infinity, with optional signs; case-insensitive
self.identical(fromHex('inf'), INF)
self.identical(fromHex('+Inf'), INF)
self.identical(fromHex('-INF'), -INF)
self.identical(fromHex('iNf'), INF)
self.identical(fromHex('Infinity'), INF)
self.identical(fromHex('+INFINITY'), INF)
self.identical(fromHex('-infinity'), -INF)
self.identical(fromHex('-iNFiNitY'), -INF)
# nans with optional sign; case insensitive
self.identical(fromHex('nan'), NAN)
self.identical(fromHex('+NaN'), NAN)
self.identical(fromHex('-NaN'), NAN)
self.identical(fromHex('-nAN'), NAN)
# variations in input format
self.identical(fromHex('1'), 1.0)
self.identical(fromHex('+1'), 1.0)
self.identical(fromHex('1.'), 1.0)
self.identical(fromHex('1.0'), 1.0)
self.identical(fromHex('1.0p0'), 1.0)
self.identical(fromHex('01'), 1.0)
self.identical(fromHex('01.'), 1.0)
self.identical(fromHex('0x1'), 1.0)
self.identical(fromHex('0x1.'), 1.0)
self.identical(fromHex('0x1.0'), 1.0)
self.identical(fromHex('+0x1.0'), 1.0)
self.identical(fromHex('0x1p0'), 1.0)
self.identical(fromHex('0X1p0'), 1.0)
self.identical(fromHex('0X1P0'), 1.0)
self.identical(fromHex('0x1P0'), 1.0)
self.identical(fromHex('0x1.p0'), 1.0)
self.identical(fromHex('0x1.0p0'), 1.0)
self.identical(fromHex('0x.1p4'), 1.0)
self.identical(fromHex('0x.1p04'), 1.0)
self.identical(fromHex('0x.1p004'), 1.0)
self.identical(fromHex('0x1p+0'), 1.0)
self.identical(fromHex('0x1P-0'), 1.0)
self.identical(fromHex('+0x1p0'), 1.0)
self.identical(fromHex('0x01p0'), 1.0)
self.identical(fromHex('0x1p00'), 1.0)
self.identical(fromHex(' 0x1p0 '), 1.0)
self.identical(fromHex('\n 0x1p0'), 1.0)
self.identical(fromHex('0x1p0 \t'), 1.0)
self.identical(fromHex('0xap0'), 10.0)
self.identical(fromHex('0xAp0'), 10.0)
self.identical(fromHex('0xaP0'), 10.0)
self.identical(fromHex('0xAP0'), 10.0)
self.identical(fromHex('0xbep0'), 190.0)
self.identical(fromHex('0xBep0'), 190.0)
self.identical(fromHex('0xbEp0'), 190.0)
self.identical(fromHex('0XBE0P-4'), 190.0)
self.identical(fromHex('0xBEp0'), 190.0)
self.identical(fromHex('0xB.Ep4'), 190.0)
self.identical(fromHex('0x.BEp8'), 190.0)
self.identical(fromHex('0x.0BEp12'), 190.0)
# moving the point around
pi = fromHex('0x1.921fb54442d18p1')
self.identical(fromHex('0x.006487ed5110b46p11'), pi)
self.identical(fromHex('0x.00c90fdaa22168cp10'), pi)
self.identical(fromHex('0x.01921fb54442d18p9'), pi)
self.identical(fromHex('0x.03243f6a8885a3p8'), pi)
self.identical(fromHex('0x.06487ed5110b46p7'), pi)
self.identical(fromHex('0x.0c90fdaa22168cp6'), pi)
self.identical(fromHex('0x.1921fb54442d18p5'), pi)
self.identical(fromHex('0x.3243f6a8885a3p4'), pi)
self.identical(fromHex('0x.6487ed5110b46p3'), pi)
self.identical(fromHex('0x.c90fdaa22168cp2'), pi)
self.identical(fromHex('0x1.921fb54442d18p1'), pi)
self.identical(fromHex('0x3.243f6a8885a3p0'), pi)
self.identical(fromHex('0x6.487ed5110b46p-1'), pi)
self.identical(fromHex('0xc.90fdaa22168cp-2'), pi)
self.identical(fromHex('0x19.21fb54442d18p-3'), pi)
self.identical(fromHex('0x32.43f6a8885a3p-4'), pi)
self.identical(fromHex('0x64.87ed5110b46p-5'), pi)
self.identical(fromHex('0xc9.0fdaa22168cp-6'), pi)
self.identical(fromHex('0x192.1fb54442d18p-7'), pi)
self.identical(fromHex('0x324.3f6a8885a3p-8'), pi)
self.identical(fromHex('0x648.7ed5110b46p-9'), pi)
self.identical(fromHex('0xc90.fdaa22168cp-10'), pi)
self.identical(fromHex('0x1921.fb54442d18p-11'), pi)
# ...
self.identical(fromHex('0x1921fb54442d1.8p-47'), pi)
self.identical(fromHex('0x3243f6a8885a3p-48'), pi)
self.identical(fromHex('0x6487ed5110b46p-49'), pi)
self.identical(fromHex('0xc90fdaa22168cp-50'), pi)
self.identical(fromHex('0x1921fb54442d18p-51'), pi)
self.identical(fromHex('0x3243f6a8885a30p-52'), pi)
self.identical(fromHex('0x6487ed5110b460p-53'), pi)
self.identical(fromHex('0xc90fdaa22168c0p-54'), pi)
self.identical(fromHex('0x1921fb54442d180p-55'), pi)
# results that should overflow...
self.assertRaises(OverflowError, fromHex, '-0x1p1024')
self.assertRaises(OverflowError, fromHex, '0x1p+1025')
self.assertRaises(OverflowError, fromHex, '+0X1p1030')
self.assertRaises(OverflowError, fromHex, '-0x1p+1100')
self.assertRaises(OverflowError, fromHex, '0X1p123456789123456789')
self.assertRaises(OverflowError, fromHex, '+0X.8p+1025')
self.assertRaises(OverflowError, fromHex, '+0x0.8p1025')
self.assertRaises(OverflowError, fromHex, '-0x0.4p1026')
self.assertRaises(OverflowError, fromHex, '0X2p+1023')
self.assertRaises(OverflowError, fromHex, '0x2.p1023')
self.assertRaises(OverflowError, fromHex, '-0x2.0p+1023')
self.assertRaises(OverflowError, fromHex, '+0X4p+1022')
self.assertRaises(OverflowError, fromHex, '0x1.ffffffffffffffp+1023')
self.assertRaises(OverflowError, fromHex, '-0X1.fffffffffffff9p1023')
self.assertRaises(OverflowError, fromHex, '0X1.fffffffffffff8p1023')
self.assertRaises(OverflowError, fromHex, '+0x3.fffffffffffffp1022')
self.assertRaises(OverflowError, fromHex, '0x3fffffffffffffp+970')
self.assertRaises(OverflowError, fromHex, '0x10000000000000000p960')
self.assertRaises(OverflowError, fromHex, '-0Xffffffffffffffffp960')
# ...and those that round to +-max float
self.identical(fromHex('+0x1.fffffffffffffp+1023'), MAX)
self.identical(fromHex('-0X1.fffffffffffff7p1023'), -MAX)
self.identical(fromHex('0X1.fffffffffffff7fffffffffffffp1023'), MAX)
# zeros
self.identical(fromHex('0x0p0'), 0.0)
self.identical(fromHex('0x0p1000'), 0.0)
self.identical(fromHex('-0x0p1023'), -0.0)
self.identical(fromHex('0X0p1024'), 0.0)
self.identical(fromHex('-0x0p1025'), -0.0)
self.identical(fromHex('0X0p2000'), 0.0)
self.identical(fromHex('0x0p123456789123456789'), 0.0)
self.identical(fromHex('-0X0p-0'), -0.0)
self.identical(fromHex('-0X0p-1000'), -0.0)
self.identical(fromHex('0x0p-1023'), 0.0)
self.identical(fromHex('-0X0p-1024'), -0.0)
self.identical(fromHex('-0x0p-1025'), -0.0)
self.identical(fromHex('-0x0p-1072'), -0.0)
self.identical(fromHex('0X0p-1073'), 0.0)
self.identical(fromHex('-0x0p-1074'), -0.0)
self.identical(fromHex('0x0p-1075'), 0.0)
self.identical(fromHex('0X0p-1076'), 0.0)
self.identical(fromHex('-0X0p-2000'), -0.0)
self.identical(fromHex('-0x0p-123456789123456789'), -0.0)
# values that should underflow to 0
self.identical(fromHex('0X1p-1075'), 0.0)
self.identical(fromHex('-0X1p-1075'), -0.0)
self.identical(fromHex('-0x1p-123456789123456789'), -0.0)
self.identical(fromHex('0x1.00000000000000001p-1075'), TINY)
self.identical(fromHex('-0x1.1p-1075'), -TINY)
self.identical(fromHex('0x1.fffffffffffffffffp-1075'), TINY)
# check round-half-even is working correctly near 0 ...
self.identical(fromHex('0x1p-1076'), 0.0)
self.identical(fromHex('0X2p-1076'), 0.0)
self.identical(fromHex('0X3p-1076'), TINY)
self.identical(fromHex('0x4p-1076'), TINY)
self.identical(fromHex('0X5p-1076'), TINY)
self.identical(fromHex('0X6p-1076'), 2*TINY)
self.identical(fromHex('0x7p-1076'), 2*TINY)
self.identical(fromHex('0X8p-1076'), 2*TINY)
self.identical(fromHex('0X9p-1076'), 2*TINY)
self.identical(fromHex('0xap-1076'), 2*TINY)
self.identical(fromHex('0Xbp-1076'), 3*TINY)
self.identical(fromHex('0xcp-1076'), 3*TINY)
self.identical(fromHex('0Xdp-1076'), 3*TINY)
self.identical(fromHex('0Xep-1076'), 4*TINY)
self.identical(fromHex('0xfp-1076'), 4*TINY)
self.identical(fromHex('0x10p-1076'), 4*TINY)
self.identical(fromHex('-0x1p-1076'), -0.0)
self.identical(fromHex('-0X2p-1076'), -0.0)
self.identical(fromHex('-0x3p-1076'), -TINY)
self.identical(fromHex('-0X4p-1076'), -TINY)
self.identical(fromHex('-0x5p-1076'), -TINY)
self.identical(fromHex('-0x6p-1076'), -2*TINY)
self.identical(fromHex('-0X7p-1076'), -2*TINY)
self.identical(fromHex('-0X8p-1076'), -2*TINY)
self.identical(fromHex('-0X9p-1076'), -2*TINY)
self.identical(fromHex('-0Xap-1076'), -2*TINY)
self.identical(fromHex('-0xbp-1076'), -3*TINY)
self.identical(fromHex('-0xcp-1076'), -3*TINY)
self.identical(fromHex('-0Xdp-1076'), -3*TINY)
self.identical(fromHex('-0xep-1076'), -4*TINY)
self.identical(fromHex('-0Xfp-1076'), -4*TINY)
self.identical(fromHex('-0X10p-1076'), -4*TINY)
# ... and near MIN ...
self.identical(fromHex('0x0.ffffffffffffd6p-1022'), MIN-3*TINY)
self.identical(fromHex('0x0.ffffffffffffd8p-1022'), MIN-2*TINY)
self.identical(fromHex('0x0.ffffffffffffdap-1022'), MIN-2*TINY)
self.identical(fromHex('0x0.ffffffffffffdcp-1022'), MIN-2*TINY)
self.identical(fromHex('0x0.ffffffffffffdep-1022'), MIN-2*TINY)
self.identical(fromHex('0x0.ffffffffffffe0p-1022'), MIN-2*TINY)
self.identical(fromHex('0x0.ffffffffffffe2p-1022'), MIN-2*TINY)
self.identical(fromHex('0x0.ffffffffffffe4p-1022'), MIN-2*TINY)
self.identical(fromHex('0x0.ffffffffffffe6p-1022'), MIN-2*TINY)
self.identical(fromHex('0x0.ffffffffffffe8p-1022'), MIN-2*TINY)
self.identical(fromHex('0x0.ffffffffffffeap-1022'), MIN-TINY)
self.identical(fromHex('0x0.ffffffffffffecp-1022'), MIN-TINY)
self.identical(fromHex('0x0.ffffffffffffeep-1022'), MIN-TINY)
self.identical(fromHex('0x0.fffffffffffff0p-1022'), MIN-TINY)
self.identical(fromHex('0x0.fffffffffffff2p-1022'), MIN-TINY)
self.identical(fromHex('0x0.fffffffffffff4p-1022'), MIN-TINY)
self.identical(fromHex('0x0.fffffffffffff6p-1022'), MIN-TINY)
self.identical(fromHex('0x0.fffffffffffff8p-1022'), MIN)
self.identical(fromHex('0x0.fffffffffffffap-1022'), MIN)
self.identical(fromHex('0x0.fffffffffffffcp-1022'), MIN)
self.identical(fromHex('0x0.fffffffffffffep-1022'), MIN)
self.identical(fromHex('0x1.00000000000000p-1022'), MIN)
self.identical(fromHex('0x1.00000000000002p-1022'), MIN)
self.identical(fromHex('0x1.00000000000004p-1022'), MIN)
self.identical(fromHex('0x1.00000000000006p-1022'), MIN)
self.identical(fromHex('0x1.00000000000008p-1022'), MIN)
self.identical(fromHex('0x1.0000000000000ap-1022'), MIN+TINY)
self.identical(fromHex('0x1.0000000000000cp-1022'), MIN+TINY)
self.identical(fromHex('0x1.0000000000000ep-1022'), MIN+TINY)
self.identical(fromHex('0x1.00000000000010p-1022'), MIN+TINY)
self.identical(fromHex('0x1.00000000000012p-1022'), MIN+TINY)
self.identical(fromHex('0x1.00000000000014p-1022'), MIN+TINY)
self.identical(fromHex('0x1.00000000000016p-1022'), MIN+TINY)
self.identical(fromHex('0x1.00000000000018p-1022'), MIN+2*TINY)
# ... and near 1.0.
self.identical(fromHex('0x0.fffffffffffff0p0'), 1.0-EPS)
self.identical(fromHex('0x0.fffffffffffff1p0'), 1.0-EPS)
self.identical(fromHex('0X0.fffffffffffff2p0'), 1.0-EPS)
self.identical(fromHex('0x0.fffffffffffff3p0'), 1.0-EPS)
self.identical(fromHex('0X0.fffffffffffff4p0'), 1.0-EPS)
self.identical(fromHex('0X0.fffffffffffff5p0'), 1.0-EPS/2)
self.identical(fromHex('0X0.fffffffffffff6p0'), 1.0-EPS/2)
self.identical(fromHex('0x0.fffffffffffff7p0'), 1.0-EPS/2)
self.identical(fromHex('0x0.fffffffffffff8p0'), 1.0-EPS/2)
self.identical(fromHex('0X0.fffffffffffff9p0'), 1.0-EPS/2)
self.identical(fromHex('0X0.fffffffffffffap0'), 1.0-EPS/2)
self.identical(fromHex('0x0.fffffffffffffbp0'), 1.0-EPS/2)
self.identical(fromHex('0X0.fffffffffffffcp0'), 1.0)
self.identical(fromHex('0x0.fffffffffffffdp0'), 1.0)
self.identical(fromHex('0X0.fffffffffffffep0'), 1.0)
self.identical(fromHex('0x0.ffffffffffffffp0'), 1.0)
self.identical(fromHex('0X1.00000000000000p0'), 1.0)
self.identical(fromHex('0X1.00000000000001p0'), 1.0)
self.identical(fromHex('0x1.00000000000002p0'), 1.0)
self.identical(fromHex('0X1.00000000000003p0'), 1.0)
self.identical(fromHex('0x1.00000000000004p0'), 1.0)
self.identical(fromHex('0X1.00000000000005p0'), 1.0)
self.identical(fromHex('0X1.00000000000006p0'), 1.0)
self.identical(fromHex('0X1.00000000000007p0'), 1.0)
self.identical(fromHex('0x1.00000000000007ffffffffffffffffffffp0'),
1.0)
self.identical(fromHex('0x1.00000000000008p0'), 1.0)
self.identical(fromHex('0x1.00000000000008000000000000000001p0'),
1+EPS)
self.identical(fromHex('0X1.00000000000009p0'), 1.0+EPS)
self.identical(fromHex('0x1.0000000000000ap0'), 1.0+EPS)
self.identical(fromHex('0x1.0000000000000bp0'), 1.0+EPS)
self.identical(fromHex('0X1.0000000000000cp0'), 1.0+EPS)
self.identical(fromHex('0x1.0000000000000dp0'), 1.0+EPS)
self.identical(fromHex('0x1.0000000000000ep0'), 1.0+EPS)
self.identical(fromHex('0X1.0000000000000fp0'), 1.0+EPS)
self.identical(fromHex('0x1.00000000000010p0'), 1.0+EPS)
self.identical(fromHex('0X1.00000000000011p0'), 1.0+EPS)
self.identical(fromHex('0x1.00000000000012p0'), 1.0+EPS)
self.identical(fromHex('0X1.00000000000013p0'), 1.0+EPS)
self.identical(fromHex('0X1.00000000000014p0'), 1.0+EPS)
self.identical(fromHex('0x1.00000000000015p0'), 1.0+EPS)
self.identical(fromHex('0x1.00000000000016p0'), 1.0+EPS)
self.identical(fromHex('0X1.00000000000017p0'), 1.0+EPS)
self.identical(fromHex('0x1.00000000000017ffffffffffffffffffffp0'),
1.0+EPS)
self.identical(fromHex('0x1.00000000000018p0'), 1.0+2*EPS)
self.identical(fromHex('0X1.00000000000018000000000000000001p0'),
1.0+2*EPS)
self.identical(fromHex('0x1.00000000000019p0'), 1.0+2*EPS)
self.identical(fromHex('0X1.0000000000001ap0'), 1.0+2*EPS)
self.identical(fromHex('0X1.0000000000001bp0'), 1.0+2*EPS)
self.identical(fromHex('0x1.0000000000001cp0'), 1.0+2*EPS)
self.identical(fromHex('0x1.0000000000001dp0'), 1.0+2*EPS)
self.identical(fromHex('0x1.0000000000001ep0'), 1.0+2*EPS)
self.identical(fromHex('0X1.0000000000001fp0'), 1.0+2*EPS)
self.identical(fromHex('0x1.00000000000020p0'), 1.0+2*EPS)
def test_roundtrip(self):
def roundtrip(x):
return fromHex(toHex(x))
for x in [NAN, INF, self.MAX, self.MIN, self.MIN-self.TINY, self.TINY, 0.0]:
self.identical(x, roundtrip(x))
self.identical(-x, roundtrip(-x))
# fromHex(toHex(x)) should exactly recover x, for any non-NaN float x.
import random
for i in range(10000):
e = random.randrange(-1200, 1200)
m = random.random()
s = random.choice([1.0, -1.0])
try:
x = s*ldexp(m, e)
except OverflowError:
pass
else:
self.identical(x, fromHex(toHex(x)))
def test_main():
support.run_unittest(
GeneralFloatCases,
FormatFunctionsTestCase,
UnknownFormatTestCase,
IEEEFormatTestCase,
FormatTestCase,
ReprTestCase,
RoundTestCase,
InfNanTest,
HexFloatTestCase,
)
if __name__ == '__main__':
test_main()
| lgpl-3.0 |
dana-i2cat/felix | modules/resource/manager/stitching-entity/src/server/flask/flaskserver.py | 2 | 8204 | from flask import Flask, g, request, request_started, request_finished
from flask.ext.pymongo import PyMongo
from core.config import ConfParser
from core import log
logger=log.getLogger("flaskserver")
from server.flask.views import ro_flask_views
from werkzeug import serving
from OpenSSL import SSL, crypto
import ast
import os
import sys
class ClientCertHTTPRequestHandler(serving.WSGIRequestHandler):
"""Overwrite the werkzeug handler, so we can extract the client cert and put it into the request's environment."""
def make_environ(self):
env = super(ClientCertHTTPRequestHandler, self).make_environ()
if self._client_cert:
env['CLIENT_RAW_CERT'] = self._client_cert
return env
def setup(self):
super(ClientCertHTTPRequestHandler, self).setup()
self.connection.do_handshake()
peer_cert = self.connection.get_peer_certificate()
if peer_cert:
pem = crypto.dump_certificate(crypto.FILETYPE_PEM, peer_cert)
self._client_cert = pem
else:
self._client_cert = None
class FlaskServer(object):
"""
Encapsules a flask server instance.
It also exports/defines the rpcservice interface.
When a request comes in the following chain is walked through:
--http--> nginx webserver --fcgi--> WSGIServer --WSGI--> FlaskApp
When using the development server:
werkzeug server --WSGI--> FlaskApp
"""
def __init__(self):
"""Constructor for the server wrapper."""
#self._app = Flask(__name__) # imports the named package, in this case this file
# Imports the named module (package includes "." and this is not nice with PyMongo)
self.config = ConfParser("flask.conf")
self.general_section = self.config.get("general")
self.template_folder = self.general_section.get("template_folder")
self.fcgi_section = self.config.get("fcgi")
self.certificates_section = self.config.get("certificates")
self._app = Flask(__name__.split(".")[-1], template_folder = self.template_folder)
self._mongo = PyMongo(self._app)
# Added in order to be able to execute "before_request" method
app = self._app
# Setup debugging for app
cDebug = self.general_section.get("debug")
if cDebug: # log all actions on the XML-RPC interface
def log_request(sender, **extra):
logger.info(">>> REQUEST %s:\n%s" % (request.path, request.data))
request_started.connect(log_request, self._app)
def log_response(sender, response, **extra):
logger.info(">>> RESPONSE %s:\n%s" % (response.status, response.data))
request_finished.connect(log_response, self._app)
@app.before_request
def before_request():
# "Attach" objects within the "g" object. This is passed to each view method
g.mongo = self._mongo
@property
def app(self):
"""Returns the flask instance (not part of the service interface, since it is specific to flask)."""
return self._app
def add_routes(self):
"""
New method. Allows to register URLs from a the views file.
"""
# from server.flask import views as flask_views
# flask_views_custom_methods = filter(lambda x: x.startswith("view_"), dir(flask_views))
# for custom_method in flask_views_custom_methods:
# # Retrieve data needed to add the URL rule to the Flask app
# view_method = getattr(locals()["flask_views"], custom_method)
# docstring = getattr(view_method, "__doc__")
# index_start = docstring.index("@app.route")
# index_end = index_start + len("@app.route") + 1
# custom_method_url = docstring[index_end:].replace(" ","").replace("\n","")
# # Get: (a) method URL to bind flask app, (b), method name, (c) method object to invoke
# self._app.add_url_rule(custom_method_url, custom_method, view_func=view_method(self._mongo))
self._app.register_blueprint(ro_flask_views)
def runServer(self, services=[]):
"""Starts up the server. It (will) support different config options via the config plugin."""
self.add_routes()
debug = self.general_section.get("debug")
host = self.general_section.get("host")
app_port = int(self.general_section.get("port"))
template_folder = self.general_section.get("template_folder")
cFCGI = ast.literal_eval(self.fcgi_section.get("enabled"))
fcgi_port = int(self.fcgi_section.get("port"))
must_have_client_cert = ast.literal_eval(self.certificates_section.get("force_client_certificate"))
if cFCGI:
logger.info("registering fcgi server at %s:%i", host, fcgi_port)
from flup.server.fcgi import WSGIServer
WSGIServer(self._app, bindAddress=(host, fcgi_port)).run()
else:
logger.info("registering app server at %s:%i", host, app_port)
# this workaround makes sure that the client cert can be acquired later (even when running the development server)
# copied all this stuff from the actual flask implementation, so we can intervene and adjust the ssl context
# self._app.run(host=host, port=app_port, ssl_context='adhoc', debug=debug, request_handler=ClientCertHTTPRequestHandler)
# the code from flask's `run...`
# see https://github.com/mitsuhiko/flask/blob/master/flask/app.py
options = {}
try:
# now the code from werkzeug's `run_simple(host, app_port, self._app, **options)`
# see https://github.com/mitsuhiko/werkzeug/blob/master/werkzeug/serving.py
from werkzeug.debug import DebuggedApplication
import socket
application = DebuggedApplication(self._app, True)
# Set up an SSL context
from OpenSSL import SSL
context = SSL.Context(SSL.SSLv23_METHOD)
certs_path = os.path.normpath(os.path.join(os.path.dirname(__file__), "../../..", "cert"))
context_crt = os.path.join(certs_path, "server.crt")
context_key = os.path.join(certs_path, "server.key")
try:
context.use_certificate_file(context_crt)
context.use_privatekey_file(context_key)
except Exception as e:
logger.critical("error starting flask server. Cert or key is missing under %s", certs_path)
sys.exit(e)
def inner():
#server = serving.make_server(host, app_port, self._app, False, 1, ClientCertHTTPRequestHandler, False, 'adhoc')
server = serving.make_server(host, app_port, self._app, False, 1, ClientCertHTTPRequestHandler, False, ssl_context=context)
#server = serving.make_server(host, app_port, self._app, False, 1, ClientCertHTTPRequestHandler, False, ssl_context=(context_crt, context_key))
# The following line is the reason why I copied all that code!
if must_have_client_cert:
# FIXME: what works with web app does not work with cli. Check this out
server.ssl_context.set_verify(SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, lambda a,b,c,d,e: True)
# before enter in the loop, start the supplementary services
for s in services:
s.start()
# That's it
server.serve_forever()
address_family = serving.select_ip_version(host, app_port)
test_socket = socket.socket(address_family, socket.SOCK_STREAM)
test_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
test_socket.bind((host, app_port))
test_socket.close()
serving.run_with_reloader(inner, None, 1)
finally:
self._app._got_first_request = False
| apache-2.0 |
ahmed-mahran/hue | desktop/core/ext-py/boto-2.38.0/boto/route53/domains/layer1.py | 151 | 32418 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.compat import json
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.route53.domains import exceptions
class Route53DomainsConnection(AWSQueryConnection):
"""
"""
APIVersion = "2014-05-15"
DefaultRegionName = "us-east-1"
DefaultRegionEndpoint = "route53domains.us-east-1.amazonaws.com"
ServiceName = "Route53Domains"
TargetPrefix = "Route53Domains_v20140515"
ResponseError = JSONResponseError
_faults = {
"DuplicateRequest": exceptions.DuplicateRequest,
"DomainLimitExceeded": exceptions.DomainLimitExceeded,
"InvalidInput": exceptions.InvalidInput,
"OperationLimitExceeded": exceptions.OperationLimitExceeded,
"UnsupportedTLD": exceptions.UnsupportedTLD,
"TLDRulesViolation": exceptions.TLDRulesViolation,
}
def __init__(self, **kwargs):
region = kwargs.pop('region', None)
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
if 'host' not in kwargs or kwargs['host'] is None:
kwargs['host'] = region.endpoint
super(Route53DomainsConnection, self).__init__(**kwargs)
self.region = region
def _required_auth_capability(self):
return ['hmac-v4']
def check_domain_availability(self, domain_name, idn_lang_code=None):
"""
This operation checks the availability of one domain name. You
can access this API without authenticating. Note that if the
availability status of a domain is pending, you must submit
another request to determine the availability of the domain
name.
:type domain_name: string
:param domain_name: The name of a domain.
Type: String
Default: None
Constraints: The domain name can contain only the letters a through z,
the numbers 0 through 9, and hyphen (-). Internationalized Domain
Names are not supported.
Required: Yes
:type idn_lang_code: string
:param idn_lang_code: Reserved for future use.
"""
params = {'DomainName': domain_name, }
if idn_lang_code is not None:
params['IdnLangCode'] = idn_lang_code
return self.make_request(action='CheckDomainAvailability',
body=json.dumps(params))
def disable_domain_transfer_lock(self, domain_name):
"""
This operation removes the transfer lock on the domain
(specifically the `clientTransferProhibited` status) to allow
domain transfers. We recommend you refrain from performing
this action unless you intend to transfer the domain to a
different registrar. Successful submission returns an
operation ID that you can use to track the progress and
completion of the action. If the request is not completed
successfully, the domain registrant will be notified by email.
:type domain_name: string
:param domain_name: The name of a domain.
Type: String
Default: None
Constraints: The domain name can contain only the letters a through z,
the numbers 0 through 9, and hyphen (-). Internationalized Domain
Names are not supported.
Required: Yes
"""
params = {'DomainName': domain_name, }
return self.make_request(action='DisableDomainTransferLock',
body=json.dumps(params))
def enable_domain_transfer_lock(self, domain_name):
"""
This operation sets the transfer lock on the domain
(specifically the `clientTransferProhibited` status) to
prevent domain transfers. Successful submission returns an
operation ID that you can use to track the progress and
completion of the action. If the request is not completed
successfully, the domain registrant will be notified by email.
:type domain_name: string
:param domain_name: The name of a domain.
Type: String
Default: None
Constraints: The domain name can contain only the letters a through z,
the numbers 0 through 9, and hyphen (-). Internationalized Domain
Names are not supported.
Required: Yes
"""
params = {'DomainName': domain_name, }
return self.make_request(action='EnableDomainTransferLock',
body=json.dumps(params))
def get_domain_detail(self, domain_name):
"""
This operation returns detailed information about the domain.
The domain's contact information is also returned as part of
the output.
:type domain_name: string
:param domain_name: The name of a domain.
Type: String
Default: None
Constraints: The domain name can contain only the letters a through z,
the numbers 0 through 9, and hyphen (-). Internationalized Domain
Names are not supported.
Required: Yes
"""
params = {'DomainName': domain_name, }
return self.make_request(action='GetDomainDetail',
body=json.dumps(params))
def get_operation_detail(self, operation_id):
"""
This operation returns the current status of an operation that
is not completed.
:type operation_id: string
:param operation_id: The identifier for the operation for which you
want to get the status. Amazon Route 53 returned the identifier in
the response to the original request.
Type: String
Default: None
Required: Yes
"""
params = {'OperationId': operation_id, }
return self.make_request(action='GetOperationDetail',
body=json.dumps(params))
def list_domains(self, marker=None, max_items=None):
"""
This operation returns all the domain names registered with
Amazon Route 53 for the current AWS account.
:type marker: string
:param marker: For an initial request for a list of domains, omit this
element. If the number of domains that are associated with the
current AWS account is greater than the value that you specified
for `MaxItems`, you can use `Marker` to return additional domains.
Get the value of `NextPageMarker` from the previous response, and
submit another request that includes the value of `NextPageMarker`
in the `Marker` element.
Type: String
Default: None
Constraints: The marker must match the value specified in the previous
request.
Required: No
:type max_items: integer
:param max_items: Number of domains to be returned.
Type: Integer
Default: 20
Constraints: A numeral between 1 and 100.
Required: No
"""
params = {}
if marker is not None:
params['Marker'] = marker
if max_items is not None:
params['MaxItems'] = max_items
return self.make_request(action='ListDomains',
body=json.dumps(params))
def list_operations(self, marker=None, max_items=None):
"""
This operation returns the operation IDs of operations that
are not yet complete.
:type marker: string
:param marker: For an initial request for a list of operations, omit
this element. If the number of operations that are not yet complete
is greater than the value that you specified for `MaxItems`, you
can use `Marker` to return additional operations. Get the value of
`NextPageMarker` from the previous response, and submit another
request that includes the value of `NextPageMarker` in the `Marker`
element.
Type: String
Default: None
Required: No
:type max_items: integer
:param max_items: Number of domains to be returned.
Type: Integer
Default: 20
Constraints: A value between 1 and 100.
Required: No
"""
params = {}
if marker is not None:
params['Marker'] = marker
if max_items is not None:
params['MaxItems'] = max_items
return self.make_request(action='ListOperations',
body=json.dumps(params))
def register_domain(self, domain_name, duration_in_years, admin_contact,
registrant_contact, tech_contact, idn_lang_code=None,
auto_renew=None, privacy_protect_admin_contact=None,
privacy_protect_registrant_contact=None,
privacy_protect_tech_contact=None):
"""
This operation registers a domain. Domains are registered by
the AWS registrar partner, Gandi. For some top-level domains
(TLDs), this operation requires extra parameters.
When you register a domain, Amazon Route 53 does the
following:
+ Creates a Amazon Route 53 hosted zone that has the same name
as the domain. Amazon Route 53 assigns four name servers to
your hosted zone and automatically updates your domain
registration with the names of these name servers.
+ Enables autorenew, so your domain registration will renew
automatically each year. We'll notify you in advance of the
renewal date so you can choose whether to renew the
registration.
+ Optionally enables privacy protection, so WHOIS queries
return contact information for our registrar partner, Gandi,
instead of the information you entered for registrant, admin,
and tech contacts.
+ If registration is successful, returns an operation ID that
you can use to track the progress and completion of the
action. If the request is not completed successfully, the
domain registrant is notified by email.
+ Charges your AWS account an amount based on the top-level
domain. For more information, see `Amazon Route 53 Pricing`_.
:type domain_name: string
:param domain_name: The name of a domain.
Type: String
Default: None
Constraints: The domain name can contain only the letters a through z,
the numbers 0 through 9, and hyphen (-). Internationalized Domain
Names are not supported.
Required: Yes
:type idn_lang_code: string
:param idn_lang_code: Reserved for future use.
:type duration_in_years: integer
:param duration_in_years: The number of years the domain will be
registered. Domains are registered for a minimum of one year. The
maximum period depends on the top-level domain.
Type: Integer
Default: 1
Valid values: Integer from 1 to 10
Required: Yes
:type auto_renew: boolean
:param auto_renew: Indicates whether the domain will be automatically
renewed ( `True`) or not ( `False`). Autorenewal only takes effect
after the account is charged.
Type: Boolean
Valid values: `True` | `False`
Default: `True`
Required: No
:type admin_contact: dict
:param admin_contact: Provides detailed contact information.
Type: Complex
Children: `FirstName`, `MiddleName`, `LastName`, `ContactType`,
`OrganizationName`, `AddressLine1`, `AddressLine2`, `City`,
`State`, `CountryCode`, `ZipCode`, `PhoneNumber`, `Email`, `Fax`,
`ExtraParams`
Required: Yes
:type registrant_contact: dict
:param registrant_contact: Provides detailed contact information.
Type: Complex
Children: `FirstName`, `MiddleName`, `LastName`, `ContactType`,
`OrganizationName`, `AddressLine1`, `AddressLine2`, `City`,
`State`, `CountryCode`, `ZipCode`, `PhoneNumber`, `Email`, `Fax`,
`ExtraParams`
Required: Yes
:type tech_contact: dict
:param tech_contact: Provides detailed contact information.
Type: Complex
Children: `FirstName`, `MiddleName`, `LastName`, `ContactType`,
`OrganizationName`, `AddressLine1`, `AddressLine2`, `City`,
`State`, `CountryCode`, `ZipCode`, `PhoneNumber`, `Email`, `Fax`,
`ExtraParams`
Required: Yes
:type privacy_protect_admin_contact: boolean
:param privacy_protect_admin_contact: Whether you want to conceal
contact information from WHOIS queries. If you specify true, WHOIS
("who is") queries will return contact information for our
registrar partner, Gandi, instead of the contact information that
you enter.
Type: Boolean
Default: `True`
Valid values: `True` | `False`
Required: No
:type privacy_protect_registrant_contact: boolean
:param privacy_protect_registrant_contact: Whether you want to conceal
contact information from WHOIS queries. If you specify true, WHOIS
("who is") queries will return contact information for our
registrar partner, Gandi, instead of the contact information that
you enter.
Type: Boolean
Default: `True`
Valid values: `True` | `False`
Required: No
:type privacy_protect_tech_contact: boolean
:param privacy_protect_tech_contact: Whether you want to conceal
contact information from WHOIS queries. If you specify true, WHOIS
("who is") queries will return contact information for our
registrar partner, Gandi, instead of the contact information that
you enter.
Type: Boolean
Default: `True`
Valid values: `True` | `False`
Required: No
"""
params = {
'DomainName': domain_name,
'DurationInYears': duration_in_years,
'AdminContact': admin_contact,
'RegistrantContact': registrant_contact,
'TechContact': tech_contact,
}
if idn_lang_code is not None:
params['IdnLangCode'] = idn_lang_code
if auto_renew is not None:
params['AutoRenew'] = auto_renew
if privacy_protect_admin_contact is not None:
params['PrivacyProtectAdminContact'] = privacy_protect_admin_contact
if privacy_protect_registrant_contact is not None:
params['PrivacyProtectRegistrantContact'] = privacy_protect_registrant_contact
if privacy_protect_tech_contact is not None:
params['PrivacyProtectTechContact'] = privacy_protect_tech_contact
return self.make_request(action='RegisterDomain',
body=json.dumps(params))
def retrieve_domain_auth_code(self, domain_name):
"""
This operation returns the AuthCode for the domain. To
transfer a domain to another registrar, you provide this value
to the new registrar.
:type domain_name: string
:param domain_name: The name of a domain.
Type: String
Default: None
Constraints: The domain name can contain only the letters a through z,
the numbers 0 through 9, and hyphen (-). Internationalized Domain
Names are not supported.
Required: Yes
"""
params = {'DomainName': domain_name, }
return self.make_request(action='RetrieveDomainAuthCode',
body=json.dumps(params))
def transfer_domain(self, domain_name, duration_in_years, nameservers,
admin_contact, registrant_contact, tech_contact,
idn_lang_code=None, auth_code=None, auto_renew=None,
privacy_protect_admin_contact=None,
privacy_protect_registrant_contact=None,
privacy_protect_tech_contact=None):
"""
This operation transfers a domain from another registrar to
Amazon Route 53. Domains are registered by the AWS registrar,
Gandi upon transfer.
To transfer a domain, you need to meet all the domain transfer
criteria, including the following:
+ You must supply nameservers to transfer a domain.
+ You must disable the domain transfer lock (if any) before
transferring the domain.
+ A minimum of 60 days must have elapsed since the domain's
registration or last transfer.
We recommend you use the Amazon Route 53 as the DNS service
for your domain. You can create a hosted zone in Amazon Route
53 for your current domain before transferring your domain.
Note that upon transfer, the domain duration is extended for a
year if not otherwise specified. Autorenew is enabled by
default.
If the transfer is successful, this method returns an
operation ID that you can use to track the progress and
completion of the action. If the request is not completed
successfully, the domain registrant will be notified by email.
Transferring domains charges your AWS account an amount based
on the top-level domain. For more information, see `Amazon
Route 53 Pricing`_.
:type domain_name: string
:param domain_name: The name of a domain.
Type: String
Default: None
Constraints: The domain name can contain only the letters a through z,
the numbers 0 through 9, and hyphen (-). Internationalized Domain
Names are not supported.
Required: Yes
:type idn_lang_code: string
:param idn_lang_code: Reserved for future use.
:type duration_in_years: integer
:param duration_in_years: The number of years the domain will be
registered. Domains are registered for a minimum of one year. The
maximum period depends on the top-level domain.
Type: Integer
Default: 1
Valid values: Integer from 1 to 10
Required: Yes
:type nameservers: list
:param nameservers: Contains details for the host and glue IP
addresses.
Type: Complex
Children: `GlueIps`, `Name`
:type auth_code: string
:param auth_code: The authorization code for the domain. You get this
value from the current registrar.
Type: String
Required: Yes
:type auto_renew: boolean
:param auto_renew: Indicates whether the domain will be automatically
renewed (true) or not (false). Autorenewal only takes effect after
the account is charged.
Type: Boolean
Valid values: `True` | `False`
Default: true
Required: No
:type admin_contact: dict
:param admin_contact: Provides detailed contact information.
Type: Complex
Children: `FirstName`, `MiddleName`, `LastName`, `ContactType`,
`OrganizationName`, `AddressLine1`, `AddressLine2`, `City`,
`State`, `CountryCode`, `ZipCode`, `PhoneNumber`, `Email`, `Fax`,
`ExtraParams`
Required: Yes
:type registrant_contact: dict
:param registrant_contact: Provides detailed contact information.
Type: Complex
Children: `FirstName`, `MiddleName`, `LastName`, `ContactType`,
`OrganizationName`, `AddressLine1`, `AddressLine2`, `City`,
`State`, `CountryCode`, `ZipCode`, `PhoneNumber`, `Email`, `Fax`,
`ExtraParams`
Required: Yes
:type tech_contact: dict
:param tech_contact: Provides detailed contact information.
Type: Complex
Children: `FirstName`, `MiddleName`, `LastName`, `ContactType`,
`OrganizationName`, `AddressLine1`, `AddressLine2`, `City`,
`State`, `CountryCode`, `ZipCode`, `PhoneNumber`, `Email`, `Fax`,
`ExtraParams`
Required: Yes
:type privacy_protect_admin_contact: boolean
:param privacy_protect_admin_contact: Whether you want to conceal
contact information from WHOIS queries. If you specify true, WHOIS
("who is") queries will return contact information for our
registrar partner, Gandi, instead of the contact information that
you enter.
Type: Boolean
Default: `True`
Valid values: `True` | `False`
Required: No
:type privacy_protect_registrant_contact: boolean
:param privacy_protect_registrant_contact: Whether you want to conceal
contact information from WHOIS queries. If you specify true, WHOIS
("who is") queries will return contact information for our
registrar partner, Gandi, instead of the contact information that
you enter.
Type: Boolean
Default: `True`
Valid values: `True` | `False`
Required: No
:type privacy_protect_tech_contact: boolean
:param privacy_protect_tech_contact: Whether you want to conceal
contact information from WHOIS queries. If you specify true, WHOIS
("who is") queries will return contact information for our
registrar partner, Gandi, instead of the contact information that
you enter.
Type: Boolean
Default: `True`
Valid values: `True` | `False`
Required: No
"""
params = {
'DomainName': domain_name,
'DurationInYears': duration_in_years,
'Nameservers': nameservers,
'AdminContact': admin_contact,
'RegistrantContact': registrant_contact,
'TechContact': tech_contact,
}
if idn_lang_code is not None:
params['IdnLangCode'] = idn_lang_code
if auth_code is not None:
params['AuthCode'] = auth_code
if auto_renew is not None:
params['AutoRenew'] = auto_renew
if privacy_protect_admin_contact is not None:
params['PrivacyProtectAdminContact'] = privacy_protect_admin_contact
if privacy_protect_registrant_contact is not None:
params['PrivacyProtectRegistrantContact'] = privacy_protect_registrant_contact
if privacy_protect_tech_contact is not None:
params['PrivacyProtectTechContact'] = privacy_protect_tech_contact
return self.make_request(action='TransferDomain',
body=json.dumps(params))
def update_domain_contact(self, domain_name, admin_contact=None,
registrant_contact=None, tech_contact=None):
"""
This operation updates the contact information for a
particular domain. Information for at least one contact
(registrant, administrator, or technical) must be supplied for
update.
If the update is successful, this method returns an operation
ID that you can use to track the progress and completion of
the action. If the request is not completed successfully, the
domain registrant will be notified by email.
:type domain_name: string
:param domain_name: The name of a domain.
Type: String
Default: None
Constraints: The domain name can contain only the letters a through z,
the numbers 0 through 9, and hyphen (-). Internationalized Domain
Names are not supported.
Required: Yes
:type admin_contact: dict
:param admin_contact: Provides detailed contact information.
Type: Complex
Children: `FirstName`, `MiddleName`, `LastName`, `ContactType`,
`OrganizationName`, `AddressLine1`, `AddressLine2`, `City`,
`State`, `CountryCode`, `ZipCode`, `PhoneNumber`, `Email`, `Fax`,
`ExtraParams`
Required: Yes
:type registrant_contact: dict
:param registrant_contact: Provides detailed contact information.
Type: Complex
Children: `FirstName`, `MiddleName`, `LastName`, `ContactType`,
`OrganizationName`, `AddressLine1`, `AddressLine2`, `City`,
`State`, `CountryCode`, `ZipCode`, `PhoneNumber`, `Email`, `Fax`,
`ExtraParams`
Required: Yes
:type tech_contact: dict
:param tech_contact: Provides detailed contact information.
Type: Complex
Children: `FirstName`, `MiddleName`, `LastName`, `ContactType`,
`OrganizationName`, `AddressLine1`, `AddressLine2`, `City`,
`State`, `CountryCode`, `ZipCode`, `PhoneNumber`, `Email`, `Fax`,
`ExtraParams`
Required: Yes
"""
params = {'DomainName': domain_name, }
if admin_contact is not None:
params['AdminContact'] = admin_contact
if registrant_contact is not None:
params['RegistrantContact'] = registrant_contact
if tech_contact is not None:
params['TechContact'] = tech_contact
return self.make_request(action='UpdateDomainContact',
body=json.dumps(params))
def update_domain_contact_privacy(self, domain_name, admin_privacy=None,
registrant_privacy=None,
tech_privacy=None):
"""
This operation updates the specified domain contact's privacy
setting. When the privacy option is enabled, personal
information such as postal or email address is hidden from the
results of a public WHOIS query. The privacy services are
provided by the AWS registrar, Gandi. For more information,
see the `Gandi privacy features`_.
This operation only affects the privacy of the specified
contact type (registrant, administrator, or tech). Successful
acceptance returns an operation ID that you can use with
GetOperationDetail to track the progress and completion of the
action. If the request is not completed successfully, the
domain registrant will be notified by email.
:type domain_name: string
:param domain_name: The name of a domain.
Type: String
Default: None
Constraints: The domain name can contain only the letters a through z,
the numbers 0 through 9, and hyphen (-). Internationalized Domain
Names are not supported.
Required: Yes
:type admin_privacy: boolean
:param admin_privacy: Whether you want to conceal contact information
from WHOIS queries. If you specify true, WHOIS ("who is") queries
will return contact information for our registrar partner, Gandi,
instead of the contact information that you enter.
Type: Boolean
Default: None
Valid values: `True` | `False`
Required: No
:type registrant_privacy: boolean
:param registrant_privacy: Whether you want to conceal contact
information from WHOIS queries. If you specify true, WHOIS ("who
is") queries will return contact information for our registrar
partner, Gandi, instead of the contact information that you enter.
Type: Boolean
Default: None
Valid values: `True` | `False`
Required: No
:type tech_privacy: boolean
:param tech_privacy: Whether you want to conceal contact information
from WHOIS queries. If you specify true, WHOIS ("who is") queries
will return contact information for our registrar partner, Gandi,
instead of the contact information that you enter.
Type: Boolean
Default: None
Valid values: `True` | `False`
Required: No
"""
params = {'DomainName': domain_name, }
if admin_privacy is not None:
params['AdminPrivacy'] = admin_privacy
if registrant_privacy is not None:
params['RegistrantPrivacy'] = registrant_privacy
if tech_privacy is not None:
params['TechPrivacy'] = tech_privacy
return self.make_request(action='UpdateDomainContactPrivacy',
body=json.dumps(params))
def update_domain_nameservers(self, domain_name, nameservers):
"""
This operation replaces the current set of name servers for
the domain with the specified set of name servers. If you use
Amazon Route 53 as your DNS service, specify the four name
servers in the delegation set for the hosted zone for the
domain.
If successful, this operation returns an operation ID that you
can use to track the progress and completion of the action. If
the request is not completed successfully, the domain
registrant will be notified by email.
:type domain_name: string
:param domain_name: The name of a domain.
Type: String
Default: None
Constraints: The domain name can contain only the letters a through z,
the numbers 0 through 9, and hyphen (-). Internationalized Domain
Names are not supported.
Required: Yes
:type nameservers: list
:param nameservers: A list of new name servers for the domain.
Type: Complex
Children: `Name`, `GlueIps`
Required: Yes
"""
params = {
'DomainName': domain_name,
'Nameservers': nameservers,
}
return self.make_request(action='UpdateDomainNameservers',
body=json.dumps(params))
def make_request(self, action, body):
headers = {
'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action),
'Host': self.region.endpoint,
'Content-Type': 'application/x-amz-json-1.1',
'Content-Length': str(len(body)),
}
http_request = self.build_base_http_request(
method='POST', path='/', auth_path='/', params={},
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
return json.loads(response_body)
else:
json_body = json.loads(response_body)
fault_name = json_body.get('__type', None)
exception_class = self._faults.get(fault_name, self.ResponseError)
raise exception_class(response.status, response.reason,
body=json_body)
| apache-2.0 |
joaoandrepsilva/dota2py | dota2py/api.py | 2 | 6146 | """
Tools for accessing the Dota 2 match history web API
"""
try:
from urllib import quote_plus
except ImportError:
from urllib.parse import quote_plus
import logging
import json
from functools import wraps
import os
API_KEY = os.environ.get("DOTA2_API_KEY")
BASE_URL = "http://api.steampowered.com/IDOTA2Match_570/"
API_FUNCTIONS = {}
logger = logging.getLogger("dota2py")
def set_api_key(key):
"""
Set your API key for all further API queries
"""
global API_KEY
API_KEY = key
def url_map(base, params):
"""
Return a URL with get parameters based on the params passed in
This is more forgiving than urllib.urlencode and will attempt to coerce
non-string objects into strings and automatically UTF-8 encode strings.
@param params: HTTP GET parameters
"""
url = base
if not params:
url.rstrip("?&")
elif '?' not in url:
url += "?"
entries = []
for key, value in params.items():
if value is not None:
value = str(value)
entries.append("%s=%s" % (quote_plus(key.encode("utf-8")),
quote_plus(value.encode("utf-8"))))
url += "&".join(entries)
return str(url)
def get_page(url):
"""
Fetch a page
"""
import requests
logger.debug('GET %s' % (url, ))
return requests.get(url)
def make_request(name, params=None, version="V001", key=None, api_type="web",
fetcher=get_page, base=None, language="en_us"):
"""
Make an API request
"""
params = params or {}
params["key"] = key or API_KEY
params["language"] = language
if not params["key"]:
raise ValueError("API key not set, please set DOTA2_API_KEY")
url = url_map("%s%s/%s/" % (base or BASE_URL, name, version), params)
return fetcher(url)
def json_request_response(f):
"""
Parse the JSON from an API response. We do this in a decorator so that our
Twisted library can reuse the underlying functions
"""
@wraps(f)
def wrapper(*args, **kwargs):
response = f(*args, **kwargs)
response.raise_for_status()
return json.loads(response.content.decode('utf-8'))
API_FUNCTIONS[f.__name__] = f
return wrapper
@json_request_response
def get_match_history(start_at_match_id=None, player_name=None, hero_id=None,
skill=0, date_min=None, date_max=None, account_id=None,
league_id=None, matches_requested=None, game_mode=None,
min_players=None, tournament_games_only=None,
**kwargs):
"""
List of most recent 25 matches before start_at_match_id
"""
params = {
"start_at_match_id": start_at_match_id,
"player_name": player_name,
"hero_id": hero_id,
"skill": skill,
"date_min": date_min,
"date_max": date_max,
"account_id": account_id,
"league_id": league_id,
"matches_requested": matches_requested,
"game_mode": game_mode,
"min_players": min_players,
"tournament_games_only": tournament_games_only
}
return make_request("GetMatchHistory", params, **kwargs)
@json_request_response
def get_match_history_by_sequence_num(start_at_match_seq_num,
matches_requested=None, **kwargs):
"""
Most recent matches ordered by sequence number
"""
params = {
"start_at_match_seq_num": start_at_match_seq_num,
"matches_requested": matches_requested
}
return make_request("GetMatchHistoryBySequenceNum", params,
**kwargs)
@json_request_response
def get_match_details(match_id, **kwargs):
"""
Detailed information about a match
"""
return make_request("GetMatchDetails", {"match_id": match_id}, **kwargs)
@json_request_response
def get_steam_id(vanityurl, **kwargs):
"""
Get a players steam id from their steam name/vanity url
"""
params = {"vanityurl": vanityurl}
return make_request("ResolveVanityURL", params, version="v0001",
base="http://api.steampowered.com/ISteamUser/", **kwargs)
@json_request_response
def get_player_summaries(players, **kwargs):
"""
Get players steam profile from their steam ids
"""
if (isinstance(players, list)):
params = {'steamids': ','.join(str(p) for p in players)}
elif (isinstance(players, int)):
params = {'steamids': players}
else:
raise ValueError("The players input needs to be a list or int")
return make_request("GetPlayerSummaries", params, version="v0002",
base="http://api.steampowered.com/ISteamUser/", **kwargs)
@json_request_response
def get_heroes(**kwargs):
"""
Get a list of hero identifiers
"""
return make_request("GetHeroes",
base="http://api.steampowered.com/IEconDOTA2_570/", **kwargs)
def get_hero_image_url(hero_name, image_size="lg"):
"""
Get a hero image based on name and image size
"""
if hero_name.startswith("npc_dota_hero_"):
hero_name = hero_name[len("npc_dota_hero_"):]
valid_sizes = ['eg', 'sb', 'lg', 'full', 'vert']
if image_size not in valid_sizes:
raise ValueError("Not a valid hero image size")
return "http://media.steampowered.com/apps/dota2/images/heroes/{}_{}.png".format(
hero_name, image_size)
def get_item_image_url(item_name, image_size="lg"):
"""
Get an item image based on name
"""
return "http://media.steampowered.com/apps/dota2/images/items/{}_{}.png".format(
item_name, image_size)
@json_request_response
def get_live_league_games(**kwargs):
"""
Get a list of currently live league games
"""
return make_request("GetLiveLeagueGames", **kwargs)
@json_request_response
def get_league_listing(**kwargs):
"""
Get a list of leagues
"""
return make_request("GetLeaguelisting", **kwargs)
@json_request_response
def get_scheduled_league_games(**kwargs):
"""
Get a list of scheduled league games
"""
return make_request("GetScheduledLeagueGames", **kwargs)
| mit |
kod3r/shinysdr | shinysdr/plugins/hamlib/__init__.py | 4 | 23076 | # Copyright 2014 Kevin Reid <kpreid@switchb.org>
#
# This file is part of ShinySDR.
#
# ShinySDR is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ShinySDR is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ShinySDR. If not, see <http://www.gnu.org/licenses/>.
'''
Plugin for Hamlib hardware interfaces.
To use this plugin, add something like this to your config file:
import shinysdr.plugins.hamlib
config.devices.add('my-other-radio',
shinysdr.plugins.hamlib.connect_to_rig(config.reactor,
options=['-m', '<model ID>', '-r', '<device file name>']))
TODO explain how to link up with soundcard devices
'''
# pylint: disable=no-init, no-member, signature-differs, raising-bad-type
# (no-init: pylint confused by interfaces)
# (no-member: pylint confused by abstract non-methods)
# (signature-differs: twisted is inconsistent about connectionMade/connectionLost)
# (raising-bad-type: pylint static analysis failure)
from __future__ import absolute_import, division
import os.path
import re
import subprocess
import time
from zope.interface import implements, Interface
from twisted.internet import defer
from twisted.internet.error import ConnectionRefusedError
from twisted.internet.protocol import ClientFactory, Protocol
from twisted.internet.task import LoopingCall, deferLater
from twisted.protocols.basic import LineReceiver
from twisted.python import log
from twisted.web import static
from shinysdr.devices import Device
from shinysdr.top import IHasFrequency
from shinysdr.types import Enum, Notice, Range
from shinysdr.values import ExportedState, LooseCell, exported_value
from shinysdr.web import ClientResourceDef
__all__ = [] # appended later
class IProxy(Interface):
'''
Marker interface for hamlib proxies (rig, rotator).
'''
__all__.append('IProxy')
class IRig(IProxy):
'''
Hamlib rig proxy (anything interfaced by rigctld).
'''
__all__.append('IRig')
class IRotator(IProxy):
'''
Hamlib rotator proxy (anything interfaced by rotctld).
'''
__all__.append('IRotator')
def _forkDeferred(d):
# No doubt this demonstrates I don't actually know how to program in Twisted
def callback(v):
d2.callback(v)
return v
def errback(f):
d2.errback(f)
f.trap() # always fail
d2 = defer.Deferred()
d.addCallbacks(callback, errback)
return d2
# Hamlib RPRT error codes
RIG_OK = 0
RIG_EINVAL = -1
RIG_ECONF = -2
RIG_ENOMEM = -3
RIG_ENIMPL = -4
RIG_ETIMEOUT = -5
RIG_EIO = -6
RIG_EINTERNAL = -7
RIG_EPROTO = -7
RIG_ERJCTED = -8
RIG_ETRUNC = -9
RIG_ENAVAIL = -10
RIG_ENTARGET = -11
RIG_BUSERROR = -12
RIG_BUSBUSY = -13
RIG_EARG = -14
RIG_EVFO = -15
RIG_EDOM = -16
_modes = Enum({x: x for x in ['USB', 'LSB', 'CW', 'CWR', 'RTTY', 'RTTYR', 'AM', 'FM', 'WFM', 'AMS', 'PKTLSB', 'PKTUSB', 'PKTFM', 'ECSSUSB', 'ECSSLSB', 'FAX', 'SAM', 'SAL', 'SAH', 'DSB']})
_vfos = Enum({'VFOA': 'VFO A', 'VFOB': 'VFO B', 'VFOC': 'VFO C', 'currVFO': 'currVFO', 'VFO': 'VFO', 'MEM': 'MEM', 'Main': 'Main', 'Sub': 'Sub', 'TX': 'TX', 'RX': 'RX'})
_passbands = Range([(0, 0)])
_cap_remap = {
# TODO: Make this well-founded
'Ant': ['Antenna'],
'CTCSS Squelch': ['CTCSS Sql'],
'CTCSS': ['CTCSS Tone'],
'DCS Squelch': ['DCS Sql'],
'DCS': ['DCS Code'],
'Mode': ['Mode', 'Passband'],
'Repeater Offset': ['Rptr Offset', 'Rptr Shift'],
'Split Freq': ['TX Frequency'],
'Split Mode': ['TX Mode', 'TX Passband'],
'Split VFO': ['Split', 'TX VFO'],
'Position': ['Azimuth', 'Elevation'],
}
@defer.inlineCallbacks
def connect_to_rigctld(reactor, host='localhost', port=4532):
'''
Connect to an existing rigctld process.
'''
proxy = yield _connect_to_daemon(
reactor=reactor,
host=host,
port=port,
server_name='rigctld',
proxy_ctor=_HamlibRig)
defer.returnValue(Device(
vfo_cell=proxy.state()['freq'],
components={'rig': proxy}))
__all__.append('connect_to_rigctld')
@defer.inlineCallbacks
def connect_to_rotctld(reactor, host='localhost', port=4533):
'''
Connect to an existing rotctld process.
'''
proxy = yield _connect_to_daemon(
reactor=reactor,
host=host,
port=port,
server_name='rotctld',
proxy_ctor=_HamlibRotator)
defer.returnValue(Device(
components={'rotator': proxy}))
__all__.append('connect_to_rotctld')
@defer.inlineCallbacks
def _connect_to_daemon(reactor, host, port, server_name, proxy_ctor):
connected = defer.Deferred()
reactor.connectTCP(host, port, _HamlibClientFactory(server_name, connected))
protocol = yield connected
proxy = proxy_ctor(protocol)
yield proxy._ready_deferred # wait for dump_caps round trip
defer.returnValue(proxy)
def connect_to_rig(reactor, options=None, port=4532):
'''
Start a rigctld process and connect to it.
options: list of rigctld options, e.g. ['-m', '123', '-r', '/dev/ttyUSB0'].
Do not specify host or port in the options.
port: A free port number to use.
'''
return _connect_to_device(
reactor=reactor,
options=options,
port=port,
daemon='rigctld',
connect_func=connect_to_rigctld)
__all__.append('connect_to_rig')
def connect_to_rotator(reactor, options=None, port=4533):
'''
Start a rotctld process and connect to it.
options: list of rotctld options, e.g. ['-m', '1102', '-r', '/dev/ttyUSB0'].
Do not specify host or port in the options.
port: A free port number to use.
'''
return _connect_to_device(
reactor=reactor,
options=options,
port=port,
daemon='rotctld',
connect_func=connect_to_rotctld)
__all__.append('connect_to_rotator')
@defer.inlineCallbacks
def _connect_to_device(reactor, options, port, daemon, connect_func):
if options is None:
options = []
host = '127.0.0.1'
# We use rigctld instead of rigctl, because rigctl will only execute one command at a time and does not have the better-structured response formats.
# If it were possible, we'd rather connect to rigctld over a pipe or unix-domain socket to avoid port allocation issues.
# Make sure that there isn't (as best we can check) something using the port already.
fake_connected = defer.Deferred()
reactor.connectTCP(host, port, _HamlibClientFactory('(probe) %s' % (daemon,), fake_connected))
try:
yield fake_connected
raise Exception('Something is already using port %i!' % port)
except ConnectionRefusedError:
pass
process = subprocess.Popen(
args=['/usr/bin/env', daemon, '-T', host, '-t', str(port)] + options,
stdin=None,
stdout=None,
stderr=None,
close_fds=True)
# Retry connecting with exponential backoff, because the daemon process won't tell us when it's started listening.
proxy_device = None
refused = None
for i in xrange(0, 5):
try:
proxy_device = yield connect_func(
reactor=reactor,
host=host,
port=port)
break
except ConnectionRefusedError, e:
refused = e
yield deferLater(reactor, 0.1 * (2 ** i), lambda: None)
else:
raise refused
# TODO: Sometimes we fail to kill the process because there was a protocol error during the connection stages. Refactor so that doesn't happen.
proxy = proxy_device.get_components().values()[0]
proxy.when_closed().addCallback(lambda _: process.kill())
defer.returnValue(proxy_device)
class _HamlibProxy(ExportedState):
'''
Abstract class for objects which export state proxied to a hamlib daemon.
'''
implements(IProxy)
def __init__(self, protocol):
# info from hamlib
self.__cache = {}
self.__caps = {}
self.__levels = []
# invert command table
# TODO: we only need to do this once per class, really
self._how_to_command = {key: command
for command, keys in self._commands.iteritems()
for key in keys}
# keys are same as __cache, values are functions to call with new values from rig
self._cell_updaters = {}
self.__communication_error = False
self.__last_error = (-1e9, '', 0)
self.__protocol = protocol
self.__disconnect_deferred = defer.Deferred()
protocol._set_proxy(self)
# TODO: If hamlib backend supports "transceive mode", use it in lieu of polling
self.__poller_slow = LoopingCall(self.__poll_slow)
self.__poller_fast = LoopingCall(self.__poll_fast)
self.__poller_slow.start(2.0)
self.__poller_fast.start(0.2)
self._ready_deferred = protocol.rc_send('dump_caps')
def sync(self):
# TODO: Replace 'sync' with more specifically meaningful operations
d = self.__protocol.rc_send(self._dummy_command)
d.addCallback(lambda _: None) # ignore result
return d
def close(self):
self.__protocol.transport.loseConnection()
return self.when_closed()
def when_closed(self):
return _forkDeferred(self.__disconnect_deferred)
def _ehs_get(self, name_in_cmd):
if name_in_cmd in self.__cache:
return self.__cache[name_in_cmd]
else:
return 0.0
def _clientReceived(self, command, key, value):
self.__communication_error = False
if command == 'dump_caps':
def write(key):
self.__caps[key] = value
if key == 'Get level':
# add to polling info
for info in value.strip().split(' '):
match = re.match(r'^(\w+)\([^()]+\)$', info)
# part in parens is probably min/max/step info, but we don't have any working examples to test against (they are all 0)
if match:
self.__levels.append(match.group(1))
else:
log.err('Unrecognized level description from %s: %r' % (self._server_name, info))
# remove irregularity
keymatch = re.match(r'(Can [gs]et )([\w\s,/-]+)', key)
if keymatch and keymatch.group(2) in _cap_remap:
for mapped in _cap_remap[keymatch.group(2)]:
write(keymatch.group(1) + mapped)
else:
write(key)
else:
self.__update_cache_and_cells(key, value)
def _clientReceivedLevel(self, level_name, value_str):
self.__update_cache_and_cells(level_name + ' level', value_str)
def _clientError(self, cmd, error_number):
if cmd.startswith('get_'):
# these getter failures are boring, probably us polling something not implemented
if error_number == RIG_ENIMPL or error_number == RIG_ENTARGET or error_number == RIG_BUSERROR:
return
elif error_number == RIG_ETIMEOUT:
self.__communication_error = True
return
self.__last_error = (time.time(), cmd, error_number)
def __update_cache_and_cells(self, key, value):
self.__cache[key] = value
if key in self._cell_updaters:
self._cell_updaters[key](value)
def _clientConnectionLost(self, reason):
self.__poller_slow.stop()
self.__poller_fast.stop()
self.__disconnect_deferred.callback(None)
def _ehs_set(self, name_full, value):
if not isinstance(value, str):
raise TypeError()
name_in_cmd = self._how_to_command[name_full] # raises if cannot set
if value != self.__cache[name_full]:
self.__cache[name_full] = value
self.__protocol.rc_send(
'set_' + name_in_cmd,
' '.join(self.__cache[arg_name] for arg_name in self._commands[name_in_cmd]))
def state_def(self, callback):
super(_HamlibProxy, self).state_def(callback)
for name in self._info:
can_get = self.__caps.get('Can get ' + name)
if can_get is None:
log.msg('No can-get information for ' + name)
if can_get != 'Y':
# TODO: Handle 'E' condition
continue
writable = name in self._how_to_command and self.__caps.get('Can set ' + name) == 'Y'
_install_cell(self, name, False, writable, callback, self.__caps)
for level_name in self.__levels:
# TODO support writable levels
_install_cell(self, level_name + ' level', True, False, callback, self.__caps)
def __poll_fast(self):
# TODO: Stop if we're getting behind
p = self.__protocol
self.poll_fast(p.rc_send)
for level_name in self.__levels:
p.rc_send('get_level', level_name)
def __poll_slow(self):
# TODO: Stop if we're getting behind
p = self.__protocol
self.poll_slow(p.rc_send)
@exported_value(type=Notice(always_visible=False))
def get_errors(self):
if self.__communication_error:
return 'Rig not responding.'
else:
(error_time, cmd, error_number) = self.__last_error
if error_time > time.time() - 10:
return u'%s: %s' % (cmd, error_number)
else:
return u''
def _install_cell(self, name, is_level, writable, callback, caps):
# this is a function for the sake of the closure variables
if name == 'Frequency':
cell_name = 'freq' # consistency with our naming scheme elsewhere, also IHasFrequency
else:
cell_name = name
if is_level:
# TODO: Use range info from hamlib if available
if name == 'STRENGTH level':
vtype = Range([(-54, 50)], strict=False)
elif name == 'SWR level':
vtype = Range([(1, 30)], strict=False)
elif name == 'RFPOWER level':
vtype = Range([(0, 100)], strict=False)
else:
vtype = Range([(-10, 10)], strict=False)
elif name == 'Mode' or name == 'TX Mode':
# kludge
vtype = Enum({x: x for x in caps['Mode list'].strip().split(' ')})
elif name == 'VFO' or name == 'TX VFO':
vtype = Enum({x: x for x in caps['VFO list'].strip().split(' ')})
else:
vtype = self._info[name]
def updater(strval):
if vtype is bool:
value = bool(int(strval))
else:
value = vtype(strval)
cell.set_internal(value)
def actually_write_value(value):
if vtype is bool:
self._ehs_set(name, str(int(value)))
else:
self._ehs_set(name, str(vtype(value)))
cell = LooseCell(key=cell_name, value='placeholder', type=vtype, writable=writable, persists=False, post_hook=actually_write_value)
self._cell_updaters[name] = updater
updater(self._ehs_get(name))
callback(cell)
class _HamlibRig(_HamlibProxy):
implements(IRig, IHasFrequency)
_server_name = 'rigctld'
_dummy_command = 'get_freq'
_info = {
'Frequency': (Range([(0, 9999999999)], integer=True)),
'Mode': (_modes),
'Passband': (_passbands),
'VFO': (_vfos),
'RIT': (int),
'XIT': (int),
'PTT': (bool),
'DCD': (bool),
'Rptr Shift': (Enum({'+': '+', '-': '-'})),
'Rptr Offset': (int),
'CTCSS Tone': (int),
'DCS Code': (str),
'CTCSS Sql': (int),
'DCS Sql': (str),
'TX Frequency': (int),
'TX Mode': (_modes),
'TX Passband': (_passbands),
'Split': (bool),
'TX VFO': (_vfos),
'Tuning Step': (int),
'Antenna': (int),
}
_commands = {
'freq': ['Frequency'],
'mode': ['Mode', 'Passband'],
'vfo': ['VFO'],
'rit': ['RIT'],
'xit': ['XIT'],
# 'ptt': ['PTT'], # writing disabled until when we're more confident in correct functioning
'rptr_shift': ['Rptr Shift'],
'rptr_offs': ['Rptr Offset'],
'ctcss_tone': ['CTCSS Tone'],
'dcs_code': ['DCS Code'],
'ctcss_sql': ['CTCSS Sql'],
'dcs_sql': ['DCS Sql'],
'split_freq': ['TX Frequency'],
'split_mode': ['TX Mode', 'TX Passband'],
'split_vfo': ['Split', 'TX VFO'],
'ts': ['Tuning Step'],
# TODO: describe func, level, parm
'ant': ['Antenna'],
'powerstat': ['Power Stat'],
}
def poll_fast(self, send):
# likely to be set by hw controls
send('get_freq')
send('get_mode')
# received signal info
send('get_dcd')
def poll_slow(self, send):
send('get_vfo')
send('get_rit')
send('get_xit')
send('get_ptt')
send('get_rptr_shift')
send('get_rptr_offs')
send('get_ctcss_tone')
send('get_dcs_code')
send('get_split_freq')
send('get_split_mode')
send('get_split_vfo')
send('get_ts')
class _HamlibRotator(_HamlibProxy):
implements(IRotator)
_server_name = 'rotctld'
_dummy_command = 'get_pos'
# TODO: support imperative commands:
# move
# stop
# park
# reset
_info = {
# TODO: Get ranges from dump_caps
'Azimuth': (Range([(-180, 180)])),
'Elevation': (Range([(0, 90)])),
}
_commands = {
'pos': ['Azimuth', 'Elevation'],
}
def poll_fast(self, send):
send('get_pos')
def poll_slow(self, send):
pass
class _HamlibClientFactory(ClientFactory):
def __init__(self, server_name, connected_deferred):
self.__server_name = server_name
self.__connected_deferred = connected_deferred
def buildProtocol(self, addr):
p = _HamlibClientProtocol(self.__server_name, self.__connected_deferred)
return p
def clientConnectionFailed(self, connector, reason):
self.__connected_deferred.errback(reason)
class _HamlibClientProtocol(Protocol):
def __init__(self, server_name, connected_deferred):
self.__proxy_obj = None
self.__server_name = server_name
self.__connected_deferred = connected_deferred
self.__line_receiver = LineReceiver()
self.__line_receiver.delimiter = '\n'
self.__line_receiver.lineReceived = self.__lineReceived
self.__waiting_for_responses = []
self.__receive_cmd = None
self.__receive_arg = None
def connectionMade(self):
self.__connected_deferred.callback(self)
def connectionLost(self, reason):
if self.__proxy_obj is not None:
self.__proxy_obj._clientConnectionLost(reason)
def dataReceived(self, data):
self.__line_receiver.dataReceived(data)
def __lineReceived(self, line):
if self.__receive_cmd is None:
match = re.match(r'^(\w+):\s*(.*)$', line)
if match is not None:
# command response starting line
self.__receive_cmd = match.group(1)
self.__receive_arg = match.group(2)
return
log.err('%s client: Unrecognized line (no command active): %r' % (self.__server_name, line))
else:
match = re.match(r'^RPRT (-?\d+)$', line)
if match is not None:
# command response ending line
return_code = int(match.group(1))
waiting = self.__waiting_for_responses
i = 0
for i, (wait_cmd, wait_deferred) in enumerate(waiting):
if self.__receive_cmd != wait_cmd:
log.err("%s client: Didn't get a response for command %r before receiving one for command %r" % (self.__server_name, wait_cmd, self.__receive_cmd))
else:
# TODO: Consider 'parsing' return code more here.
if return_code != 0:
self.__proxy_obj._clientError(self.__receive_cmd, return_code)
wait_deferred.callback(return_code)
break
self.__waiting_for_responses = waiting[i + 1:]
self.__receive_cmd = None
self.__receive_arg = None
return
if self.__receive_cmd == 'get_level':
# Should be a level value
match = re.match(r'^-?\d+\.?\d*$', line)
if match:
self.__proxy_obj._clientReceivedLevel(self.__receive_arg, line)
return
match = re.match(r'^([\w ,/-]+):\s*(.*)$', line)
if match is not None:
# Command response
if self.__proxy_obj is not None:
self.__proxy_obj._clientReceived(self.__receive_cmd, match.group(1), match.group(2))
return
match = re.match(r'^\t', line)
if match is not None and self.__receive_cmd == 'dump_caps':
# Sub-info from dump_caps, not currently used
return
match = re.match(r'^Warning--', line)
if match is not None:
# Warning from dump_caps, not currently used
return
match = re.match(r'^$', line)
if match is not None:
return
log.err('%s client: Unrecognized line during %s: %r' % (self.__server_name, self.__receive_cmd, line))
def _set_proxy(self, proxy):
self.__proxy_obj = proxy
def rc_send(self, cmd, argstr=''):
if not re.match(r'^\w+$', cmd): # no spaces (stuffing args in), no newlines (breaking the command)
raise ValueError('Syntactically invalid command name %r' % (cmd,))
if not re.match(r'^[^\r\n]*$', argstr): # no newlines
raise ValueError('Syntactically invalid arguments string %r' % (cmd,))
self.transport.write('+\\' + cmd + ' ' + argstr + '\n')
d = defer.Deferred()
self.__waiting_for_responses.append((cmd, d))
return d
_plugin_client = ClientResourceDef(
key=__name__,
resource=static.File(os.path.join(os.path.split(__file__)[0], 'client')),
load_js_path='hamlib.js')
| gpl-3.0 |
nicholasamorim/fleepy | fleepy/_conversation.py | 1 | 15759 | # -*- coding: utf-8 -*-
class Conversation(object):
"""Conversation management.
https://fleep.io/fleepapi/ref-conversation.html
"""
def __init__(self, server, handler='conversation'):
self._server = server
self._handler = handler
def call(self, *args, **kwargs):
"""
"""
if 'method' in kwargs:
requester_func = getattr(self._server, kwargs.pop('method'))
else:
requester_func = self._server.post
return requester_func(*args, **kwargs)
def add_members(self, emails, from_message_nr=None):
"""Add members to the conversation
:param emails: A list of emails.
:param from_message_nr: Used to return next batch of changes.
"""
emails = '; '.join(emails)
return self.call(
'conversation/add_members',
{'emails': emails, 'from_message_nr': from_message_nr})
def autojoin(self, conv_url_key):
"""Autojoin conversation if not member yet.
Autojoin url has following format https://fleep.io/chat/<conv_url_key>
:param conv_url_key: Last part of autojoin url or conversation_id
"""
return self.call(
'conversation/autojoin', {'conv_url_key': conv_url_key})
def check_permissions(self, conversation_id):
"""Check if account has modification rights on the conversation.
Same check is done in all conversation services so this here mainly
helps with testing and documentation at first.
:param conversation_id: The conversation id.
"""
return self.call(
'conversation/check_permissions/{}'.format(conversation_id))
def create(self, topic=None, emails=None, message=None,
attachments=None, is_invite=None):
"""Create new conversation
:param topic: Conversation topic. Optional.
:param emails: A list of emails. Optional.
:param message: Initial message. Optional.
:param attachments: list of AttachmentInfos to be added to
conversation. Optional.
:param is_invite: Send out invite emails to fresh fleepers.
Defaults to None.
"""
emails = '; '.join(emails)
return self.call(
'conversation/create',
{'topic': topic, 'emails': emails, 'message': message,
'attachments': attachments, 'is_invite': is_invite})
def create_hangout(self, conversation_id):
"""Create a new hangout.
"""
return self.call(
'conversation/create_hangout/{}'.format(conversation_id))
def delete(self, conversation_id):
"""Remove conversation from your conversation list.
If you don’t leave conversation before deleting it will
still reappear when someone writes in it.
"""
return self.call(
'conversation/delete/{}'.format(conversation_id))
def _disclose(self, endpoint, conversation_id, emails, **kwargs):
"""
"""
kwargs['emails'] = ';'.join(emails)
return self.call('conversation/{}/{}', kwargs)
def disclose(self, conversation_id, emails,
message_nr=None, from_message_nr=None):
"""Disclose conversation history to members until given message.
:param conversation_id: The conversation id.
:param emails: A list of emails.
:param message_nr: disclose up to this message
:param from_message_nr: used to return next batch of changes
"""
return self._disclose(
'disclose', conversation_id, emails,
{'message_nr': message_nr, 'from_message_nr': from_message_nr})
def disclose_all(self, conversation_id, emails, from_message_nr=None):
"""Disclose conversation history to members. All content of last
membership is disclosed.
"""
return self._disclose(
'disclose_all', conversation_id, emails,
{'from_message_nr': from_message_nr})
def hide(self, conversation_id, from_message_nr=None):
"""Hide conversation until new messages arrives.
Useful for people who want to keep their inbox clear.
"""
return self.call('conversation/hide/{}'.format(
conversation_id), {'from_message_nr': from_message_nr})
def label(self, label, sync_horizon=None):
"""List all conversations with that label.
"""
return self.call(
'conversation/label',
{'label': label, 'sync_horizon': sync_horizon})
def leave(self, conversation_id, from_message_nr=None):
"""Leaves the conversation.
"""
return self.call('conversation/leave/{}'.format(
conversation_id), {'from_message_nr': from_message_nr})
def list(self, sync_horizon=None):
"""List all conversations for this account.
Same conversations may pop up several times due to shifting order
caused by incoming messages. Stop calling when you receive empty
conversation list.
"""
return self.call(
'conversation/list', {'sync_horizon': sync_horizon})
def mark_read(self, conversation_id, mk_init_mode='ic_tiny'):
"""Mark conversation as read regardless of how many unread messages
there are. Useful for marking read conversations that you are not
planning to read.
For example error log after it has rolled up thousands of messages.
Returns init conversation stream so the client side conversation will
be reset to new read position and all the possibly skipped messages
will not get misinterpreted.
:param mk_init_mode: ic_tiny or ic_full. Defaults to ic_tiny.
"""
return self.call(
'conversation/mark_read/{}'.format(conversation_id),
{'mk_init_mode': mk_init_mode})
def poke(self, conversation_id, message_nr, from_message_nr, is_bg_poke):
"""Send poke event, used for testing sync between clients.
"""
data = {
'message_nr': message_nr,
'from_message_nr': from_message_nr,
'is_bg_poke': is_bg_poke}
return self.call(
'conversation/poke/{}'.format(conversation_id), data)
def remove_members(self, conversation_id, emails, from_message_nr=None):
"""Remove members from the conversation.
:param conversation_id: The ID of the conversation that members should
be removed.
:param emails: A list of emails.
:param from_message_nr: Used to return next batch of changes.
"""
emails = '; '.join(emails)
return self.call(
'conversation/remove_members/{}'.format(conversation_id),
{'emails': emails, 'from_message_nr': from_message_nr})
def set_alerts(self, conversation_id, mk_alert_level,
from_message_nr=None):
"""Set conversation alerts.
"""
if mk_alert_level not in ('default', 'never'):
raise ValueError('alert level should be default or never')
data = {
'mk_alert_level': mk_alert_level,
'from_message_nr': from_message_nr}
return self.call(
'conversation/set_alerts/{}'.format(conversation_id), data)
def set_topic(self, conversation_id, topic, from_message_nr=None):
"""Change conversation topic.
"""
data = {'topic': topic, 'from_message_nr': from_message_nr}
return self.call(
'conversation/set_topic/{}'.format(conversation_id), data)
def show_activity(self, conversation_id, is_writing, message_nr=None):
"""Show writing pen and/or pinboard editing status. This works both ways.
Any call activates this conversation for this account.
So to start receiving activity call with empty parameters.
"""
return self.call(
'conversation/set_topic/{}'.format(conversation_id),
{'is_writing': is_writing, 'message_nr': message_nr})
def slash_command(self, conversation_id, message, from_message_nr=None):
"""Slash commands for conversations
"""
return self.call(
'conversation/slash_command/{}'.format(conversation_id),
{'message': message, 'from_message_nr': from_message_nr})
def store(self, conversation_id, **kwargs):
"""Store conversation header fields. Store only fields that have
changed. Call only when cache is fully synced.
:param conversation_id: The conversation id.
Params allowed in kwargs:
:param read_message_nr: New read horizon for conversation
:param labels: User labels for conversation
:param topic: Shared topic for conversation
:param mk_alert_level: User alert level for the conversation
:param snooze_interval: For how long to snooze conversation in seconds
:param add_emails: emails of members to be added
:param remove_emails: emails of members to be removed
:param disclose_emails: disclose conversation to these users
:param add_ids: add emails (as given, no mapping)
:param remove_ids: remove emails (as given, no mapping)
:param disclose_ids: disclose chat to given accounts
:param hide_message_nr: Hide the conversation from this message nr
:param is_deleted: Set to true to delete the conversation
:param from_message_nr: used to return next batch of changes
:param is_autojoin: enable disable auto join
:param is_disclose: enable/disable auto disclose
:param can_post: set to false to leave the conversation
:param is_url_preview_disabled: don't show url previews for all users
"""
allowed_parameters = (
'read_message_nr', 'labels', 'topic', 'mk_alert_level',
'snooze_interval', 'add_emails', 'remove_emails',
'disclose_emails', 'add_ids', 'remove_ids', 'disclose_ids',
'hide_message_nr', 'is_deleted', 'from_message_nr', 'is_autojoin',
'is_disclose', 'can_post', 'is_url_preview_disabled')
for key in kwargs:
if key not in allowed_parameters:
raise ValueError('{} is not a valid paramater'.format(key))
return self.call(
'conversation/store/{}'.format(conversation_id), kwargs)
def sync(self, conversation_id, from_message_nr=None, mk_direction=None):
"""Sync state for single conversation. If used with default values 5
messages before and after last reported read_message_nr are returned.
Also all conversation state are returned: PinInfo, memberInfo. All
optional fields (o) are returned for first sync. After that these are
included only when there have been changes. Changes are detected from
system messages in message flow.
:param from_message_nr: last message nr client has received.
:param mk_direction: ic_tiny - do minimal init conversation
returns only inbox message and header
ic_full - do full init conversation
returns inbox message and several
messages before and after current
read horizon and several pins and
several files
ic_flow - get flow fragment
returns several messages before and
after given from_message_nr or current
read horizon if from message nr
not given
ic_end - get flow fragment from the end
of all available content
ic_backward - get flow fragment before given
message
ic_forward - get flow fragment after given message
only visible messages are returned so
not suitable for syncing cached
conversation (edits will be lost)
ic_files - get only messages with files.
ic_pinboard - get only shared messages.
ic_tasks -get only archeved task messages.
<null> - default behaviour get sequential
messages forward. Returns all flow messages even
non visible ones like edits of older messages
"""
return self.call(
'conversation/sync/{}'.format(conversation_id),
{'from_message_nr': from_message_nr, 'mk_direction': mk_direction})
def _sync(self, sync_type, conversation_id, from_message_nr=None):
return self.call(
'conversation/sync_{}/{}'.format(sync_type, conversation_id),
{'from_message_nr': from_message_nr})
def sync_backward(self, conversation_id, from_message_nr=None):
"""Sync state for single conversation. Used to fetch messages for
backward scroll.
"""
return self._sync('backward', conversation_id, from_message_nr)
def sync_files(self, conversation_id, from_message_nr=None):
"""Sync earlier files if user wants to browse them
"""
return self._sync('files', conversation_id, from_message_nr)
def sync_pins(self, conversation_id, from_message_nr=None):
"""Sync pinboard for conversation where it was not fully sent with init
"""
return self._sync('pins', conversation_id, from_message_nr)
def unhide(self, conversation_id, from_message_nr=None):
"""Bring conversation out of hiding
"""
return self.call(
'conversation/unhide/{}'.format(conversation_id),
{'from_message_nr': from_message_nr})
def configure_hook(self, conversation_id, hook_key,
hook_name=None, from_message_nr=None):
"""Change hook name and/or other settings.
"""
return self.call(
'conversation/configure_hook/{}'.format(conversation_id),
{'hook_key': hook_key,
'hook_name': hook_name,
'from_message_nr': from_message_nr})
def create_hook(self, conversation_id, hook_name=None,
mk_hook_type='plain', from_message_nr=None):
"""Create hook for given conversation.
:param hook_name: Name for hook.
:param mk_hook_type: plain, jira, github, import, pivotaltracker,
newrelic, bitbucket, zapier, confluence, gitlab, sameroom.
:param from_message_nr: Used to return next batch of messages.
"""
return self.call(
'conversation/create_hook/{}'.format(conversation_id),
{'hook_name': hook_name,
'mk_hook_type': mk_hook_type,
'from_message_nr': from_message_nr})
def drop_hook(self, conversation_id, hook_key, from_message_nr=None):
"""Remove hook from conversation.
"""
return self.call(
'conversation/drop_hook/{}'.format(conversation_id),
{'hook_key': hook_key,
'from_message_nr': from_message_nr})
def show_hooks(self, conversation_id):
"""Show hooks in converastion.
:param conversation_id: The conversation ID.
"""
return self.call('conversation/show_hooks/{}'.format(conversation_id))
| gpl-3.0 |
jjhelmus/scipy | setup.py | 5 | 15012 | #!/usr/bin/env python
"""SciPy: Scientific Library for Python
SciPy (pronounced "Sigh Pie") is open-source software for mathematics,
science, and engineering. The SciPy library
depends on NumPy, which provides convenient and fast N-dimensional
array manipulation. The SciPy library is built to work with NumPy
arrays, and provides many user-friendly and efficient numerical
routines such as routines for numerical integration and optimization.
Together, they run on all popular operating systems, are quick to
install, and are free of charge. NumPy and SciPy are easy to use,
but powerful enough to be depended upon by some of the world's
leading scientists and engineers. If you need to manipulate
numbers on a computer and display or publish the results,
give SciPy a try!
"""
DOCLINES = __doc__.split("\n")
import os
import sys
import subprocess
import textwrap
import warnings
if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 4):
raise RuntimeError("Python version 2.7 or >= 3.4 required.")
if sys.version_info[0] < 3:
import __builtin__ as builtins
else:
import builtins
CLASSIFIERS = """\
Development Status :: 5 - Production/Stable
Intended Audience :: Science/Research
Intended Audience :: Developers
License :: OSI Approved :: BSD License
Programming Language :: C
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.6
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Topic :: Software Development
Topic :: Scientific/Engineering
Operating System :: Microsoft :: Windows
Operating System :: POSIX
Operating System :: Unix
Operating System :: MacOS
"""
MAJOR = 1
MINOR = 0
MICRO = 0
ISRELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
# Return the git revision as a string
def git_version():
def _minimal_ext_cmd(cmd):
# construct minimal environment
env = {}
for k in ['SYSTEMROOT', 'PATH']:
v = os.environ.get(k)
if v is not None:
env[k] = v
# LANGUAGE is used on win32
env['LANGUAGE'] = 'C'
env['LANG'] = 'C'
env['LC_ALL'] = 'C'
out = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env).communicate()[0]
return out
try:
out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])
GIT_REVISION = out.strip().decode('ascii')
except OSError:
GIT_REVISION = "Unknown"
return GIT_REVISION
# BEFORE importing setuptools, remove MANIFEST. Otherwise it may not be
# properly updated when the contents of directories change (true for distutils,
# not sure about setuptools).
if os.path.exists('MANIFEST'):
os.remove('MANIFEST')
# This is a bit hackish: we are setting a global variable so that the main
# scipy __init__ can detect if it is being loaded by the setup routine, to
# avoid attempting to load components that aren't built yet. While ugly, it's
# a lot more robust than what was previously being used.
builtins.__SCIPY_SETUP__ = True
def get_version_info():
# Adding the git rev number needs to be done inside
# write_version_py(), otherwise the import of scipy.version messes
# up the build under Python 3.
FULLVERSION = VERSION
if os.path.exists('.git'):
GIT_REVISION = git_version()
elif os.path.exists('scipy/version.py'):
# must be a source distribution, use existing version file
# load it as a separate module to not load scipy/__init__.py
import imp
version = imp.load_source('scipy.version', 'scipy/version.py')
GIT_REVISION = version.git_revision
else:
GIT_REVISION = "Unknown"
if not ISRELEASED:
FULLVERSION += '.dev0+' + GIT_REVISION[:7]
return FULLVERSION, GIT_REVISION
def write_version_py(filename='scipy/version.py'):
cnt = """
# THIS FILE IS GENERATED FROM SCIPY SETUP.PY
short_version = '%(version)s'
version = '%(version)s'
full_version = '%(full_version)s'
git_revision = '%(git_revision)s'
release = %(isrelease)s
if not release:
version = full_version
"""
FULLVERSION, GIT_REVISION = get_version_info()
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION,
'full_version': FULLVERSION,
'git_revision': GIT_REVISION,
'isrelease': str(ISRELEASED)})
finally:
a.close()
try:
from sphinx.setup_command import BuildDoc
HAVE_SPHINX = True
except:
HAVE_SPHINX = False
if HAVE_SPHINX:
class ScipyBuildDoc(BuildDoc):
"""Run in-place build before Sphinx doc build"""
def run(self):
ret = subprocess.call([sys.executable, sys.argv[0], 'build_ext', '-i'])
if ret != 0:
raise RuntimeError("Building Scipy failed!")
BuildDoc.run(self)
def check_submodules():
""" verify that the submodules are checked out and clean
use `git submodule update --init`; on failure
"""
if not os.path.exists('.git'):
return
with open('.gitmodules') as f:
for l in f:
if 'path' in l:
p = l.split('=')[-1].strip()
if not os.path.exists(p):
raise ValueError('Submodule %s missing' % p)
proc = subprocess.Popen(['git', 'submodule', 'status'],
stdout=subprocess.PIPE)
status, _ = proc.communicate()
status = status.decode("ascii", "replace")
for line in status.splitlines():
if line.startswith('-') or line.startswith('+'):
raise ValueError('Submodule not clean: %s' % line)
from distutils.command.sdist import sdist
class sdist_checked(sdist):
""" check submodules on sdist to prevent incomplete tarballs """
def run(self):
check_submodules()
sdist.run(self)
def generate_cython():
cwd = os.path.abspath(os.path.dirname(__file__))
print("Cythonizing sources")
p = subprocess.call([sys.executable,
os.path.join(cwd, 'tools', 'cythonize.py'),
'scipy'],
cwd=cwd)
if p != 0:
raise RuntimeError("Running cythonize failed!")
def parse_setuppy_commands():
"""Check the commands and respond appropriately. Disable broken commands.
Return a boolean value for whether or not to run the build or not (avoid
parsing Cython and template files if False).
"""
if len(sys.argv) < 2:
# User forgot to give an argument probably, let setuptools handle that.
return True
info_commands = ['--help-commands', '--name', '--version', '-V',
'--fullname', '--author', '--author-email',
'--maintainer', '--maintainer-email', '--contact',
'--contact-email', '--url', '--license', '--description',
'--long-description', '--platforms', '--classifiers',
'--keywords', '--provides', '--requires', '--obsoletes']
# Add commands that do more than print info, but also don't need Cython and
# template parsing.
info_commands.extend(['egg_info', 'install_egg_info', 'rotate'])
for command in info_commands:
if command in sys.argv[1:]:
return False
# Note that 'alias', 'saveopts' and 'setopt' commands also seem to work
# fine as they are, but are usually used together with one of the commands
# below and not standalone. Hence they're not added to good_commands.
good_commands = ('develop', 'sdist', 'build', 'build_ext', 'build_py',
'build_clib', 'build_scripts', 'bdist_wheel', 'bdist_rpm',
'bdist_wininst', 'bdist_msi', 'bdist_mpkg',
'build_sphinx')
for command in good_commands:
if command in sys.argv[1:]:
return True
# The following commands are supported, but we need to show more
# useful messages to the user
if 'install' in sys.argv[1:]:
print(textwrap.dedent("""
Note: if you need reliable uninstall behavior, then install
with pip instead of using `setup.py install`:
- `pip install .` (from a git repo or downloaded source
release)
- `pip install scipy` (last SciPy release on PyPI)
"""))
return True
if '--help' in sys.argv[1:] or '-h' in sys.argv[1]:
print(textwrap.dedent("""
SciPy-specific help
-------------------
To install SciPy from here with reliable uninstall, we recommend
that you use `pip install .`. To install the latest SciPy release
from PyPI, use `pip install scipy`.
For help with build/installation issues, please ask on the
scipy-user mailing list. If you are sure that you have run
into a bug, please report it at https://github.com/scipy/scipy/issues.
Setuptools commands help
------------------------
"""))
return False
# The following commands aren't supported. They can only be executed when
# the user explicitly adds a --force command-line argument.
bad_commands = dict(
test="""
`setup.py test` is not supported. Use one of the following
instead:
- `python runtests.py` (to build and test)
- `python runtests.py --no-build` (to test installed scipy)
- `>>> scipy.test()` (run tests for installed scipy
from within an interpreter)
""",
upload="""
`setup.py upload` is not supported, because it's insecure.
Instead, build what you want to upload and upload those files
with `twine upload -s <filenames>` instead.
""",
upload_docs="`setup.py upload_docs` is not supported",
easy_install="`setup.py easy_install` is not supported",
clean="""
`setup.py clean` is not supported, use one of the following instead:
- `git clean -xdf` (cleans all files)
- `git clean -Xdf` (cleans all versioned files, doesn't touch
files that aren't checked into the git repo)
""",
check="`setup.py check` is not supported",
register="`setup.py register` is not supported",
bdist_dumb="`setup.py bdist_dumb` is not supported",
bdist="`setup.py bdist` is not supported",
flake8="`setup.py flake8` is not supported, use flake8 standalone",
)
bad_commands['nosetests'] = bad_commands['test']
for command in ('upload_docs', 'easy_install', 'bdist', 'bdist_dumb',
'register', 'check', 'install_data', 'install_headers',
'install_lib', 'install_scripts', ):
bad_commands[command] = "`setup.py %s` is not supported" % command
for command in bad_commands.keys():
if command in sys.argv[1:]:
print(textwrap.dedent(bad_commands[command]) +
"\nAdd `--force` to your command to use it anyway if you "
"must (unsupported).\n")
sys.exit(1)
# If we got here, we didn't detect what setup.py command was given
warnings.warn("Unrecognized setuptools command, proceeding with "
"generating Cython sources and expanding templates")
return True
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration(None, parent_package, top_path)
config.set_options(ignore_setup_xxx_py=True,
assume_default_configuration=True,
delegate_options_to_subpackages=True,
quiet=True)
config.add_subpackage('scipy')
config.add_data_files(('scipy', '*.txt'))
config.get_version('scipy/version.py')
return config
def setup_package():
# Rewrite the version file every time
write_version_py()
cmdclass = {'sdist': sdist_checked}
if HAVE_SPHINX:
cmdclass['build_sphinx'] = ScipyBuildDoc
# Figure out whether to add ``*_requires = ['numpy']``.
# We don't want to do that unconditionally, because we risk updating
# an installed numpy which fails too often. Just if it's not installed, we
# may give it a try. See gh-3379.
try:
import numpy
except ImportError: # We do not have numpy installed
build_requires = ['numpy>=1.8.2']
else:
# If we're building a wheel, assume there already exist numpy wheels
# for this platform, so it is safe to add numpy to build requirements.
# See gh-5184.
build_requires = (['numpy>=1.8.2'] if 'bdist_wheel' in sys.argv[1:]
else [])
metadata = dict(
name='scipy',
maintainer="SciPy Developers",
maintainer_email="scipy-dev@python.org",
description=DOCLINES[0],
long_description="\n".join(DOCLINES[2:]),
url="https://www.scipy.org",
download_url="https://github.com/scipy/scipy/releases",
license='BSD',
cmdclass=cmdclass,
classifiers=[_f for _f in CLASSIFIERS.split('\n') if _f],
platforms=["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
test_suite='nose.collector',
setup_requires=build_requires,
install_requires=build_requires,
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',
)
if "--force" in sys.argv:
run_build = True
else:
# Raise errors for unsupported commands, improve help output, etc.
run_build = parse_setuppy_commands()
# This import is here because it needs to be done before importing setup()
# from numpy.distutils, but after the MANIFEST removing and sdist import
# higher up in this file.
from setuptools import setup
if run_build:
from numpy.distutils.core import setup
cwd = os.path.abspath(os.path.dirname(__file__))
if not os.path.exists(os.path.join(cwd, 'PKG-INFO')):
# Generate Cython sources, unless building from source release
generate_cython()
metadata['configuration'] = configuration
else:
# Don't import numpy here - non-build actions are required to succeed
# without Numpy for example when pip is used to install Scipy when
# Numpy is not yet present in the system.
# Version number is added to metadata inside configuration() if build
# is run.
metadata['version'] = get_version_info()[0]
setup(**metadata)
if __name__ == '__main__':
setup_package()
| bsd-3-clause |
jhawkesworth/ansible | lib/ansible/modules/network/nxos/nxos_vpc_interface.py | 39 | 10376 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_vpc_interface
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages interface VPC configuration
description:
- Manages interface VPC configuration
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- Either vpc or peer_link param is required, but not both.
- C(state=absent) removes whatever VPC config is on a port-channel
if one exists.
- Re-assigning a vpc or peerlink from one portchannel to another is not
supported. The module will force the user to unconfigure an existing
vpc/pl before configuring the same value on a new portchannel
options:
portchannel:
description:
- Group number of the portchannel that will be configured.
required: true
vpc:
description:
- VPC group/id that will be configured on associated portchannel.
peer_link:
description:
- Set to true/false for peer link config on associated portchannel.
type: bool
state:
description:
- Manages desired state of the resource.
required: true
choices: ['present','absent']
default: present
'''
EXAMPLES = '''
- nxos_vpc_interface:
portchannel: 10
vpc: 100
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["interface port-channel100", "vpc 10"]
'''
from ansible.module_utils.network.nxos.nxos import get_config, load_config, run_commands
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_portchannel_list(module):
portchannels = []
pc_list = []
try:
body = run_commands(module, ['show port-channel summary | json'])[0]
pc_list = body['TABLE_channel']['ROW_channel']
except (KeyError, AttributeError, TypeError):
return portchannels
if pc_list:
if isinstance(pc_list, dict):
pc_list = [pc_list]
for pc in pc_list:
portchannels.append(pc['group'])
return portchannels
def get_existing_portchannel_to_vpc_mappings(module):
pc_vpc_mapping = {}
try:
body = run_commands(module, ['show vpc brief | json'])[0]
vpc_table = body['TABLE_vpc']['ROW_vpc']
except (KeyError, AttributeError, TypeError):
vpc_table = None
if vpc_table:
if isinstance(vpc_table, dict):
vpc_table = [vpc_table]
for vpc in vpc_table:
pc_vpc_mapping[str(vpc['vpc-id'])] = str(vpc['vpc-ifindex'])
return pc_vpc_mapping
def peer_link_exists(module):
found = False
run = get_config(module, flags=['vpc'])
vpc_list = run.split('\n')
for each in vpc_list:
if 'peer-link' in each:
found = True
return found
def get_active_vpc_peer_link(module):
peer_link = None
try:
body = run_commands(module, ['show vpc brief | json'])[0]
peer_link = body['TABLE_peerlink']['ROW_peerlink']['peerlink-ifindex']
except (KeyError, AttributeError, TypeError):
return peer_link
return peer_link
def get_portchannel_vpc_config(module, portchannel):
peer_link_pc = None
peer_link = False
vpc = ""
pc = ""
config = {}
try:
body = run_commands(module, ['show vpc brief | json'])[0]
table = body['TABLE_peerlink']['ROW_peerlink']
except (KeyError, AttributeError, TypeError):
table = {}
if table:
peer_link_pc = table.get('peerlink-ifindex', None)
if peer_link_pc:
plpc = str(peer_link_pc[2:])
if portchannel == plpc:
config['portchannel'] = portchannel
config['peer-link'] = True
config['vpc'] = vpc
mapping = get_existing_portchannel_to_vpc_mappings(module)
for existing_vpc, port_channel in mapping.items():
port_ch = str(port_channel[2:])
if port_ch == portchannel:
pc = port_ch
vpc = str(existing_vpc)
config['portchannel'] = pc
config['peer-link'] = peer_link
config['vpc'] = vpc
return config
def get_commands_to_config_vpc_interface(portchannel, delta, config_value, existing):
commands = []
if not delta.get('peer-link') and existing.get('peer-link'):
commands.append('no vpc peer-link')
commands.insert(0, 'interface port-channel{0}'.format(portchannel))
elif delta.get('peer-link') and not existing.get('peer-link'):
commands.append('vpc peer-link')
commands.insert(0, 'interface port-channel{0}'.format(portchannel))
elif delta.get('vpc') and not existing.get('vpc'):
command = 'vpc {0}'.format(config_value)
commands.append(command)
commands.insert(0, 'interface port-channel{0}'.format(portchannel))
return commands
def state_present(portchannel, delta, config_value, existing):
commands = []
command = get_commands_to_config_vpc_interface(
portchannel,
delta,
config_value,
existing
)
commands.append(command)
return commands
def state_absent(portchannel, existing):
commands = []
if existing.get('vpc'):
command = 'no vpc'
commands.append(command)
elif existing.get('peer-link'):
command = 'no vpc peer-link'
commands.append(command)
if commands:
commands.insert(0, 'interface port-channel{0}'.format(portchannel))
return commands
def main():
argument_spec = dict(
portchannel=dict(required=True, type='str'),
vpc=dict(required=False, type='str'),
peer_link=dict(required=False, type='bool'),
state=dict(choices=['absent', 'present'], default='present')
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=[['vpc', 'peer_link']],
supports_check_mode=True)
warnings = list()
commands = []
check_args(module, warnings)
results = {'changed': False, 'warnings': warnings}
portchannel = module.params['portchannel']
vpc = module.params['vpc']
peer_link = module.params['peer_link']
state = module.params['state']
args = {'portchannel': portchannel, 'vpc': vpc, 'peer-link': peer_link}
active_peer_link = None
if portchannel not in get_portchannel_list(module):
if not portchannel.isdigit() or int(portchannel) not in get_portchannel_list(module):
module.fail_json(msg="The portchannel you are trying to make a"
" VPC or PL is not created yet. "
"Create it first!")
if vpc:
mapping = get_existing_portchannel_to_vpc_mappings(module)
if vpc in mapping and portchannel != mapping[vpc].strip('Po'):
module.fail_json(msg="This vpc is already configured on "
"another portchannel. Remove it first "
"before trying to assign it here. ",
existing_portchannel=mapping[vpc])
for vpcid, existing_pc in mapping.items():
if portchannel == existing_pc.strip('Po') and vpcid != vpc:
module.fail_json(msg="This portchannel already has another"
" VPC configured. Remove it first "
"before assigning this one",
existing_vpc=vpcid)
if peer_link_exists(module):
active_peer_link = get_active_vpc_peer_link(module)
if active_peer_link[-2:] == portchannel:
module.fail_json(msg="That port channel is the current "
"PEER LINK. Remove it if you want it"
" to be a VPC")
config_value = vpc
elif peer_link is not None:
if peer_link_exists(module):
active_peer_link = get_active_vpc_peer_link(module)[2::]
if active_peer_link != portchannel:
if peer_link:
module.fail_json(msg="A peer link already exists on"
" the device. Remove it first",
current_peer_link='Po{0}'.format(active_peer_link))
config_value = 'peer-link'
proposed = dict((k, v) for k, v in args.items() if v is not None)
existing = get_portchannel_vpc_config(module, portchannel)
if state == 'present':
delta = dict(set(proposed.items()).difference(existing.items()))
if delta:
commands = state_present(portchannel, delta, config_value, existing)
elif state == 'absent' and existing:
commands = state_absent(portchannel, existing)
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
load_config(module, cmds)
results['changed'] = True
if 'configure' in cmds:
cmds.pop(0)
results['commands'] = cmds
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
yanirs/servo | tests/wpt/web-platform-tests/tools/wptserve/wptserve/response.py | 114 | 14756 | from collections import OrderedDict
from datetime import datetime, timedelta
import Cookie
import json
import types
import uuid
import socket
from constants import response_codes
from logger import get_logger
missing = object()
class Response(object):
"""Object representing the response to a HTTP request
:param handler: RequestHandler being used for this response
:param request: Request that this is the response for
.. attribute:: request
Request associated with this Response.
.. attribute:: encoding
The encoding to use when converting unicode to strings for output.
.. attribute:: add_required_headers
Boolean indicating whether mandatory headers should be added to the
response.
.. attribute:: send_body_for_head_request
Boolean, default False, indicating whether the body content should be
sent when the request method is HEAD.
.. attribute:: explicit_flush
Boolean indicating whether output should be flushed automatically or only
when requested.
.. attribute:: writer
The ResponseWriter for this response
.. attribute:: status
Status tuple (code, message). Can be set to an integer, in which case the
message part is filled in automatically, or a tuple.
.. attribute:: headers
List of HTTP headers to send with the response. Each item in the list is a
tuple of (name, value).
.. attribute:: content
The body of the response. This can either be a string or a iterable of response
parts. If it is an iterable, any item may be a string or a function of zero
parameters which, when called, returns a string."""
def __init__(self, handler, request):
self.request = request
self.encoding = "utf8"
self.add_required_headers = True
self.send_body_for_head_request = False
self.explicit_flush = False
self.close_connection = False
self.writer = ResponseWriter(handler, self)
self._status = (200, None)
self.headers = ResponseHeaders()
self.content = []
self.logger = get_logger()
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if hasattr(value, "__len__"):
if len(value) != 2:
raise ValueError
else:
self._status = (int(value[0]), str(value[1]))
else:
self._status = (int(value), None)
def set_cookie(self, name, value, path="/", domain=None, max_age=None,
expires=None, secure=False, httponly=False, comment=None):
"""Set a cookie to be sent with a Set-Cookie header in the
response
:param name: String name of the cookie
:param value: String value of the cookie
:param max_age: datetime.timedelta int representing the time (in seconds)
until the cookie expires
:param path: String path to which the cookie applies
:param domain: String domain to which the cookie applies
:param secure: Boolean indicating whether the cookie is marked as secure
:param httponly: Boolean indicating whether the cookie is marked as
HTTP Only
:param comment: String comment
:param expires: datetime.datetime or datetime.timedelta indicating a
time or interval from now when the cookie expires
"""
days = dict((i+1, name) for i, name in enumerate(["jan", "feb", "mar",
"apr", "may", "jun",
"jul", "aug", "sep",
"oct", "nov", "dec"]))
if value is None:
value = ''
max_age = 0
expires = timedelta(days=-1)
if isinstance(expires, timedelta):
expires = datetime.utcnow() + expires
if expires is not None:
expires_str = expires.strftime("%d %%s %Y %H:%M:%S GMT")
expires_str = expires_str % days[expires.month]
expires = expires_str
if max_age is not None:
if hasattr(max_age, "total_seconds"):
max_age = int(max_age.total_seconds())
max_age = "%.0d" % max_age
m = Cookie.Morsel()
def maybe_set(key, value):
if value is not None and value is not False:
m[key] = value
m.set(name, value, value)
maybe_set("path", path)
maybe_set("domain", domain)
maybe_set("comment", comment)
maybe_set("expires", expires)
maybe_set("max-age", max_age)
maybe_set("secure", secure)
maybe_set("httponly", httponly)
self.headers.append("Set-Cookie", m.OutputString())
def unset_cookie(self, name):
"""Remove a cookie from those that are being sent with the response"""
cookies = self.headers.get("Set-Cookie")
parser = Cookie.BaseCookie()
for cookie in cookies:
parser.load(cookie)
if name in parser.keys():
del self.headers["Set-Cookie"]
for m in parser.values():
if m.key != name:
self.headers.append(("Set-Cookie", m.OutputString()))
def delete_cookie(self, name, path="/", domain=None):
"""Delete a cookie on the client by setting it to the empty string
and to expire in the past"""
self.set_cookie(name, None, path=path, domain=domain, max_age=0,
expires=timedelta(days=-1))
def iter_content(self):
"""Iterator returning chunks of response body content.
If any part of the content is a function, this will be called
and the resulting value (if any) returned."""
if type(self.content) in types.StringTypes:
yield self.content
else:
for item in self.content:
if hasattr(item, "__call__"):
value = item()
else:
value = item
if value:
yield value
def write_status_headers(self):
"""Write out the status line and headers for the response"""
self.writer.write_status(*self.status)
for item in self.headers:
self.writer.write_header(*item)
self.writer.end_headers()
def write_content(self):
"""Write out the response content"""
if self.request.method != "HEAD" or self.send_body_for_head_request:
for item in self.iter_content():
self.writer.write_content(item)
def write(self):
"""Write the whole response"""
self.write_status_headers()
self.write_content()
def set_error(self, code, message=""):
"""Set the response status headers and body to indicate an
error"""
err = {"code": code,
"message": message}
data = json.dumps({"error": err})
self.status = code
self.headers = [("Content-Type", "application/json"),
("Content-Length", len(data))]
self.content = data
if code == 500:
self.logger.error(message)
class MultipartContent(object):
def __init__(self, boundary=None, default_content_type=None):
self.items = []
if boundary is None:
boundary = str(uuid.uuid4())
self.boundary = boundary
self.default_content_type = default_content_type
def __call__(self):
boundary = "--" + self.boundary
rv = ["", boundary]
for item in self.items:
rv.append(str(item))
rv.append(boundary)
rv[-1] += "--"
return "\r\n".join(rv)
def append_part(self, data, content_type=None, headers=None):
if content_type is None:
content_type = self.default_content_type
self.items.append(MultipartPart(data, content_type, headers))
def __iter__(self):
#This is hackish; when writing the response we need an iterable
#or a string. For a multipart/byterange response we want an
#iterable that contains a single callable; the MultipartContent
#object itself
yield self
class MultipartPart(object):
def __init__(self, data, content_type=None, headers=None):
self.headers = ResponseHeaders()
if content_type is not None:
self.headers.set("Content-Type", content_type)
if headers is not None:
for name, value in headers:
if name.lower() == "content-type":
func = self.headers.set
else:
func = self.headers.append
func(name, value)
self.data = data
def __str__(self):
rv = []
for item in self.headers:
rv.append("%s: %s" % item)
rv.append("")
rv.append(self.data)
return "\r\n".join(rv)
class ResponseHeaders(object):
"""Dictionary-like object holding the headers for the response"""
def __init__(self):
self.data = OrderedDict()
def set(self, key, value):
"""Set a header to a specific value, overwriting any previous header
with the same name
:param key: Name of the header to set
:param value: Value to set the header to
"""
self.data[key.lower()] = (key, [value])
def append(self, key, value):
"""Add a new header with a given name, not overwriting any existing
headers with the same name
:param key: Name of the header to add
:param value: Value to set for the header
"""
if key.lower() in self.data:
self.data[key.lower()][1].append(value)
else:
self.set(key, value)
def get(self, key, default=missing):
"""Get the set values for a particular header."""
try:
return self[key]
except KeyError:
if default is missing:
return []
return default
def __getitem__(self, key):
"""Get a list of values for a particular header
"""
return self.data[key.lower()][1]
def __delitem__(self, key):
del self.data[key.lower()]
def __contains__(self, key):
return key.lower() in self.data
def __setitem__(self, key, value):
self.set(key, value)
def __iter__(self):
for key, values in self.data.itervalues():
for value in values:
yield key, value
def items(self):
return list(self)
def update(self, items_iter):
for name, value in items_iter:
self.set(name, value)
def __repr__(self):
return repr(self.data)
class ResponseWriter(object):
"""Object providing an API to write out a HTTP response.
:param handler: The RequestHandler being used.
:param response: The Response associated with this writer.
After each part of the response is written, the output is
flushed unless response.explicit_flush is False, in which case
the user must call .flush() explicitly."""
def __init__(self, handler, response):
self._wfile = handler.wfile
self._response = response
self._handler = handler
self._headers_seen = set()
self._headers_complete = False
self.content_written = False
self.request = response.request
def write_status(self, code, message=None):
"""Write out the status line of a response.
:param code: The integer status code of the response.
:param message: The message of the response. Defaults to the message commonly used
with the status code."""
if message is None:
if code in response_codes:
message = response_codes[code][0]
else:
message = ''
self.write("%s %d %s\r\n" %
(self._response.request.protocol_version, code, message))
def write_header(self, name, value):
"""Write out a single header for the response.
:param name: Name of the header field
:param value: Value of the header field
"""
self._headers_seen.add(name.lower())
self.write("%s: %s\r\n" % (name, value))
if not self._response.explicit_flush:
self.flush()
def write_default_headers(self):
for name, f in [("Server", self._handler.version_string),
("Date", self._handler.date_time_string)]:
if name.lower() not in self._headers_seen:
self.write_header(name, f())
if (type(self._response.content) in (str, unicode) and
"content-length" not in self._headers_seen):
#Would be nice to avoid double-encoding here
self.write_header("Content-Length", len(self.encode(self._response.content)))
def end_headers(self):
"""Finish writing headers and write the separator.
Unless add_required_headers on the response is False,
this will also add HTTP-mandated headers that have not yet been supplied
to the response headers"""
if self._response.add_required_headers:
self.write_default_headers()
self.write("\r\n")
if "content-length" not in self._headers_seen:
self._response.close_connection = True
if not self._response.explicit_flush:
self.flush()
self._headers_complete = True
def write_content(self, data):
"""Write the body of the response."""
self.write(self.encode(data))
if not self._response.explicit_flush:
self.flush()
def write(self, data):
"""Write directly to the response, converting unicode to bytes
according to response.encoding. Does not flush."""
self.content_written = True
try:
self._wfile.write(self.encode(data))
except socket.error:
# This can happen if the socket got closed by the remote end
pass
def encode(self, data):
"""Convert unicode to bytes according to response.encoding."""
if isinstance(data, str):
return data
elif isinstance(data, unicode):
return data.encode(self._response.encoding)
else:
raise ValueError
def flush(self):
"""Flush the output."""
try:
self._wfile.flush()
except socket.error:
# This can happen if the socket got closed by the remote end
pass
| mpl-2.0 |
takeshineshiro/neutron | neutron/tests/api/test_floating_ips.py | 47 | 10395 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from tempest_lib.common.utils import data_utils
from neutron.tests.api import base
from neutron.tests.tempest import config
from neutron.tests.tempest import test
CONF = config.CONF
class FloatingIPTestJSON(base.BaseNetworkTest):
"""
Tests the following operations in the Quantum API using the REST client for
Neutron:
Create a Floating IP
Update a Floating IP
Delete a Floating IP
List all Floating IPs
Show Floating IP details
Associate a Floating IP with a port and then delete that port
Associate a Floating IP with a port and then with a port on another
router
v2.0 of the Neutron API is assumed. It is also assumed that the following
options are defined in the [network] section of etc/tempest.conf:
public_network_id which is the id for the external network present
"""
@classmethod
def resource_setup(cls):
super(FloatingIPTestJSON, cls).resource_setup()
if not test.is_extension_enabled('router', 'network'):
msg = "router extension not enabled."
raise cls.skipException(msg)
cls.ext_net_id = CONF.network.public_network_id
# Create network, subnet, router and add interface
cls.network = cls.create_network()
cls.subnet = cls.create_subnet(cls.network)
cls.router = cls.create_router(data_utils.rand_name('router-'),
external_network_id=cls.ext_net_id)
cls.create_router_interface(cls.router['id'], cls.subnet['id'])
cls.port = list()
# Create two ports one each for Creation and Updating of floatingIP
for i in range(2):
cls.create_port(cls.network)
@test.attr(type='smoke')
@test.idempotent_id('62595970-ab1c-4b7f-8fcc-fddfe55e8718')
def test_create_list_show_update_delete_floating_ip(self):
# Creates a floating IP
body = self.client.create_floatingip(
floating_network_id=self.ext_net_id,
port_id=self.ports[0]['id'])
created_floating_ip = body['floatingip']
self.addCleanup(self.client.delete_floatingip,
created_floating_ip['id'])
self.assertIsNotNone(created_floating_ip['id'])
self.assertIsNotNone(created_floating_ip['tenant_id'])
self.assertIsNotNone(created_floating_ip['floating_ip_address'])
self.assertEqual(created_floating_ip['port_id'], self.ports[0]['id'])
self.assertEqual(created_floating_ip['floating_network_id'],
self.ext_net_id)
self.assertIn(created_floating_ip['fixed_ip_address'],
[ip['ip_address'] for ip in self.ports[0]['fixed_ips']])
# Verifies the details of a floating_ip
floating_ip = self.client.show_floatingip(created_floating_ip['id'])
shown_floating_ip = floating_ip['floatingip']
self.assertEqual(shown_floating_ip['id'], created_floating_ip['id'])
self.assertEqual(shown_floating_ip['floating_network_id'],
self.ext_net_id)
self.assertEqual(shown_floating_ip['tenant_id'],
created_floating_ip['tenant_id'])
self.assertEqual(shown_floating_ip['floating_ip_address'],
created_floating_ip['floating_ip_address'])
self.assertEqual(shown_floating_ip['port_id'], self.ports[0]['id'])
# Verify the floating ip exists in the list of all floating_ips
floating_ips = self.client.list_floatingips()
floatingip_id_list = list()
for f in floating_ips['floatingips']:
floatingip_id_list.append(f['id'])
self.assertIn(created_floating_ip['id'], floatingip_id_list)
# Associate floating IP to the other port
floating_ip = self.client.update_floatingip(
created_floating_ip['id'],
port_id=self.ports[1]['id'])
updated_floating_ip = floating_ip['floatingip']
self.assertEqual(updated_floating_ip['port_id'], self.ports[1]['id'])
self.assertEqual(updated_floating_ip['fixed_ip_address'],
self.ports[1]['fixed_ips'][0]['ip_address'])
self.assertEqual(updated_floating_ip['router_id'], self.router['id'])
# Disassociate floating IP from the port
floating_ip = self.client.update_floatingip(
created_floating_ip['id'],
port_id=None)
updated_floating_ip = floating_ip['floatingip']
self.assertIsNone(updated_floating_ip['port_id'])
self.assertIsNone(updated_floating_ip['fixed_ip_address'])
self.assertIsNone(updated_floating_ip['router_id'])
@test.attr(type='smoke')
@test.idempotent_id('e1f6bffd-442f-4668-b30e-df13f2705e77')
def test_floating_ip_delete_port(self):
# Create a floating IP
body = self.client.create_floatingip(
floating_network_id=self.ext_net_id)
created_floating_ip = body['floatingip']
self.addCleanup(self.client.delete_floatingip,
created_floating_ip['id'])
# Create a port
port = self.client.create_port(network_id=self.network['id'])
created_port = port['port']
floating_ip = self.client.update_floatingip(
created_floating_ip['id'],
port_id=created_port['id'])
# Delete port
self.client.delete_port(created_port['id'])
# Verifies the details of the floating_ip
floating_ip = self.client.show_floatingip(created_floating_ip['id'])
shown_floating_ip = floating_ip['floatingip']
# Confirm the fields are back to None
self.assertEqual(shown_floating_ip['id'], created_floating_ip['id'])
self.assertIsNone(shown_floating_ip['port_id'])
self.assertIsNone(shown_floating_ip['fixed_ip_address'])
self.assertIsNone(shown_floating_ip['router_id'])
@test.attr(type='smoke')
@test.idempotent_id('1bb2f731-fe5a-4b8c-8409-799ade1bed4d')
def test_floating_ip_update_different_router(self):
# Associate a floating IP to a port on a router
body = self.client.create_floatingip(
floating_network_id=self.ext_net_id,
port_id=self.ports[1]['id'])
created_floating_ip = body['floatingip']
self.addCleanup(self.client.delete_floatingip,
created_floating_ip['id'])
self.assertEqual(created_floating_ip['router_id'], self.router['id'])
network2 = self.create_network()
subnet2 = self.create_subnet(network2)
router2 = self.create_router(data_utils.rand_name('router-'),
external_network_id=self.ext_net_id)
self.create_router_interface(router2['id'], subnet2['id'])
port_other_router = self.create_port(network2)
# Associate floating IP to the other port on another router
floating_ip = self.client.update_floatingip(
created_floating_ip['id'],
port_id=port_other_router['id'])
updated_floating_ip = floating_ip['floatingip']
self.assertEqual(updated_floating_ip['router_id'], router2['id'])
self.assertEqual(updated_floating_ip['port_id'],
port_other_router['id'])
self.assertIsNotNone(updated_floating_ip['fixed_ip_address'])
@test.attr(type='smoke')
@test.idempotent_id('36de4bd0-f09c-43e3-a8e1-1decc1ffd3a5')
def test_create_floating_ip_specifying_a_fixed_ip_address(self):
body = self.client.create_floatingip(
floating_network_id=self.ext_net_id,
port_id=self.ports[1]['id'],
fixed_ip_address=self.ports[1]['fixed_ips'][0]['ip_address'])
created_floating_ip = body['floatingip']
self.addCleanup(self.client.delete_floatingip,
created_floating_ip['id'])
self.assertIsNotNone(created_floating_ip['id'])
self.assertEqual(created_floating_ip['fixed_ip_address'],
self.ports[1]['fixed_ips'][0]['ip_address'])
floating_ip = self.client.update_floatingip(
created_floating_ip['id'],
port_id=None)
self.assertIsNone(floating_ip['floatingip']['port_id'])
@test.attr(type='smoke')
@test.idempotent_id('45c4c683-ea97-41ef-9c51-5e9802f2f3d7')
def test_create_update_floatingip_with_port_multiple_ip_address(self):
# Find out ips that can be used for tests
ips = list(netaddr.IPNetwork(self.subnet['cidr']))
list_ips = [str(ip) for ip in ips[-3:-1]]
fixed_ips = [{'ip_address': list_ips[0]}, {'ip_address': list_ips[1]}]
# Create port
body = self.client.create_port(network_id=self.network['id'],
fixed_ips=fixed_ips)
port = body['port']
self.addCleanup(self.client.delete_port, port['id'])
# Create floating ip
body = self.client.create_floatingip(
floating_network_id=self.ext_net_id,
port_id=port['id'],
fixed_ip_address=list_ips[0])
floating_ip = body['floatingip']
self.addCleanup(self.client.delete_floatingip, floating_ip['id'])
self.assertIsNotNone(floating_ip['id'])
self.assertEqual(floating_ip['fixed_ip_address'], list_ips[0])
# Update floating ip
body = self.client.update_floatingip(floating_ip['id'],
port_id=port['id'],
fixed_ip_address=list_ips[1])
update_floating_ip = body['floatingip']
self.assertEqual(update_floating_ip['fixed_ip_address'],
list_ips[1])
| apache-2.0 |
fldc/CouchPotatoServer | couchpotato/core/media/movie/providers/automation/moviemeter.py | 38 | 1179 | from couchpotato.core.helpers.rss import RSS
from couchpotato.core.logger import CPLog
from couchpotato.core.media.movie.providers.automation.base import Automation
log = CPLog(__name__)
autoload = 'Moviemeter'
class Moviemeter(Automation, RSS):
interval = 1800
rss_url = 'http://www.moviemeter.nl/rss/cinema'
def getIMDBids(self):
movies = []
rss_movies = self.getRSSData(self.rss_url)
for movie in rss_movies:
imdb = self.search(self.getTextElement(movie, 'title'))
if imdb and self.isMinimalMovie(imdb):
movies.append(imdb['imdb'])
return movies
config = [{
'name': 'moviemeter',
'groups': [
{
'tab': 'automation',
'list': 'automation_providers',
'name': 'moviemeter_automation',
'label': 'Moviemeter',
'description': 'Imports movies from the current top 10 of moviemeter.nl.',
'options': [
{
'name': 'automation_enabled',
'default': False,
'type': 'enabler',
},
],
},
],
}]
| gpl-3.0 |
mknx/smarthome | tools/owsensors2items.py | 14 | 2913 | #!/usr/bin/env python
# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab
#########################################################################
# Copyright 2012-2013 KNX-User-Forum e.V. http://knx-user-forum.de/
#########################################################################
# This file is part of SmartHome.py. http://smarthome.sourceforge.net/
#
# SmartHome.py is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SmartHome.py is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SmartHome.py. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
import ConfigParser, io, sys
conf = ''
# read conf and skip header entries (no section)
try:
with open(sys.argv[1], 'r') as cfg:
found_section = False
for l in cfg.readlines():
if len(l.strip()) == 0:
continue
if l[0] != '[' and found_section == False:
continue
found_section = True
conf += l
with open(sys.argv[2], 'w') as out:
config = ConfigParser.ConfigParser()
config.readfp(io.BytesIO(conf))
for section in config.sections():
try:
name = config.get(section, 'name')
typ = config.get(section, 'type')
except ConfigParser.NoOptionError:
continue
if typ == 'DS1820':
sensor = 'T' + config.get(section, 'resolution')
typ = 'num'
knx_send = config.get(section, 'eib_ga_temp')
elif typ == 'DS2438Hum' or typ == 'DS2438Datanab':
sensor = 'H'
typ = 'num'
elif typ == 'DS1990':
sensor = 'B'
typ = 'bool'
knx_send = config.get(section, 'eib_ga_present')
elif typ == 'DS2401':
sensor = 'B'
typ = 'bool'
knx_send = config.get(section, 'eib_ga_present')
elif typ == 'DS9490':
sensor = 'BM'
typ = 'bool'
else:
continue
out.write('''
[[{0}]]
name = {0}
type = {1}
ow_addr = {2}
ow_sensor = {3}
#knx_send = {4}
#knx_reply = {4}
'''.format(name, typ, section, sensor,knx_send))
except:
print "usage: owsensors2item.py <input_file> <output_file>"
sys.exit()
| gpl-3.0 |
Voluntarynet/BitmessageKit | BitmessageKit/Vendor/static-python/Lib/ctypes/test/test_unicode.py | 81 | 5101 | # coding: latin-1
import unittest
import ctypes
try:
ctypes.c_wchar
except AttributeError:
pass
else:
import _ctypes_test
dll = ctypes.CDLL(_ctypes_test.__file__)
wcslen = dll.my_wcslen
wcslen.argtypes = [ctypes.c_wchar_p]
class UnicodeTestCase(unittest.TestCase):
def setUp(self):
self.prev_conv_mode = ctypes.set_conversion_mode("ascii", "strict")
def tearDown(self):
ctypes.set_conversion_mode(*self.prev_conv_mode)
def test_ascii_strict(self):
ctypes.set_conversion_mode("ascii", "strict")
# no conversions take place with unicode arguments
self.assertEqual(wcslen(u"abc"), 3)
self.assertEqual(wcslen(u"ab\u2070"), 3)
# string args are converted
self.assertEqual(wcslen("abc"), 3)
self.assertRaises(ctypes.ArgumentError, wcslen, "abä")
def test_ascii_replace(self):
ctypes.set_conversion_mode("ascii", "replace")
self.assertEqual(wcslen(u"abc"), 3)
self.assertEqual(wcslen(u"ab\u2070"), 3)
self.assertEqual(wcslen("abc"), 3)
self.assertEqual(wcslen("abä"), 3)
def test_ascii_ignore(self):
ctypes.set_conversion_mode("ascii", "ignore")
self.assertEqual(wcslen(u"abc"), 3)
self.assertEqual(wcslen(u"ab\u2070"), 3)
# ignore error mode skips non-ascii characters
self.assertEqual(wcslen("abc"), 3)
self.assertEqual(wcslen("äöüß"), 0)
def test_latin1_strict(self):
ctypes.set_conversion_mode("latin-1", "strict")
self.assertEqual(wcslen(u"abc"), 3)
self.assertEqual(wcslen(u"ab\u2070"), 3)
self.assertEqual(wcslen("abc"), 3)
self.assertEqual(wcslen("äöüß"), 4)
def test_buffers(self):
ctypes.set_conversion_mode("ascii", "strict")
buf = ctypes.create_unicode_buffer("abc")
self.assertEqual(len(buf), 3+1)
ctypes.set_conversion_mode("ascii", "replace")
buf = ctypes.create_unicode_buffer("abäöü")
self.assertEqual(buf[:], u"ab\uFFFD\uFFFD\uFFFD\0")
self.assertEqual(buf[::], u"ab\uFFFD\uFFFD\uFFFD\0")
self.assertEqual(buf[::-1], u"\0\uFFFD\uFFFD\uFFFDba")
self.assertEqual(buf[::2], u"a\uFFFD\uFFFD")
self.assertEqual(buf[6:5:-1], u"")
ctypes.set_conversion_mode("ascii", "ignore")
buf = ctypes.create_unicode_buffer("abäöü")
# is that correct? not sure. But with 'ignore', you get what you pay for..
self.assertEqual(buf[:], u"ab\0\0\0\0")
self.assertEqual(buf[::], u"ab\0\0\0\0")
self.assertEqual(buf[::-1], u"\0\0\0\0ba")
self.assertEqual(buf[::2], u"a\0\0")
self.assertEqual(buf[6:5:-1], u"")
import _ctypes_test
func = ctypes.CDLL(_ctypes_test.__file__)._testfunc_p_p
class StringTestCase(UnicodeTestCase):
def setUp(self):
self.prev_conv_mode = ctypes.set_conversion_mode("ascii", "strict")
func.argtypes = [ctypes.c_char_p]
func.restype = ctypes.c_char_p
def tearDown(self):
ctypes.set_conversion_mode(*self.prev_conv_mode)
func.argtypes = None
func.restype = ctypes.c_int
def test_ascii_replace(self):
ctypes.set_conversion_mode("ascii", "strict")
self.assertEqual(func("abc"), "abc")
self.assertEqual(func(u"abc"), "abc")
self.assertRaises(ctypes.ArgumentError, func, u"abä")
def test_ascii_ignore(self):
ctypes.set_conversion_mode("ascii", "ignore")
self.assertEqual(func("abc"), "abc")
self.assertEqual(func(u"abc"), "abc")
self.assertEqual(func(u"äöüß"), "")
def test_ascii_replace(self):
ctypes.set_conversion_mode("ascii", "replace")
self.assertEqual(func("abc"), "abc")
self.assertEqual(func(u"abc"), "abc")
self.assertEqual(func(u"äöüß"), "????")
def test_buffers(self):
ctypes.set_conversion_mode("ascii", "strict")
buf = ctypes.create_string_buffer(u"abc")
self.assertEqual(len(buf), 3+1)
ctypes.set_conversion_mode("ascii", "replace")
buf = ctypes.create_string_buffer(u"abäöü")
self.assertEqual(buf[:], "ab???\0")
self.assertEqual(buf[::], "ab???\0")
self.assertEqual(buf[::-1], "\0???ba")
self.assertEqual(buf[::2], "a??")
self.assertEqual(buf[6:5:-1], "")
ctypes.set_conversion_mode("ascii", "ignore")
buf = ctypes.create_string_buffer(u"abäöü")
# is that correct? not sure. But with 'ignore', you get what you pay for..
self.assertEqual(buf[:], "ab\0\0\0\0")
self.assertEqual(buf[::], "ab\0\0\0\0")
self.assertEqual(buf[::-1], "\0\0\0\0ba")
if __name__ == '__main__':
unittest.main()
| mit |
LarsFronius/ansible | lib/ansible/modules/network/cumulus/_cl_img_install.py | 60 | 10997 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Cumulus Networks <ce-ceng@cumulusnetworks.com>
#
# This file is part of Ansible
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cl_img_install
version_added: "2.1"
author: "Cumulus Networks (@CumulusLinux)"
short_description: Install a different Cumulus Linux version.
deprecated: Deprecated in 2.3. The image slot system no longer exists in Cumulus Linux.
description:
- install a different version of Cumulus Linux in the inactive slot. For
more details go the Image Management User Guide at
U(http://docs.cumulusnetworks.com/).
options:
src:
description:
- The full path to the Cumulus Linux binary image. Can be a local path,
http or https URL. If the code version is in the name of the file,
the module will assume this is the version of code you wish to
install.
required: true
version:
description:
- Inform the module of the exact version one is installing. This
overrides the automatic check of version in the file name. For
example, if the binary file name is called CumulusLinux-2.2.3.bin,
and version is set to '2.5.0', then the module will assume it is
installing '2.5.0' not '2.2.3'. If version is not included, then
the module will assume '2.2.3' is the version to install.
default: None
required: false
switch_slot:
description:
- Switch slots after installing the image.
To run the installed code, reboot the switch.
choices: ['yes', 'no']
default: 'no'
required: false
requirements: ["Cumulus Linux OS"]
'''
EXAMPLES = '''
## Download and install the image from a webserver.
- name: Install image using using http url. Switch slots so the subsequent will load the new version
cl_img_install:
version: 2.0.1
src: http://10.1.1.1/CumulusLinux-2.0.1.bin
switch_slot: yes
## Copy the software from the ansible server to the switch.
## The module will get the code version from the filename
## The code will be installed in the alternate slot but the slot will not be primary
## A subsequent reload will not run the new code
- name: Download cumulus linux to local system
get_url:
src: ftp://cumuluslinux.bin
dest: /root/CumulusLinux-2.0.1.bin
- name: Install image from local filesystem. Get version from the filename.
cl_img_install:
src: /root/CumulusLinux-2.0.1.bin
## If the image name has been changed from the original name, use the `version` option
## to inform the module exactly what code version is been installed
- name: Download cumulus linux to local system
get_url:
src: ftp://CumulusLinux-2.0.1.bin
dest: /root/image.bin
- name: install image and switch slots. Only reboot needed
cl_img_install:
version: 2.0.1
src: /root/image.bin
switch_slot: yes
'''
RETURN = '''
changed:
description: whether the interface was changed
returned: changed
type: bool
sample: True
msg:
description: human-readable report of success or failure
returned: always
type: string
sample: "interface bond0 config updated"
'''
def check_url(module, url):
parsed_url = urlparse(url)
if len(parsed_url.path) > 0:
sch = parsed_url.scheme
if (sch == 'http' or sch == 'https' or len(parsed_url.scheme) == 0):
return True
module.fail_json(msg="Image Path URL. Wrong Format %s" % (url))
return False
def run_cl_cmd(module, cmd, check_rc=True):
try:
(rc, out, err) = module.run_command(cmd, check_rc=check_rc)
except Exception:
e = get_exception()
module.fail_json(msg=e.strerror)
# trim last line as it is always empty
ret = out.splitlines()
return ret
def get_slot_info(module):
slots = {}
slots['1'] = {}
slots['2'] = {}
active_slotnum = get_active_slot(module)
primary_slotnum = get_primary_slot_num(module)
for _num in range(1, 3):
slot = slots[str(_num)]
slot['version'] = get_slot_version(module, str(_num))
if _num == int(active_slotnum):
slot['active'] = True
if _num == int(primary_slotnum):
slot['primary'] = True
return slots
def get_slot_version(module, slot_num):
lsb_release = check_mnt_root_lsb_release(slot_num)
switch_firm_ver = check_fw_print_env(module, slot_num)
_version = module.sw_version
if lsb_release == _version or switch_firm_ver == _version:
return _version
elif lsb_release:
return lsb_release
else:
return switch_firm_ver
def check_mnt_root_lsb_release(slot_num):
_path = '/mnt/root-rw/config%s/etc/lsb-release' % (slot_num)
try:
lsb_release = open(_path)
lines = lsb_release.readlines()
for line in lines:
_match = re.search('DISTRIB_RELEASE=([0-9a-zA-Z.]+)', line)
if _match:
return _match.group(1).split('-')[0]
except:
pass
return None
def check_fw_print_env(module, slot_num):
cmd = None
if platform.machine() == 'ppc':
cmd = "/usr/sbin/fw_printenv -n cl.ver%s" % (slot_num)
fw_output = run_cl_cmd(module, cmd)
return fw_output[0].split('-')[0]
elif platform.machine() == 'x86_64':
cmd = "/usr/bin/grub-editenv list"
grub_output = run_cl_cmd(module, cmd)
for _line in grub_output:
_regex_str = re.compile('cl.ver' + slot_num + '=([\w.]+)-')
m0 = re.match(_regex_str, _line)
if m0:
return m0.group(1)
def get_primary_slot_num(module):
cmd = None
if platform.machine() == 'ppc':
cmd = "/usr/sbin/fw_printenv -n cl.active"
return ''.join(run_cl_cmd(module, cmd))
elif platform.machine() == 'x86_64':
cmd = "/usr/bin/grub-editenv list"
grub_output = run_cl_cmd(module, cmd)
for _line in grub_output:
_regex_str = re.compile('cl.active=(\d)')
m0 = re.match(_regex_str, _line)
if m0:
return m0.group(1)
def get_active_slot(module):
try:
cmdline = open('/proc/cmdline').readline()
except:
module.fail_json(msg='Failed to open /proc/cmdline. ' +
'Unable to determine active slot')
_match = re.search('active=(\d+)', cmdline)
if _match:
return _match.group(1)
return None
def install_img(module):
src = module.params.get('src')
_version = module.sw_version
app_path = '/usr/cumulus/bin/cl-img-install -f %s' % (src)
run_cl_cmd(module, app_path)
perform_switch_slot = module.params.get('switch_slot')
if perform_switch_slot is True:
check_sw_version(module)
else:
_changed = True
_msg = "Cumulus Linux Version " + _version + " successfully" + \
" installed in alternate slot"
module.exit_json(changed=_changed, msg=_msg)
def switch_slot(module, slotnum):
_switch_slot = module.params.get('switch_slot')
if _switch_slot is True:
app_path = '/usr/cumulus/bin/cl-img-select %s' % (slotnum)
run_cl_cmd(module, app_path)
def determine_sw_version(module):
_version = module.params.get('version')
_filename = ''
# Use _version if user defines it
if _version:
module.sw_version = _version
return
else:
_filename = module.params.get('src').split('/')[-1]
_match = re.search('\d+\W\d+\W\w+', _filename)
if _match:
module.sw_version = re.sub('\W', '.', _match.group())
return
_msg = 'Unable to determine version from file %s' % (_filename)
module.exit_json(changed=False, msg=_msg)
def check_sw_version(module):
slots = get_slot_info(module)
_version = module.sw_version
perform_switch_slot = module.params.get('switch_slot')
for _num, slot in slots.items():
if slot['version'] == _version:
if 'active' in slot:
_msg = "Version %s is installed in the active slot" \
% (_version)
module.exit_json(changed=False, msg=_msg)
else:
_msg = "Version " + _version + \
" is installed in the alternate slot. "
if 'primary' not in slot:
if perform_switch_slot is True:
switch_slot(module, _num)
_msg = _msg + \
"cl-img-select has made the alternate " + \
"slot the primary slot. " +\
"Next reboot, switch will load " + _version + "."
module.exit_json(changed=True, msg=_msg)
else:
_msg = _msg + \
"Next reboot will not load " + _version + ". " + \
"switch_slot keyword set to 'no'."
module.exit_json(changed=False, msg=_msg)
else:
if perform_switch_slot is True:
_msg = _msg + \
"Next reboot, switch will load " + _version + "."
module.exit_json(changed=False, msg=_msg)
else:
_msg = _msg + \
'switch_slot set to "no". ' + \
'No further action to take'
module.exit_json(changed=False, msg=_msg)
def main():
module = AnsibleModule(
argument_spec=dict(
src=dict(required=True, type='str'),
version=dict(type='str'),
switch_slot=dict(type='bool', choices=BOOLEANS, default=False),
),
)
determine_sw_version(module)
_url = module.params.get('src')
check_sw_version(module)
check_url(module, _url)
install_img(module)
# import module snippets
from ansible.module_utils.basic import *
# incompatible with ansible 1.4.4 - ubuntu 12.04 version
# from ansible.module_utils.urls import *
from urlparse import urlparse
import re
if __name__ == '__main__':
main()
| gpl-3.0 |
Endika/addons-yelizariev | access_custom/__openerp__.py | 7 | 1053 | {
'name' : 'Custom security stuff',
'version' : '1.0.0',
'author' : 'Ivan Yelizariev',
'category' : 'Tools',
'website' : 'https://yelizariev.github.io',
'description': """
Tested on 8.0 ab7b5d7732a7c222a0aea45bd173742acd47242d.
""",
'depends' : ['access_base',
'res_users_clear_access_rights',
'base',
'account',
'sale',
'crm',
'hr_payroll',
'hr_expense',
'hr_timesheet',
'hr_timesheet_sheet',
'project',
'purchase',
'hr_recruitment',
'hr_holidays',
'hr_evaluation',
'board',
'marketing',
'account_analytic_analysis',
'is_employee',
],
'data':[
'views.xml',
'security.xml',
'ir.model.access.csv',
],
'demo':[
'demo.xml'
],
'installable': True
}
| lgpl-3.0 |
simbs/edx-platform | common/test/acceptance/pages/studio/import_export.py | 61 | 8923 | """
Import/Export pages.
"""
import time
from datetime import datetime
from bok_choy.promise import EmptyPromise
import os
import re
import requests
from ..common.utils import click_css
from .library import LibraryPage
from .course_page import CoursePage
from . import BASE_URL
class TemplateCheckMixin(object):
"""
Mixin for verifying that a template is loading the correct text.
"""
@property
def header_text(self):
"""
Get the header text of the page.
"""
# There are prefixes like 'Tools' and '>', but the text itself is not in a span.
return self.q(css='h1.page-header')[0].text.split('\n')[-1]
class ExportMixin(object):
"""
Export page Mixin.
"""
url_path = "export"
def is_browser_on_page(self):
"""
Verify this is the export page
"""
return self.q(css='body.view-export').present
def _get_tarball(self, url):
"""
Download tarball at `url`
"""
kwargs = dict()
session_id = [{i['name']: i['value']} for i in self.browser.get_cookies() if i['name'] == u'sessionid']
if session_id:
kwargs.update({
'cookies': session_id[0]
})
response = requests.get(url, **kwargs)
return response.status_code == 200, response.headers
def download_tarball(self):
"""
Downloads the course or library in tarball form.
"""
tarball_url = self.q(css='a.action-export').attrs('href')[0]
good_status, headers = self._get_tarball(tarball_url)
return good_status, headers['content-type'] == 'application/x-tgz'
def click_export(self):
"""
Click the export button. Should only be used if expected to fail, as
otherwise a browser dialog for saving the file will be presented.
"""
self.q(css='a.action-export').click()
def is_error_modal_showing(self):
"""
Indicates whether or not the error modal is showing.
"""
return self.q(css='.prompt.error').visible
def click_modal_button(self):
"""
Click the button on the modal dialog that appears when there's a problem.
"""
self.q(css='.prompt.error .action-primary').click()
def wait_for_error_modal(self):
"""
If an import or export has an error, an error modal will be shown.
"""
EmptyPromise(self.is_error_modal_showing, 'Error Modal Displayed', timeout=30).fulfill()
class LibraryLoader(object):
"""
URL loading mixing for Library import/export
"""
@property
def url(self):
"""
This pattern isn't followed universally by library URLs,
but is used for import/export.
"""
# pylint: disable=no-member
return "/".join([BASE_URL, self.url_path, unicode(self.locator)])
class ExportCoursePage(ExportMixin, TemplateCheckMixin, CoursePage):
"""
Export page for Courses
"""
class ExportLibraryPage(ExportMixin, TemplateCheckMixin, LibraryLoader, LibraryPage):
"""
Export page for Libraries
"""
class ImportMixin(object):
"""
Import page mixin
"""
url_path = "import"
@property
def timestamp(self):
"""
The timestamp is displayed on the page as "(MM/DD/YYYY at HH:mm)"
It parses the timestamp and returns a (date, time) tuple
"""
string = self.q(css='.item-progresspoint-success-date').text[0]
return re.match(r'\(([^ ]+).+?(\d{2}:\d{2})', string).groups()
@property
def parsed_timestamp(self):
"""
Return python datetime object from the parsed timestamp tuple (date, time)
"""
timestamp = "{0} {1}".format(*self.timestamp)
formatted_timestamp = time.strptime(timestamp, "%m/%d/%Y %H:%M")
return datetime.fromtimestamp(time.mktime(formatted_timestamp))
def is_browser_on_page(self):
"""
Verify this is the export page
"""
return self.q(css='.choose-file-button').present
@staticmethod
def file_path(filename):
"""
Construct file path to be uploaded from the data upload folder.
Arguments:
filename (str): asset filename
"""
# Should grab common point between this page module and the data folder.
return os.sep.join(__file__.split(os.sep)[:-4]) + '/data/imports/' + filename
def _wait_for_button(self):
"""
Wait for the upload button to appear.
"""
return EmptyPromise(
lambda: self.q(css='#replace-courselike-button')[0],
"Upload button appears",
timeout=30
).fulfill()
def upload_tarball(self, tarball_filename):
"""
Upload a tarball to be imported.
"""
asset_file_path = self.file_path(tarball_filename)
# Make the upload elements visible to the WebDriver.
self.browser.execute_script('$(".file-name-block").show();$(".file-input").show()')
self.q(css='input[type="file"]')[0].send_keys(asset_file_path)
self._wait_for_button()
click_css(self, '.submit-button', require_notification=False)
def is_upload_finished(self):
"""
Checks if the 'view updated' button is showing.
"""
return self.q(css='#view-updated-button').visible
@staticmethod
def _task_properties(completed):
"""
Outputs the CSS class and promise description for task states based on completion.
"""
if completed:
return 'is-complete', "'{}' is marked complete"
else:
return 'is-not-started', "'{}' is in not-yet-started status"
def wait_for_tasks(self, completed=False, fail_on=None):
"""
Wait for all of the items in the task list to be set to the correct state.
"""
classes = {
'Uploading': 'item-progresspoint-upload',
'Unpacking': 'item-progresspoint-unpack',
'Verifying': 'item-progresspoint-verify',
'Updating': 'item-progresspoint-import',
'Success': 'item-progresspoint-success'
}
if fail_on:
# Makes no sense to include this if the tasks haven't run.
completed = True
state, desc_template = self._task_properties(completed)
for desc, css_class in classes.items():
desc_text = desc_template.format(desc)
# pylint: disable=cell-var-from-loop
EmptyPromise(lambda: self.q(css='.{}.{}'.format(css_class, state)).present, desc_text, timeout=30)
if fail_on == desc:
EmptyPromise(
lambda: self.q(css='.{}.is-complete.has-error'.format(css_class)).present,
"{} checkpoint marked as failed".format(desc),
timeout=30
)
# The rest should never run.
state, desc_template = self._task_properties(False)
def wait_for_upload(self):
"""
Wait for the upload to be confirmed.
"""
EmptyPromise(self.is_upload_finished, 'Upload Finished', timeout=30).fulfill()
def is_filename_error_showing(self):
"""
An should be shown if the user tries to upload the wrong kind of file.
Tell us whether it's currently being shown.
"""
return self.q(css='#fileupload .error-block').visible
def is_task_list_showing(self):
"""
The task list shows a series of steps being performed during import. It is normally
hidden until the upload begins.
Tell us whether it's currently visible.
"""
return self.q(css='.wrapper-status').visible
def is_timestamp_visible(self):
"""
Checks if the UTC timestamp of the last successful import is visible
"""
return self.q(css='.item-progresspoint-success-date').visible
def wait_for_timestamp_visible(self):
"""
Wait for the timestamp of the last successful import to be visible.
"""
EmptyPromise(self.is_timestamp_visible, 'Timestamp Visible', timeout=30).fulfill()
def wait_for_filename_error(self):
"""
Wait for the upload field to display an error.
"""
EmptyPromise(self.is_filename_error_showing, 'Upload Error Displayed', timeout=30).fulfill()
def finished_target_url(self):
"""
Grab the URL of the 'view updated library/course outline' button.
"""
return self.q(css='.action.action-primary')[0].get_attribute('href')
class ImportCoursePage(ImportMixin, TemplateCheckMixin, CoursePage):
"""
Import page for Courses
"""
class ImportLibraryPage(ImportMixin, TemplateCheckMixin, LibraryLoader, LibraryPage):
"""
Import page for Libraries
"""
| agpl-3.0 |
hexcap/dpkt | dpkt/ip6.py | 6 | 13386 | # $Id: ip6.py 87 2013-03-05 19:41:04Z andrewflnr@gmail.com $
# -*- coding: utf-8 -*-
"""Internet Protocol, version 6."""
import dpkt
from decorators import deprecated
class IP6(dpkt.Packet):
__hdr__ = (
('_v_fc_flow', 'I', 0x60000000L),
('plen', 'H', 0), # payload length (not including header)
('nxt', 'B', 0), # next header protocol
('hlim', 'B', 0), # hop limit
('src', '16s', ''),
('dst', '16s', '')
)
# XXX - to be shared with IP. We cannot refer to the ip module
# right now because ip.__load_protos() expects the IP6 class to be
# defined.
_protosw = None
@property
def v(self):
return self._v_fc_flow >> 28
@v.setter
def v(self, v):
self._v_fc_flow = (self._v_fc_flow & ~0xf0000000L) | (v << 28)
@property
def fc(self):
return (self._v_fc_flow >> 20) & 0xff
@fc.setter
def fc(self, v):
self._v_fc_flow = (self._v_fc_flow & ~0xff00000L) | (v << 20)
@property
def flow(self):
return self._v_fc_flow & 0xfffff
@flow.setter
def flow(self, v):
self._v_fc_flow = (self._v_fc_flow & ~0xfffff) | (v & 0xfffff)
# Deprecated methods, will be removed in the future
# =================================================
@deprecated('v')
def _get_v(self):
return self.v
@deprecated('v')
def _set_v(self, v):
self.v = v
@deprecated('fc')
def _get_fc(self):
return self.fc
@deprecated('fc')
def _set_fc(self, v):
self.fc = v
@deprecated('flow')
def _get_flow(self):
return self.flow
@deprecated('flow')
def _set_flow(self, v):
self.flow = v
# =================================================
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
self.extension_hdrs = {}
if self.plen:
buf = self.data[:self.plen]
else: # due to jumbo payload or TSO
buf = self.data
next_ext_hdr = self.nxt
while next_ext_hdr in ext_hdrs:
ext = ext_hdrs_cls[next_ext_hdr](buf)
self.extension_hdrs[next_ext_hdr] = ext
buf = buf[ext.length:]
next_ext_hdr = getattr(ext, 'nxt', None)
# set the payload protocol id
if next_ext_hdr is not None:
self.p = next_ext_hdr
try:
self.data = self._protosw[next_ext_hdr](buf)
setattr(self, self.data.__class__.__name__.lower(), self.data)
except (KeyError, dpkt.UnpackError):
self.data = buf
def headers_str(self):
"""Output extension headers in order defined in RFC1883 (except dest opts)"""
header_str = ""
for hdr in ext_hdrs:
if hdr in self.extension_hdrs:
header_str += str(self.extension_hdrs[hdr])
return header_str
def __str__(self):
if (self.p == 6 or self.p == 17 or self.p == 58) and not self.data.sum:
# XXX - set TCP, UDP, and ICMPv6 checksums
p = str(self.data)
s = dpkt.struct.pack('>16s16sxBH', self.src, self.dst, self.nxt, len(p))
s = dpkt.in_cksum_add(0, s)
s = dpkt.in_cksum_add(s, p)
try:
self.data.sum = dpkt.in_cksum_done(s)
except AttributeError:
pass
return self.pack_hdr() + self.headers_str() + str(self.data)
@classmethod
def set_proto(cls, p, pktclass):
cls._protosw[p] = pktclass
@classmethod
def get_proto(cls, p):
return cls._protosw[p]
import ip
# We are most likely still in the middle of ip.__load_protos() which
# implicitly loads this module through __import__(), so the content of
# ip.IP._protosw is still incomplete at the moment. By sharing the
# same dictionary by reference as opposed to making a copy, when
# ip.__load_protos() finishes, we will also automatically get the most
# up-to-date dictionary.
IP6._protosw = ip.IP._protosw
class IP6ExtensionHeader(dpkt.Packet):
"""
An extension header is very similar to a 'sub-packet'.
We just want to re-use all the hdr unpacking etc.
"""
pass
class IP6OptsHeader(IP6ExtensionHeader):
__hdr__ = (
('nxt', 'B', 0), # next extension header protocol
('len', 'B', 0) # option data length in 8 octect units (ignoring first 8 octets) so, len 0 == 64bit header
)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
self.length = (self.len + 1) * 8
options = []
index = 0
while index < self.length - 2:
opt_type = ord(self.data[index])
# PAD1 option
if opt_type == 0:
index += 1
continue
opt_length = ord(self.data[index + 1])
if opt_type == 1: # PADN option
# PADN uses opt_length bytes in total
index += opt_length + 2
continue
options.append(
{'type': opt_type, 'opt_length': opt_length, 'data': self.data[index + 2:index + 2 + opt_length]})
# add the two chars and the option_length, to move to the next option
index += opt_length + 2
self.options = options
class IP6HopOptsHeader(IP6OptsHeader):
pass
class IP6DstOptsHeader(IP6OptsHeader):
pass
class IP6RoutingHeader(IP6ExtensionHeader):
__hdr__ = (
('nxt', 'B', 0), # next extension header protocol
('len', 'B', 0), # extension data length in 8 octect units (ignoring first 8 octets) (<= 46 for type 0)
('type', 'B', 0), # routing type (currently, only 0 is used)
('segs_left', 'B', 0), # remaining segments in route, until destination (<= 23)
('rsvd_sl_bits', 'I', 0), # reserved (1 byte), strict/loose bitmap for addresses
)
@property
def sl_bits(self):
return self.rsvd_sl_bits & 0xffffff
@sl_bits.setter
def sl_bits(self, v):
self.rsvd_sl_bits = (self.rsvd_sl_bits & ~0xfffff) | (v & 0xfffff)
# Deprecated methods, will be removed in the future
# =================================================
@deprecated('sl_bits')
def _get_sl_bits(self): return self.sl_bits
@deprecated('sl_bits')
def _set_sl_bits(self, v): self.sl_bits = v
# =================================================
def unpack(self, buf):
hdr_size = 8
addr_size = 16
dpkt.Packet.unpack(self, buf)
addresses = []
num_addresses = self.len / 2
buf = buf[hdr_size:hdr_size + num_addresses * addr_size]
for i in range(num_addresses):
addresses.append(buf[i * addr_size: i * addr_size + addr_size])
self.data = buf
self.addresses = addresses
self.length = self.len * 8 + 8
class IP6FragmentHeader(IP6ExtensionHeader):
__hdr__ = (
('nxt', 'B', 0), # next extension header protocol
('resv', 'B', 0), # reserved, set to 0
('frag_off_resv_m', 'H', 0), # frag offset (13 bits), reserved zero (2 bits), More frags flag
('id', 'I', 0) # fragments id
)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
self.length = self.__hdr_len__
self.data = ''
@property
def frag_off(self):
return self.frag_off_resv_m >> 3
@frag_off.setter
def frag_off(self, v):
self.frag_off_resv_m = (self.frag_off_resv_m & ~0xfff8) | (v << 3)
@property
def m_flag(self):
return self.frag_off_resv_m & 1
@m_flag.setter
def m_flag(self, v):
self.frag_off_resv_m = (self.frag_off_resv_m & ~0xfffe) | v
# Deprecated methods, will be removed in the future
# =================================================
@deprecated('frag_off')
def _get_frag_off(self): return self.frag_off
@deprecated('frag_off')
def _set_frag_off(self, v): self.frag_off = v
@deprecated('m_flag')
def _get_m_flag(self): return self.m_flag
@deprecated('m_flag')
def _set_m_flag(self, v): self.m_flag = v
# =================================================
class IP6AHHeader(IP6ExtensionHeader):
__hdr__ = (
('nxt', 'B', 0), # next extension header protocol
('len', 'B', 0), # length of header in 4 octet units (ignoring first 2 units)
('resv', 'H', 0), # reserved, 2 bytes of 0
('spi', 'I', 0), # SPI security parameter index
('seq', 'I', 0) # sequence no.
)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
self.length = (self.len + 2) * 4
self.auth_data = self.data[:(self.len - 1) * 4]
class IP6ESPHeader(IP6ExtensionHeader):
__hdr__ = (
('spi', 'I', 0),
('seq', 'I', 0)
)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
self.length = self.__hdr_len__ + len(self.data)
ext_hdrs = [ip.IP_PROTO_HOPOPTS, ip.IP_PROTO_ROUTING, ip.IP_PROTO_FRAGMENT, ip.IP_PROTO_AH, ip.IP_PROTO_ESP,
ip.IP_PROTO_DSTOPTS]
ext_hdrs_cls = {ip.IP_PROTO_HOPOPTS: IP6HopOptsHeader,
ip.IP_PROTO_ROUTING: IP6RoutingHeader,
ip.IP_PROTO_FRAGMENT: IP6FragmentHeader,
ip.IP_PROTO_ESP: IP6ESPHeader,
ip.IP_PROTO_AH: IP6AHHeader,
ip.IP_PROTO_DSTOPTS: IP6DstOptsHeader}
def test_ipg():
s = '`\x00\x00\x00\x00(\x06@\xfe\x80\x00\x00\x00\x00\x00\x00\x02\x11$\xff\xfe\x8c\x11\xde\xfe\x80\x00\x00\x00\x00\x00\x00\x02\xb0\xd0\xff\xfe\xe1\x80r\xcd\xca\x00\x16\x04\x84F\xd5\x00\x00\x00\x00\xa0\x02\xff\xff\xf8\t\x00\x00\x02\x04\x05\xa0\x01\x03\x03\x00\x01\x01\x08\n}\x185?\x00\x00\x00\x00'
_ip = IP6(s)
# print `ip`
_ip.data.sum = 0
s2 = str(_ip)
IP6(s)
# print `ip2`
assert (s == s2)
def test_ip6_routing_header():
s = '`\x00\x00\x00\x00<+@ H\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xde\xca G\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xca\xfe\x06\x04\x00\x02\x00\x00\x00\x00 \x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xde\xca "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xde\xca\x00\x14\x00P\x00\x00\x00\x00\x00\x00\x00\x00P\x02 \x00\x91\x7f\x00\x00'
ip = IP6(s)
s2 = str(ip)
# 43 is Routing header id
assert (len(ip.extension_hdrs[43].addresses) == 2)
assert ip.tcp
assert (s == s2)
assert str(ip) == s
def test_ip6_fragment_header():
s = '\x06\xee\xff\xfb\x00\x00\xff\xff'
fh = IP6FragmentHeader(s)
# s2 = str(fh) variable 's2' is not used
str(fh)
assert (fh.nxt == 6)
assert (fh.id == 65535)
assert (fh.frag_off == 8191)
assert (fh.m_flag == 1)
assert str(fh) == s
# IP6 with fragment header
s = '\x60\x00\x00\x00\x00\x10\x2c\x00\x02\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x03\x33\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x29\x00\x00\x01\x00\x00\x00\x00\x60\x00\x00\x00\x00\x10\x2c\x00'
ip = IP6(s)
assert str(ip) == s
def test_ip6_options_header():
s = ';\x04\x01\x02\x00\x00\xc9\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xc2\x04\x00\x00\x00\x00\x05\x02\x00\x00\x01\x02\x00\x00'
options = IP6OptsHeader(s).options
assert (len(options) == 3)
assert str(IP6OptsHeader(s)) == s
def test_ip6_ah_header():
s = ';\x04\x00\x00\x02\x02\x02\x02\x01\x01\x01\x01\x78\x78\x78\x78\x78\x78\x78\x78'
ah = IP6AHHeader(s)
assert (ah.length == 24)
assert (ah.auth_data == 'xxxxxxxx')
assert (ah.spi == 0x2020202)
assert (ah.seq == 0x1010101)
assert str(ah) == s
def test_ip6_esp_header():
s = '\x00\x00\x01\x00\x00\x00\x00\x44\xe2\x4f\x9e\x68\xf3\xcd\xb1\x5f\x61\x65\x42\x8b\x78\x0b\x4a\xfd\x13\xf0\x15\x98\xf5\x55\x16\xa8\x12\xb3\xb8\x4d\xbc\x16\xb2\x14\xbe\x3d\xf9\x96\xd4\xa0\x39\x1f\x85\x74\x25\x81\x83\xa6\x0d\x99\xb6\xba\xa3\xcc\xb6\xe0\x9a\x78\xee\xf2\xaf\x9a'
esp = IP6ESPHeader(s)
assert esp.length == 68
assert esp.spi == 256
assert str(esp) == s
def test_ip6_extension_headers():
p = '`\x00\x00\x00\x00<+@ H\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xde\xca G\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xca\xfe\x06\x04\x00\x02\x00\x00\x00\x00 \x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xde\xca "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xde\xca\x00\x14\x00P\x00\x00\x00\x00\x00\x00\x00\x00P\x02 \x00\x91\x7f\x00\x00'
ip = IP6(p)
o = ';\x04\x01\x02\x00\x00\xc9\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xc2\x04\x00\x00\x00\x00\x05\x02\x00\x00\x01\x02\x00\x00'
options = IP6HopOptsHeader(o)
ip.extension_hdrs[0] = options
fh = '\x06\xee\xff\xfb\x00\x00\xff\xff'
ip.extension_hdrs[44] = IP6FragmentHeader(fh)
ah = ';\x04\x00\x00\x02\x02\x02\x02\x01\x01\x01\x01\x78\x78\x78\x78\x78\x78\x78\x78'
ip.extension_hdrs[51] = IP6AHHeader(ah)
do = ';\x02\x01\x02\x00\x00\xc9\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
ip.extension_hdrs[60] = IP6DstOptsHeader(do)
assert (len(ip.extension_hdrs) == 5)
if __name__ == '__main__':
test_ipg()
test_ip6_routing_header()
test_ip6_fragment_header()
test_ip6_options_header()
test_ip6_ah_header()
test_ip6_esp_header()
test_ip6_extension_headers()
print 'Tests Successful...'
| bsd-3-clause |
aurelieladier/openturns | python/test/t_GumbelCopula_std.py | 1 | 4030 | #! /usr/bin/env python
from __future__ import print_function
from openturns import *
from math import *
TESTPREAMBLE()
RandomGenerator.SetSeed(0)
try:
# Instanciate one distribution object
dim = 2
copula = GumbelCopula(2.5)
print("Copula ", repr(copula))
print("Copula ", copula)
print("Mean ", repr(copula.getMean()))
print("Covariance ", repr(copula.getCovariance()))
# Is this copula an elliptical distribution?
print("Elliptical distribution= ", copula.isElliptical())
# Is this copula elliptical ?
print("Elliptical copula= ", copula.hasEllipticalCopula())
# Is this copula independent ?
print("Independent copula= ", copula.hasIndependentCopula())
# Test for realization of distribution
oneRealization = copula.getRealization()
print("oneRealization=", repr(oneRealization))
# Test for sampling
size = 10
oneSample = copula.getSample(size)
print("oneSample=", repr(oneSample))
# Test for sampling
size = 10000
anotherSample = copula.getSample(size)
print("anotherSample mean=", repr(anotherSample.computeMean()))
print("anotherSample covariance=", repr(anotherSample.computeCovariance()))
# Define a point
point = NumericalPoint(dim, 0.2)
# Show PDF and CDF of point
pointPDF = copula.computePDF(point)
pointCDF = copula.computeCDF(point)
print("Point = ", repr(point), " pdf=%.6f" %
pointPDF, " cdf=%.6f" % pointCDF)
# Get 50% quantile
quantile = copula.computeQuantile(0.5)
print("Quantile=", repr(quantile))
print("CDF(quantile)=%.6f" % copula.computeCDF(quantile))
# Get 95% survival function
inverseSurvival = NumericalPoint(copula.computeInverseSurvivalFunction(0.95))
print("InverseSurvival=", repr(inverseSurvival))
print("Survival(inverseSurvival)=%.6f" % copula.computeSurvivalFunction(inverseSurvival))
# Confidence regions
interval, threshold = copula.computeMinimumVolumeIntervalWithMarginalProbability(0.95)
print("Minimum volume interval=", interval)
print("threshold=", NumericalPoint(1, threshold))
levelSet, beta = copula.computeMinimumVolumeLevelSetWithThreshold(0.95)
print("Minimum volume level set=", levelSet)
print("beta=", NumericalPoint(1, beta))
interval, beta = copula.computeBilateralConfidenceIntervalWithMarginalProbability(0.95)
print("Bilateral confidence interval=", interval)
print("beta=", NumericalPoint(1, beta))
interval, beta = copula.computeUnilateralConfidenceIntervalWithMarginalProbability(0.95, False)
print("Unilateral confidence interval (lower tail)=", interval)
print("beta=", NumericalPoint(1, beta))
interval, beta = copula.computeUnilateralConfidenceIntervalWithMarginalProbability(0.95, True)
print("Unilateral confidence interval (upper tail)=", interval)
print("beta=", NumericalPoint(1, beta))
# Extract the marginals
for i in range(dim):
margin = copula.getMarginal(i)
print("margin=", repr(margin))
print("margin PDF=%.6f" % margin.computePDF(NumericalPoint(1, 0.25)))
print("margin CDF=%.6f" % margin.computeCDF(NumericalPoint(1, 0.25)))
print("margin quantile=", repr(margin.computeQuantile(0.95)))
print("margin realization=", repr(margin.getRealization()))
# Extract a 2-D marginal
indices = Indices(2, 0)
indices[0] = 1
indices[1] = 0
print("indices=", repr(indices))
margins = copula.getMarginal(indices)
print("margins=", repr(margins))
print("margins PDF=%.6f" % margins.computePDF(NumericalPoint(2, 0.25)))
print("margins CDF=%.6f" % margins.computeCDF(NumericalPoint(2, 0.25)))
quantile = NumericalPoint(margins.computeQuantile(0.95))
print("margins quantile=", repr(quantile))
print("margins CDF(qantile)=%.6f" % margins.computeCDF(quantile))
print("margins realization=", repr(margins.getRealization()))
except:
import sys
print("t_NormalCopula_std.py", sys.exc_info()[0], sys.exc_info()[1])
| lgpl-3.0 |
weso/CWR-DataApi | tests/parser/dictionary/encoder/record/test_acknowledgement.py | 1 | 2439 | # -*- coding: utf-8 -*-
import unittest
import datetime
from cwr.parser.encoder.dictionary import AcknowledgementDictionaryEncoder
from cwr.acknowledgement import AcknowledgementRecord
"""
Acknowledgement to dictionary encoding tests.
The following cases are tested:
"""
__author__ = 'Bernardo Martínez Garrido'
__license__ = 'MIT'
__status__ = 'Development'
class TestAcknowledgementRecordDictionaryEncoding(unittest.TestCase):
def setUp(self):
self._encoder = AcknowledgementDictionaryEncoder()
def test_encoded(self):
data = AcknowledgementRecord(record_type='ACK',
transaction_sequence_n=3,
record_sequence_n=15,
original_group_id=4,
original_transaction_sequence_n=5,
original_transaction_type='AGR',
transaction_status='AS',
creation_date_time=datetime.datetime.strptime(
'20030215', '%Y%m%d').date(),
processing_date=datetime.datetime.strptime(
'20030216', '%Y%m%d').date(),
creation_title='TITLE',
submitter_creation_n='A123',
recipient_creation_n='B124')
encoded = self._encoder.encode(data)
self.assertEqual('ACK', encoded['record_type'])
self.assertEqual(3, encoded['transaction_sequence_n'])
self.assertEqual(15, encoded['record_sequence_n'])
self.assertEqual(4, encoded['original_group_id'])
self.assertEqual(5, encoded['original_transaction_sequence_n'])
self.assertEqual('AGR', encoded['original_transaction_type'])
self.assertEqual('AS', encoded['transaction_status'])
self.assertEqual(
datetime.datetime.strptime('20030215', '%Y%m%d').date(),
encoded['creation_date_time'])
self.assertEqual(
datetime.datetime.strptime('20030216', '%Y%m%d').date(),
encoded['processing_date'])
self.assertEqual('TITLE', encoded['creation_title'])
self.assertEqual('A123', encoded['submitter_creation_n'])
self.assertEqual('B124', encoded['recipient_creation_n'])
| mit |
onceuponatimeforever/oh-mainline | vendor/packages/bleach/bleach/tests/test_unicode.py | 35 | 1909 | # -*- coding: utf-8 -*-
from nose.tools import eq_
from bleach import clean, linkify
def test_japanese_safe_simple():
eq_(u'ヘルプとチュートリアル', clean(u'ヘルプとチュートリアル'))
eq_(u'ヘルプとチュートリアル', linkify(u'ヘルプとチュートリアル'))
def test_japanese_strip():
eq_(u'<em>ヘルプとチュートリアル</em>',
clean(u'<em>ヘルプとチュートリアル</em>'))
eq_(u'<span>ヘルプとチュートリアル</span>',
clean(u'<span>ヘルプとチュートリアル</span>'))
def test_russian_simple():
eq_(u'Домашняя', clean(u'Домашняя'))
eq_(u'Домашняя', linkify(u'Домашняя'))
def test_mixed():
eq_(u'Домашняяヘルプとチュートリアル',
clean(u'Домашняяヘルプとチュートリアル'))
def test_mixed_linkify():
eq_(u'Домашняя <a href="http://example.com" rel="nofollow">'
u'http://example.com</a> ヘルプとチュートリアル',
linkify(u'Домашняя http://example.com ヘルプとチュートリアル'))
def test_url_utf8():
"""Allow UTF8 characters in URLs themselves."""
out = u'<a href="%(url)s" rel="nofollow">%(url)s</a>'
tests = (
('http://éxámplé.com/', out % {'url': u'http://éxámplé.com/'}),
('http://éxámplé.com/íàñá/',
out % {'url': u'http://éxámplé.com/íàñá/'}),
('http://éxámplé.com/íàñá/?foo=bar',
out % {'url': u'http://éxámplé.com/íàñá/?foo=bar'}),
('http://éxámplé.com/íàñá/?fóo=bár',
out % {'url': u'http://éxámplé.com/íàñá/?fóo=bár'}),
)
def check(test, expected_output):
eq_(expected_output, linkify(test))
for test, expected_output in tests:
yield check, test, expected_output
| agpl-3.0 |
tkelman/utf8rewind | dependencies/gtest-svn-head/scripts/release_docs.py | 1167 | 6132 | #!/usr/bin/env python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for branching Google Test/Mock wiki pages for a new version.
SYNOPSIS
release_docs.py NEW_RELEASE_VERSION
Google Test and Google Mock's external user documentation is in
interlinked wiki files. When we release a new version of
Google Test or Google Mock, we need to branch the wiki files
such that users of a specific version of Google Test/Mock can
look up documenation relevant for that version. This script
automates that process by:
- branching the current wiki pages (which document the
behavior of the SVN trunk head) to pages for the specified
version (e.g. branching FAQ.wiki to V2_6_FAQ.wiki when
NEW_RELEASE_VERSION is 2.6);
- updating the links in the branched files to point to the branched
version (e.g. a link in V2_6_FAQ.wiki that pointed to
Primer.wiki#Anchor will now point to V2_6_Primer.wiki#Anchor).
NOTE: NEW_RELEASE_VERSION must be a NEW version number for
which the wiki pages don't yet exist; otherwise you'll get SVN
errors like "svn: Path 'V1_7_PumpManual.wiki' is not a
directory" when running the script.
EXAMPLE
$ cd PATH/TO/GTEST_SVN_WORKSPACE/trunk
$ scripts/release_docs.py 2.6 # create wiki pages for v2.6
$ svn status # verify the file list
$ svn diff # verify the file contents
$ svn commit -m "release wiki pages for v2.6"
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sys
import common
# Wiki pages that shouldn't be branched for every gtest/gmock release.
GTEST_UNVERSIONED_WIKIS = ['DevGuide.wiki']
GMOCK_UNVERSIONED_WIKIS = [
'DesignDoc.wiki',
'DevGuide.wiki',
'KnownIssues.wiki'
]
def DropWikiSuffix(wiki_filename):
"""Removes the .wiki suffix (if any) from the given filename."""
return (wiki_filename[:-len('.wiki')] if wiki_filename.endswith('.wiki')
else wiki_filename)
class WikiBrancher(object):
"""Branches ..."""
def __init__(self, dot_version):
self.project, svn_root_path = common.GetSvnInfo()
if self.project not in ('googletest', 'googlemock'):
sys.exit('This script must be run in a gtest or gmock SVN workspace.')
self.wiki_dir = svn_root_path + '/wiki'
# Turn '2.6' to 'V2_6_'.
self.version_prefix = 'V' + dot_version.replace('.', '_') + '_'
self.files_to_branch = self.GetFilesToBranch()
page_names = [DropWikiSuffix(f) for f in self.files_to_branch]
# A link to Foo.wiki is in one of the following forms:
# [Foo words]
# [Foo#Anchor words]
# [http://code.google.com/.../wiki/Foo words]
# [http://code.google.com/.../wiki/Foo#Anchor words]
# We want to replace 'Foo' with 'V2_6_Foo' in the above cases.
self.search_for_re = re.compile(
# This regex matches either
# [Foo
# or
# /wiki/Foo
# followed by a space or a #, where Foo is the name of an
# unversioned wiki page.
r'(\[|/wiki/)(%s)([ #])' % '|'.join(page_names))
self.replace_with = r'\1%s\2\3' % (self.version_prefix,)
def GetFilesToBranch(self):
"""Returns a list of .wiki file names that need to be branched."""
unversioned_wikis = (GTEST_UNVERSIONED_WIKIS if self.project == 'googletest'
else GMOCK_UNVERSIONED_WIKIS)
return [f for f in os.listdir(self.wiki_dir)
if (f.endswith('.wiki') and
not re.match(r'^V\d', f) and # Excluded versioned .wiki files.
f not in unversioned_wikis)]
def BranchFiles(self):
"""Branches the .wiki files needed to be branched."""
print 'Branching %d .wiki files:' % (len(self.files_to_branch),)
os.chdir(self.wiki_dir)
for f in self.files_to_branch:
command = 'svn cp %s %s%s' % (f, self.version_prefix, f)
print command
os.system(command)
def UpdateLinksInBranchedFiles(self):
for f in self.files_to_branch:
source_file = os.path.join(self.wiki_dir, f)
versioned_file = os.path.join(self.wiki_dir, self.version_prefix + f)
print 'Updating links in %s.' % (versioned_file,)
text = file(source_file, 'r').read()
new_text = self.search_for_re.sub(self.replace_with, text)
file(versioned_file, 'w').write(new_text)
def main():
if len(sys.argv) != 2:
sys.exit(__doc__)
brancher = WikiBrancher(sys.argv[1])
brancher.BranchFiles()
brancher.UpdateLinksInBranchedFiles()
if __name__ == '__main__':
main()
| mit |
kinjalcpatel/kinetic-c | vendor/protobuf-2.6.0/gtest/test/gtest_shuffle_test.py | 3023 | 12549 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
| lgpl-2.1 |
AlanZatarain/sqlalchemy-migrate | migrate/versioning/script/sql.py | 73 | 1688 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import shutil
from migrate.versioning.script import base
from migrate.versioning.template import Template
log = logging.getLogger(__name__)
class SqlScript(base.BaseScript):
"""A file containing plain SQL statements."""
@classmethod
def create(cls, path, **opts):
"""Create an empty migration script at specified path
:returns: :class:`SqlScript instance <migrate.versioning.script.sql.SqlScript>`"""
cls.require_notfound(path)
src = Template(opts.pop('templates_path', None)).get_sql_script(theme=opts.pop('templates_theme', None))
shutil.copy(src, path)
return cls(path)
# TODO: why is step parameter even here?
def run(self, engine, step=None, executemany=True):
"""Runs SQL script through raw dbapi execute call"""
text = self.source()
# Don't rely on SA's autocommit here
# (SA uses .startswith to check if a commit is needed. What if script
# starts with a comment?)
conn = engine.connect()
try:
trans = conn.begin()
try:
# HACK: SQLite doesn't allow multiple statements through
# its execute() method, but it provides executescript() instead
dbapi = conn.engine.raw_connection()
if executemany and getattr(dbapi, 'executescript', None):
dbapi.executescript(text)
else:
conn.execute(text)
trans.commit()
except:
trans.rollback()
raise
finally:
conn.close()
| mit |
monouno/site | judge/utils/file_cache.py | 3 | 1250 | import errno
import os
from gzip import open as gzip_open
from urlparse import urljoin
class HashFileCache(object):
def __init__(self, root, url, gzip=False):
self.root = root
self.url = url
self.gzip = gzip
def create(self, hash):
try:
os.makedirs(os.path.join(self.root, hash))
except OSError as e:
if e.errno != errno.EEXIST:
raise
def has_file(self, hash, file):
return os.path.isfile(self.get_path(hash, file))
def get_path(self, hash, file):
return os.path.join(self.root, hash, file)
def get_url(self, hash, file):
return urljoin(self.url, '%s/%s' % (hash, file))
def read_file(self, hash, file):
return open(self.get_path(hash, file), 'rb')
def read_data(self, hash, file):
with self.read_file(hash, file) as f:
return f.read()
def cache_data(self, hash, file, data, url=True, gzip=True):
if gzip and self.gzip:
with gzip_open(self.get_path(hash, file + '.gz'), 'wb') as f:
f.write(data)
with open(self.get_path(hash, file), 'wb') as f:
f.write(data)
if url:
return self.get_url(hash, file)
| agpl-3.0 |
FireWRT/OpenWrt-Firefly-Libraries | staging_dir/host/lib/python3.4/encodings/iso8859_9.py | 272 | 13156 | """ Python Character Mapping Codec iso8859_9 generated from 'MAPPINGS/ISO8859/8859-9.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-9',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\xa4' # 0xA4 -> CURRENCY SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\xb9' # 0xB9 -> SUPERSCRIPT ONE
'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
'\xbf' # 0xBF -> INVERTED QUESTION MARK
'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\u011e' # 0xD0 -> LATIN CAPITAL LETTER G WITH BREVE
'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0130' # 0xDD -> LATIN CAPITAL LETTER I WITH DOT ABOVE
'\u015e' # 0xDE -> LATIN CAPITAL LETTER S WITH CEDILLA
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
'\u011f' # 0xF0 -> LATIN SMALL LETTER G WITH BREVE
'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\u0131' # 0xFD -> LATIN SMALL LETTER DOTLESS I
'\u015f' # 0xFE -> LATIN SMALL LETTER S WITH CEDILLA
'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-2.0 |
Abi1ity/uniclust2.0 | flask/lib/python2.7/site-packages/distribute-0.6.24-py2.7.egg/setuptools/tests/server.py | 62 | 1503 | """Basic http server for tests to simulate PyPI or custom indexes
"""
import urllib2
import sys
from threading import Thread
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
class IndexServer(HTTPServer):
"""Basic single-threaded http server simulating a package index
You can use this server in unittest like this::
s = IndexServer()
s.start()
index_url = s.base_url() + 'mytestindex'
# do some test requests to the index
# The index files should be located in setuptools/tests/indexes
s.stop()
"""
def __init__(self):
HTTPServer.__init__(self, ('', 0), SimpleHTTPRequestHandler)
self._run = True
def serve(self):
while True:
self.handle_request()
if not self._run: break
def start(self):
self.thread = Thread(target=self.serve)
self.thread.start()
def stop(self):
"""self.shutdown is not supported on python < 2.6"""
self._run = False
try:
if sys.version > '2.6':
urllib2.urlopen('http://127.0.0.1:%s/' % self.server_port,
None, 5)
else:
urllib2.urlopen('http://127.0.0.1:%s/' % self.server_port)
except urllib2.URLError:
pass
self.thread.join()
def base_url(self):
port = self.server_port
return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
| bsd-3-clause |
rajsadho/django | django/contrib/admin/actions.py | 395 | 3316 | """
Built-in, globally-available admin actions.
"""
from django.contrib import messages
from django.contrib.admin import helpers
from django.contrib.admin.utils import get_deleted_objects, model_ngettext
from django.core.exceptions import PermissionDenied
from django.db import router
from django.template.response import TemplateResponse
from django.utils.encoding import force_text
from django.utils.translation import ugettext as _, ugettext_lazy
def delete_selected(modeladmin, request, queryset):
"""
Default action which deletes the selected objects.
This action first displays a confirmation page whichs shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it deletes all selected objects and redirects back to the change list.
"""
opts = modeladmin.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not modeladmin.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(modeladmin.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, model_count, perms_needed, protected = get_deleted_objects(
queryset, opts, request.user, modeladmin.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_text(obj)
modeladmin.log_deletion(request, obj, obj_display)
queryset.delete()
modeladmin.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(modeladmin.opts, n)
}, messages.SUCCESS)
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = dict(
modeladmin.admin_site.each_context(request),
title=title,
objects_name=objects_name,
deletable_objects=[deletable_objects],
model_count=dict(model_count).items(),
queryset=queryset,
perms_lacking=perms_needed,
protected=protected,
opts=opts,
action_checkbox_name=helpers.ACTION_CHECKBOX_NAME,
)
request.current_app = modeladmin.admin_site.name
# Display the confirmation page
return TemplateResponse(request, modeladmin.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.model_name),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context)
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
| bsd-3-clause |
RoboErik/RUBIK | Rubik/RubikSolver/rpi_ws281x/version.py | 10 | 2918 | #
# SConstruct
#
# Copyright (c) 2016 Jeremy Garff <jer @ jers.net>
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted
# provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
# 3. Neither the name of the owner nor the names of its contributors may be used to endorse
# or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import SCons, os
def version_flags(env):
if not env['V']:
env['VERSIONCOMSTR'] = 'Version ${TARGET}'
def version_builders(env):
def generate_version_header(target, source, env):
headername = os.path.basename(target[0].abspath)
headerdef = headername.replace('.', '_').replace('-', '_').upper()
try:
version = open(source[0].abspath, 'r').readline().strip().split('.')
except:
version = [ '0', '0', '0' ]
f = open(headername, 'w')
f.write('/* Auto Generated Header built by version.py - DO NOT MODIFY */\n')
f.write('\n')
f.write('#ifndef __%s__\n' % (headerdef))
f.write('#define __%s__\n' % (headerdef))
f.write('\n')
f.write('#define VERSION_MAJOR %s\n' % version[0])
f.write('#define VERSION_MINOR %s\n' % version[1])
f.write('#define VERSION_MICRO %s\n' % version[2])
f.write('\n')
f.write('#endif /* __%s__ */\n' % (headerdef))
f.close()
env.Append(BUILDERS = {
'Version' : SCons.Builder.Builder(
action = SCons.Action.Action(generate_version_header, '${VERSIONCOMSTR}'),
suffix = '.h',
),
})
def exists(env):
return 1
def generate(env, **kwargs):
[f(env) for f in (version_flags, version_builders)]
| apache-2.0 |
yencarnacion/jaikuengine | .google_appengine/lib/django-1.5/django/core/management/commands/startproject.py | 201 | 1323 | from django.core.management.base import CommandError
from django.core.management.templates import TemplateCommand
from django.utils.crypto import get_random_string
from django.utils.importlib import import_module
class Command(TemplateCommand):
help = ("Creates a Django project directory structure for the given "
"project name in the current directory or optionally in the "
"given directory.")
def handle(self, project_name=None, target=None, *args, **options):
if project_name is None:
raise CommandError("you must provide a project name")
# Check that the project_name cannot be imported.
try:
import_module(project_name)
except ImportError:
pass
else:
raise CommandError("%r conflicts with the name of an existing "
"Python module and cannot be used as a "
"project name. Please try another name." %
project_name)
# Create a random SECRET_KEY hash to put it in the main settings.
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
options['secret_key'] = get_random_string(50, chars)
super(Command, self).handle('project', project_name, target, **options)
| apache-2.0 |
Wilbeibi/rethinkdb | external/v8_3.30.33.16/testing/gmock/scripts/fuse_gmock_files.py | 729 | 8606 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""fuse_gmock_files.py v0.1.0
Fuses Google Mock and Google Test source code into two .h files and a .cc file.
SYNOPSIS
fuse_gmock_files.py [GMOCK_ROOT_DIR] OUTPUT_DIR
Scans GMOCK_ROOT_DIR for Google Mock and Google Test source
code, assuming Google Test is in the GMOCK_ROOT_DIR/gtest
sub-directory, and generates three files:
OUTPUT_DIR/gtest/gtest.h, OUTPUT_DIR/gmock/gmock.h, and
OUTPUT_DIR/gmock-gtest-all.cc. Then you can build your tests
by adding OUTPUT_DIR to the include search path and linking
with OUTPUT_DIR/gmock-gtest-all.cc. These three files contain
everything you need to use Google Mock. Hence you can
"install" Google Mock by copying them to wherever you want.
GMOCK_ROOT_DIR can be omitted and defaults to the parent
directory of the directory holding this script.
EXAMPLES
./fuse_gmock_files.py fused_gmock
./fuse_gmock_files.py path/to/unpacked/gmock fused_gmock
This tool is experimental. In particular, it assumes that there is no
conditional inclusion of Google Mock or Google Test headers. Please
report any problems to googlemock@googlegroups.com. You can read
http://code.google.com/p/googlemock/wiki/CookBook for more
information.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sets
import sys
# We assume that this file is in the scripts/ directory in the Google
# Mock root directory.
DEFAULT_GMOCK_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
# We need to call into gtest/scripts/fuse_gtest_files.py.
sys.path.append(os.path.join(DEFAULT_GMOCK_ROOT_DIR, 'gtest/scripts'))
import fuse_gtest_files
gtest = fuse_gtest_files
# Regex for matching '#include "gmock/..."'.
INCLUDE_GMOCK_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(gmock/.+)"')
# Where to find the source seed files.
GMOCK_H_SEED = 'include/gmock/gmock.h'
GMOCK_ALL_CC_SEED = 'src/gmock-all.cc'
# Where to put the generated files.
GTEST_H_OUTPUT = 'gtest/gtest.h'
GMOCK_H_OUTPUT = 'gmock/gmock.h'
GMOCK_GTEST_ALL_CC_OUTPUT = 'gmock-gtest-all.cc'
def GetGTestRootDir(gmock_root):
"""Returns the root directory of Google Test."""
return os.path.join(gmock_root, 'gtest')
def ValidateGMockRootDir(gmock_root):
"""Makes sure gmock_root points to a valid gmock root directory.
The function aborts the program on failure.
"""
gtest.ValidateGTestRootDir(GetGTestRootDir(gmock_root))
gtest.VerifyFileExists(gmock_root, GMOCK_H_SEED)
gtest.VerifyFileExists(gmock_root, GMOCK_ALL_CC_SEED)
def ValidateOutputDir(output_dir):
"""Makes sure output_dir points to a valid output directory.
The function aborts the program on failure.
"""
gtest.VerifyOutputFile(output_dir, gtest.GTEST_H_OUTPUT)
gtest.VerifyOutputFile(output_dir, GMOCK_H_OUTPUT)
gtest.VerifyOutputFile(output_dir, GMOCK_GTEST_ALL_CC_OUTPUT)
def FuseGMockH(gmock_root, output_dir):
"""Scans folder gmock_root to generate gmock/gmock.h in output_dir."""
output_file = file(os.path.join(output_dir, GMOCK_H_OUTPUT), 'w')
processed_files = sets.Set() # Holds all gmock headers we've processed.
def ProcessFile(gmock_header_path):
"""Processes the given gmock header file."""
# We don't process the same header twice.
if gmock_header_path in processed_files:
return
processed_files.add(gmock_header_path)
# Reads each line in the given gmock header.
for line in file(os.path.join(gmock_root, gmock_header_path), 'r'):
m = INCLUDE_GMOCK_FILE_REGEX.match(line)
if m:
# It's '#include "gmock/..."' - let's process it recursively.
ProcessFile('include/' + m.group(1))
else:
m = gtest.INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
# It's '#include "gtest/foo.h"'. We translate it to
# "gtest/gtest.h", regardless of what foo is, since all
# gtest headers are fused into gtest/gtest.h.
# There is no need to #include gtest.h twice.
if not gtest.GTEST_H_SEED in processed_files:
processed_files.add(gtest.GTEST_H_SEED)
output_file.write('#include "%s"\n' % (gtest.GTEST_H_OUTPUT,))
else:
# Otherwise we copy the line unchanged to the output file.
output_file.write(line)
ProcessFile(GMOCK_H_SEED)
output_file.close()
def FuseGMockAllCcToFile(gmock_root, output_file):
"""Scans folder gmock_root to fuse gmock-all.cc into output_file."""
processed_files = sets.Set()
def ProcessFile(gmock_source_file):
"""Processes the given gmock source file."""
# We don't process the same #included file twice.
if gmock_source_file in processed_files:
return
processed_files.add(gmock_source_file)
# Reads each line in the given gmock source file.
for line in file(os.path.join(gmock_root, gmock_source_file), 'r'):
m = INCLUDE_GMOCK_FILE_REGEX.match(line)
if m:
# It's '#include "gmock/foo.h"'. We treat it as '#include
# "gmock/gmock.h"', as all other gmock headers are being fused
# into gmock.h and cannot be #included directly.
# There is no need to #include "gmock/gmock.h" more than once.
if not GMOCK_H_SEED in processed_files:
processed_files.add(GMOCK_H_SEED)
output_file.write('#include "%s"\n' % (GMOCK_H_OUTPUT,))
else:
m = gtest.INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
# It's '#include "gtest/..."'.
# There is no need to #include gtest.h as it has been
# #included by gtest-all.cc.
pass
else:
m = gtest.INCLUDE_SRC_FILE_REGEX.match(line)
if m:
# It's '#include "src/foo"' - let's process it recursively.
ProcessFile(m.group(1))
else:
# Otherwise we copy the line unchanged to the output file.
output_file.write(line)
ProcessFile(GMOCK_ALL_CC_SEED)
def FuseGMockGTestAllCc(gmock_root, output_dir):
"""Scans folder gmock_root to generate gmock-gtest-all.cc in output_dir."""
output_file = file(os.path.join(output_dir, GMOCK_GTEST_ALL_CC_OUTPUT), 'w')
# First, fuse gtest-all.cc into gmock-gtest-all.cc.
gtest.FuseGTestAllCcToFile(GetGTestRootDir(gmock_root), output_file)
# Next, append fused gmock-all.cc to gmock-gtest-all.cc.
FuseGMockAllCcToFile(gmock_root, output_file)
output_file.close()
def FuseGMock(gmock_root, output_dir):
"""Fuses gtest.h, gmock.h, and gmock-gtest-all.h."""
ValidateGMockRootDir(gmock_root)
ValidateOutputDir(output_dir)
gtest.FuseGTestH(GetGTestRootDir(gmock_root), output_dir)
FuseGMockH(gmock_root, output_dir)
FuseGMockGTestAllCc(gmock_root, output_dir)
def main():
argc = len(sys.argv)
if argc == 2:
# fuse_gmock_files.py OUTPUT_DIR
FuseGMock(DEFAULT_GMOCK_ROOT_DIR, sys.argv[1])
elif argc == 3:
# fuse_gmock_files.py GMOCK_ROOT_DIR OUTPUT_DIR
FuseGMock(sys.argv[1], sys.argv[2])
else:
print __doc__
sys.exit(1)
if __name__ == '__main__':
main()
| agpl-3.0 |
zcbenz/cefode-chromium | testing/test_env.py | 19 | 2934 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Sets environment variables needed to run a chromium unit test."""
import os
import stat
import subprocess
import sys
# This is hardcoded to be src/ relative to this script.
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
CHROME_SANDBOX_ENV = 'CHROME_DEVEL_SANDBOX'
CHROME_SANDBOX_PATH = '/opt/chromium/chrome_sandbox'
def should_enable_sandbox(sandbox_path):
"""Return a boolean indicating that the current slave is capable of using the
sandbox and should enable it. This should return True iff the slave is a
Linux host with the sandbox file present and configured correctly."""
if not (sys.platform.startswith('linux') and
os.path.exists(sandbox_path)):
return False
sandbox_stat = os.stat(sandbox_path)
if ((sandbox_stat.st_mode & stat.S_ISUID) and
(sandbox_stat.st_mode & stat.S_IRUSR) and
(sandbox_stat.st_mode & stat.S_IXUSR) and
(sandbox_stat.st_uid == 0)):
return True
return False
def enable_sandbox_if_required(env, verbose=False):
"""Checks enables the sandbox if it is required, otherwise it disables it."""
chrome_sandbox_path = env.get(CHROME_SANDBOX_ENV, CHROME_SANDBOX_PATH)
if should_enable_sandbox(chrome_sandbox_path):
if verbose:
print 'Enabling sandbox. Setting environment variable:'
print ' %s="%s"' % (CHROME_SANDBOX_ENV, chrome_sandbox_path)
env[CHROME_SANDBOX_ENV] = chrome_sandbox_path
else:
if verbose:
print 'Disabling sandbox. Setting environment variable:'
print ' %s=""' % CHROME_SANDBOX_ENV
env[CHROME_SANDBOX_ENV] = ''
def fix_python_path(cmd):
"""Returns the fixed command line to call the right python executable."""
out = cmd[:]
if out[0] == 'python':
out[0] = sys.executable
elif out[0].endswith('.py'):
out.insert(0, sys.executable)
return out
def run_executable(cmd, env):
"""Runs an executable with:
- environment variable CR_SOURCE_ROOT set to the root directory.
- environment variable LANGUAGE to en_US.UTF-8.
- environment variable CHROME_DEVEL_SANDBOX set if need
- Reuses sys.executable automatically.
"""
# Many tests assume a English interface...
env['LANG'] = 'en_US.UTF-8'
# Used by base/base_paths_linux.cc as an override. Just make sure the default
# logic is used.
env.pop('CR_SOURCE_ROOT', None)
enable_sandbox_if_required(env)
# Ensure paths are correctly separated on windows.
cmd[0] = cmd[0].replace('/', os.path.sep)
cmd = fix_python_path(cmd)
try:
return subprocess.call(cmd, env=env)
except OSError:
print >> sys.stderr, 'Failed to start %s' % cmd
raise
def main():
return run_executable(sys.argv[1:], os.environ.copy())
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
PNProductions/py-seam-merging | examples/image_example.py | 1 | 1677 | from image_helper import image_open, local_path
from seammerging import seam_merging
from utils.tvd import TotalVariationDenoising
from utils.seams import print_seams
import cv2
from numpy import size, float64, array, abs
import time
import os
alpha = 0.5
betaEn = 0.5
iterTV = 80
makeNewDecData = False
debug = False
saveBMP = True
file_suffix = '_small'
folder_name = 'results'
X = image_open(local_path('../assets/skyscraper.jpg'))
deleteNumberW = -1
deleteNumberH = 0
y = cv2.cvtColor(X, cv2.COLOR_BGR2YCR_CB)
y = y.astype(float64)
structureImage = TotalVariationDenoising(y[:, :, 0], iterTV).generate() # y = to_matlab_ycbcr(y[:, :, 0])
importance = y
kernel = array([[0, 0, 0],
[1, 0, -1],
[0, 0, 0]
])
importance = abs(cv2.filter2D(y[:, :, 0], -1, kernel, borderType=cv2.BORDER_REPLICATE)) + abs(cv2.filter2D(y[:, :, 0], -1, kernel.T, borderType=cv2.BORDER_REPLICATE))
img, seams = seam_merging(X, structureImage, importance, deleteNumberW, alpha, betaEn)
seams = print_seams(X, img, seams, deleteNumberW)
size = '_reduce' if deleteNumberW < 0 else '_enlarge'
size += str(-deleteNumberW) if deleteNumberW < 0 else str(deleteNumberW)
name = 'result_' + file_suffix + '_' + size + '_' + str(int(time.time()))
if not os.path.exists(folder_name):
os.makedirs(folder_name)
cv2.imwrite(local_path('./' + folder_name + '/' + name + '.png'), img)
cv2.imwrite(local_path('./' + folder_name + '/' + name + '_seams_.png'), seams)
# cv2.imwrite(local_path('./' + folder_name + '/' + name + '_cartoon_.png'), structureImage)
# cv2.imwrite(local_path('./' + folder_name + '/' + name + '_importance_.png'), importance)
| mit |
sanjuro/RCJK | vendor/django/contrib/localflavor/de/forms.py | 35 | 3129 | """
DE-specific Form helpers
"""
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select, EMPTY_VALUES
from django.utils.translation import ugettext_lazy as _
import re
id_re = re.compile(r"^(?P<residence>\d{10})(?P<origin>\w{1,3})[-\ ]?(?P<birthday>\d{7})[-\ ]?(?P<validity>\d{7})[-\ ]?(?P<checksum>\d{1})$")
class DEZipCodeField(RegexField):
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXXX.'),
}
def __init__(self, *args, **kwargs):
super(DEZipCodeField, self).__init__(r'^\d{5}$',
max_length=None, min_length=None, *args, **kwargs)
class DEStateSelect(Select):
"""
A Select widget that uses a list of DE states as its choices.
"""
def __init__(self, attrs=None):
from de_states import STATE_CHOICES
super(DEStateSelect, self).__init__(attrs, choices=STATE_CHOICES)
class DEIdentityCardNumberField(Field):
"""
A German identity card number.
Checks the following rules to determine whether the number is valid:
* Conforms to the XXXXXXXXXXX-XXXXXXX-XXXXXXX-X format.
* No group consists entirely of zeroes.
* Included checksums match calculated checksums
Algorithm is documented at http://de.wikipedia.org/wiki/Personalausweis
"""
default_error_messages = {
'invalid': _('Enter a valid German identity card number in XXXXXXXXXXX-XXXXXXX-XXXXXXX-X format.'),
}
def has_valid_checksum(self, number):
given_number, given_checksum = number[:-1], number[-1]
calculated_checksum = 0
fragment = ""
parameter = 7
for i in range(len(given_number)):
fragment = str(int(given_number[i]) * parameter)
if fragment.isalnum():
calculated_checksum += int(fragment[-1])
if parameter == 1:
parameter = 7
elif parameter == 3:
parameter = 1
elif parameter ==7:
parameter = 3
return str(calculated_checksum)[-1] == given_checksum
def clean(self, value):
super(DEIdentityCardNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
match = re.match(id_re, value)
if not match:
raise ValidationError(self.error_messages['invalid'])
gd = match.groupdict()
residence, origin = gd['residence'], gd['origin']
birthday, validity, checksum = gd['birthday'], gd['validity'], gd['checksum']
if residence == '0000000000' or birthday == '0000000' or validity == '0000000':
raise ValidationError(self.error_messages['invalid'])
all_digits = u"%s%s%s%s" % (residence, birthday, validity, checksum)
if not self.has_valid_checksum(residence) or not self.has_valid_checksum(birthday) or \
not self.has_valid_checksum(validity) or not self.has_valid_checksum(all_digits):
raise ValidationError(self.error_messages['invalid'])
return u'%s%s-%s-%s-%s' % (residence, origin, birthday, validity, checksum)
| apache-2.0 |
xHeliotrope/injustice_dropper | env/lib/python3.4/site-packages/django/templatetags/tz.py | 251 | 5574 | from datetime import datetime, tzinfo
from django.template import Library, Node, TemplateSyntaxError
from django.utils import six, timezone
try:
import pytz
except ImportError:
pytz = None
register = Library()
# HACK: datetime is an old-style class, create a new-style equivalent
# so we can define additional attributes.
class datetimeobject(datetime, object):
pass
# Template filters
@register.filter
def localtime(value):
"""
Converts a datetime to local time in the active time zone.
This only makes sense within a {% localtime off %} block.
"""
return do_timezone(value, timezone.get_current_timezone())
@register.filter
def utc(value):
"""
Converts a datetime to UTC.
"""
return do_timezone(value, timezone.utc)
@register.filter('timezone')
def do_timezone(value, arg):
"""
Converts a datetime to local time in a given time zone.
The argument must be an instance of a tzinfo subclass or a time zone name.
If it is a time zone name, pytz is required.
Naive datetimes are assumed to be in local time in the default time zone.
"""
if not isinstance(value, datetime):
return ''
# Obtain a timezone-aware datetime
try:
if timezone.is_naive(value):
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
# Filters must never raise exceptions, and pytz' exceptions inherit
# Exception directly, not a specific subclass. So catch everything.
except Exception:
return ''
# Obtain a tzinfo instance
if isinstance(arg, tzinfo):
tz = arg
elif isinstance(arg, six.string_types) and pytz is not None:
try:
tz = pytz.timezone(arg)
except pytz.UnknownTimeZoneError:
return ''
else:
return ''
result = timezone.localtime(value, tz)
# HACK: the convert_to_local_time flag will prevent
# automatic conversion of the value to local time.
result = datetimeobject(result.year, result.month, result.day,
result.hour, result.minute, result.second,
result.microsecond, result.tzinfo)
result.convert_to_local_time = False
return result
# Template tags
class LocalTimeNode(Node):
"""
Template node class used by ``localtime_tag``.
"""
def __init__(self, nodelist, use_tz):
self.nodelist = nodelist
self.use_tz = use_tz
def render(self, context):
old_setting = context.use_tz
context.use_tz = self.use_tz
output = self.nodelist.render(context)
context.use_tz = old_setting
return output
class TimezoneNode(Node):
"""
Template node class used by ``timezone_tag``.
"""
def __init__(self, nodelist, tz):
self.nodelist = nodelist
self.tz = tz
def render(self, context):
with timezone.override(self.tz.resolve(context)):
output = self.nodelist.render(context)
return output
class GetCurrentTimezoneNode(Node):
"""
Template node class used by ``get_current_timezone_tag``.
"""
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = timezone.get_current_timezone_name()
return ''
@register.tag('localtime')
def localtime_tag(parser, token):
"""
Forces or prevents conversion of datetime objects to local time,
regardless of the value of ``settings.USE_TZ``.
Sample usage::
{% localtime off %}{{ value_in_utc }}{% endlocaltime %}
"""
bits = token.split_contents()
if len(bits) == 1:
use_tz = True
elif len(bits) > 2 or bits[1] not in ('on', 'off'):
raise TemplateSyntaxError("%r argument should be 'on' or 'off'" %
bits[0])
else:
use_tz = bits[1] == 'on'
nodelist = parser.parse(('endlocaltime',))
parser.delete_first_token()
return LocalTimeNode(nodelist, use_tz)
@register.tag('timezone')
def timezone_tag(parser, token):
"""
Enables a given time zone just for this block.
The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a
time zone name, or ``None``. If is it a time zone name, pytz is required.
If it is ``None``, the default time zone is used within the block.
Sample usage::
{% timezone "Europe/Paris" %}
It is {{ now }} in Paris.
{% endtimezone %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' takes one argument (timezone)" %
bits[0])
tz = parser.compile_filter(bits[1])
nodelist = parser.parse(('endtimezone',))
parser.delete_first_token()
return TimezoneNode(nodelist, tz)
@register.tag("get_current_timezone")
def get_current_timezone_tag(parser, token):
"""
Stores the name of the current time zone in the context.
Usage::
{% get_current_timezone as TIME_ZONE %}
This will fetch the currently active time zone and put its name
into the ``TIME_ZONE`` context variable.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_current_timezone' requires "
"'as variable' (got %r)" % args)
return GetCurrentTimezoneNode(args[2])
| mit |
yashodhank/frappe | frappe/desk/doctype/todo/todo.py | 10 | 3035 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import json
from frappe.model.document import Document
from frappe.utils import get_fullname
subject_field = "description"
sender_field = "sender"
exclude_from_linked_with = True
class ToDo(Document):
def validate(self):
self._assignment = None
if self.is_new():
self._assignment = {
"text": frappe._("Assigned to {0}: {1}").format(get_fullname(self.owner), self.description),
"comment_type": "Assigned"
}
else:
# NOTE the previous value is only available in validate method
if self.get_db_value("status") != self.status:
self._assignment = {
"text": frappe._("Assignment closed by {0}".format(get_fullname(frappe.session.user))),
"comment_type": "Assignment Completed"
}
def on_update(self):
if self._assignment:
self.add_assign_comment(**self._assignment)
self.update_in_reference()
def on_trash(self):
# unlink todo from linked comments
frappe.db.sql("""update `tabCommunication` set link_doctype=null, link_name=null
where link_doctype=%(doctype)s and link_name=%(name)s""", {"doctype": self.doctype, "name": self.name})
self.update_in_reference()
def add_assign_comment(self, text, comment_type):
if not (self.reference_type and self.reference_name):
return
frappe.get_doc(self.reference_type, self.reference_name).add_comment(comment_type, text,
link_doctype=self.doctype, link_name=self.name)
def update_in_reference(self):
if not (self.reference_type and self.reference_name):
return
try:
assignments = [d[0] for d in frappe.get_all("ToDo",
filters={
"reference_type": self.reference_type,
"reference_name": self.reference_name,
"status": "Open"
},
fields=["owner"], as_list=True)]
assignments.reverse()
frappe.db.set_value(self.reference_type, self.reference_name,
"_assign", json.dumps(assignments), update_modified=False)
except Exception, e:
if e.args[0] == 1146 and frappe.flags.in_install:
# no table
return
elif e.args[0]==1054:
from frappe.model.db_schema import add_column
add_column(self.reference_type, "_assign", "Text")
self.update_in_reference()
else:
raise
# NOTE: todo is viewable if either owner or assigned_to or System Manager in roles
def on_doctype_update():
frappe.db.add_index("ToDo", ["reference_type", "reference_name"])
def get_permission_query_conditions(user):
if not user: user = frappe.session.user
if "System Manager" in frappe.get_roles(user):
return None
else:
return """(tabToDo.owner = '{user}' or tabToDo.assigned_by = '{user}')"""\
.format(user=frappe.db.escape(user))
def has_permission(doc, user):
if "System Manager" in frappe.get_roles(user):
return True
else:
return doc.owner==user or doc.assigned_by==user
@frappe.whitelist()
def new_todo(description):
frappe.get_doc({
'doctype': 'ToDo',
'description': description
}).insert() | mit |
jarun/Buku | bukuserver/filters.py | 1 | 7778 | from enum import Enum
from flask_admin.model import filters
class BookmarkField(Enum):
ID = 0
URL = 1
TITLE = 2
TAGS = 3
DESCRIPTION = 4
def equal_func(query, value, index):
return filter(lambda x: x[index] == value, query)
def not_equal_func(query, value, index):
return filter(lambda x: x[index] != value, query)
def greater_func(query, value, index):
return filter(lambda x: x[index] > value, query)
def smaller_func(query, value, index):
return filter(lambda x: x[index] < value, query)
def in_list_func(query, value, index):
return filter(lambda x: x[index] in value, query)
def not_in_list_func(query, value, index):
return filter(lambda x: x[index] not in value, query)
def top_x_func(query, value, index):
items = sorted(set(x[index] for x in query), reverse=True)
top_x = items[:value]
return filter(lambda x: x[index] in top_x, query)
def bottom_x_func(query, value, index):
items = sorted(set(x[index] for x in query), reverse=False)
top_x = items[:value]
return filter(lambda x: x[index] in top_x, query)
class FilterType(Enum):
EQUAL = {'func': equal_func, 'text':'equals'}
NOT_EQUAL = {'func': not_equal_func, 'text':'not equal'}
GREATER = {'func': greater_func, 'text':'greater than'}
SMALLER = {'func': smaller_func, 'text':'smaller than'}
IN_LIST = {'func': in_list_func, 'text':'in list'}
NOT_IN_LIST = {'func': not_in_list_func, 'text':'not in list'}
TOP_X = {'func': top_x_func, 'text': 'top x'}
BOTTOM_X = {'func': bottom_x_func, 'text': 'bottom x'}
class BaseFilter(filters.BaseFilter):
def operation(self):
return getattr(self, 'operation_text')
def apply(self, query, value):
return getattr(self, 'apply_func')(query, value, getattr(self, 'index'))
class TagBaseFilter(BaseFilter):
def __init__(
self,
name,
operation_text=None,
apply_func=None,
filter_type=None,
options=None,
data_type=None):
if operation_text in ('in list', 'not in list'):
super().__init__(name, options, data_type='select2-tags')
else:
super().__init__(name, options, data_type)
if name == 'name':
self.index = 0
elif name == 'usage_count':
self.index = 1
else:
raise ValueError('name: {}'.format(name))
self.filter_type = None
if filter_type:
self.apply_func = filter_type.value['func']
self.operation_text = filter_type.value['text']
self.filter_type = filter_type
else:
self.apply_func = apply_func
self.operation_text = operation_text
def clean(self, value):
if (
self.filter_type in (FilterType.IN_LIST, FilterType.NOT_IN_LIST) and
self.name == 'usage_count'):
value = [int(v.strip()) for v in value.split(',') if v.strip()]
elif self.filter_type in (FilterType.IN_LIST, FilterType.NOT_IN_LIST):
value = [v.strip() for v in value.split(',') if v.strip()]
elif self.name == 'usage_count':
value = int(value)
if self.filter_type in (FilterType.TOP_X, FilterType.BOTTOM_X) and value < 1:
raise ValueError
if isinstance(value, str):
return value.strip()
return value
class BookmarkBukuFilter(BaseFilter):
def __init__(self, *args, **kwargs):
self.keys = {
'all_keywords': 'match all',
'deep': 'deep',
'regex': 'regex'
}
for key, value in kwargs.items():
if key in self.keys and value:
setattr(self, key, value)
else:
setattr(self, key, False)
list(map(lambda x: kwargs.pop(x), self.keys))
super().__init__('buku', *args, **kwargs)
def operation(self):
parts = []
for key, value in self.keys.items():
if getattr(self, key):
parts.append(value)
if not parts:
return 'search'
return 'search ' + ', '.join(parts)
def apply(self, query, value):
return query
class BookmarkBaseFilter(BaseFilter):
def __init__(
self,
name,
operation_text=None,
apply_func=None,
filter_type=None,
options=None,
data_type=None):
if operation_text in ('in list', 'not in list'):
super().__init__(name, options, data_type='select2-tags')
else:
super().__init__(name, options, data_type)
bm_fields_dict = {x.name.lower(): x.value for x in BookmarkField}
if name in bm_fields_dict:
self.index = bm_fields_dict[name]
else:
raise ValueError('name: {}'.format(name))
self.filter_type = None
if filter_type:
self.apply_func = filter_type.value['func']
self.operation_text = filter_type.value['text']
else:
self.apply_func = apply_func
self.operation_text = operation_text
def clean(self, value):
if (
self.filter_type in (FilterType.IN_LIST, FilterType.NOT_IN_LIST) and
self.name == BookmarkField.ID.name.lower()):
value = [int(v.strip()) for v in value.split(',') if v.strip()]
elif self.filter_type in (FilterType.IN_LIST, FilterType.NOT_IN_LIST):
value = [v.strip() for v in value.split(',') if v.strip()]
elif self.name == BookmarkField.ID.name.lower():
value = int(value)
if self.filter_type in (FilterType.TOP_X, FilterType.BOTTOM_X) and value < 1:
raise ValueError
if isinstance(value, str):
return value.strip()
return value
class BookmarkTagNumberEqualFilter(BookmarkBaseFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def apply_func(query, value, index):
for item in query:
tags = [tag for tag in item[index].split(',') if tag]
if len(tags) == value:
yield item
self.apply_func = apply_func
def clean(self, value):
value = int(value)
if value < 0:
raise ValueError
return value
class BookmarkTagNumberGreaterFilter(BookmarkTagNumberEqualFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def apply_func(query, value, index):
for item in query:
tags = [tag for tag in item[index].split(',') if tag]
if len(tags) > value:
yield item
self.apply_func = apply_func
class BookmarkTagNumberNotEqualFilter(BookmarkTagNumberEqualFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def apply_func(query, value, index):
for item in query:
tags = [tag for tag in item[index].split(',') if tag]
if len(tags) != value:
yield item
self. apply_func = apply_func
class BookmarkTagNumberSmallerFilter(BookmarkBaseFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def apply_func(query, value, index):
for item in query:
tags = [tag for tag in item[index].split(',') if tag]
if len(tags) < value:
yield item
self.apply_func = apply_func
def clean(self, value):
value = int(value)
if value < 1:
raise ValueError
return value
| gpl-3.0 |
ilovecv/NAMAS | dataset/make_dict.py | 9 | 1384 | #
# Copyright (c) 2015, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
# Author: Alexander M Rush <srush@seas.harvard.edu>
# Sumit Chopra <spchopra@fb.com>
# Jason Weston <jase@fb.com>
import sys
from collections import Counter
#@lint-avoid-python-3-compatibility-imports
title_words = Counter()
article_words = Counter()
limit = int(sys.argv[3])
for l in open(sys.argv[1]):
splits = l.strip().split("\t")
if len(splits) != 4:
continue
title_parse, article_parse, title, article = l.strip().split("\t")
title_words.update(title.lower().split())
article_words.update(article.lower().split())
with open(sys.argv[2] + ".article.dict", "w") as f:
print >>f, "<unk>", 1e5
print >>f, "<s>", 1e5
print >>f, "</s>", 1e5
for word, count in article_words.most_common():
if count < limit:
break
print >>f, word, count
with open(sys.argv[2] + ".title.dict", "w") as f:
print >>f, "<unk>", 1e5
print >>f, "<s>", 1e5
print >>f, "</s>", 1e5
for word, count in title_words.most_common():
if count < limit:
break
print >>f, word, count
| bsd-3-clause |
aspc/mainsite | aspc/housing/urls.py | 1 | 1067 | from django.conf.urls import patterns, include, url
from aspc.housing.views import home, RoomDetail, \
BrowseBuildings, BrowseBuildingFloor, ReviewRoom, ReviewRoomWithChoice, \
search
from django.contrib.auth.decorators import login_required
urlpatterns = [
url(r'^$', home, name="housing_home"),
url(r'^search/$', search, name="housing_search"),
url(r'^browse/$', BrowseBuildings.as_view(), name="housing_browse"),
url(r'^review/$', login_required(ReviewRoomWithChoice.as_view()), name="housing_review"),
url(r'^browse/(?P<building>[^\s/]+)/$', BrowseBuildingFloor.as_view(), name="housing_browse_building_floor_first"),
url(r'^browse/(?P<building>[^\s/]+)/(?P<floor>\d)/$', BrowseBuildingFloor.as_view(), name="housing_browse_building_floor"),
url(r'^browse/(?P<building>[^\s/]+)/(?P<floor>\d)/(?P<room>[A-Za-z0-9]+)/$', RoomDetail.as_view(), name="housing_browse_room"),
url(r'^browse/(?P<building>[^\s/]+)/(?P<floor>\d)/(?P<room>[A-Za-z0-9]+)/review/$', login_required(ReviewRoom.as_view()), name="housing_review_room"),
] | mit |
jolevq/odoopub | addons/product_margin/wizard/__init__.py | 444 | 1078 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import product_margin
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
czchen/debian-lxc | src/python-lxc/setup.py | 4 | 1144 | #!/usr/bin/python3
#
# python-lxc: Python bindings for LXC
#
# (C) Copyright Canonical Ltd. 2012
#
# Authors:
# Stéphane Graber <stgraber@ubuntu.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from distutils.core import setup, Extension
module = Extension('_lxc', sources=['lxc.c'], libraries=['lxc'])
setup(name='_lxc',
version='0.1',
description='LXC',
packages=['lxc'],
package_dir={'lxc': 'lxc'},
ext_modules=[module])
| lgpl-2.1 |
mapr/sahara | sahara/service/networks.py | 5 | 3621 | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
import six
from sahara import conductor as c
from sahara import context
from sahara.openstack.common import log as logging
from sahara.utils.openstack import neutron
from sahara.utils.openstack import nova
LOG = logging.getLogger(__name__)
conductor = c.API
CONF = cfg.CONF
def init_instances_ips(instance):
"""Extracts internal and management ips.
As internal ip will be used the first ip from the nova networks CIDRs.
If use_floating_ip flag is set than management ip will be the first
non-internal ip.
"""
server = nova.get_instance_info(instance)
management_ip = None
internal_ip = None
for network_label, addresses in six.iteritems(server.addresses):
for address in addresses:
if address['OS-EXT-IPS:type'] == 'fixed':
internal_ip = internal_ip or address['addr']
else:
management_ip = management_ip or address['addr']
if not CONF.use_floating_ips:
management_ip = internal_ip
# NOTE(aignatov): Once bug #1262529 is fixed this 'if' block should be
# reviewed and reformatted again, probably removed completely.
if CONF.use_neutron and not (management_ip and internal_ip):
LOG.debug("Instance %s doesn't contain yet Floating IP or Internal IP."
" Floating IP=%s, Internal IP=%s. Trying to get via Neutron."
% (server.name, management_ip, internal_ip))
neutron_client = neutron.client()
ports = neutron_client.list_ports(device_id=server.id)["ports"]
if ports:
target_port_id = ports[0]['id']
fl_ips = neutron_client.list_floatingips(
port_id=target_port_id)['floatingips']
if fl_ips:
fl_ip = fl_ips[0]
if not internal_ip:
internal_ip = fl_ip['fixed_ip_address']
LOG.debug('Found fixed IP %s for %s' % (internal_ip,
server.name))
# Zeroing management_ip if Sahara in private network
if not CONF.use_floating_ips:
management_ip = internal_ip
elif not management_ip:
management_ip = fl_ip['floating_ip_address']
LOG.debug('Found floating IP %s for %s' % (management_ip,
server.name))
conductor.instance_update(context.ctx(), instance,
{"management_ip": management_ip,
"internal_ip": internal_ip})
return internal_ip and management_ip
def assign_floating_ip(instance_id, pool):
ip = nova.client().floating_ips.create(pool)
nova.client().servers.get(instance_id).add_floating_ip(ip)
def delete_floating_ip(instance_id):
fl_ips = nova.client().floating_ips.findall(instance_id=instance_id)
for fl_ip in fl_ips:
nova.client().floating_ips.delete(fl_ip.id)
| apache-2.0 |
iwpiv/autokey | src/test/configurationmanagertest.py | 53 | 1681 | import unittest
import lib.configurationmanager as conf
from lib.phrase import *
CONFIG_FILE = "../../config/abbr.ini"
class LegacyImporterTest(unittest.TestCase):
def setUp(self):
self.importer = conf.LegacyImporter()
self.importer.load_config(CONFIG_FILE)
def testGlobalSettings(self):
# Test old global defaults using a phrase that has no custom options defined
# Locate otoh phrase
otohPhrase = None
for phrase in self.importer.phrases:
if phrase.abbreviation == "otoh":
otohPhrase = phrase
break
self.assert_(otohPhrase is not None)
self.assertEqual(otohPhrase.immediate, False)
self.assertEqual(otohPhrase.ignoreCase, False)
self.assertEqual(otohPhrase.matchCase, False)
self.assertEqual(otohPhrase.backspace, True)
self.assertEqual(otohPhrase.omitTrigger, False)
self.assertEqual(otohPhrase.triggerInside, False)
def testPhraseCount(self):
self.assertEqual(len(self.importer.phrases), 23)
def testPhrase(self):
# Locate brb phrase
brbPhrase = None
for phrase in self.importer.phrases:
if phrase.abbreviation == "brb":
brbPhrase = phrase
break
self.assert_(brbPhrase is not None)
self.assertEqual(brbPhrase.phrase, "be right back")
self.assertEqual(brbPhrase.description, "be right back")
self.assertEqual(brbPhrase.mode, PhraseMode.ABBREVIATION)
self.assertEqual(brbPhrase.immediate, True)
| gpl-3.0 |
eayunstack/rally | rally/common/objects/verification.py | 5 | 3756 | # Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rally.common import db
from rally import consts
from rally import exceptions
_MAP_OLD_TO_NEW_STATUSES = {
"OK": "success",
"FAIL": "fail",
"SKIP": "skip"
}
class Verification(object):
"""Represents results of verification."""
def __init__(self, db_object=None, deployment_uuid=None):
if db_object:
self.db_object = db_object
else:
self.db_object = db.verification_create(deployment_uuid)
def __getattr__(self, item):
return self.db_object[item]
def __getitem__(self, key):
return self.db_object[key]
@classmethod
def get(cls, uuid):
return cls(db.verification_get(uuid))
@classmethod
def list(cls, status=None):
return db.verification_list(status)
def delete(self):
db.verification_delete(self.db_object["uuid"])
def _update(self, **values):
self.db_object = db.verification_update(self.uuid, values)
def update_status(self, status):
self._update(status=status)
def start_verifying(self, set_name):
self._update(status=consts.TaskStatus.VERIFYING, set_name=set_name)
def set_failed(self):
self.update_status(consts.TaskStatus.FAILED)
def set_running(self):
self.update_status(consts.TaskStatus.RUNNING)
def finish_verification(self, total, test_cases):
# update verification db object
self._update(status=consts.TaskStatus.FINISHED,
tests=total["tests"],
# Expected failures are still failures, so we should
# merge them together in main info of Verification
# (see db model for Verification for more details)
failures=(total["failures"] + total["expected_failures"]),
time=total["time"])
# create db object for results
data = total.copy()
data["test_cases"] = test_cases
db.verification_result_create(self.uuid, data)
def get_results(self):
try:
results = db.verification_result_get(self.uuid)["data"]
except exceptions.NotFoundException:
return None
if "errors" in results:
# NOTE(andreykurilin): there is no "error" status in verification
# and this key presents only in old format, so it can be used as
# an identifier for old format.
for test in results["test_cases"].keys():
old_status = results["test_cases"][test]["status"]
new_status = _MAP_OLD_TO_NEW_STATUSES.get(old_status,
old_status.lower())
results["test_cases"][test]["status"] = new_status
if "failure" in results["test_cases"][test]:
results["test_cases"][test]["traceback"] = results[
"test_cases"][test]["failure"]["log"]
results["test_cases"][test].pop("failure")
results["unexpected_success"] = 0
results["expected_failures"] = 0
return results
| apache-2.0 |
Whatsit2yaa/vast-tundra-84597 | public/bower_components/bootstrap/test-infra/s3_cache.py | 1700 | 3523 | #!/usr/bin/env python2.7
from __future__ import absolute_import, unicode_literals, print_function, division
from sys import argv
from os import environ, stat, remove as _delete_file
from os.path import isfile, dirname, basename, abspath
from hashlib import sha256
from subprocess import check_call as run
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from boto.exception import S3ResponseError
NEED_TO_UPLOAD_MARKER = '.need-to-upload'
BYTES_PER_MB = 1024 * 1024
try:
BUCKET_NAME = environ['TWBS_S3_BUCKET']
except KeyError:
raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
def _sha256_of_file(filename):
hasher = sha256()
with open(filename, 'rb') as input_file:
hasher.update(input_file.read())
file_hash = hasher.hexdigest()
print('sha256({}) = {}'.format(filename, file_hash))
return file_hash
def _delete_file_quietly(filename):
try:
_delete_file(filename)
except (OSError, IOError):
pass
def _tarball_size(directory):
kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
return "{} MiB".format(kib)
def _tarball_filename_for(directory):
return abspath('./{}.tar.gz'.format(basename(directory)))
def _create_tarball(directory):
print("Creating tarball of {}...".format(directory))
run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])
def _extract_tarball(directory):
print("Extracting tarball of {}...".format(directory))
run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])
def download(directory):
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
try:
print("Downloading {} tarball from S3...".format(friendly_name))
key.get_contents_to_filename(_tarball_filename_for(directory))
except S3ResponseError as err:
open(NEED_TO_UPLOAD_MARKER, 'a').close()
print(err)
raise SystemExit("Cached {} download failed!".format(friendly_name))
print("Downloaded {}.".format(_tarball_size(directory)))
_extract_tarball(directory)
print("{} successfully installed from cache.".format(friendly_name))
def upload(directory):
_create_tarball(directory)
print("Uploading {} tarball to S3... ({})".format(friendly_name, _tarball_size(directory)))
key.set_contents_from_filename(_tarball_filename_for(directory))
print("{} cache successfully updated.".format(friendly_name))
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
if __name__ == '__main__':
# Uses environment variables:
# AWS_ACCESS_KEY_ID -- AWS Access Key ID
# AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key
argv.pop(0)
if len(argv) != 4:
raise SystemExit("USAGE: s3_cache.py <download | upload> <friendly name> <dependencies file> <directory>")
mode, friendly_name, dependencies_file, directory = argv
conn = S3Connection()
bucket = conn.lookup(BUCKET_NAME, validate=False)
if bucket is None:
raise SystemExit("Could not access bucket!")
dependencies_file_hash = _sha256_of_file(dependencies_file)
key = Key(bucket, dependencies_file_hash)
key.storage_class = 'REDUCED_REDUNDANCY'
if mode == 'download':
download(directory)
elif mode == 'upload':
if isfile(NEED_TO_UPLOAD_MARKER): # FIXME
upload(directory)
else:
print("No need to upload anything.")
else:
raise SystemExit("Unrecognized mode {!r}".format(mode))
| gpl-3.0 |
swartn/sam-vs-jet-paper | data_retrieval/get_ccmp_data.py | 1 | 1089 | import urllib
import os
def get_ccmp_data(destination='./'):
base_url = 'ftp://podaac-ftp.jpl.nasa.gov/allData/ccmp/L3.5a/monthly/flk/'
for year in range(1987, 2011):
url = base_url + str( year ) + '/'
#print url
for month in range(1,13):
mnth = str( month ) if month > 9 else '0' + str(month)
filen = 'month_' + str( year ) + mnth + '01_v11l35flk.nc.gz'
guz = 'gunzip ' + filen
#print filen
if year > 1987:
urllib.urlretrieve( url + filen , filen)
elif month > 6:
urllib.urlretrieve( url + filen , filen)
# time join the data
files = glob.glob('month_*_v11l35flk.nc')
subprocess.Popen(['cdo', 'mergetime', ' '.join(files),
'CCMP_198701-201112.nc']).wait()
# cleanup
for f in files:
os.remove(f)
# move to destination
mv_to_dest.mv_to_dest(destination, 'CCMP_198701-201112.nc')
if __name__=='__main__':
get_ccmp_data('../data/')
| gpl-2.0 |
gsehub/edx-platform | lms/djangoapps/instructor/views/coupons.py | 17 | 6549 | """
E-commerce Tab Instructor Dashboard Coupons Operations views
"""
import datetime
import logging
import pytz
from django.contrib.auth.decorators import login_required
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_POST
from opaque_keys.edx.locator import CourseKey
from six import text_type
from shoppingcart.models import Coupon, CourseRegistrationCode
from util.json_request import JsonResponse
log = logging.getLogger(__name__)
@require_POST
@login_required
def remove_coupon(request, course_id): # pylint: disable=unused-argument
"""
remove the coupon against the coupon id
set the coupon is_active flag to false
"""
coupon_id = request.POST.get('id', None)
if not coupon_id:
return JsonResponse({
'message': _('coupon id is None')
}, status=400) # status code 400: Bad Request
try:
coupon = Coupon.objects.get(id=coupon_id)
except ObjectDoesNotExist:
return JsonResponse({
'message': _('coupon with the coupon id ({coupon_id}) DoesNotExist').format(coupon_id=coupon_id)
}, status=400) # status code 400: Bad Request
if not coupon.is_active:
return JsonResponse({
'message': _('coupon with the coupon id ({coupon_id}) is already inactive').format(coupon_id=coupon_id)
}, status=400) # status code 400: Bad Request
coupon.is_active = False
coupon.save()
return JsonResponse({
'message': _('coupon with the coupon id ({coupon_id}) updated successfully').format(coupon_id=coupon_id)
}) # status code 200: OK by default
@require_POST
@login_required
def add_coupon(request, course_id):
"""
add coupon in the Coupons Table
"""
code = request.POST.get('code')
# check if the code is already in the Coupons Table and active
try:
course_id = CourseKey.from_string(course_id)
coupon = Coupon.objects.get(is_active=True, code=code, course_id=course_id)
except Coupon.DoesNotExist:
# check if the coupon code is in the CourseRegistrationCode Table
course_registration_code = CourseRegistrationCode.objects.filter(code=code)
if course_registration_code:
return JsonResponse(
{'message': _("The code ({code}) that you have tried to define is already in use as a registration code").format(code=code)},
status=400) # status code 400: Bad Request
description = request.POST.get('description')
course_id = request.POST.get('course_id')
try:
discount = int(request.POST.get('discount'))
except ValueError:
return JsonResponse({
'message': _("Please Enter the Integer Value for Coupon Discount")
}, status=400) # status code 400: Bad Request
if discount > 100 or discount < 0:
return JsonResponse({
'message': _("Please Enter the Coupon Discount Value Less than or Equal to 100")
}, status=400) # status code 400: Bad Request
expiration_date = None
if request.POST.get('expiration_date'):
expiration_date = request.POST.get('expiration_date')
try:
expiration_date = datetime.datetime.strptime(expiration_date, "%m/%d/%Y").replace(tzinfo=pytz.UTC) + datetime.timedelta(days=1)
except ValueError:
return JsonResponse({
'message': _("Please enter the date in this format i-e month/day/year")
}, status=400) # status code 400: Bad Request
coupon = Coupon(
code=code, description=description,
course_id=course_id,
percentage_discount=discount,
created_by_id=request.user.id,
expiration_date=expiration_date
)
coupon.save()
return JsonResponse(
{'message': _("coupon with the coupon code ({code}) added successfully").format(code=code)}
)
if coupon:
return JsonResponse(
{'message': _("coupon with the coupon code ({code}) already exists for this course").format(code=code)},
status=400) # status code 400: Bad Request
@require_POST
@login_required
def update_coupon(request, course_id): # pylint: disable=unused-argument
"""
update the coupon object in the database
"""
coupon_id = request.POST.get('coupon_id', None)
if not coupon_id:
return JsonResponse({'message': _("coupon id not found")}, status=400) # status code 400: Bad Request
try:
coupon = Coupon.objects.get(pk=coupon_id)
except ObjectDoesNotExist:
return JsonResponse(
{'message': _("coupon with the coupon id ({coupon_id}) DoesNotExist").format(coupon_id=coupon_id)},
status=400) # status code 400: Bad Request
description = request.POST.get('description')
coupon.description = description
coupon.save()
return JsonResponse(
{'message': _("coupon with the coupon id ({coupon_id}) updated Successfully").format(coupon_id=coupon_id)}
)
@require_POST
@login_required
def get_coupon_info(request, course_id): # pylint: disable=unused-argument
"""
get the coupon information to display in the pop up form
"""
coupon_id = request.POST.get('id', None)
if not coupon_id:
return JsonResponse({
'message': _("coupon id not found")
}, status=400) # status code 400: Bad Request
try:
coupon = Coupon.objects.get(id=coupon_id)
except ObjectDoesNotExist:
return JsonResponse({
'message': _("coupon with the coupon id ({coupon_id}) DoesNotExist").format(coupon_id=coupon_id)
}, status=400) # status code 400: Bad Request
if not coupon.is_active:
return JsonResponse({
'message': _("coupon with the coupon id ({coupon_id}) is already inactive").format(coupon_id=coupon_id)
}, status=400) # status code 400: Bad Request
expiry_date = coupon.display_expiry_date
return JsonResponse({
'coupon_code': coupon.code,
'coupon_description': coupon.description,
'coupon_course_id': text_type(coupon.course_id),
'coupon_discount': coupon.percentage_discount,
'expiry_date': expiry_date,
'message': _('coupon with the coupon id ({coupon_id}) updated successfully').format(coupon_id=coupon_id)
}) # status code 200: OK by default
| agpl-3.0 |
rainest/dance-partner-matching | networkx/linalg/tests/test_spectrum.py | 2 | 2673 | from nose import SkipTest
import networkx as nx
from networkx.generators.degree_seq import havel_hakimi_graph
class TestSpectrum(object):
@classmethod
def setupClass(cls):
global numpy
global assert_equal
global assert_almost_equal
try:
import numpy
from numpy.testing import assert_equal,assert_almost_equal
except ImportError:
raise SkipTest('NumPy not available.')
def setUp(self):
deg=[3,2,2,1,0]
self.G=havel_hakimi_graph(deg)
self.P=nx.path_graph(3)
self.A=numpy.array([[0, 1, 1, 1, 0],
[1, 0, 1, 0, 0],
[1, 1, 0, 0, 0],
[1, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
def test_adjacency_matrix(self):
"Conversion to adjacency matrix"
assert_equal(nx.adj_matrix(self.G),self.A)
def test_laplacian(self):
"Graph Laplacian"
NL=numpy.array([[ 3, -1, -1, -1, 0],
[-1, 2, -1, 0, 0],
[-1, -1, 2, 0, 0],
[-1, 0, 0, 1, 0],
[ 0, 0, 0, 0, 0]])
assert_equal(nx.laplacian(self.G),NL)
def test_generalized_laplacian(self):
"Generalized Graph Laplacian"
GL=numpy.array([[ 1.00, -0.408, -0.408, -0.577, 0.00],
[-0.408, 1.00, -0.50, 0.00 , 0.00],
[-0.408, -0.50, 1.00, 0.00, 0.00],
[-0.577, 0.00, 0.00, 1.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00]])
assert_almost_equal(nx.generalized_laplacian(self.G),GL,decimal=3)
def test_normalized_laplacian(self):
"Generalized Graph Laplacian"
GL=numpy.array([[ 1.00, -0.408, -0.408, -0.577, 0.00],
[-0.408, 1.00, -0.50, 0.00 , 0.00],
[-0.408, -0.50, 1.00, 0.00, 0.00],
[-0.577, 0.00, 0.00, 1.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00]])
assert_almost_equal(nx.normalized_laplacian(self.G),GL,decimal=3)
def test_laplacian_spectrum(self):
"Laplacian eigenvalues"
evals=numpy.array([0, 0, 1, 3, 4])
e=sorted(nx.laplacian_spectrum(self.G))
assert_almost_equal(e,evals)
def test_adjacency_spectrum(self):
"Adjacency eigenvalues"
evals=numpy.array([-numpy.sqrt(2), 0, numpy.sqrt(2)])
e=sorted(nx.adjacency_spectrum(self.P))
assert_almost_equal(e,evals)
| bsd-2-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.