repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
ge0rgi/cinder | cinder/tests/unit/volume/drivers/dell_emc/scaleio/test_delete_snapshot.py | 1 | 3977 | # Copyright (c) 2013 - 2015 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import urllib
from cinder import context
from cinder import exception
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit.fake_snapshot import fake_snapshot_obj
from cinder.tests.unit.volume.drivers.dell_emc import scaleio
from cinder.tests.unit.volume.drivers.dell_emc.scaleio import mocks
class TestDeleteSnapShot(scaleio.TestScaleIODriver):
"""Test cases for ``ScaleIODriver.delete_snapshot()``"""
def setUp(self):
"""Setup a test case environment.
Creates fake volume and snapshot objects and sets up the required
API responses.
"""
super(TestDeleteSnapShot, self).setUp()
ctx = context.RequestContext('fake', 'fake', auth_token=True)
self.snapshot = fake_snapshot_obj(
ctx, **{'provider_id': fake.SNAPSHOT_ID})
self.snapshot_name_2x_enc = urllib.parse.quote(
urllib.parse.quote(
self.driver._id_to_base64(self.snapshot.id)
)
)
self.HTTPS_MOCK_RESPONSES = {
self.RESPONSE_MODE.Valid: {
'types/Volume/instances/getByName::' +
self.snapshot_name_2x_enc: self.snapshot.id,
'instances/Volume::{}/action/removeMappedSdc'.format(
self.snapshot.provider_id
): self.snapshot.id,
'instances/Volume::{}/action/removeVolume'.format(
self.snapshot.provider_id
): self.snapshot.id,
},
self.RESPONSE_MODE.BadStatus: {
'types/Volume/instances/getByName::' +
self.snapshot_name_2x_enc: self.BAD_STATUS_RESPONSE,
'instances/Volume::{}/action/removeVolume'.format(
self.snapshot.provider_id
): self.BAD_STATUS_RESPONSE,
},
self.RESPONSE_MODE.Invalid: {
'types/Volume/instances/getByName::' +
self.snapshot_name_2x_enc: mocks.MockHTTPSResponse(
{
'errorCode': self.OLD_VOLUME_NOT_FOUND_ERROR,
'message': 'Test Delete Invalid Snapshot',
}, 400
),
'instances/Volume::{}/action/removeVolume'.format(
self.snapshot.provider_id): mocks.MockHTTPSResponse(
{
'errorCode': self.OLD_VOLUME_NOT_FOUND_ERROR,
'message': 'Test Delete Invalid Snapshot',
}, 400,
)
},
}
def test_bad_login(self):
self.set_https_response_mode(self.RESPONSE_MODE.BadStatus)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_snapshot, self.snapshot)
def test_delete_invalid_snapshot(self):
self.set_https_response_mode(self.RESPONSE_MODE.Valid)
self.driver.delete_snapshot(self.snapshot)
def test_delete_snapshot(self):
"""Setting the unmap volume before delete flag for tests """
self.driver.configuration.set_override(
'sio_unmap_volume_before_deletion',
override=True)
self.set_https_response_mode(self.RESPONSE_MODE.Valid)
self.driver.delete_snapshot(self.snapshot)
| apache-2.0 |
wildjan/Flask | Work/TriviaMVA/TriviaMVA/env/Lib/site-packages/jinja2/defaults.py | 659 | 1068 | # -*- coding: utf-8 -*-
"""
jinja2.defaults
~~~~~~~~~~~~~~~
Jinja default filters and tags.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2._compat import range_type
from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner
# defaults for the parser / lexer
BLOCK_START_STRING = '{%'
BLOCK_END_STRING = '%}'
VARIABLE_START_STRING = '{{'
VARIABLE_END_STRING = '}}'
COMMENT_START_STRING = '{#'
COMMENT_END_STRING = '#}'
LINE_STATEMENT_PREFIX = None
LINE_COMMENT_PREFIX = None
TRIM_BLOCKS = False
LSTRIP_BLOCKS = False
NEWLINE_SEQUENCE = '\n'
KEEP_TRAILING_NEWLINE = False
# default filters, tests and namespace
from jinja2.filters import FILTERS as DEFAULT_FILTERS
from jinja2.tests import TESTS as DEFAULT_TESTS
DEFAULT_NAMESPACE = {
'range': range_type,
'dict': lambda **kw: kw,
'lipsum': generate_lorem_ipsum,
'cycler': Cycler,
'joiner': Joiner
}
# export all constants
__all__ = tuple(x for x in locals().keys() if x.isupper())
| apache-2.0 |
rafafigueroa/compass-gait | hasimpy.py | 1 | 9216 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: Rafael Figueroa
"""
dp = True
import numpy as np
DEBUG = False
class H:
"""Hybrid Automata Model"""
def __init__(self, Q, Init_X, Init_qID, state_names = None):
self.q = Q #list of q
self.Init_X = Init_X
self.Init_qID = Init_qID
self.states = state_names
self.Ts = None
def mode_tracker_guard_check(self, qID, X):
# Called by mode_tracker to set the mode
q = self.q[qID]
g=q.E.G #guard list
oe=q.E.OE #out edges list
[g_activated, oID_activated_g] = guard_check(g, X)
# return new qID when a guard is activated
if g_activated:
qID_activated_g = oe[oID_activated_g]
else:
qID_activated_g = qID
return qID_activated_g
def sim(self, qID, X, u, t0, tlim,
haws_flag=False,
debug_flag=False, Ts=1e-4):
self.Ts = Ts
#t0 refers to the initial time of
#each continuous dynamic time interval
sr = SimResult(self.states) #Initialize class
q = self.q[qID] #get a ref to current mode
global DEBUG
DEBUG = debug_flag #change global DEBUG variable
while t0<tlim:
#get values from current q object
f=q.f #continuous dynamics func
# when simulating is requested by haws
# with a forced input
if not haws_flag:
u=q.u
g=q.E.G #guard list
r=q.E.R #reset map list
oe=q.E.OE #out edges list
dom=q.Dom #discrete mode domain
avoid=q.Avoid #discrete mode avoid
if DEBUG:
print '\n*** New Discrete State *** \n'
print 'f=',f,'\ng=',g,'\nr=',r,'\noe=',oe,'\ndom=',dom
print 'Avoid=',avoid
print 'qID=',q.qID,'\nX=',X,'\nu=',u
print '\n*** domain check *** \n'
if not dom(X):
errorString = 'Outside domain!'
print errorString
#raise NameError(errorString)
if DEBUG:
print '\n*** continuous dynamics *** \n'
#simulate continuous dynamics
T, Y, oID_activated_g, \
avoid_activated, tlim_activated = \
odeeul(f, u, g, avoid, X, t0, tlim, Ts)
# store this time interval
# in the simulation results
sr.newTimeInterval(T, Y, q)
# when inside the avoid set, simulation stops
# and the information is stored in the simulation results
if avoid_activated:
sr.avoid_activated = True
sr.timeToAvoid = T[-1]
break #while loop
if tlim_activated:
break #while loop
# *** after guard is activated ***
# prepare data for the next loop
t0=T[-1] #reset initial time to the end of
#last time interval
last_state = np.array(Y[-1])
if DEBUG:
print '\n *** reset map *** \n'
print 'last state =',last_state
X=r[oID_activated_g](last_state) #reset map
qID_activated_g = oe[oID_activated_g]
#guard activated print out
if DEBUG:
print 'sim -- guard activated'
print 'sim -- from q =', q.qID, 'to q =', qID_activated_g
print 'sim -- State =', X
#get new q
q = self.q[qID_activated_g]
return sr
class Q:
def __init__(self,qID,f,u,E,
Dom = lambda X:True,
Avoid = lambda X:False ,
TC=True):
self.qID = qID
self.f = f
self.u = u
self.E = E
self.Dom = Dom
self.Avoid = Avoid
self.TC = TC
class E:
def __init__(self,OE,G,R):
self.OE = OE
self.G = G
self.R = R
def guard_check(g,X):
guard_list = []
#evaluate every guard in g
#g is the list of guards for this q
#store the results in guard_list
for guard in g:
guard_list.append(guard(X))
oID_activated_g = None
g_activated = False
#check if any result in guard_list is True
#if it is, store the index
for oID,guard in enumerate(guard_list):
if guard:
oID_activated_g = oID #outside q which tripped the guard
g_activated = True
break
return [g_activated, oID_activated_g]
def avoid_check(avoid,X):
'avoid returns True when inside the avoid set'
return avoid(X)
def odeeul(f, u, g, avoid, X0, t0, tlim, Ts):
X=np.array(X0)
Y=np.array(X0)
T=np.array([t0])
if DEBUG:
print 'State=',X
g_activated, oID_activated_g = guard_check(g,X)
avoid_activated = avoid_check(avoid,X)
tlim_activated = (t0>=tlim)
if g_activated:
print 'instant jump'
if DEBUG:
print 'First checks:'
print '\tg_activated:', g_activated
print '\tavoid_activated', avoid_activated
print '\ttlim_activated', tlim_activated
while not (g_activated or avoid_activated or tlim_activated):
#Evolve continuously until a
#termination condition is activated
X=Ts*f(X,u)+X
Y=np.vstack((Y,X))
tnew = np.array([T[-1]+Ts])
T=np.concatenate([T,tnew])
#termination checks
g_activated, oID_activated_g = guard_check(g,X)
avoid_activated = avoid_check(avoid,X)
tlim_activated = (tnew>=tlim)
if DEBUG:
print 'Running checks:'
print '\tg_activated:',g_activated
print '\tavoid_activated',avoid_activated
print '\ttlim_activated',tlim_activated
return [T, Y, oID_activated_g, avoid_activated,
tlim_activated]
class SimResult:
"""Output from one simulation run"""
def __init__(self, states = None):
self.I = []
self.j = 0
self.timesteps = 0
self.timeToAvoid = None
self.avoid_activated = False
self.path = None
self.time = None
self.mode = None
self.states = states
for yi in range(0, len(states)):
self.states[yi] = "$" + self.states[yi] + "$"
self.states[yi] = self.states[yi].encode('string-escape')
self.states[yi] = self.states[yi].replace("\\\\", "\\")
def newTimeInterval(self, T, Y, qID):
"""Simulation is broken into continuous chunks
Here the chunks are put together"""
if self.j == 0:
# First interval
self.path = Y
self.time = T
self.mode = np.array([qID])
else:
self.path = np.vstack((self.path, Y))
self.time = np.concatenate((self.time, T))
self.mode = np.concatenate((self.mode, np.array([qID])))
self.j = self.j + 1
self.timesteps = self.timesteps + np.size(T)
self.I.append(TimeInterval(T, Y, self.j))
def simPlot(self):
Y_plot = self.path
T_plot = self.time
import matplotlib.pyplot as plt
# TODO: Configurate at install?
# user might not want latex
from matplotlib import rc
rc('text', usetex=True)
nstates = np.size(Y_plot,1)
f, axarr = plt.subplots(nstates, sharex=True)
if nstates>1:
for yi in range(nstates):
axarr[yi].plot(T_plot, Y_plot[:,yi])
if self.states is not None:
axarr[nstates-1].set_xlabel(r'time(s)')
axarr[yi].set_ylabel(self.states[yi], fontsize = 20)
axarr[yi].yaxis.set_label_coords(-0.08, 0.5)
else:
axarr.plot(T_plot,Y_plot)
if self.states is not None:
axarr.set_xlabel('time(s)')
axarr.set_ylabel(self.states[0])
plt.ion()
plt.show()
def phasePlot(self, plotStates):
#TODO:check size of Y,plotStates
X1_plot = self.path[:,plotStates[0]]
X2_plot = self.path[:,plotStates[1]]
import matplotlib.pyplot as plt
# figx = plt.figure()
f, axarr = plt.subplots(1, sharex=True)
axarr.plot(X1_plot,X2_plot)
if self.states is not None:
axarr.set_xlabel(self.states[plotStates[0]], fontsize = 20)
axarr.set_ylabel(self.states[plotStates[1]], fontsize = 20)
axarr.yaxis.set_label_coords(-0.08, 0.5)
plt.ion()
plt.show()
class TimeInterval:
def __init__(self,T,Y,j):
self.T=T
self.Y=Y
self.j=j
def idem(X):
return X
def tolEqual(a,b,tol=1e-2):
return abs(a-b)<tol
def last_row(Y):
print 'shape', np.shape(Y)
rows = np.shape(Y)[0]
print 'rows',rows
if rows>1:
return Y[-1]
else:
return Y
| gpl-2.0 |
samdowd/drumm-farm | drumm_env/lib/python2.7/site-packages/django/test/client.py | 132 | 26745 | from __future__ import unicode_literals
import json
import mimetypes
import os
import re
import sys
from copy import copy
from importlib import import_module
from io import BytesIO
from django.apps import apps
from django.conf import settings
from django.core import urlresolvers
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import ISO_8859_1, UTF_8, WSGIRequest
from django.core.signals import (
got_request_exception, request_finished, request_started,
)
from django.db import close_old_connections
from django.http import HttpRequest, QueryDict, SimpleCookie
from django.template import TemplateDoesNotExist
from django.test import signals
from django.test.utils import ContextList
from django.utils import six
from django.utils.encoding import force_bytes, force_str, uri_to_iri
from django.utils.functional import SimpleLazyObject, curry
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.six.moves.urllib.parse import urlparse, urlsplit
__all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile('.*; charset=([\w\d-]+);?')
class RedirectCycleError(Exception):
"""
The test client has been asked to follow a redirect loop.
"""
def __init__(self, message, last_response):
super(RedirectCycleError, self).__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
store.setdefault('context', ContextList()).append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
is_file = lambda thing: hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
filename = os.path.basename(file.name) if hasattr(file, 'name') else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
if not filename:
filename = key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())
]
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = uri_to_iri(path).encode(UTF_8)
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. We replicate this behavior here.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode(ISO_8859_1) if six.PY3 else path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
data = {} if data is None else data
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def trace(self, path, secure=False, **extra):
"Construct a TRACE request."
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(force_str(path))
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
def _session(self):
"""
Obtains the current session variables.
"""
if apps.is_installed('django.contrib.sessions'):
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
else:
s = engine.SessionStore()
s.save()
self.cookies[settings.SESSION_COOKIE_NAME] = s.session_key
return s
return {}
session = property(_session)
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = curry(self._parse_json, response)
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(
lambda: urlresolvers.resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""
Send a TRACE request to the server.
"""
response = super(Client, self).trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect, or the user is inactive, or if the sessions framework is
not available.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if (user and user.is_active and
apps.is_installed('django.contrib.sessions')):
self._login(user)
return True
else:
return False
def force_login(self, user, backend=None):
if backend is None:
backend = settings.AUTHENTICATION_BACKENDS[0]
user.backend = backend
self._login(user)
def _login(self, user):
from django.contrib.auth import login
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if 'application/json' not in response.get('Content-Type'):
raise ValueError(
'Content-Type header is "{0}", not "application/json"'
.format(response.get('Content-Type'))
)
return json.loads(response.content.decode(), **extra)
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
response = self.get(url.path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
| mit |
allenlavoie/tensorflow | tensorflow/tools/api/generator/create_python_api_test.py | 2 | 2863 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for create_python_api."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import imp
import sys
from tensorflow.python.platform import test
from tensorflow.python.util.tf_export import tf_export
from tensorflow.tools.api.generator import create_python_api
@tf_export('test_op', 'test_op1')
def test_op():
pass
@tf_export('TestClass', 'NewTestClass')
class TestClass(object):
pass
_TEST_CONSTANT = 5
_MODULE_NAME = 'test.tensorflow.test_module'
class CreatePythonApiTest(test.TestCase):
def setUp(self):
# Add fake op to a module that has 'tensorflow' in the name.
sys.modules[_MODULE_NAME] = imp.new_module(_MODULE_NAME)
setattr(sys.modules[_MODULE_NAME], 'test_op', test_op)
setattr(sys.modules[_MODULE_NAME], 'TestClass', TestClass)
test_op.__module__ = _MODULE_NAME
TestClass.__module__ = _MODULE_NAME
tf_export('consts._TEST_CONSTANT').export_constant(
_MODULE_NAME, '_TEST_CONSTANT')
def tearDown(self):
del sys.modules[_MODULE_NAME]
def testFunctionImportIsAdded(self):
imports = create_python_api.get_api_init_text()
expected_import = (
'from test.tensorflow.test_module import test_op as test_op1')
self.assertTrue(
expected_import in str(imports),
msg='%s not in %s' % (expected_import, str(imports)))
expected_import = 'from test.tensorflow.test_module import test_op'
self.assertTrue(
expected_import in str(imports),
msg='%s not in %s' % (expected_import, str(imports)))
def testClassImportIsAdded(self):
imports = create_python_api.get_api_init_text()
expected_import = 'from test.tensorflow.test_module import TestClass'
self.assertTrue(
'TestClass' in str(imports),
msg='%s not in %s' % (expected_import, str(imports)))
def testConstantIsAdded(self):
imports = create_python_api.get_api_init_text()
expected = 'from test.tensorflow.test_module import _TEST_CONSTANT'
self.assertTrue(expected in str(imports),
msg='%s not in %s' % (expected, str(imports)))
if __name__ == '__main__':
test.main()
| apache-2.0 |
benoitsteiner/tensorflow-xsmm | tensorflow/contrib/data/python/kernel_tests/serialization/ignore_errors_serialization_test.py | 14 | 1862 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the IgnoreErrors input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.data.python.kernel_tests.serialization import dataset_serialization_test_base
from tensorflow.contrib.data.python.ops import error_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class IgnoreErrorsSerializationTest(
dataset_serialization_test_base.DatasetSerializationTestBase):
def _build_ds(self, components):
return dataset_ops.Dataset.from_tensor_slices(components).map(
lambda x: array_ops.check_numerics(x, "message")).apply(
error_ops.ignore_errors())
def testIgnoreErrorsCore(self):
components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)
diff_components = np.array([1., 2., 3., np.nan]).astype(np.float32)
num_outputs = 4
self.run_core_tests(lambda: self._build_ds(components),
lambda: self._build_ds(diff_components), num_outputs)
if __name__ == "__main__":
test.main()
| apache-2.0 |
gusai-francelabs/datafari | windows/python/Lib/idlelib/PyShell.py | 5 | 58008 | #! /usr/bin/env python
from __future__ import print_function
import os
import os.path
import sys
import string
import getopt
import re
import socket
import time
import threading
import io
import linecache
from code import InteractiveInterpreter
from platform import python_version, system
try:
from Tkinter import *
except ImportError:
print("** IDLE can't import Tkinter.\n"
"Your Python may not be configured for Tk. **", file=sys.__stderr__)
sys.exit(1)
import tkMessageBox
from idlelib.EditorWindow import EditorWindow, fixwordbreaks
from idlelib.FileList import FileList
from idlelib.ColorDelegator import ColorDelegator
from idlelib.UndoDelegator import UndoDelegator
from idlelib.OutputWindow import OutputWindow
from idlelib.configHandler import idleConf
from idlelib import rpc
from idlelib import Debugger
from idlelib import RemoteDebugger
from idlelib import macosxSupport
from idlelib import IOBinding
IDENTCHARS = string.ascii_letters + string.digits + "_"
HOST = '127.0.0.1' # python execution server on localhost loopback
PORT = 0 # someday pass in host, port for remote debug capability
try:
from signal import SIGTERM
except ImportError:
SIGTERM = 15
# Override warnings module to write to warning_stream. Initialize to send IDLE
# internal warnings to the console. ScriptBinding.check_syntax() will
# temporarily redirect the stream to the shell window to display warnings when
# checking user's code.
warning_stream = sys.__stderr__ # None, at least on Windows, if no console.
import warnings
def idle_formatwarning(message, category, filename, lineno, line=None):
"""Format warnings the IDLE way."""
s = "\nWarning (from warnings module):\n"
s += ' File \"%s\", line %s\n' % (filename, lineno)
if line is None:
line = linecache.getline(filename, lineno)
line = line.strip()
if line:
s += " %s\n" % line
s += "%s: %s\n" % (category.__name__, message)
return s
def idle_showwarning(
message, category, filename, lineno, file=None, line=None):
"""Show Idle-format warning (after replacing warnings.showwarning).
The differences are the formatter called, the file=None replacement,
which can be None, the capture of the consequence AttributeError,
and the output of a hard-coded prompt.
"""
if file is None:
file = warning_stream
try:
file.write(idle_formatwarning(
message, category, filename, lineno, line=line))
file.write(">>> ")
except (AttributeError, IOError):
pass # if file (probably __stderr__) is invalid, skip warning.
_warnings_showwarning = None
def capture_warnings(capture):
"Replace warning.showwarning with idle_showwarning, or reverse."
global _warnings_showwarning
if capture:
if _warnings_showwarning is None:
_warnings_showwarning = warnings.showwarning
warnings.showwarning = idle_showwarning
else:
if _warnings_showwarning is not None:
warnings.showwarning = _warnings_showwarning
_warnings_showwarning = None
capture_warnings(True)
def extended_linecache_checkcache(filename=None,
orig_checkcache=linecache.checkcache):
"""Extend linecache.checkcache to preserve the <pyshell#...> entries
Rather than repeating the linecache code, patch it to save the
<pyshell#...> entries, call the original linecache.checkcache()
(skipping them), and then restore the saved entries.
orig_checkcache is bound at definition time to the original
method, allowing it to be patched.
"""
cache = linecache.cache
save = {}
for key in list(cache):
if key[:1] + key[-1:] == '<>':
save[key] = cache.pop(key)
orig_checkcache(filename)
cache.update(save)
# Patch linecache.checkcache():
linecache.checkcache = extended_linecache_checkcache
class PyShellEditorWindow(EditorWindow):
"Regular text edit window in IDLE, supports breakpoints"
def __init__(self, *args):
self.breakpoints = []
EditorWindow.__init__(self, *args)
self.text.bind("<<set-breakpoint-here>>", self.set_breakpoint_here)
self.text.bind("<<clear-breakpoint-here>>", self.clear_breakpoint_here)
self.text.bind("<<open-python-shell>>", self.flist.open_shell)
self.breakpointPath = os.path.join(idleConf.GetUserCfgDir(),
'breakpoints.lst')
# whenever a file is changed, restore breakpoints
def filename_changed_hook(old_hook=self.io.filename_change_hook,
self=self):
self.restore_file_breaks()
old_hook()
self.io.set_filename_change_hook(filename_changed_hook)
if self.io.filename:
self.restore_file_breaks()
self.color_breakpoint_text()
rmenu_specs = [
("Cut", "<<cut>>", "rmenu_check_cut"),
("Copy", "<<copy>>", "rmenu_check_copy"),
("Paste", "<<paste>>", "rmenu_check_paste"),
("Set Breakpoint", "<<set-breakpoint-here>>", None),
("Clear Breakpoint", "<<clear-breakpoint-here>>", None)
]
def color_breakpoint_text(self, color=True):
"Turn colorizing of breakpoint text on or off"
if self.io is None:
# possible due to update in restore_file_breaks
return
if color:
theme = idleConf.GetOption('main','Theme','name')
cfg = idleConf.GetHighlight(theme, "break")
else:
cfg = {'foreground': '', 'background': ''}
self.text.tag_config('BREAK', cfg)
def set_breakpoint(self, lineno):
text = self.text
filename = self.io.filename
text.tag_add("BREAK", "%d.0" % lineno, "%d.0" % (lineno+1))
try:
self.breakpoints.index(lineno)
except ValueError: # only add if missing, i.e. do once
self.breakpoints.append(lineno)
try: # update the subprocess debugger
debug = self.flist.pyshell.interp.debugger
debug.set_breakpoint_here(filename, lineno)
except: # but debugger may not be active right now....
pass
def set_breakpoint_here(self, event=None):
text = self.text
filename = self.io.filename
if not filename:
text.bell()
return
lineno = int(float(text.index("insert")))
self.set_breakpoint(lineno)
def clear_breakpoint_here(self, event=None):
text = self.text
filename = self.io.filename
if not filename:
text.bell()
return
lineno = int(float(text.index("insert")))
try:
self.breakpoints.remove(lineno)
except:
pass
text.tag_remove("BREAK", "insert linestart",\
"insert lineend +1char")
try:
debug = self.flist.pyshell.interp.debugger
debug.clear_breakpoint_here(filename, lineno)
except:
pass
def clear_file_breaks(self):
if self.breakpoints:
text = self.text
filename = self.io.filename
if not filename:
text.bell()
return
self.breakpoints = []
text.tag_remove("BREAK", "1.0", END)
try:
debug = self.flist.pyshell.interp.debugger
debug.clear_file_breaks(filename)
except:
pass
def store_file_breaks(self):
"Save breakpoints when file is saved"
# XXX 13 Dec 2002 KBK Currently the file must be saved before it can
# be run. The breaks are saved at that time. If we introduce
# a temporary file save feature the save breaks functionality
# needs to be re-verified, since the breaks at the time the
# temp file is created may differ from the breaks at the last
# permanent save of the file. Currently, a break introduced
# after a save will be effective, but not persistent.
# This is necessary to keep the saved breaks synched with the
# saved file.
#
# Breakpoints are set as tagged ranges in the text.
# Since a modified file has to be saved before it is
# run, and since self.breakpoints (from which the subprocess
# debugger is loaded) is updated during the save, the visible
# breaks stay synched with the subprocess even if one of these
# unexpected breakpoint deletions occurs.
breaks = self.breakpoints
filename = self.io.filename
try:
with open(self.breakpointPath,"r") as old_file:
lines = old_file.readlines()
except IOError:
lines = []
try:
with open(self.breakpointPath,"w") as new_file:
for line in lines:
if not line.startswith(filename + '='):
new_file.write(line)
self.update_breakpoints()
breaks = self.breakpoints
if breaks:
new_file.write(filename + '=' + str(breaks) + '\n')
except IOError as err:
if not getattr(self.root, "breakpoint_error_displayed", False):
self.root.breakpoint_error_displayed = True
tkMessageBox.showerror(title='IDLE Error',
message='Unable to update breakpoint list:\n%s'
% str(err),
parent=self.text)
def restore_file_breaks(self):
self.text.update() # this enables setting "BREAK" tags to be visible
if self.io is None:
# can happen if IDLE closes due to the .update() call
return
filename = self.io.filename
if filename is None:
return
if os.path.isfile(self.breakpointPath):
lines = open(self.breakpointPath,"r").readlines()
for line in lines:
if line.startswith(filename + '='):
breakpoint_linenumbers = eval(line[len(filename)+1:])
for breakpoint_linenumber in breakpoint_linenumbers:
self.set_breakpoint(breakpoint_linenumber)
def update_breakpoints(self):
"Retrieves all the breakpoints in the current window"
text = self.text
ranges = text.tag_ranges("BREAK")
linenumber_list = self.ranges_to_linenumbers(ranges)
self.breakpoints = linenumber_list
def ranges_to_linenumbers(self, ranges):
lines = []
for index in range(0, len(ranges), 2):
lineno = int(float(ranges[index].string))
end = int(float(ranges[index+1].string))
while lineno < end:
lines.append(lineno)
lineno += 1
return lines
# XXX 13 Dec 2002 KBK Not used currently
# def saved_change_hook(self):
# "Extend base method - clear breaks if module is modified"
# if not self.get_saved():
# self.clear_file_breaks()
# EditorWindow.saved_change_hook(self)
def _close(self):
"Extend base method - clear breaks when module is closed"
self.clear_file_breaks()
EditorWindow._close(self)
class PyShellFileList(FileList):
"Extend base class: IDLE supports a shell and breakpoints"
# override FileList's class variable, instances return PyShellEditorWindow
# instead of EditorWindow when new edit windows are created.
EditorWindow = PyShellEditorWindow
pyshell = None
def open_shell(self, event=None):
if self.pyshell:
self.pyshell.top.wakeup()
else:
self.pyshell = PyShell(self)
if self.pyshell:
if not self.pyshell.begin():
return None
return self.pyshell
class ModifiedColorDelegator(ColorDelegator):
"Extend base class: colorizer for the shell window itself"
def __init__(self):
ColorDelegator.__init__(self)
self.LoadTagDefs()
def recolorize_main(self):
self.tag_remove("TODO", "1.0", "iomark")
self.tag_add("SYNC", "1.0", "iomark")
ColorDelegator.recolorize_main(self)
def LoadTagDefs(self):
ColorDelegator.LoadTagDefs(self)
theme = idleConf.GetOption('main','Theme','name')
self.tagdefs.update({
"stdin": {'background':None,'foreground':None},
"stdout": idleConf.GetHighlight(theme, "stdout"),
"stderr": idleConf.GetHighlight(theme, "stderr"),
"console": idleConf.GetHighlight(theme, "console"),
})
def removecolors(self):
# Don't remove shell color tags before "iomark"
for tag in self.tagdefs:
self.tag_remove(tag, "iomark", "end")
class ModifiedUndoDelegator(UndoDelegator):
"Extend base class: forbid insert/delete before the I/O mark"
def insert(self, index, chars, tags=None):
try:
if self.delegate.compare(index, "<", "iomark"):
self.delegate.bell()
return
except TclError:
pass
UndoDelegator.insert(self, index, chars, tags)
def delete(self, index1, index2=None):
try:
if self.delegate.compare(index1, "<", "iomark"):
self.delegate.bell()
return
except TclError:
pass
UndoDelegator.delete(self, index1, index2)
class MyRPCClient(rpc.RPCClient):
def handle_EOF(self):
"Override the base class - just re-raise EOFError"
raise EOFError
class ModifiedInterpreter(InteractiveInterpreter):
def __init__(self, tkconsole):
self.tkconsole = tkconsole
locals = sys.modules['__main__'].__dict__
InteractiveInterpreter.__init__(self, locals=locals)
self.save_warnings_filters = None
self.restarting = False
self.subprocess_arglist = None
self.port = PORT
self.original_compiler_flags = self.compile.compiler.flags
_afterid = None
rpcclt = None
rpcpid = None
def spawn_subprocess(self):
if self.subprocess_arglist is None:
self.subprocess_arglist = self.build_subprocess_arglist()
args = self.subprocess_arglist
self.rpcpid = os.spawnv(os.P_NOWAIT, sys.executable, args)
def build_subprocess_arglist(self):
assert (self.port!=0), (
"Socket should have been assigned a port number.")
w = ['-W' + s for s in sys.warnoptions]
if 1/2 > 0: # account for new division
w.append('-Qnew')
# Maybe IDLE is installed and is being accessed via sys.path,
# or maybe it's not installed and the idle.py script is being
# run from the IDLE source directory.
del_exitf = idleConf.GetOption('main', 'General', 'delete-exitfunc',
default=False, type='bool')
if __name__ == 'idlelib.PyShell':
command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,)
else:
command = "__import__('run').main(%r)" % (del_exitf,)
if sys.platform[:3] == 'win' and ' ' in sys.executable:
# handle embedded space in path by quoting the argument
decorated_exec = '"%s"' % sys.executable
else:
decorated_exec = sys.executable
return [decorated_exec] + w + ["-c", command, str(self.port)]
def start_subprocess(self):
addr = (HOST, self.port)
# GUI makes several attempts to acquire socket, listens for connection
for i in range(3):
time.sleep(i)
try:
self.rpcclt = MyRPCClient(addr)
break
except socket.error:
pass
else:
self.display_port_binding_error()
return None
# if PORT was 0, system will assign an 'ephemeral' port. Find it out:
self.port = self.rpcclt.listening_sock.getsockname()[1]
# if PORT was not 0, probably working with a remote execution server
if PORT != 0:
# To allow reconnection within the 2MSL wait (cf. Stevens TCP
# V1, 18.6), set SO_REUSEADDR. Note that this can be problematic
# on Windows since the implementation allows two active sockets on
# the same address!
self.rpcclt.listening_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
self.spawn_subprocess()
#time.sleep(20) # test to simulate GUI not accepting connection
# Accept the connection from the Python execution server
self.rpcclt.listening_sock.settimeout(10)
try:
self.rpcclt.accept()
except socket.timeout:
self.display_no_subprocess_error()
return None
self.rpcclt.register("console", self.tkconsole)
self.rpcclt.register("stdin", self.tkconsole.stdin)
self.rpcclt.register("stdout", self.tkconsole.stdout)
self.rpcclt.register("stderr", self.tkconsole.stderr)
self.rpcclt.register("flist", self.tkconsole.flist)
self.rpcclt.register("linecache", linecache)
self.rpcclt.register("interp", self)
self.transfer_path(with_cwd=True)
self.poll_subprocess()
return self.rpcclt
def restart_subprocess(self, with_cwd=False):
if self.restarting:
return self.rpcclt
self.restarting = True
# close only the subprocess debugger
debug = self.getdebugger()
if debug:
try:
# Only close subprocess debugger, don't unregister gui_adap!
RemoteDebugger.close_subprocess_debugger(self.rpcclt)
except:
pass
# Kill subprocess, spawn a new one, accept connection.
self.rpcclt.close()
self.unix_terminate()
console = self.tkconsole
was_executing = console.executing
console.executing = False
self.spawn_subprocess()
try:
self.rpcclt.accept()
except socket.timeout:
self.display_no_subprocess_error()
return None
self.transfer_path(with_cwd=with_cwd)
console.stop_readline()
# annotate restart in shell window and mark it
console.text.delete("iomark", "end-1c")
if was_executing:
console.write('\n')
console.showprompt()
halfbar = ((int(console.width) - 16) // 2) * '='
console.write(halfbar + ' RESTART ' + halfbar)
console.text.mark_set("restart", "end-1c")
console.text.mark_gravity("restart", "left")
console.showprompt()
# restart subprocess debugger
if debug:
# Restarted debugger connects to current instance of debug GUI
RemoteDebugger.restart_subprocess_debugger(self.rpcclt)
# reload remote debugger breakpoints for all PyShellEditWindows
debug.load_breakpoints()
self.compile.compiler.flags = self.original_compiler_flags
self.restarting = False
return self.rpcclt
def __request_interrupt(self):
self.rpcclt.remotecall("exec", "interrupt_the_server", (), {})
def interrupt_subprocess(self):
threading.Thread(target=self.__request_interrupt).start()
def kill_subprocess(self):
if self._afterid is not None:
self.tkconsole.text.after_cancel(self._afterid)
try:
self.rpcclt.close()
except AttributeError: # no socket
pass
self.unix_terminate()
self.tkconsole.executing = False
self.rpcclt = None
def unix_terminate(self):
"UNIX: make sure subprocess is terminated and collect status"
if hasattr(os, 'kill'):
try:
os.kill(self.rpcpid, SIGTERM)
except OSError:
# process already terminated:
return
else:
try:
os.waitpid(self.rpcpid, 0)
except OSError:
return
def transfer_path(self, with_cwd=False):
if with_cwd: # Issue 13506
path = [''] # include Current Working Directory
path.extend(sys.path)
else:
path = sys.path
self.runcommand("""if 1:
import sys as _sys
_sys.path = %r
del _sys
\n""" % (path,))
active_seq = None
def poll_subprocess(self):
clt = self.rpcclt
if clt is None:
return
try:
response = clt.pollresponse(self.active_seq, wait=0.05)
except (EOFError, IOError, KeyboardInterrupt):
# lost connection or subprocess terminated itself, restart
# [the KBI is from rpc.SocketIO.handle_EOF()]
if self.tkconsole.closing:
return
response = None
self.restart_subprocess()
if response:
self.tkconsole.resetoutput()
self.active_seq = None
how, what = response
console = self.tkconsole.console
if how == "OK":
if what is not None:
print(repr(what), file=console)
elif how == "EXCEPTION":
if self.tkconsole.getvar("<<toggle-jit-stack-viewer>>"):
self.remote_stack_viewer()
elif how == "ERROR":
errmsg = "PyShell.ModifiedInterpreter: Subprocess ERROR:\n"
print(errmsg, what, file=sys.__stderr__)
print(errmsg, what, file=console)
# we received a response to the currently active seq number:
try:
self.tkconsole.endexecuting()
except AttributeError: # shell may have closed
pass
# Reschedule myself
if not self.tkconsole.closing:
self._afterid = self.tkconsole.text.after(
self.tkconsole.pollinterval, self.poll_subprocess)
debugger = None
def setdebugger(self, debugger):
self.debugger = debugger
def getdebugger(self):
return self.debugger
def open_remote_stack_viewer(self):
"""Initiate the remote stack viewer from a separate thread.
This method is called from the subprocess, and by returning from this
method we allow the subprocess to unblock. After a bit the shell
requests the subprocess to open the remote stack viewer which returns a
static object looking at the last exception. It is queried through
the RPC mechanism.
"""
self.tkconsole.text.after(300, self.remote_stack_viewer)
return
def remote_stack_viewer(self):
from idlelib import RemoteObjectBrowser
oid = self.rpcclt.remotequeue("exec", "stackviewer", ("flist",), {})
if oid is None:
self.tkconsole.root.bell()
return
item = RemoteObjectBrowser.StubObjectTreeItem(self.rpcclt, oid)
from idlelib.TreeWidget import ScrolledCanvas, TreeNode
top = Toplevel(self.tkconsole.root)
theme = idleConf.GetOption('main','Theme','name')
background = idleConf.GetHighlight(theme, 'normal')['background']
sc = ScrolledCanvas(top, bg=background, highlightthickness=0)
sc.frame.pack(expand=1, fill="both")
node = TreeNode(sc.canvas, None, item)
node.expand()
# XXX Should GC the remote tree when closing the window
gid = 0
def execsource(self, source):
"Like runsource() but assumes complete exec source"
filename = self.stuffsource(source)
self.execfile(filename, source)
def execfile(self, filename, source=None):
"Execute an existing file"
if source is None:
source = open(filename, "r").read()
try:
code = compile(source, filename, "exec", dont_inherit=True)
except (OverflowError, SyntaxError):
self.tkconsole.resetoutput()
print('*** Error in script or command!\n'
'Traceback (most recent call last):',
file=self.tkconsole.stderr)
InteractiveInterpreter.showsyntaxerror(self, filename)
self.tkconsole.showprompt()
else:
self.runcode(code)
def runsource(self, source):
"Extend base class method: Stuff the source in the line cache first"
filename = self.stuffsource(source)
self.more = 0
self.save_warnings_filters = warnings.filters[:]
warnings.filterwarnings(action="error", category=SyntaxWarning)
if isinstance(source, unicode) and IOBinding.encoding != 'utf-8':
try:
source = '# -*- coding: %s -*-\n%s' % (
IOBinding.encoding,
source.encode(IOBinding.encoding))
except UnicodeError:
self.tkconsole.resetoutput()
self.write("Unsupported characters in input\n")
return
try:
# InteractiveInterpreter.runsource() calls its runcode() method,
# which is overridden (see below)
return InteractiveInterpreter.runsource(self, source, filename)
finally:
if self.save_warnings_filters is not None:
warnings.filters[:] = self.save_warnings_filters
self.save_warnings_filters = None
def stuffsource(self, source):
"Stuff source in the filename cache"
filename = "<pyshell#%d>" % self.gid
self.gid = self.gid + 1
lines = source.split("\n")
linecache.cache[filename] = len(source)+1, 0, lines, filename
return filename
def prepend_syspath(self, filename):
"Prepend sys.path with file's directory if not already included"
self.runcommand("""if 1:
_filename = %r
import sys as _sys
from os.path import dirname as _dirname
_dir = _dirname(_filename)
if not _dir in _sys.path:
_sys.path.insert(0, _dir)
del _filename, _sys, _dirname, _dir
\n""" % (filename,))
def showsyntaxerror(self, filename=None):
"""Extend base class method: Add Colorizing
Color the offending position instead of printing it and pointing at it
with a caret.
"""
text = self.tkconsole.text
stuff = self.unpackerror()
if stuff:
msg, lineno, offset, line = stuff
if lineno == 1:
pos = "iomark + %d chars" % (offset-1)
else:
pos = "iomark linestart + %d lines + %d chars" % \
(lineno-1, offset-1)
text.tag_add("ERROR", pos)
text.see(pos)
char = text.get(pos)
if char and char in IDENTCHARS:
text.tag_add("ERROR", pos + " wordstart", pos)
self.tkconsole.resetoutput()
self.write("SyntaxError: %s\n" % str(msg))
else:
self.tkconsole.resetoutput()
InteractiveInterpreter.showsyntaxerror(self, filename)
self.tkconsole.showprompt()
def unpackerror(self):
type, value, tb = sys.exc_info()
ok = type is SyntaxError
if ok:
try:
msg, (dummy_filename, lineno, offset, line) = value
if not offset:
offset = 0
except:
ok = 0
if ok:
return msg, lineno, offset, line
else:
return None
def showtraceback(self):
"Extend base class method to reset output properly"
self.tkconsole.resetoutput()
self.checklinecache()
InteractiveInterpreter.showtraceback(self)
if self.tkconsole.getvar("<<toggle-jit-stack-viewer>>"):
self.tkconsole.open_stack_viewer()
def checklinecache(self):
c = linecache.cache
for key in c.keys():
if key[:1] + key[-1:] != "<>":
del c[key]
def runcommand(self, code):
"Run the code without invoking the debugger"
# The code better not raise an exception!
if self.tkconsole.executing:
self.display_executing_dialog()
return 0
if self.rpcclt:
self.rpcclt.remotequeue("exec", "runcode", (code,), {})
else:
exec code in self.locals
return 1
def runcode(self, code):
"Override base class method"
if self.tkconsole.executing:
self.interp.restart_subprocess()
self.checklinecache()
if self.save_warnings_filters is not None:
warnings.filters[:] = self.save_warnings_filters
self.save_warnings_filters = None
debugger = self.debugger
try:
self.tkconsole.beginexecuting()
if not debugger and self.rpcclt is not None:
self.active_seq = self.rpcclt.asyncqueue("exec", "runcode",
(code,), {})
elif debugger:
debugger.run(code, self.locals)
else:
exec code in self.locals
except SystemExit:
if not self.tkconsole.closing:
if tkMessageBox.askyesno(
"Exit?",
"Do you want to exit altogether?",
default="yes",
master=self.tkconsole.text):
raise
else:
self.showtraceback()
else:
raise
except:
if use_subprocess:
print("IDLE internal error in runcode()",
file=self.tkconsole.stderr)
self.showtraceback()
self.tkconsole.endexecuting()
else:
if self.tkconsole.canceled:
self.tkconsole.canceled = False
print("KeyboardInterrupt", file=self.tkconsole.stderr)
else:
self.showtraceback()
finally:
if not use_subprocess:
try:
self.tkconsole.endexecuting()
except AttributeError: # shell may have closed
pass
def write(self, s):
"Override base class method"
self.tkconsole.stderr.write(s)
def display_port_binding_error(self):
tkMessageBox.showerror(
"Port Binding Error",
"IDLE can't bind to a TCP/IP port, which is necessary to "
"communicate with its Python execution server. This might be "
"because no networking is installed on this computer. "
"Run IDLE with the -n command line switch to start without a "
"subprocess and refer to Help/IDLE Help 'Running without a "
"subprocess' for further details.",
master=self.tkconsole.text)
def display_no_subprocess_error(self):
tkMessageBox.showerror(
"Subprocess Startup Error",
"IDLE's subprocess didn't make connection. Either IDLE can't "
"start a subprocess or personal firewall software is blocking "
"the connection.",
master=self.tkconsole.text)
def display_executing_dialog(self):
tkMessageBox.showerror(
"Already executing",
"The Python Shell window is already executing a command; "
"please wait until it is finished.",
master=self.tkconsole.text)
class PyShell(OutputWindow):
shell_title = "Python " + python_version() + " Shell"
# Override classes
ColorDelegator = ModifiedColorDelegator
UndoDelegator = ModifiedUndoDelegator
# Override menus
menu_specs = [
("file", "_File"),
("edit", "_Edit"),
("debug", "_Debug"),
("options", "_Options"),
("windows", "_Window"),
("help", "_Help"),
]
# New classes
from idlelib.IdleHistory import History
def __init__(self, flist=None):
if use_subprocess:
ms = self.menu_specs
if ms[2][0] != "shell":
ms.insert(2, ("shell", "She_ll"))
self.interp = ModifiedInterpreter(self)
if flist is None:
root = Tk()
fixwordbreaks(root)
root.withdraw()
flist = PyShellFileList(root)
#
OutputWindow.__init__(self, flist, None, None)
#
## self.config(usetabs=1, indentwidth=8, context_use_ps1=1)
self.usetabs = True
# indentwidth must be 8 when using tabs. See note in EditorWindow:
self.indentwidth = 8
self.context_use_ps1 = True
#
text = self.text
text.configure(wrap="char")
text.bind("<<newline-and-indent>>", self.enter_callback)
text.bind("<<plain-newline-and-indent>>", self.linefeed_callback)
text.bind("<<interrupt-execution>>", self.cancel_callback)
text.bind("<<end-of-file>>", self.eof_callback)
text.bind("<<open-stack-viewer>>", self.open_stack_viewer)
text.bind("<<toggle-debugger>>", self.toggle_debugger)
text.bind("<<toggle-jit-stack-viewer>>", self.toggle_jit_stack_viewer)
if use_subprocess:
text.bind("<<view-restart>>", self.view_restart_mark)
text.bind("<<restart-shell>>", self.restart_shell)
#
self.save_stdout = sys.stdout
self.save_stderr = sys.stderr
self.save_stdin = sys.stdin
from idlelib import IOBinding
self.stdin = PseudoInputFile(self, "stdin", IOBinding.encoding)
self.stdout = PseudoOutputFile(self, "stdout", IOBinding.encoding)
self.stderr = PseudoOutputFile(self, "stderr", IOBinding.encoding)
self.console = PseudoOutputFile(self, "console", IOBinding.encoding)
if not use_subprocess:
sys.stdout = self.stdout
sys.stderr = self.stderr
sys.stdin = self.stdin
#
self.history = self.History(self.text)
#
self.pollinterval = 50 # millisec
def get_standard_extension_names(self):
return idleConf.GetExtensions(shell_only=True)
reading = False
executing = False
canceled = False
endoffile = False
closing = False
_stop_readline_flag = False
def set_warning_stream(self, stream):
global warning_stream
warning_stream = stream
def get_warning_stream(self):
return warning_stream
def toggle_debugger(self, event=None):
if self.executing:
tkMessageBox.showerror("Don't debug now",
"You can only toggle the debugger when idle",
master=self.text)
self.set_debugger_indicator()
return "break"
else:
db = self.interp.getdebugger()
if db:
self.close_debugger()
else:
self.open_debugger()
def set_debugger_indicator(self):
db = self.interp.getdebugger()
self.setvar("<<toggle-debugger>>", not not db)
def toggle_jit_stack_viewer(self, event=None):
pass # All we need is the variable
def close_debugger(self):
db = self.interp.getdebugger()
if db:
self.interp.setdebugger(None)
db.close()
if self.interp.rpcclt:
RemoteDebugger.close_remote_debugger(self.interp.rpcclt)
self.resetoutput()
self.console.write("[DEBUG OFF]\n")
sys.ps1 = ">>> "
self.showprompt()
self.set_debugger_indicator()
def open_debugger(self):
if self.interp.rpcclt:
dbg_gui = RemoteDebugger.start_remote_debugger(self.interp.rpcclt,
self)
else:
dbg_gui = Debugger.Debugger(self)
self.interp.setdebugger(dbg_gui)
dbg_gui.load_breakpoints()
sys.ps1 = "[DEBUG ON]\n>>> "
self.showprompt()
self.set_debugger_indicator()
def beginexecuting(self):
"Helper for ModifiedInterpreter"
self.resetoutput()
self.executing = 1
def endexecuting(self):
"Helper for ModifiedInterpreter"
self.executing = 0
self.canceled = 0
self.showprompt()
def close(self):
"Extend EditorWindow.close()"
if self.executing:
response = tkMessageBox.askokcancel(
"Kill?",
"The program is still running!\n Do you want to kill it?",
default="ok",
parent=self.text)
if response is False:
return "cancel"
self.stop_readline()
self.canceled = True
self.closing = True
return EditorWindow.close(self)
def _close(self):
"Extend EditorWindow._close(), shut down debugger and execution server"
self.close_debugger()
if use_subprocess:
self.interp.kill_subprocess()
# Restore std streams
sys.stdout = self.save_stdout
sys.stderr = self.save_stderr
sys.stdin = self.save_stdin
# Break cycles
self.interp = None
self.console = None
self.flist.pyshell = None
self.history = None
EditorWindow._close(self)
def ispythonsource(self, filename):
"Override EditorWindow method: never remove the colorizer"
return True
def short_title(self):
return self.shell_title
COPYRIGHT = \
'Type "copyright", "credits" or "license()" for more information.'
def begin(self):
self.resetoutput()
if use_subprocess:
nosub = ''
client = self.interp.start_subprocess()
if not client:
self.close()
return False
else:
nosub = "==== No Subprocess ===="
self.write("Python %s on %s\n%s\n%s" %
(sys.version, sys.platform, self.COPYRIGHT, nosub))
self.showprompt()
import Tkinter
Tkinter._default_root = None # 03Jan04 KBK What's this?
return True
def stop_readline(self):
if not self.reading: # no nested mainloop to exit.
return
self._stop_readline_flag = True
self.top.quit()
def readline(self):
save = self.reading
try:
self.reading = 1
self.top.mainloop() # nested mainloop()
finally:
self.reading = save
if self._stop_readline_flag:
self._stop_readline_flag = False
return ""
line = self.text.get("iomark", "end-1c")
if len(line) == 0: # may be EOF if we quit our mainloop with Ctrl-C
line = "\n"
if isinstance(line, unicode):
from idlelib import IOBinding
try:
line = line.encode(IOBinding.encoding)
except UnicodeError:
pass
self.resetoutput()
if self.canceled:
self.canceled = 0
if not use_subprocess:
raise KeyboardInterrupt
if self.endoffile:
self.endoffile = 0
line = ""
return line
def isatty(self):
return True
def cancel_callback(self, event=None):
try:
if self.text.compare("sel.first", "!=", "sel.last"):
return # Active selection -- always use default binding
except:
pass
if not (self.executing or self.reading):
self.resetoutput()
self.interp.write("KeyboardInterrupt\n")
self.showprompt()
return "break"
self.endoffile = 0
self.canceled = 1
if (self.executing and self.interp.rpcclt):
if self.interp.getdebugger():
self.interp.restart_subprocess()
else:
self.interp.interrupt_subprocess()
if self.reading:
self.top.quit() # exit the nested mainloop() in readline()
return "break"
def eof_callback(self, event):
if self.executing and not self.reading:
return # Let the default binding (delete next char) take over
if not (self.text.compare("iomark", "==", "insert") and
self.text.compare("insert", "==", "end-1c")):
return # Let the default binding (delete next char) take over
if not self.executing:
self.resetoutput()
self.close()
else:
self.canceled = 0
self.endoffile = 1
self.top.quit()
return "break"
def linefeed_callback(self, event):
# Insert a linefeed without entering anything (still autoindented)
if self.reading:
self.text.insert("insert", "\n")
self.text.see("insert")
else:
self.newline_and_indent_event(event)
return "break"
def enter_callback(self, event):
if self.executing and not self.reading:
return # Let the default binding (insert '\n') take over
# If some text is selected, recall the selection
# (but only if this before the I/O mark)
try:
sel = self.text.get("sel.first", "sel.last")
if sel:
if self.text.compare("sel.last", "<=", "iomark"):
self.recall(sel, event)
return "break"
except:
pass
# If we're strictly before the line containing iomark, recall
# the current line, less a leading prompt, less leading or
# trailing whitespace
if self.text.compare("insert", "<", "iomark linestart"):
# Check if there's a relevant stdin range -- if so, use it
prev = self.text.tag_prevrange("stdin", "insert")
if prev and self.text.compare("insert", "<", prev[1]):
self.recall(self.text.get(prev[0], prev[1]), event)
return "break"
next = self.text.tag_nextrange("stdin", "insert")
if next and self.text.compare("insert lineend", ">=", next[0]):
self.recall(self.text.get(next[0], next[1]), event)
return "break"
# No stdin mark -- just get the current line, less any prompt
indices = self.text.tag_nextrange("console", "insert linestart")
if indices and \
self.text.compare(indices[0], "<=", "insert linestart"):
self.recall(self.text.get(indices[1], "insert lineend"), event)
else:
self.recall(self.text.get("insert linestart", "insert lineend"), event)
return "break"
# If we're between the beginning of the line and the iomark, i.e.
# in the prompt area, move to the end of the prompt
if self.text.compare("insert", "<", "iomark"):
self.text.mark_set("insert", "iomark")
# If we're in the current input and there's only whitespace
# beyond the cursor, erase that whitespace first
s = self.text.get("insert", "end-1c")
if s and not s.strip():
self.text.delete("insert", "end-1c")
# If we're in the current input before its last line,
# insert a newline right at the insert point
if self.text.compare("insert", "<", "end-1c linestart"):
self.newline_and_indent_event(event)
return "break"
# We're in the last line; append a newline and submit it
self.text.mark_set("insert", "end-1c")
if self.reading:
self.text.insert("insert", "\n")
self.text.see("insert")
else:
self.newline_and_indent_event(event)
self.text.tag_add("stdin", "iomark", "end-1c")
self.text.update_idletasks()
if self.reading:
self.top.quit() # Break out of recursive mainloop() in raw_input()
else:
self.runit()
return "break"
def recall(self, s, event):
# remove leading and trailing empty or whitespace lines
s = re.sub(r'^\s*\n', '' , s)
s = re.sub(r'\n\s*$', '', s)
lines = s.split('\n')
self.text.undo_block_start()
try:
self.text.tag_remove("sel", "1.0", "end")
self.text.mark_set("insert", "end-1c")
prefix = self.text.get("insert linestart", "insert")
if prefix.rstrip().endswith(':'):
self.newline_and_indent_event(event)
prefix = self.text.get("insert linestart", "insert")
self.text.insert("insert", lines[0].strip())
if len(lines) > 1:
orig_base_indent = re.search(r'^([ \t]*)', lines[0]).group(0)
new_base_indent = re.search(r'^([ \t]*)', prefix).group(0)
for line in lines[1:]:
if line.startswith(orig_base_indent):
# replace orig base indentation with new indentation
line = new_base_indent + line[len(orig_base_indent):]
self.text.insert('insert', '\n'+line.rstrip())
finally:
self.text.see("insert")
self.text.undo_block_stop()
def runit(self):
line = self.text.get("iomark", "end-1c")
# Strip off last newline and surrounding whitespace.
# (To allow you to hit return twice to end a statement.)
i = len(line)
while i > 0 and line[i-1] in " \t":
i = i-1
if i > 0 and line[i-1] == "\n":
i = i-1
while i > 0 and line[i-1] in " \t":
i = i-1
line = line[:i]
self.interp.runsource(line)
def open_stack_viewer(self, event=None):
if self.interp.rpcclt:
return self.interp.remote_stack_viewer()
try:
sys.last_traceback
except:
tkMessageBox.showerror("No stack trace",
"There is no stack trace yet.\n"
"(sys.last_traceback is not defined)",
master=self.text)
return
from idlelib.StackViewer import StackBrowser
StackBrowser(self.root, self.flist)
def view_restart_mark(self, event=None):
self.text.see("iomark")
self.text.see("restart")
def restart_shell(self, event=None):
"Callback for Run/Restart Shell Cntl-F6"
self.interp.restart_subprocess(with_cwd=True)
def showprompt(self):
self.resetoutput()
try:
s = str(sys.ps1)
except:
s = ""
self.console.write(s)
self.text.mark_set("insert", "end-1c")
self.set_line_and_column()
self.io.reset_undo()
def resetoutput(self):
source = self.text.get("iomark", "end-1c")
if self.history:
self.history.store(source)
if self.text.get("end-2c") != "\n":
self.text.insert("end-1c", "\n")
self.text.mark_set("iomark", "end-1c")
self.set_line_and_column()
sys.stdout.softspace = 0
def write(self, s, tags=()):
try:
self.text.mark_gravity("iomark", "right")
OutputWindow.write(self, s, tags, "iomark")
self.text.mark_gravity("iomark", "left")
except:
pass
if self.canceled:
self.canceled = 0
if not use_subprocess:
raise KeyboardInterrupt
def rmenu_check_cut(self):
try:
if self.text.compare('sel.first', '<', 'iomark'):
return 'disabled'
except TclError: # no selection, so the index 'sel.first' doesn't exist
return 'disabled'
return super(PyShell, self).rmenu_check_cut()
def rmenu_check_paste(self):
if self.text.compare('insert', '<', 'iomark'):
return 'disabled'
return super(PyShell, self).rmenu_check_paste()
class PseudoFile(io.TextIOBase):
def __init__(self, shell, tags, encoding=None):
self.shell = shell
self.tags = tags
self.softspace = 0
self._encoding = encoding
@property
def encoding(self):
return self._encoding
@property
def name(self):
return '<%s>' % self.tags
def isatty(self):
return True
class PseudoOutputFile(PseudoFile):
def writable(self):
return True
def write(self, s):
if self.closed:
raise ValueError("write to closed file")
if type(s) not in (unicode, str, bytearray):
# See issue #19481
if isinstance(s, unicode):
s = unicode.__getitem__(s, slice(None))
elif isinstance(s, str):
s = str.__str__(s)
elif isinstance(s, bytearray):
s = bytearray.__str__(s)
else:
raise TypeError('must be string, not ' + type(s).__name__)
return self.shell.write(s, self.tags)
class PseudoInputFile(PseudoFile):
def __init__(self, shell, tags, encoding=None):
PseudoFile.__init__(self, shell, tags, encoding)
self._line_buffer = ''
def readable(self):
return True
def read(self, size=-1):
if self.closed:
raise ValueError("read from closed file")
if size is None:
size = -1
elif not isinstance(size, int):
raise TypeError('must be int, not ' + type(size).__name__)
result = self._line_buffer
self._line_buffer = ''
if size < 0:
while True:
line = self.shell.readline()
if not line: break
result += line
else:
while len(result) < size:
line = self.shell.readline()
if not line: break
result += line
self._line_buffer = result[size:]
result = result[:size]
return result
def readline(self, size=-1):
if self.closed:
raise ValueError("read from closed file")
if size is None:
size = -1
elif not isinstance(size, int):
raise TypeError('must be int, not ' + type(size).__name__)
line = self._line_buffer or self.shell.readline()
if size < 0:
size = len(line)
eol = line.find('\n', 0, size)
if eol >= 0:
size = eol + 1
self._line_buffer = line[size:]
return line[:size]
def close(self):
self.shell.close()
usage_msg = """\
USAGE: idle [-deins] [-t title] [file]*
idle [-dns] [-t title] (-c cmd | -r file) [arg]*
idle [-dns] [-t title] - [arg]*
-h print this help message and exit
-n run IDLE without a subprocess (see Help/IDLE Help for details)
The following options will override the IDLE 'settings' configuration:
-e open an edit window
-i open a shell window
The following options imply -i and will open a shell:
-c cmd run the command in a shell, or
-r file run script from file
-d enable the debugger
-s run $IDLESTARTUP or $PYTHONSTARTUP before anything else
-t title set title of shell window
A default edit window will be bypassed when -c, -r, or - are used.
[arg]* are passed to the command (-c) or script (-r) in sys.argv[1:].
Examples:
idle
Open an edit window or shell depending on IDLE's configuration.
idle foo.py foobar.py
Edit the files, also open a shell if configured to start with shell.
idle -est "Baz" foo.py
Run $IDLESTARTUP or $PYTHONSTARTUP, edit foo.py, and open a shell
window with the title "Baz".
idle -c "import sys; print sys.argv" "foo"
Open a shell window and run the command, passing "-c" in sys.argv[0]
and "foo" in sys.argv[1].
idle -d -s -r foo.py "Hello World"
Open a shell window, run a startup script, enable the debugger, and
run foo.py, passing "foo.py" in sys.argv[0] and "Hello World" in
sys.argv[1].
echo "import sys; print sys.argv" | idle - "foobar"
Open a shell window, run the script piped in, passing '' in sys.argv[0]
and "foobar" in sys.argv[1].
"""
def main():
global flist, root, use_subprocess
capture_warnings(True)
use_subprocess = True
enable_shell = False
enable_edit = False
debug = False
cmd = None
script = None
startup = False
try:
opts, args = getopt.getopt(sys.argv[1:], "c:deihnr:st:")
except getopt.error as msg:
print("Error: %s\n%s" % (msg, usage_msg), file=sys.stderr)
sys.exit(2)
for o, a in opts:
if o == '-c':
cmd = a
enable_shell = True
if o == '-d':
debug = True
enable_shell = True
if o == '-e':
enable_edit = True
if o == '-h':
sys.stdout.write(usage_msg)
sys.exit()
if o == '-i':
enable_shell = True
if o == '-n':
use_subprocess = False
if o == '-r':
script = a
if os.path.isfile(script):
pass
else:
print("No script file: ", script, file=sys.stderr)
sys.exit()
enable_shell = True
if o == '-s':
startup = True
enable_shell = True
if o == '-t':
PyShell.shell_title = a
enable_shell = True
if args and args[0] == '-':
cmd = sys.stdin.read()
enable_shell = True
# process sys.argv and sys.path:
for i in range(len(sys.path)):
sys.path[i] = os.path.abspath(sys.path[i])
if args and args[0] == '-':
sys.argv = [''] + args[1:]
elif cmd:
sys.argv = ['-c'] + args
elif script:
sys.argv = [script] + args
elif args:
enable_edit = True
pathx = []
for filename in args:
pathx.append(os.path.dirname(filename))
for dir in pathx:
dir = os.path.abspath(dir)
if dir not in sys.path:
sys.path.insert(0, dir)
else:
dir = os.getcwd()
if not dir in sys.path:
sys.path.insert(0, dir)
# check the IDLE settings configuration (but command line overrides)
edit_start = idleConf.GetOption('main', 'General',
'editor-on-startup', type='bool')
enable_edit = enable_edit or edit_start
enable_shell = enable_shell or not enable_edit
# start editor and/or shell windows:
root = Tk(className="Idle")
# set application icon
icondir = os.path.join(os.path.dirname(__file__), 'Icons')
if system() == 'Windows':
iconfile = os.path.join(icondir, 'idle.ico')
root.wm_iconbitmap(default=iconfile)
elif TkVersion >= 8.5:
ext = '.png' if TkVersion >= 8.6 else '.gif'
iconfiles = [os.path.join(icondir, 'idle_%d%s' % (size, ext))
for size in (16, 32, 48)]
icons = [PhotoImage(file=iconfile) for iconfile in iconfiles]
root.tk.call('wm', 'iconphoto', str(root), "-default", *icons)
fixwordbreaks(root)
root.withdraw()
flist = PyShellFileList(root)
macosxSupport.setupApp(root, flist)
if enable_edit:
if not (cmd or script):
for filename in args[:]:
if flist.open(filename) is None:
# filename is a directory actually, disconsider it
args.remove(filename)
if not args:
flist.new()
if enable_shell:
shell = flist.open_shell()
if not shell:
return # couldn't open shell
if macosxSupport.isAquaTk() and flist.dict:
# On OSX: when the user has double-clicked on a file that causes
# IDLE to be launched the shell window will open just in front of
# the file she wants to see. Lower the interpreter window when
# there are open files.
shell.top.lower()
else:
shell = flist.pyshell
# Handle remaining options. If any of these are set, enable_shell
# was set also, so shell must be true to reach here.
if debug:
shell.open_debugger()
if startup:
filename = os.environ.get("IDLESTARTUP") or \
os.environ.get("PYTHONSTARTUP")
if filename and os.path.isfile(filename):
shell.interp.execfile(filename)
if cmd or script:
shell.interp.runcommand("""if 1:
import sys as _sys
_sys.argv = %r
del _sys
\n""" % (sys.argv,))
if cmd:
shell.interp.execsource(cmd)
elif script:
shell.interp.prepend_syspath(script)
shell.interp.execfile(script)
elif shell:
# If there is a shell window and no cmd or script in progress,
# check for problematic OS X Tk versions and print a warning
# message in the IDLE shell window; this is less intrusive
# than always opening a separate window.
tkversionwarning = macosxSupport.tkVersionWarning(root)
if tkversionwarning:
shell.interp.runcommand("print('%s')" % tkversionwarning)
while flist.inversedict: # keep IDLE running while files are open.
root.mainloop()
root.destroy()
capture_warnings(False)
if __name__ == "__main__":
sys.modules['PyShell'] = sys.modules['__main__']
main()
capture_warnings(False) # Make sure turned off; see issue 18081
| apache-2.0 |
tequa/ammisoft | ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/site-packages/matplotlib/offsetbox.py | 10 | 54984 | """
The OffsetBox is a simple container artist. The child artist are meant
to be drawn at a relative position to its parent. The [VH]Packer,
DrawingArea and TextArea are derived from the OffsetBox.
The [VH]Packer automatically adjust the relative postisions of their
children, which should be instances of the OffsetBox. This is used to
align similar artists together, e.g., in legend.
The DrawingArea can contain any Artist as a child. The
DrawingArea has a fixed width and height. The position of children
relative to the parent is fixed. The TextArea is contains a single
Text instance. The width and height of the TextArea instance is the
width and height of the its child text.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange, zip
import warnings
import matplotlib.transforms as mtransforms
import matplotlib.artist as martist
import matplotlib.text as mtext
import matplotlib.path as mpath
import numpy as np
from matplotlib.transforms import Bbox, BboxBase, TransformedBbox
from matplotlib.font_manager import FontProperties
from matplotlib.patches import FancyBboxPatch, FancyArrowPatch
from matplotlib import rcParams
from matplotlib import docstring
#from bboximage import BboxImage
from matplotlib.image import BboxImage
from matplotlib.patches import bbox_artist as mbbox_artist
from matplotlib.text import _AnnotationBase
DEBUG = False
# for debuging use
def bbox_artist(*args, **kwargs):
if DEBUG:
mbbox_artist(*args, **kwargs)
# _get_packed_offsets() and _get_aligned_offsets() are coded assuming
# that we are packing boxes horizontally. But same function will be
# used with vertical packing.
def _get_packed_offsets(wd_list, total, sep, mode="fixed"):
"""
Geiven a list of (width, xdescent) of each boxes, calculate the
total width and the x-offset positions of each items according to
*mode*. xdescent is analagous to the usual descent, but along the
x-direction. xdescent values are currently ignored.
*wd_list* : list of (width, xdescent) of boxes to be packed.
*sep* : spacing between boxes
*total* : Intended total length. None if not used.
*mode* : packing mode. 'fixed', 'expand', or 'equal'.
"""
w_list, d_list = list(zip(*wd_list))
# d_list is currently not used.
if mode == "fixed":
offsets_ = np.add.accumulate([0] + [w + sep for w in w_list])
offsets = offsets_[:-1]
if total is None:
total = offsets_[-1] - sep
return total, offsets
elif mode == "expand":
if len(w_list) > 1:
sep = (total - sum(w_list)) / (len(w_list) - 1.)
else:
sep = 0.
offsets_ = np.add.accumulate([0] + [w + sep for w in w_list])
offsets = offsets_[:-1]
return total, offsets
elif mode == "equal":
maxh = max(w_list)
if total is None:
total = (maxh + sep) * len(w_list)
else:
sep = float(total) / (len(w_list)) - maxh
offsets = np.array([(maxh + sep) * i for i in range(len(w_list))])
return total, offsets
else:
raise ValueError("Unknown mode : %s" % (mode,))
def _get_aligned_offsets(hd_list, height, align="baseline"):
"""
Given a list of (height, descent) of each boxes, align the boxes
with *align* and calculate the y-offsets of each boxes.
total width and the offset positions of each items according to
*mode*. xdescent is analogous to the usual descent, but along the
x-direction. xdescent values are currently ignored.
*hd_list* : list of (width, xdescent) of boxes to be aligned.
*sep* : spacing between boxes
*height* : Intended total length. None if not used.
*align* : align mode. 'baseline', 'top', 'bottom', or 'center'.
"""
if height is None:
height = max([h for h, d in hd_list])
if align == "baseline":
height_descent = max([h - d for h, d in hd_list])
descent = max([d for h, d in hd_list])
height = height_descent + descent
offsets = [0. for h, d in hd_list]
elif align in ["left", "top"]:
descent = 0.
offsets = [d for h, d in hd_list]
elif align in ["right", "bottom"]:
descent = 0.
offsets = [height - h + d for h, d in hd_list]
elif align == "center":
descent = 0.
offsets = [(height - h) * .5 + d for h, d in hd_list]
else:
raise ValueError("Unknown Align mode : %s" % (align,))
return height, descent, offsets
class OffsetBox(martist.Artist):
"""
The OffsetBox is a simple container artist. The child artist are meant
to be drawn at a relative position to its parent.
"""
def __init__(self, *args, **kwargs):
super(OffsetBox, self).__init__(*args, **kwargs)
# Clipping has not been implemented in the OffesetBox family, so
# disable the clip flag for consistency. It can always be turned back
# on to zero effect.
self.set_clip_on(False)
self._children = []
self._offset = (0, 0)
def __getstate__(self):
state = martist.Artist.__getstate__(self)
# pickle cannot save instancemethods, so handle them here
from .cbook import _InstanceMethodPickler
import inspect
offset = state['_offset']
if inspect.ismethod(offset):
state['_offset'] = _InstanceMethodPickler(offset)
return state
def __setstate__(self, state):
self.__dict__ = state
from .cbook import _InstanceMethodPickler
if isinstance(self._offset, _InstanceMethodPickler):
self._offset = self._offset.get_instancemethod()
self.stale = True
def set_figure(self, fig):
"""
Set the figure
accepts a class:`~matplotlib.figure.Figure` instance
"""
martist.Artist.set_figure(self, fig)
for c in self.get_children():
c.set_figure(fig)
@martist.Artist.axes.setter
def axes(self, ax):
# TODO deal with this better
martist.Artist.axes.fset(self, ax)
for c in self.get_children():
if c is not None:
c.axes = ax
def contains(self, mouseevent):
for c in self.get_children():
a, b = c.contains(mouseevent)
if a:
return a, b
return False, {}
def set_offset(self, xy):
"""
Set the offset
accepts x, y, tuple, or a callable object.
"""
self._offset = xy
self.stale = True
def get_offset(self, width, height, xdescent, ydescent, renderer):
"""
Get the offset
accepts extent of the box
"""
if six.callable(self._offset):
return self._offset(width, height, xdescent, ydescent, renderer)
else:
return self._offset
def set_width(self, width):
"""
Set the width
accepts float
"""
self.width = width
self.stale = True
def set_height(self, height):
"""
Set the height
accepts float
"""
self.height = height
self.stale = True
def get_visible_children(self):
"""
Return a list of visible artists it contains.
"""
return [c for c in self._children if c.get_visible()]
def get_children(self):
"""
Return a list of artists it contains.
"""
return self._children
def get_extent_offsets(self, renderer):
raise Exception("")
def get_extent(self, renderer):
"""
Return with, height, xdescent, ydescent of box
"""
w, h, xd, yd, offsets = self.get_extent_offsets(renderer)
return w, h, xd, yd
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
w, h, xd, yd, offsets = self.get_extent_offsets(renderer)
px, py = self.get_offset(w, h, xd, yd, renderer)
return mtransforms.Bbox.from_bounds(px - xd, py - yd, w, h)
def draw(self, renderer):
"""
Update the location of children if necessary and draw them
to the given *renderer*.
"""
width, height, xdescent, ydescent, offsets = self.get_extent_offsets(
renderer)
px, py = self.get_offset(width, height, xdescent, ydescent, renderer)
for c, (ox, oy) in zip(self.get_visible_children(), offsets):
c.set_offset((px + ox, py + oy))
c.draw(renderer)
bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
class PackerBase(OffsetBox):
def __init__(self, pad=None, sep=None, width=None, height=None,
align=None, mode=None,
children=None):
"""
Parameters
----------
pad : float, optional
Boundary pad.
sep : float, optional
Spacing between items.
width : float, optional
height : float, optional
Width and height of the container box, calculated if
`None`.
align : str, optional
Alignment of boxes. Can be one of ``top``, ``bottom``,
``left``, ``right``, ``center`` and ``baseline``
mode : str, optional
Packing mode.
Notes
-----
*pad* and *sep* need to given in points and will be scale with
the renderer dpi, while *width* and *height* need to be in
pixels.
"""
super(PackerBase, self).__init__()
self.height = height
self.width = width
self.sep = sep
self.pad = pad
self.mode = mode
self.align = align
self._children = children
class VPacker(PackerBase):
"""
The VPacker has its children packed vertically. It automatically
adjust the relative positions of children in the drawing time.
"""
def __init__(self, pad=None, sep=None, width=None, height=None,
align="baseline", mode="fixed",
children=None):
"""
Parameters
----------
pad : float, optional
Boundary pad.
sep : float, optional
Spacing between items.
width : float, optional
height : float, optional
width and height of the container box, calculated if
`None`.
align : str, optional
Alignment of boxes.
mode : str, optional
Packing mode.
Notes
-----
*pad* and *sep* need to given in points and will be scale with
the renderer dpi, while *width* and *height* need to be in
pixels.
"""
super(VPacker, self).__init__(pad, sep, width, height,
align, mode,
children)
def get_extent_offsets(self, renderer):
"""
update offset of childrens and return the extents of the box
"""
dpicor = renderer.points_to_pixels(1.)
pad = self.pad * dpicor
sep = self.sep * dpicor
if self.width is not None:
for c in self.get_visible_children():
if isinstance(c, PackerBase) and c.mode == "expand":
c.set_width(self.width)
whd_list = [c.get_extent(renderer)
for c in self.get_visible_children()]
whd_list = [(w, h, xd, (h - yd)) for w, h, xd, yd in whd_list]
wd_list = [(w, xd) for w, h, xd, yd in whd_list]
width, xdescent, xoffsets = _get_aligned_offsets(wd_list,
self.width,
self.align)
pack_list = [(h, yd) for w, h, xd, yd in whd_list]
height, yoffsets_ = _get_packed_offsets(pack_list, self.height,
sep, self.mode)
yoffsets = yoffsets_ + [yd for w, h, xd, yd in whd_list]
ydescent = height - yoffsets[0]
yoffsets = height - yoffsets
#w, h, xd, h_yd = whd_list[-1]
yoffsets = yoffsets - ydescent
return width + 2 * pad, height + 2 * pad, \
xdescent + pad, ydescent + pad, \
list(zip(xoffsets, yoffsets))
class HPacker(PackerBase):
"""
The HPacker has its children packed horizontally. It automatically
adjusts the relative positions of children at draw time.
"""
def __init__(self, pad=None, sep=None, width=None, height=None,
align="baseline", mode="fixed",
children=None):
"""
Parameters
----------
pad : float, optional
Boundary pad.
sep : float, optional
Spacing between items.
width : float, optional
height : float, optional
Width and height of the container box, calculated if
`None`.
align : str
Alignment of boxes.
mode : str
Packing mode.
Notes
-----
*pad* and *sep* need to given in points and will be scale with
the renderer dpi, while *width* and *height* need to be in
pixels.
"""
super(HPacker, self).__init__(pad, sep, width, height,
align, mode, children)
def get_extent_offsets(self, renderer):
"""
update offset of children and return the extents of the box
"""
dpicor = renderer.points_to_pixels(1.)
pad = self.pad * dpicor
sep = self.sep * dpicor
whd_list = [c.get_extent(renderer)
for c in self.get_visible_children()]
if not whd_list:
return 2 * pad, 2 * pad, pad, pad, []
if self.height is None:
height_descent = max([h - yd for w, h, xd, yd in whd_list])
ydescent = max([yd for w, h, xd, yd in whd_list])
height = height_descent + ydescent
else:
height = self.height - 2 * pad # width w/o pad
hd_list = [(h, yd) for w, h, xd, yd in whd_list]
height, ydescent, yoffsets = _get_aligned_offsets(hd_list,
self.height,
self.align)
pack_list = [(w, xd) for w, h, xd, yd in whd_list]
width, xoffsets_ = _get_packed_offsets(pack_list, self.width,
sep, self.mode)
xoffsets = xoffsets_ + [xd for w, h, xd, yd in whd_list]
xdescent = whd_list[0][2]
xoffsets = xoffsets - xdescent
return width + 2 * pad, height + 2 * pad, \
xdescent + pad, ydescent + pad, \
list(zip(xoffsets, yoffsets))
class PaddedBox(OffsetBox):
def __init__(self, child, pad=None, draw_frame=False, patch_attrs=None):
"""
*pad* : boundary pad
.. note::
*pad* need to given in points and will be
scale with the renderer dpi, while *width* and *height*
need to be in pixels.
"""
super(PaddedBox, self).__init__()
self.pad = pad
self._children = [child]
self.patch = FancyBboxPatch(
xy=(0.0, 0.0), width=1., height=1.,
facecolor='w', edgecolor='k',
mutation_scale=1, # self.prop.get_size_in_points(),
snap=True
)
self.patch.set_boxstyle("square", pad=0)
if patch_attrs is not None:
self.patch.update(patch_attrs)
self._drawFrame = draw_frame
def get_extent_offsets(self, renderer):
"""
update offset of childrens and return the extents of the box
"""
dpicor = renderer.points_to_pixels(1.)
pad = self.pad * dpicor
w, h, xd, yd = self._children[0].get_extent(renderer)
return w + 2 * pad, h + 2 * pad, \
xd + pad, yd + pad, \
[(0, 0)]
def draw(self, renderer):
"""
Update the location of children if necessary and draw them
to the given *renderer*.
"""
width, height, xdescent, ydescent, offsets = self.get_extent_offsets(
renderer)
px, py = self.get_offset(width, height, xdescent, ydescent, renderer)
for c, (ox, oy) in zip(self.get_visible_children(), offsets):
c.set_offset((px + ox, py + oy))
self.draw_frame(renderer)
for c in self.get_visible_children():
c.draw(renderer)
#bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
def update_frame(self, bbox, fontsize=None):
self.patch.set_bounds(bbox.x0, bbox.y0,
bbox.width, bbox.height)
if fontsize:
self.patch.set_mutation_scale(fontsize)
self.stale = True
def draw_frame(self, renderer):
# update the location and size of the legend
bbox = self.get_window_extent(renderer)
self.update_frame(bbox)
if self._drawFrame:
self.patch.draw(renderer)
class DrawingArea(OffsetBox):
"""
The DrawingArea can contain any Artist as a child. The DrawingArea
has a fixed width and height. The position of children relative to
the parent is fixed. The children can be clipped at the
boundaries of the parent.
"""
def __init__(self, width, height, xdescent=0.,
ydescent=0., clip=False):
"""
*width*, *height* : width and height of the container box.
*xdescent*, *ydescent* : descent of the box in x- and y-direction.
*clip* : Whether to clip the children
"""
super(DrawingArea, self).__init__()
self.width = width
self.height = height
self.xdescent = xdescent
self.ydescent = ydescent
self._clip_children = clip
self.offset_transform = mtransforms.Affine2D()
self.offset_transform.clear()
self.offset_transform.translate(0, 0)
self.dpi_transform = mtransforms.Affine2D()
@property
def clip_children(self):
"""
If the children of this DrawingArea should be clipped
by DrawingArea bounding box.
"""
return self._clip_children
@clip_children.setter
def clip_children(self, val):
self._clip_children = bool(val)
self.stale = True
def get_transform(self):
"""
Return the :class:`~matplotlib.transforms.Transform` applied
to the children
"""
return self.dpi_transform + self.offset_transform
def set_transform(self, t):
"""
set_transform is ignored.
"""
pass
def set_offset(self, xy):
"""
set offset of the container.
Accept : tuple of x,y cooridnate in disokay units.
"""
self._offset = xy
self.offset_transform.clear()
self.offset_transform.translate(xy[0], xy[1])
self.stale = True
def get_offset(self):
"""
return offset of the container.
"""
return self._offset
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
w, h, xd, yd = self.get_extent(renderer)
ox, oy = self.get_offset() # w, h, xd, yd)
return mtransforms.Bbox.from_bounds(ox - xd, oy - yd, w, h)
def get_extent(self, renderer):
"""
Return with, height, xdescent, ydescent of box
"""
dpi_cor = renderer.points_to_pixels(1.)
return self.width * dpi_cor, self.height * dpi_cor, \
self.xdescent * dpi_cor, self.ydescent * dpi_cor
def add_artist(self, a):
'Add any :class:`~matplotlib.artist.Artist` to the container box'
self._children.append(a)
if not a.is_transform_set():
a.set_transform(self.get_transform())
if self.axes is not None:
a.axes = self.axes
fig = self.figure
if fig is not None:
a.set_figure(fig)
def draw(self, renderer):
"""
Draw the children
"""
dpi_cor = renderer.points_to_pixels(1.)
self.dpi_transform.clear()
self.dpi_transform.scale(dpi_cor, dpi_cor)
# At this point the DrawingArea has a transform
# to the display space so the path created is
# good for clipping children
tpath = mtransforms.TransformedPath(
mpath.Path([[0, 0], [0, self.height],
[self.width, self.height],
[self.width, 0]]),
self.get_transform())
for c in self._children:
if self._clip_children and not (c.clipbox or c._clippath):
c.set_clip_path(tpath)
c.draw(renderer)
bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
class TextArea(OffsetBox):
"""
The TextArea is contains a single Text instance. The text is
placed at (0,0) with baseline+left alignment. The width and height
of the TextArea instance is the width and height of the its child
text.
"""
def __init__(self, s,
textprops=None,
multilinebaseline=None,
minimumdescent=True,
):
"""
Parameters
----------
s : str
a string to be displayed.
textprops : `~matplotlib.font_manager.FontProperties`, optional
multilinebaseline : bool, optional
If `True`, baseline for multiline text is adjusted so that
it is (approximatedly) center-aligned with singleline
text.
minimumdescent : bool, optional
If `True`, the box has a minimum descent of "p".
"""
if textprops is None:
textprops = {}
if "va" not in textprops:
textprops["va"] = "baseline"
self._text = mtext.Text(0, 0, s, **textprops)
OffsetBox.__init__(self)
self._children = [self._text]
self.offset_transform = mtransforms.Affine2D()
self.offset_transform.clear()
self.offset_transform.translate(0, 0)
self._baseline_transform = mtransforms.Affine2D()
self._text.set_transform(self.offset_transform +
self._baseline_transform)
self._multilinebaseline = multilinebaseline
self._minimumdescent = minimumdescent
def set_text(self, s):
"Set the text of this area as a string."
self._text.set_text(s)
self.stale = True
def get_text(self):
"Returns the string representation of this area's text"
return self._text.get_text()
def set_multilinebaseline(self, t):
"""
Set multilinebaseline .
If True, baseline for multiline text is
adjusted so that it is (approximatedly) center-aligned with
singleline text.
"""
self._multilinebaseline = t
self.stale = True
def get_multilinebaseline(self):
"""
get multilinebaseline .
"""
return self._multilinebaseline
def set_minimumdescent(self, t):
"""
Set minimumdescent .
If True, extent of the single line text is adjusted so that
it has minimum descent of "p"
"""
self._minimumdescent = t
self.stale = True
def get_minimumdescent(self):
"""
get minimumdescent.
"""
return self._minimumdescent
def set_transform(self, t):
"""
set_transform is ignored.
"""
pass
def set_offset(self, xy):
"""
set offset of the container.
Accept : tuple of x,y coordinates in display units.
"""
self._offset = xy
self.offset_transform.clear()
self.offset_transform.translate(xy[0], xy[1])
self.stale = True
def get_offset(self):
"""
return offset of the container.
"""
return self._offset
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
w, h, xd, yd = self.get_extent(renderer)
ox, oy = self.get_offset() # w, h, xd, yd)
return mtransforms.Bbox.from_bounds(ox - xd, oy - yd, w, h)
def get_extent(self, renderer):
clean_line, ismath = self._text.is_math_text(self._text._text)
_, h_, d_ = renderer.get_text_width_height_descent(
"lp", self._text._fontproperties, ismath=False)
bbox, info, d = self._text._get_layout(renderer)
w, h = bbox.width, bbox.height
line = info[-1][0] # last line
self._baseline_transform.clear()
if len(info) > 1 and self._multilinebaseline:
d_new = 0.5 * h - 0.5 * (h_ - d_)
self._baseline_transform.translate(0, d - d_new)
d = d_new
else: # single line
h_d = max(h_ - d_, h - d)
if self.get_minimumdescent():
## to have a minimum descent, #i.e., "l" and "p" have same
## descents.
d = max(d, d_)
#else:
# d = d
h = h_d + d
return w, h, 0., d
def draw(self, renderer):
"""
Draw the children
"""
self._text.draw(renderer)
bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
class AuxTransformBox(OffsetBox):
"""
Offset Box with the aux_transform . Its children will be
transformed with the aux_transform first then will be
offseted. The absolute coordinate of the aux_transform is meaning
as it will be automatically adjust so that the left-lower corner
of the bounding box of children will be set to (0,0) before the
offset transform.
It is similar to drawing area, except that the extent of the box
is not predetermined but calculated from the window extent of its
children. Furthermore, the extent of the children will be
calculated in the transformed coordinate.
"""
def __init__(self, aux_transform):
self.aux_transform = aux_transform
OffsetBox.__init__(self)
self.offset_transform = mtransforms.Affine2D()
self.offset_transform.clear()
self.offset_transform.translate(0, 0)
# ref_offset_transform is used to make the offset_transform is
# always reference to the lower-left corner of the bbox of its
# children.
self.ref_offset_transform = mtransforms.Affine2D()
self.ref_offset_transform.clear()
def add_artist(self, a):
'Add any :class:`~matplotlib.artist.Artist` to the container box'
self._children.append(a)
a.set_transform(self.get_transform())
self.stale = True
def get_transform(self):
"""
Return the :class:`~matplotlib.transforms.Transform` applied
to the children
"""
return self.aux_transform + \
self.ref_offset_transform + \
self.offset_transform
def set_transform(self, t):
"""
set_transform is ignored.
"""
pass
def set_offset(self, xy):
"""
set offset of the container.
Accept : tuple of x,y coordinate in disokay units.
"""
self._offset = xy
self.offset_transform.clear()
self.offset_transform.translate(xy[0], xy[1])
self.stale = True
def get_offset(self):
"""
return offset of the container.
"""
return self._offset
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
w, h, xd, yd = self.get_extent(renderer)
ox, oy = self.get_offset() # w, h, xd, yd)
return mtransforms.Bbox.from_bounds(ox - xd, oy - yd, w, h)
def get_extent(self, renderer):
# clear the offset transforms
_off = self.offset_transform.to_values() # to be restored later
self.ref_offset_transform.clear()
self.offset_transform.clear()
# calculate the extent
bboxes = [c.get_window_extent(renderer) for c in self._children]
ub = mtransforms.Bbox.union(bboxes)
# adjust ref_offset_tansform
self.ref_offset_transform.translate(-ub.x0, -ub.y0)
# restor offset transform
mtx = self.offset_transform.matrix_from_values(*_off)
self.offset_transform.set_matrix(mtx)
return ub.width, ub.height, 0., 0.
def draw(self, renderer):
"""
Draw the children
"""
for c in self._children:
c.draw(renderer)
bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
class AnchoredOffsetbox(OffsetBox):
"""
An offset box placed according to the legend location
loc. AnchoredOffsetbox has a single child. When multiple children
is needed, use other OffsetBox class to enclose them. By default,
the offset box is anchored against its parent axes. You may
explicitly specify the bbox_to_anchor.
"""
zorder = 5 # zorder of the legend
def __init__(self, loc,
pad=0.4, borderpad=0.5,
child=None, prop=None, frameon=True,
bbox_to_anchor=None,
bbox_transform=None,
**kwargs):
"""
loc is a string or an integer specifying the legend location.
The valid location codes are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4,
'right' : 5,
'center left' : 6,
'center right' : 7,
'lower center' : 8,
'upper center' : 9,
'center' : 10,
pad : pad around the child for drawing a frame. given in
fraction of fontsize.
borderpad : pad between offsetbox frame and the bbox_to_anchor,
child : OffsetBox instance that will be anchored.
prop : font property. This is only used as a reference for paddings.
frameon : draw a frame box if True.
bbox_to_anchor : bbox to anchor. Use self.axes.bbox if None.
bbox_transform : with which the bbox_to_anchor will be transformed.
"""
super(AnchoredOffsetbox, self).__init__(**kwargs)
self.set_bbox_to_anchor(bbox_to_anchor, bbox_transform)
self.set_child(child)
self.loc = loc
self.borderpad = borderpad
self.pad = pad
if prop is None:
self.prop = FontProperties(size=rcParams["legend.fontsize"])
elif isinstance(prop, dict):
self.prop = FontProperties(**prop)
if "size" not in prop:
self.prop.set_size(rcParams["legend.fontsize"])
else:
self.prop = prop
self.patch = FancyBboxPatch(
xy=(0.0, 0.0), width=1., height=1.,
facecolor='w', edgecolor='k',
mutation_scale=self.prop.get_size_in_points(),
snap=True
)
self.patch.set_boxstyle("square", pad=0)
self._drawFrame = frameon
def set_child(self, child):
"set the child to be anchored"
self._child = child
if child is not None:
child.axes = self.axes
self.stale = True
def get_child(self):
"return the child"
return self._child
def get_children(self):
"return the list of children"
return [self._child]
def get_extent(self, renderer):
"""
return the extent of the artist. The extent of the child
added with the pad is returned
"""
w, h, xd, yd = self.get_child().get_extent(renderer)
fontsize = renderer.points_to_pixels(self.prop.get_size_in_points())
pad = self.pad * fontsize
return w + 2 * pad, h + 2 * pad, xd + pad, yd + pad
def get_bbox_to_anchor(self):
"""
return the bbox that the legend will be anchored
"""
if self._bbox_to_anchor is None:
return self.axes.bbox
else:
transform = self._bbox_to_anchor_transform
if transform is None:
return self._bbox_to_anchor
else:
return TransformedBbox(self._bbox_to_anchor,
transform)
def set_bbox_to_anchor(self, bbox, transform=None):
"""
set the bbox that the child will be anchored.
*bbox* can be a Bbox instance, a list of [left, bottom, width,
height], or a list of [left, bottom] where the width and
height will be assumed to be zero. The bbox will be
transformed to display coordinate by the given transform.
"""
if bbox is None or isinstance(bbox, BboxBase):
self._bbox_to_anchor = bbox
else:
try:
l = len(bbox)
except TypeError:
raise ValueError("Invalid argument for bbox : %s" % str(bbox))
if l == 2:
bbox = [bbox[0], bbox[1], 0, 0]
self._bbox_to_anchor = Bbox.from_bounds(*bbox)
self._bbox_to_anchor_transform = transform
self.stale = True
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
self._update_offset_func(renderer)
w, h, xd, yd = self.get_extent(renderer)
ox, oy = self.get_offset(w, h, xd, yd, renderer)
return Bbox.from_bounds(ox - xd, oy - yd, w, h)
def _update_offset_func(self, renderer, fontsize=None):
"""
Update the offset func which depends on the dpi of the
renderer (because of the padding).
"""
if fontsize is None:
fontsize = renderer.points_to_pixels(
self.prop.get_size_in_points())
def _offset(w, h, xd, yd, renderer, fontsize=fontsize, self=self):
bbox = Bbox.from_bounds(0, 0, w, h)
borderpad = self.borderpad * fontsize
bbox_to_anchor = self.get_bbox_to_anchor()
x0, y0 = self._get_anchored_bbox(self.loc,
bbox,
bbox_to_anchor,
borderpad)
return x0 + xd, y0 + yd
self.set_offset(_offset)
def update_frame(self, bbox, fontsize=None):
self.patch.set_bounds(bbox.x0, bbox.y0,
bbox.width, bbox.height)
if fontsize:
self.patch.set_mutation_scale(fontsize)
def draw(self, renderer):
"draw the artist"
if not self.get_visible():
return
fontsize = renderer.points_to_pixels(self.prop.get_size_in_points())
self._update_offset_func(renderer, fontsize)
if self._drawFrame:
# update the location and size of the legend
bbox = self.get_window_extent(renderer)
self.update_frame(bbox, fontsize)
self.patch.draw(renderer)
width, height, xdescent, ydescent = self.get_extent(renderer)
px, py = self.get_offset(width, height, xdescent, ydescent, renderer)
self.get_child().set_offset((px, py))
self.get_child().draw(renderer)
self.stale = False
def _get_anchored_bbox(self, loc, bbox, parentbbox, borderpad):
"""
return the position of the bbox anchored at the parentbbox
with the loc code, with the borderpad.
"""
assert loc in range(1, 11) # called only internally
BEST, UR, UL, LL, LR, R, CL, CR, LC, UC, C = list(xrange(11))
anchor_coefs = {UR: "NE",
UL: "NW",
LL: "SW",
LR: "SE",
R: "E",
CL: "W",
CR: "E",
LC: "S",
UC: "N",
C: "C"}
c = anchor_coefs[loc]
container = parentbbox.padded(-borderpad)
anchored_box = bbox.anchored(c, container=container)
return anchored_box.x0, anchored_box.y0
class AnchoredText(AnchoredOffsetbox):
"""
AnchoredOffsetbox with Text.
"""
def __init__(self, s, loc, pad=0.4, borderpad=0.5, prop=None, **kwargs):
"""
Parameters
----------
s : string
Text.
loc : str
Location code.
pad : float, optional
Pad between the text and the frame as fraction of the font
size.
borderpad : float, optional
Pad between the frame and the axes (or *bbox_to_anchor*).
prop : `matplotlib.font_manager.FontProperties`
Font properties.
Notes
-----
Other keyword parameters of `AnchoredOffsetbox` are also
allowed.
"""
if prop is None:
prop = {}
propkeys = list(six.iterkeys(prop))
badkwargs = ('ha', 'horizontalalignment', 'va', 'verticalalignment')
if set(badkwargs) & set(propkeys):
warnings.warn("Mixing horizontalalignment or verticalalignment "
"with AnchoredText is not supported.")
self.txt = TextArea(s, textprops=prop,
minimumdescent=False)
fp = self.txt._text.get_fontproperties()
super(AnchoredText, self).__init__(loc, pad=pad, borderpad=borderpad,
child=self.txt,
prop=fp,
**kwargs)
class OffsetImage(OffsetBox):
def __init__(self, arr,
zoom=1,
cmap=None,
norm=None,
interpolation=None,
origin=None,
filternorm=1,
filterrad=4.0,
resample=False,
dpi_cor=True,
**kwargs
):
OffsetBox.__init__(self)
self._dpi_cor = dpi_cor
self.image = BboxImage(bbox=self.get_window_extent,
cmap=cmap,
norm=norm,
interpolation=interpolation,
origin=origin,
filternorm=filternorm,
filterrad=filterrad,
resample=resample,
**kwargs
)
self._children = [self.image]
self.set_zoom(zoom)
self.set_data(arr)
def set_data(self, arr):
self._data = np.asarray(arr)
self.image.set_data(self._data)
self.stale = True
def get_data(self):
return self._data
def set_zoom(self, zoom):
self._zoom = zoom
self.stale = True
def get_zoom(self):
return self._zoom
# def set_axes(self, axes):
# self.image.set_axes(axes)
# martist.Artist.set_axes(self, axes)
# def set_offset(self, xy):
# """
# set offset of the container.
# Accept : tuple of x,y coordinate in disokay units.
# """
# self._offset = xy
# self.offset_transform.clear()
# self.offset_transform.translate(xy[0], xy[1])
def get_offset(self):
"""
return offset of the container.
"""
return self._offset
def get_children(self):
return [self.image]
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
w, h, xd, yd = self.get_extent(renderer)
ox, oy = self.get_offset()
return mtransforms.Bbox.from_bounds(ox - xd, oy - yd, w, h)
def get_extent(self, renderer):
if self._dpi_cor: # True, do correction
dpi_cor = renderer.points_to_pixels(1.)
else:
dpi_cor = 1.
zoom = self.get_zoom()
data = self.get_data()
ny, nx = data.shape[:2]
w, h = dpi_cor * nx * zoom, dpi_cor * ny * zoom
return w, h, 0, 0
def draw(self, renderer):
"""
Draw the children
"""
self.image.draw(renderer)
# bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
class AnnotationBbox(martist.Artist, _AnnotationBase):
"""
Annotation-like class, but with offsetbox instead of Text.
"""
zorder = 3
def __str__(self):
return "AnnotationBbox(%g,%g)" % (self.xy[0], self.xy[1])
@docstring.dedent_interpd
def __init__(self, offsetbox, xy,
xybox=None,
xycoords='data',
boxcoords=None,
frameon=True, pad=0.4, # BboxPatch
annotation_clip=None,
box_alignment=(0.5, 0.5),
bboxprops=None,
arrowprops=None,
fontsize=None,
**kwargs):
"""
*offsetbox* : OffsetBox instance
*xycoords* : same as Annotation but can be a tuple of two
strings which are interpreted as x and y coordinates.
*boxcoords* : similar to textcoords as Annotation but can be a
tuple of two strings which are interpreted as x and y
coordinates.
*box_alignment* : a tuple of two floats for a vertical and
horizontal alignment of the offset box w.r.t. the *boxcoords*.
The lower-left corner is (0.0) and upper-right corner is (1.1).
other parameters are identical to that of Annotation.
"""
martist.Artist.__init__(self, **kwargs)
_AnnotationBase.__init__(self,
xy,
xycoords=xycoords,
annotation_clip=annotation_clip)
self.offsetbox = offsetbox
self.arrowprops = arrowprops
self.set_fontsize(fontsize)
if xybox is None:
self.xybox = xy
else:
self.xybox = xybox
if boxcoords is None:
self.boxcoords = xycoords
else:
self.boxcoords = boxcoords
if arrowprops is not None:
self._arrow_relpos = self.arrowprops.pop("relpos", (0.5, 0.5))
self.arrow_patch = FancyArrowPatch((0, 0), (1, 1),
**self.arrowprops)
else:
self._arrow_relpos = None
self.arrow_patch = None
#self._fw, self._fh = 0., 0. # for alignment
self._box_alignment = box_alignment
# frame
self.patch = FancyBboxPatch(
xy=(0.0, 0.0), width=1., height=1.,
facecolor='w', edgecolor='k',
mutation_scale=self.prop.get_size_in_points(),
snap=True
)
self.patch.set_boxstyle("square", pad=pad)
if bboxprops:
self.patch.set(**bboxprops)
self._drawFrame = frameon
@property
def xyann(self):
return self.xybox
@xyann.setter
def xyann(self, xyann):
self.xybox = xyann
self.stale = True
@property
def anncoords(self):
return self.boxcoords
@anncoords.setter
def anncoords(self, coords):
self.boxcoords = coords
self.stale = True
def contains(self, event):
t, tinfo = self.offsetbox.contains(event)
#if self.arrow_patch is not None:
# a,ainfo=self.arrow_patch.contains(event)
# t = t or a
# self.arrow_patch is currently not checked as this can be a line - JJ
return t, tinfo
def get_children(self):
children = [self.offsetbox, self.patch]
if self.arrow_patch:
children.append(self.arrow_patch)
return children
def set_figure(self, fig):
if self.arrow_patch is not None:
self.arrow_patch.set_figure(fig)
self.offsetbox.set_figure(fig)
martist.Artist.set_figure(self, fig)
def set_fontsize(self, s=None):
"""
set fontsize in points
"""
if s is None:
s = rcParams["legend.fontsize"]
self.prop = FontProperties(size=s)
self.stale = True
def get_fontsize(self, s=None):
"""
return fontsize in points
"""
return self.prop.get_size_in_points()
def update_positions(self, renderer):
"""
Update the pixel positions of the annotated point and the text.
"""
xy_pixel = self._get_position_xy(renderer)
self._update_position_xybox(renderer, xy_pixel)
mutation_scale = renderer.points_to_pixels(self.get_fontsize())
self.patch.set_mutation_scale(mutation_scale)
if self.arrow_patch:
self.arrow_patch.set_mutation_scale(mutation_scale)
def _update_position_xybox(self, renderer, xy_pixel):
"""
Update the pixel positions of the annotation text and the arrow
patch.
"""
x, y = self.xybox
if isinstance(self.boxcoords, tuple):
xcoord, ycoord = self.boxcoords
x1, y1 = self._get_xy(renderer, x, y, xcoord)
x2, y2 = self._get_xy(renderer, x, y, ycoord)
ox0, oy0 = x1, y2
else:
ox0, oy0 = self._get_xy(renderer, x, y, self.boxcoords)
w, h, xd, yd = self.offsetbox.get_extent(renderer)
_fw, _fh = self._box_alignment
self.offsetbox.set_offset((ox0 - _fw * w + xd, oy0 - _fh * h + yd))
# update patch position
bbox = self.offsetbox.get_window_extent(renderer)
#self.offsetbox.set_offset((ox0-_fw*w, oy0-_fh*h))
self.patch.set_bounds(bbox.x0, bbox.y0,
bbox.width, bbox.height)
x, y = xy_pixel
ox1, oy1 = x, y
if self.arrowprops:
x0, y0 = x, y
d = self.arrowprops.copy()
# Use FancyArrowPatch if self.arrowprops has "arrowstyle" key.
# adjust the starting point of the arrow relative to
# the textbox.
# TODO : Rotation needs to be accounted.
relpos = self._arrow_relpos
ox0 = bbox.x0 + bbox.width * relpos[0]
oy0 = bbox.y0 + bbox.height * relpos[1]
# The arrow will be drawn from (ox0, oy0) to (ox1,
# oy1). It will be first clipped by patchA and patchB.
# Then it will be shrinked by shirnkA and shrinkB
# (in points). If patch A is not set, self.bbox_patch
# is used.
self.arrow_patch.set_positions((ox0, oy0), (ox1, oy1))
fs = self.prop.get_size_in_points()
mutation_scale = d.pop("mutation_scale", fs)
mutation_scale = renderer.points_to_pixels(mutation_scale)
self.arrow_patch.set_mutation_scale(mutation_scale)
patchA = d.pop("patchA", self.patch)
self.arrow_patch.set_patchA(patchA)
def draw(self, renderer):
"""
Draw the :class:`Annotation` object to the given *renderer*.
"""
if renderer is not None:
self._renderer = renderer
if not self.get_visible():
return
xy_pixel = self._get_position_xy(renderer)
if not self._check_xy(renderer, xy_pixel):
return
self.update_positions(renderer)
if self.arrow_patch is not None:
if self.arrow_patch.figure is None and self.figure is not None:
self.arrow_patch.figure = self.figure
self.arrow_patch.draw(renderer)
if self._drawFrame:
self.patch.draw(renderer)
self.offsetbox.draw(renderer)
self.stale = False
class DraggableBase(object):
"""
helper code for a draggable artist (legend, offsetbox)
The derived class must override following two method.
def saveoffset(self):
pass
def update_offset(self, dx, dy):
pass
*saveoffset* is called when the object is picked for dragging and it is
meant to save reference position of the artist.
*update_offset* is called during the dragging. dx and dy is the pixel
offset from the point where the mouse drag started.
Optionally you may override following two methods.
def artist_picker(self, artist, evt):
return self.ref_artist.contains(evt)
def finalize_offset(self):
pass
*artist_picker* is a picker method that will be
used. *finalize_offset* is called when the mouse is released. In
current implementaion of DraggableLegend and DraggableAnnotation,
*update_offset* places the artists simply in display
coordinates. And *finalize_offset* recalculate their position in
the normalized axes coordinate and set a relavant attribute.
"""
def __init__(self, ref_artist, use_blit=False):
self.ref_artist = ref_artist
self.got_artist = False
self.canvas = self.ref_artist.figure.canvas
self._use_blit = use_blit and self.canvas.supports_blit
c2 = self.canvas.mpl_connect('pick_event', self.on_pick)
c3 = self.canvas.mpl_connect('button_release_event', self.on_release)
ref_artist.set_picker(self.artist_picker)
self.cids = [c2, c3]
def on_motion(self, evt):
if self.got_artist:
dx = evt.x - self.mouse_x
dy = evt.y - self.mouse_y
self.update_offset(dx, dy)
self.canvas.draw()
def on_motion_blit(self, evt):
if self.got_artist:
dx = evt.x - self.mouse_x
dy = evt.y - self.mouse_y
self.update_offset(dx, dy)
self.canvas.restore_region(self.background)
self.ref_artist.draw(self.ref_artist.figure._cachedRenderer)
self.canvas.blit(self.ref_artist.figure.bbox)
def on_pick(self, evt):
if evt.artist == self.ref_artist:
self.mouse_x = evt.mouseevent.x
self.mouse_y = evt.mouseevent.y
self.got_artist = True
if self._use_blit:
self.ref_artist.set_animated(True)
self.canvas.draw()
self.background = self.canvas.copy_from_bbox(
self.ref_artist.figure.bbox)
self.ref_artist.draw(self.ref_artist.figure._cachedRenderer)
self.canvas.blit(self.ref_artist.figure.bbox)
self._c1 = self.canvas.mpl_connect('motion_notify_event',
self.on_motion_blit)
else:
self._c1 = self.canvas.mpl_connect('motion_notify_event',
self.on_motion)
self.save_offset()
def on_release(self, event):
if self.got_artist:
self.finalize_offset()
self.got_artist = False
self.canvas.mpl_disconnect(self._c1)
if self._use_blit:
self.ref_artist.set_animated(False)
def disconnect(self):
"""disconnect the callbacks"""
for cid in self.cids:
self.canvas.mpl_disconnect(cid)
try:
c1 = self._c1
except AttributeError:
pass
else:
self.canvas.mpl_disconnect(c1)
def artist_picker(self, artist, evt):
return self.ref_artist.contains(evt)
def save_offset(self):
pass
def update_offset(self, dx, dy):
pass
def finalize_offset(self):
pass
class DraggableOffsetBox(DraggableBase):
def __init__(self, ref_artist, offsetbox, use_blit=False):
DraggableBase.__init__(self, ref_artist, use_blit=use_blit)
self.offsetbox = offsetbox
def save_offset(self):
offsetbox = self.offsetbox
renderer = offsetbox.figure._cachedRenderer
w, h, xd, yd = offsetbox.get_extent(renderer)
offset = offsetbox.get_offset(w, h, xd, yd, renderer)
self.offsetbox_x, self.offsetbox_y = offset
self.offsetbox.set_offset(offset)
def update_offset(self, dx, dy):
loc_in_canvas = self.offsetbox_x + dx, self.offsetbox_y + dy
self.offsetbox.set_offset(loc_in_canvas)
def get_loc_in_canvas(self):
offsetbox = self.offsetbox
renderer = offsetbox.figure._cachedRenderer
w, h, xd, yd = offsetbox.get_extent(renderer)
ox, oy = offsetbox._offset
loc_in_canvas = (ox - xd, oy - yd)
return loc_in_canvas
class DraggableAnnotation(DraggableBase):
def __init__(self, annotation, use_blit=False):
DraggableBase.__init__(self, annotation, use_blit=use_blit)
self.annotation = annotation
def save_offset(self):
ann = self.annotation
self.ox, self.oy = ann.get_transform().transform(ann.xyann)
def update_offset(self, dx, dy):
ann = self.annotation
ann.xyann = ann.get_transform().inverted().transform(
(self.ox + dx, self.oy + dy))
if __name__ == "__main__":
import matplotlib.pyplot as plt
fig = plt.figure(1)
fig.clf()
ax = plt.subplot(121)
#txt = ax.text(0.5, 0.5, "Test", size=30, ha="center", color="w")
kwargs = dict()
a = np.arange(256).reshape(16, 16) / 256.
myimage = OffsetImage(a,
zoom=2,
norm=None,
origin=None,
**kwargs
)
ax.add_artist(myimage)
myimage.set_offset((100, 100))
myimage2 = OffsetImage(a,
zoom=2,
norm=None,
origin=None,
**kwargs
)
ann = AnnotationBbox(myimage2, (0.5, 0.5),
xybox=(30, 30),
xycoords='data',
boxcoords="offset points",
frameon=True, pad=0.4, # BboxPatch
bboxprops=dict(boxstyle="round", fc="y"),
fontsize=None,
arrowprops=dict(arrowstyle="->"),
)
ax.add_artist(ann)
plt.draw()
plt.show()
| bsd-3-clause |
mingderwang/angr | angr/surveyors/escaper.py | 9 | 4240 | #!/usr/bin/env python
from ..surveyor import Surveyor
from . import Explorer
import logging
l = logging.getLogger("angr.surveyors.Escaper")
class Escaper(Surveyor):
'''
Escaper implements loop escaping!
normal - any found normal paths from the loop
forced - forced paths from the loop, if a normal wasn't found
'''
def __init__(self, project, loop_addresses, start=None, max_concurrency=None, max_active=None, pickle_paths=None, loop_iterations=0, iteration_depth=100, unconstrain_memory=True, unconstrain_registers=True):
'''
Creates an Escaper. Most options are for Surveyor (separate docs).
@param loop_addresses: the addresses of all the basic blocks in the loop, to know the
instructions to which the analysis should be restricted
@param loop_iterations: the number of times to run the loop before escaping
@param iteration_depth: the maximum depth (in SimRuns) of a path through the loop
'''
Surveyor.__init__(self, project, start=start, max_concurrency=max_concurrency, max_active=max_active, pickle_paths=pickle_paths)
self._loop_addresses = loop_addresses
self._loop_iterations = loop_iterations
self._iteration_depth = iteration_depth
self._current_iteration = 0
self._done = False
self._unconstrain_memory = unconstrain_memory
self._unconstrain_registers = unconstrain_registers
self.normal = [ ]
self.forced = [ ]
def _tick_loop(self, start=None):
results = Explorer(self._project, start=start, find=self._loop_addresses[0], restrict=self._loop_addresses, min_depth=2, max_depth=self._iteration_depth, max_repeats=1, max_concurrency=self._max_concurrency, num_find=self._num_find).run()
self.deadended += results.deadended
return results
def unconstrain_loop(self, constrained_entry):
'''
Unconstrains an exit to the loop header by looping one more time
and replacing all modified variables with unconstrained versions.
'''
constrained_state = constrained_entry.state.copy()
# first, go through the loop normally, one more time
constrained_results = self._tick_loop(start=constrained_entry)
l.debug("%d paths to header found", len(constrained_results.found))
# then unconstrain differences between the original state and any new
# head states
unconstrained_states = []
for p in constrained_results.found:
# because the head_entry might actually point partway *through* the
# loop header, in the cases of a loop starting between
# the counter-increment and the condition check (because the
# counter is only incremented at the end of the loop, and the
# end is placed in the beginning for optimization), so we run the
# loop through to the *end* of the header
new_state = p.state.copy()
if self._unconstrain_registers:
new_state.registers.unconstrain_differences(constrained_state.registers)
if self._unconstrain_memory:
new_state.memory.unconstrain_differences(constrained_state.memory)
unconstrained_states.append(new_state)
l.debug("%d unconstrained states", len(unconstrained_states))
unconstrained_exits = []
unconstrained_entry = constrained_entry
for s in unconstrained_states:
unconstrained_entry.state = s
unconstrained_results = self._tick_loop(start=unconstrained_entry)
unconstrained_exits += unconstrained_results.deviating
return unconstrained_exits
def tick(self):
'''
Makes one run through the loop.
'''
if self._current_iteration < self._loop_iterations:
l.debug("Currently at iteration %d of %d", self._current_iteration, self._loop_iterations)
results = self._tick_loop(start=self.active_exits(reachable=True))
l.debug("... found %d exiting paths", len(results.deviating))
self.normal += results.deviating
self.active = results.found
self._current_iteration += 1
else:
all_exits = self.active_exits(reachable=True)
l.debug("Unconstraining %d heads.", len(all_exits))
for e in all_exits:
self.forced += self.unconstrain_loop(e)
self._done = True
@property
def done(self):
return self._done
def __repr__(self):
return "<Escaper with paths: %s, %d normal, %d forced>" % (Surveyor.__repr__(self), len(self.normal), len(self.forced))
from . import all_surveyors
all_surveyors['Escaper'] = Escaper
| bsd-2-clause |
dmitry-sobolev/ansible | contrib/inventory/vmware_inventory.py | 28 | 26112 | #!/usr/bin/env python
# Requirements
# - pyvmomi >= 6.0.0.2016.4
# TODO:
# * more jq examples
# * optional folder heriarchy
"""
$ jq '._meta.hostvars[].config' data.json | head
{
"alternateguestname": "",
"instanceuuid": "5035a5cd-b8e8-d717-e133-2d383eb0d675",
"memoryhotaddenabled": false,
"guestfullname": "Red Hat Enterprise Linux 7 (64-bit)",
"changeversion": "2016-05-16T18:43:14.977925Z",
"uuid": "4235fc97-5ddb-7a17-193b-9a3ac97dc7b4",
"cpuhotremoveenabled": false,
"vpmcenabled": false,
"firmware": "bios",
"""
from __future__ import print_function
import argparse
import atexit
import datetime
import getpass
import os
import re
import six
import ssl
import sys
import uuid
from collections import defaultdict
from six.moves import configparser
from time import time
from jinja2 import Environment
HAS_PYVMOMI = False
try:
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
HAS_PYVMOMI = True
except ImportError:
pass
try:
import json
except ImportError:
import simplejson as json
hasvcr = False
try:
import vcr
hasvcr = True
except ImportError:
pass
def regex_match(s, pattern):
'''Custom filter for regex matching'''
reg = re.compile(pattern)
if reg.match(s):
return True
else:
return False
class VMwareMissingHostException(Exception):
pass
class VMWareInventory(object):
__name__ = 'VMWareInventory'
guest_props = False
instances = []
debug = False
load_dumpfile = None
write_dumpfile = None
maxlevel = 1
lowerkeys = True
config = None
cache_max_age = None
cache_path_cache = None
cache_path_index = None
cache_dir = None
server = None
port = None
username = None
password = None
validate_certs = True
host_filters = []
skip_keys = []
groupby_patterns = []
if sys.version_info > (3, 0):
safe_types = [int, bool, str, float, None]
else:
safe_types = [int, long, bool, str, float, None]
iter_types = [dict, list]
bad_types = ['Array', 'disabledMethod', 'declaredAlarmState']
vimTableMaxDepth = {
"vim.HostSystem": 2,
"vim.VirtualMachine": 2,
}
custom_fields = {}
# use jinja environments to allow for custom filters
env = Environment()
env.filters['regex_match'] = regex_match
# translation table for attributes to fetch for known vim types
if not HAS_PYVMOMI:
vimTable = {}
else:
vimTable = {
vim.Datastore: ['_moId', 'name'],
vim.ResourcePool: ['_moId', 'name'],
vim.HostSystem: ['_moId', 'name'],
}
@staticmethod
def _empty_inventory():
return {"_meta": {"hostvars": {}}}
def __init__(self, load=True):
self.inventory = VMWareInventory._empty_inventory()
if load:
# Read settings and parse CLI arguments
self.parse_cli_args()
self.read_settings()
# Check the cache
cache_valid = self.is_cache_valid()
# Handle Cache
if self.args.refresh_cache or not cache_valid:
self.do_api_calls_update_cache()
else:
self.debugl('loading inventory from cache')
self.inventory = self.get_inventory_from_cache()
def debugl(self, text):
if self.args.debug:
try:
text = str(text)
except UnicodeEncodeError:
text = text.encode('ascii', 'ignore')
print('%s %s' % (datetime.datetime.now(), text))
def show(self):
# Data to print
self.debugl('dumping results')
data_to_print = None
if self.args.host:
data_to_print = self.get_host_info(self.args.host)
elif self.args.list:
# Display list of instances for inventory
data_to_print = self.inventory
return json.dumps(data_to_print, indent=2)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
valid = False
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
valid = True
return valid
def do_api_calls_update_cache(self):
''' Get instances and cache the data '''
self.inventory = self.instances_to_inventory(self.get_instances())
self.write_to_cache(self.inventory)
def write_to_cache(self, data):
''' Dump inventory to json file '''
with open(self.cache_path_cache, 'wb') as f:
f.write(json.dumps(data))
def get_inventory_from_cache(self):
''' Read in jsonified inventory '''
jdata = None
with open(self.cache_path_cache, 'rb') as f:
jdata = f.read()
return json.loads(jdata)
def read_settings(self):
''' Reads the settings from the vmware_inventory.ini file '''
scriptbasename = __file__
scriptbasename = os.path.basename(scriptbasename)
scriptbasename = scriptbasename.replace('.py', '')
defaults = {'vmware': {
'server': '',
'port': 443,
'username': '',
'password': '',
'validate_certs': True,
'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename),
'cache_name': 'ansible-vmware',
'cache_path': '~/.ansible/tmp',
'cache_max_age': 3600,
'max_object_level': 1,
'skip_keys': 'declaredalarmstate,'
'disabledmethod,'
'dynamicproperty,'
'dynamictype,'
'environmentbrowser,'
'managedby,'
'parent,'
'childtype,'
'resourceconfig',
'alias_pattern': '{{ config.name + "_" + config.uuid }}',
'host_pattern': '{{ guest.ipaddress }}',
'host_filters': '{{ guest.gueststate == "running" }}',
'groupby_patterns': '{{ guest.guestid }},{{ "templates" if config.template else "guests"}}',
'lower_var_keys': True,
'custom_field_group_prefix': 'vmware_tag_',
'groupby_custom_field': False}
}
if six.PY3:
config = configparser.ConfigParser()
else:
config = configparser.SafeConfigParser()
# where is the config?
vmware_ini_path = os.environ.get('VMWARE_INI_PATH', defaults['vmware']['ini_path'])
vmware_ini_path = os.path.expanduser(os.path.expandvars(vmware_ini_path))
config.read(vmware_ini_path)
# apply defaults
for k, v in defaults['vmware'].items():
if not config.has_option('vmware', k):
config.set('vmware', k, str(v))
# where is the cache?
self.cache_dir = os.path.expanduser(config.get('vmware', 'cache_path'))
if self.cache_dir and not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
# set the cache filename and max age
cache_name = config.get('vmware', 'cache_name')
self.cache_path_cache = self.cache_dir + "/%s.cache" % cache_name
self.debugl('cache path is %s' % self.cache_path_cache)
self.cache_max_age = int(config.getint('vmware', 'cache_max_age'))
# mark the connection info
self.server = os.environ.get('VMWARE_SERVER', config.get('vmware', 'server'))
self.debugl('server is %s' % self.server)
self.port = int(os.environ.get('VMWARE_PORT', config.get('vmware', 'port')))
self.username = os.environ.get('VMWARE_USERNAME', config.get('vmware', 'username'))
self.debugl('username is %s' % self.username)
self.password = os.environ.get('VMWARE_PASSWORD', config.get('vmware', 'password'))
self.validate_certs = os.environ.get('VMWARE_VALIDATE_CERTS', config.get('vmware', 'validate_certs'))
if self.validate_certs in ['no', 'false', 'False', False]:
self.validate_certs = False
self.debugl('cert validation is %s' % self.validate_certs)
# behavior control
self.maxlevel = int(config.get('vmware', 'max_object_level'))
self.debugl('max object level is %s' % self.maxlevel)
self.lowerkeys = config.get('vmware', 'lower_var_keys')
if type(self.lowerkeys) != bool:
if str(self.lowerkeys).lower() in ['yes', 'true', '1']:
self.lowerkeys = True
else:
self.lowerkeys = False
self.debugl('lower keys is %s' % self.lowerkeys)
self.skip_keys = list(config.get('vmware', 'skip_keys').split(','))
self.debugl('skip keys is %s' % self.skip_keys)
self.host_filters = list(config.get('vmware', 'host_filters').split(','))
self.debugl('host filters are %s' % self.host_filters)
self.groupby_patterns = list(config.get('vmware', 'groupby_patterns').split(','))
self.debugl('groupby patterns are %s' % self.groupby_patterns)
# Special feature to disable the brute force serialization of the
# virtulmachine objects. The key name for these properties does not
# matter because the values are just items for a larger list.
if config.has_section('properties'):
self.guest_props = []
for prop in config.items('properties'):
self.guest_props.append(prop[1])
# save the config
self.config = config
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on PyVmomi')
parser.add_argument('--debug', action='store_true', default=False,
help='show debug info')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to VSphere (default: False - use cache files)')
parser.add_argument('--max-instances', default=None, type=int,
help='maximum number of instances to retrieve')
self.args = parser.parse_args()
def get_instances(self):
''' Get a list of vm instances with pyvmomi '''
kwargs = {'host': self.server,
'user': self.username,
'pwd': self.password,
'port': int(self.port)}
if hasattr(ssl, 'SSLContext') and not self.validate_certs:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
kwargs['sslContext'] = context
return self._get_instances(kwargs)
def _get_instances(self, inkwargs):
''' Make API calls '''
instances = []
si = SmartConnect(**inkwargs)
self.debugl('retrieving all instances')
if not si:
print("Could not connect to the specified host using specified "
"username and password")
return -1
atexit.register(Disconnect, si)
content = si.RetrieveContent()
# Create a search container for virtualmachines
self.debugl('creating containerview for virtualmachines')
container = content.rootFolder
viewType = [vim.VirtualMachine]
recursive = True
containerView = content.viewManager.CreateContainerView(container, viewType, recursive)
children = containerView.view
for child in children:
# If requested, limit the total number of instances
if self.args.max_instances:
if len(instances) >= self.args.max_instances:
break
instances.append(child)
self.debugl("%s total instances in container view" % len(instances))
if self.args.host:
instances = [x for x in instances if x.name == self.args.host]
instance_tuples = []
for instance in sorted(instances):
if self.guest_props:
ifacts = self.facts_from_proplist(instance)
else:
ifacts = self.facts_from_vobj(instance)
instance_tuples.append((instance, ifacts))
self.debugl('facts collected for all instances')
cfm = content.customFieldsManager
if cfm is not None and cfm.field:
for f in cfm.field:
if f.managedObjectType == vim.VirtualMachine:
self.custom_fields[f.key] = f.name
self.debugl('%d custom fieds collected' % len(self.custom_fields))
return instance_tuples
def instances_to_inventory(self, instances):
''' Convert a list of vm objects into a json compliant inventory '''
self.debugl('re-indexing instances based on ini settings')
inventory = VMWareInventory._empty_inventory()
inventory['all'] = {}
inventory['all']['hosts'] = []
for idx, instance in enumerate(instances):
# make a unique id for this object to avoid vmware's
# numerous uuid's which aren't all unique.
thisid = str(uuid.uuid4())
idata = instance[1]
# Put it in the inventory
inventory['all']['hosts'].append(thisid)
inventory['_meta']['hostvars'][thisid] = idata.copy()
inventory['_meta']['hostvars'][thisid]['ansible_uuid'] = thisid
# Make a map of the uuid to the alias the user wants
name_mapping = self.create_template_mapping(
inventory,
self.config.get('vmware', 'alias_pattern')
)
# Make a map of the uuid to the ssh hostname the user wants
host_mapping = self.create_template_mapping(
inventory,
self.config.get('vmware', 'host_pattern')
)
# Reset the inventory keys
for k, v in name_mapping.items():
if not host_mapping or not k in host_mapping:
continue
# set ansible_host (2.x)
try:
inventory['_meta']['hostvars'][k]['ansible_host'] = host_mapping[k]
# 1.9.x backwards compliance
inventory['_meta']['hostvars'][k]['ansible_ssh_host'] = host_mapping[k]
except Exception:
continue
if k == v:
continue
# add new key
inventory['all']['hosts'].append(v)
inventory['_meta']['hostvars'][v] = inventory['_meta']['hostvars'][k]
# cleanup old key
inventory['all']['hosts'].remove(k)
inventory['_meta']['hostvars'].pop(k, None)
self.debugl('pre-filtered hosts:')
for i in inventory['all']['hosts']:
self.debugl(' * %s' % i)
# Apply host filters
for hf in self.host_filters:
if not hf:
continue
self.debugl('filter: %s' % hf)
filter_map = self.create_template_mapping(inventory, hf, dtype='boolean')
for k, v in filter_map.items():
if not v:
# delete this host
inventory['all']['hosts'].remove(k)
inventory['_meta']['hostvars'].pop(k, None)
self.debugl('post-filter hosts:')
for i in inventory['all']['hosts']:
self.debugl(' * %s' % i)
# Create groups
for gbp in self.groupby_patterns:
groupby_map = self.create_template_mapping(inventory, gbp)
for k, v in groupby_map.items():
if v not in inventory:
inventory[v] = {}
inventory[v]['hosts'] = []
if k not in inventory[v]['hosts']:
inventory[v]['hosts'].append(k)
if self.config.get('vmware', 'groupby_custom_field'):
for k, v in inventory['_meta']['hostvars'].items():
if 'customvalue' in v:
for tv in v['customvalue']:
if not isinstance(tv['value'], str) and not isinstance(tv['value'], unicode):
continue
newkey = None
field_name = self.custom_fields[tv['key']] if tv['key'] in self.custom_fields else tv['key']
values = []
keylist = map(lambda x: x.strip(), tv['value'].split(','))
for kl in keylist:
try:
newkey = self.config.get('vmware', 'custom_field_group_prefix') + field_name + '_' + kl
newkey = newkey.strip()
except Exception as e:
self.debugl(e)
values.append(newkey)
for tag in values:
if not tag:
continue
if tag not in inventory:
inventory[tag] = {}
inventory[tag]['hosts'] = []
if k not in inventory[tag]['hosts']:
inventory[tag]['hosts'].append(k)
return inventory
def create_template_mapping(self, inventory, pattern, dtype='string'):
''' Return a hash of uuid to templated string from pattern '''
mapping = {}
for k, v in inventory['_meta']['hostvars'].items():
t = self.env.from_string(pattern)
newkey = None
try:
newkey = t.render(v)
newkey = newkey.strip()
except Exception as e:
self.debugl(e)
if not newkey:
continue
elif dtype == 'integer':
newkey = int(newkey)
elif dtype == 'boolean':
if newkey.lower() == 'false':
newkey = False
elif newkey.lower() == 'true':
newkey = True
elif dtype == 'string':
pass
mapping[k] = newkey
return mapping
def facts_from_proplist(self, vm):
'''Get specific properties instead of serializing everything'''
rdata = {}
for prop in self.guest_props:
self.debugl('getting %s property for %s' % (prop, vm.name))
key = prop
if self.lowerkeys:
key = key.lower()
if '.' not in prop:
# props without periods are direct attributes of the parent
rdata[key] = getattr(vm, prop)
else:
# props with periods are subkeys of parent attributes
parts = prop.split('.')
total = len(parts) - 1
# pointer to the current object
val = None
# pointer to the current result key
lastref = rdata
for idx, x in enumerate(parts):
# if the val wasn't set yet, get it from the parent
if not val:
try:
val = getattr(vm, x)
except AttributeError as e:
self.debugl(e)
else:
# in a subkey, get the subprop from the previous attrib
try:
val = getattr(val, x)
except AttributeError as e:
self.debugl(e)
# lowercase keys if requested
if self.lowerkeys:
x = x.lower()
# change the pointer or set the final value
if idx != total:
if x not in lastref:
lastref[x] = {}
lastref = lastref[x]
else:
lastref[x] = val
return rdata
def facts_from_vobj(self, vobj, level=0):
''' Traverse a VM object and return a json compliant data structure '''
# pyvmomi objects are not yet serializable, but may be one day ...
# https://github.com/vmware/pyvmomi/issues/21
# WARNING:
# Accessing an object attribute will trigger a SOAP call to the remote.
# Increasing the attributes collected or the depth of recursion greatly
# increases runtime duration and potentially memory+network utilization.
if level == 0:
try:
self.debugl("get facts for %s" % vobj.name)
except Exception as e:
self.debugl(e)
rdata = {}
methods = dir(vobj)
methods = [str(x) for x in methods if not x.startswith('_')]
methods = [x for x in methods if x not in self.bad_types]
methods = [x for x in methods if not x.lower() in self.skip_keys]
methods = sorted(methods)
for method in methods:
# Attempt to get the method, skip on fail
try:
methodToCall = getattr(vobj, method)
except Exception as e:
continue
# Skip callable methods
if callable(methodToCall):
continue
if self.lowerkeys:
method = method.lower()
rdata[method] = self._process_object_types(
methodToCall,
thisvm=vobj,
inkey=method,
)
return rdata
def _process_object_types(self, vobj, thisvm=None, inkey=None, level=0):
''' Serialize an object '''
rdata = {}
if type(vobj).__name__ in self.vimTableMaxDepth and level >= self.vimTableMaxDepth[type(vobj).__name__]:
return rdata
if vobj is None:
rdata = None
elif type(vobj) in self.vimTable:
rdata = {}
for key in self.vimTable[type(vobj)]:
try:
rdata[key] = getattr(vobj, key)
except Exception as e:
self.debugl(e)
elif issubclass(type(vobj), str) or isinstance(vobj, str):
if vobj.isalnum():
rdata = vobj
else:
rdata = vobj.decode('ascii', 'ignore')
elif issubclass(type(vobj), bool) or isinstance(vobj, bool):
rdata = vobj
elif issubclass(type(vobj), int) or isinstance(vobj, int):
rdata = vobj
elif issubclass(type(vobj), float) or isinstance(vobj, float):
rdata = vobj
elif issubclass(type(vobj), long) or isinstance(vobj, long):
rdata = vobj
elif issubclass(type(vobj), list) or issubclass(type(vobj), tuple):
rdata = []
try:
vobj = sorted(vobj)
except Exception:
pass
for idv, vii in enumerate(vobj):
if level + 1 <= self.maxlevel:
vid = self._process_object_types(
vii,
thisvm=thisvm,
inkey=inkey + '[' + str(idv) + ']',
level=(level + 1)
)
if vid:
rdata.append(vid)
elif issubclass(type(vobj), dict):
pass
elif issubclass(type(vobj), object):
methods = dir(vobj)
methods = [str(x) for x in methods if not x.startswith('_')]
methods = [x for x in methods if x not in self.bad_types]
methods = [x for x in methods if not inkey + '.' + x.lower() in self.skip_keys]
methods = sorted(methods)
for method in methods:
# Attempt to get the method, skip on fail
try:
methodToCall = getattr(vobj, method)
except Exception as e:
continue
if callable(methodToCall):
continue
if self.lowerkeys:
method = method.lower()
if level + 1 <= self.maxlevel:
try:
rdata[method] = self._process_object_types(
methodToCall,
thisvm=thisvm,
inkey=inkey + '.' + method,
level=(level + 1)
)
except vim.fault.NoPermission:
self.debugl("Skipping method %s (NoPermission)" % method)
else:
pass
return rdata
def get_host_info(self, host):
''' Return hostvars for a single host '''
if host in self.inventory['_meta']['hostvars']:
return self.inventory['_meta']['hostvars'][host]
elif self.args.host and self.inventory['_meta']['hostvars']:
match = None
for k, v in self.inventory['_meta']['hostvars']:
if self.inventory['_meta']['hostvars'][k]['name'] == self.args.host:
match = k
break
if match:
return self.inventory['_meta']['hostvars'][match]
else:
raise VMwareMissingHostException('%s not found' % host)
else:
raise VMwareMissingHostException('%s not found' % host)
if __name__ == "__main__":
# Run the script
print(VMWareInventory().show())
| gpl-3.0 |
ramtinms/tokenregex | tokenquery/tests/acceptors/core/vector_opr_test.py | 2 | 1970 | import unittest
from tokenquery.acceptors.vector_opr import change_string_to_vector
from tokenquery.acceptors.vector_opr import vec_cos_sim
from tokenquery.acceptors.vector_opr import vec_cos_dist
from tokenquery.acceptors.vector_opr import vec_man_dist
class TestVectorCoreAcceptorsClass(unittest.TestCase):
def test_change_to_vector_method(self):
self.assertEqual(change_string_to_vector('[0, 1, 0.06, 3,4,5.4, 0.0, -1]'),
[0.0, 1.0, 0.06, 3.0, 4.0, 5.4, 0.0, -1.0])
def test_cos_sim_method(self):
input_token_string = '[0.0, 1.0, 0.06, 3.0, 4.0, 5.4, 0.0]'
param_string = '[0.0, 1.0, 0.06, 3.0, 4.0, 5.4, 0.0] > 0.5'
self.assertEqual(vec_cos_sim(input_token_string, param_string), True)
input_token_string = '[0.0, 1.0, 0.06, 3.0, 4.0, 5.4, 0.0]'
param_string = '[12.0, -1.0, 1.00, 3.0, -4.0, 5.4, 0.0] > 0.5'
self.assertEqual(vec_cos_sim(input_token_string, param_string), False)
def test_cos_dist_method(self):
input_token_string = '[0.0, 1.0, 0.06, 3.0, 4.0, 5.4, 0.0]'
param_string = '[0.0, 1.0, 0.06, 3.0, 4.0, 5.4, 0.0] > 0.5'
self.assertEqual(vec_cos_dist(input_token_string, param_string), False)
input_token_string = '[0.0, 1.0, 0.06, 3.0, 4.0, 5.4, 0.0]'
param_string = '[12.0, -1.0, 1.00, 3.0, -4.0, 5.4, 0.0] > 0.5'
self.assertEqual(vec_cos_dist(input_token_string, param_string), True)
def test_man_dist_method(self):
input_token_string = '[0.0, 1.0, 0.06, 3.0, 4.0, 5.4, 0.0]'
param_string = '[0.0, 1.0, 0.06, 3.0, 4.0, 5.4, 0.0] == 0'
self.assertEqual(vec_man_dist(input_token_string, param_string), True)
input_token_string = '[0.0, 1.0, 0.06, 3.0, 4.0, 5.4, 0.0]'
param_string = '[12.0, -1.0, 1.00, 3.0, -4.0, 5.4, 0.0] > 20'
self.assertEqual(vec_man_dist(input_token_string, param_string), True)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
n0trax/ansible | lib/ansible/modules/windows/win_tempfile.py | 47 | 2164 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017 Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_tempfile
version_added: "2.3"
author: Dag Wieers (@dagwieers)
short_description: Creates temporary files and directories.
description:
- Creates temporary files and directories.
- For non-Windows targets, please use the M(tempfile) module instead.
options:
state:
description:
- Whether to create file or directory.
choices: [ file, directory ]
default: file
path:
description:
- Location where temporary file or directory should be created.
- If path is not specified default system temporary directory (%TEMP%) will be used.
default: '%TEMP%'
prefix:
description:
- Prefix of file/directory name created by module.
default: ansible.
suffix:
description:
- Suffix of file/directory name created by module.
default: ''
notes:
- For non-Windows targets, please use the M(tempfile) module instead.
'''
EXAMPLES = r"""
- name: Create temporary build directory
win_tempfile:
state: directory
suffix: build
- name: Create temporary file
win_tempfile:
state: file
suffix: temp
"""
RETURN = r'''
path:
description: Path to created file or directory
returned: success
type: string
sample: C:\Users\Administrator\AppData\Local\Temp\ansible.bMlvdk
'''
| gpl-3.0 |
drawks/ansible | lib/ansible/plugins/action/dellos6.py | 38 | 4085 | #
# (c) 2016 Red Hat Inc.
#
# (c) 2017 Dell EMC.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import copy
from ansible import constants as C
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection
from ansible.plugins.action.network import ActionModule as ActionNetworkModule
from ansible.module_utils.network.dellos6.dellos6 import dellos6_provider_spec
from ansible.module_utils.network.common.utils import load_provider
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionNetworkModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
self._config_module = True if self._task.action == 'dellos6_config' else False
socket_path = None
if self._play_context.connection == 'network_cli':
provider = self._task.args.get('provider', {})
if any(provider.values()):
display.warning('provider is unnecessary when using network_cli and will be ignored')
del self._task.args['provider']
elif self._play_context.connection == 'local':
provider = load_provider(dellos6_provider_spec, self._task.args)
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'dellos6'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = int(provider['port'] or self._play_context.port or 22)
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
command_timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT)
pc.become = provider['authorize'] or False
if pc.become:
pc.become_method = 'enable'
pc.become_pass = provider['auth_pass']
display.vvv('using connection plugin %s' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
connection.set_options(direct={'persistent_command_timeout': command_timeout})
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
task_vars['ansible_socket'] = socket_path
# make sure we are in the right cli context which should be
# enable mode and not config module
if socket_path is None:
socket_path = self._connection.socket_path
conn = Connection(socket_path)
out = conn.get_prompt()
while to_text(out, errors='surrogate_then_replace').strip().endswith(')#'):
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
conn.send_command('exit')
out = conn.get_prompt()
result = super(ActionModule, self).run(task_vars=task_vars)
return result
| gpl-3.0 |
Iristyle/ChocolateyPackages | EthanBrown.SublimeText2.WebPackages/tools/PackageCache/Emmet/emmet/file.py | 21 | 3498 | '''
@author Sergey Chikuyonok (serge.che@gmail.com)
@link http://chikuyonok.ru
'''
import sys
import os.path
import re
is_python3 = sys.version_info[0] > 2
try:
if is_python3:
import urllib.request as urllib2
else:
import urllib2
except Exception as e:
pass
def is_url(path):
return re.match(r'^https?://', path, re.IGNORECASE)
def read_http(url, size=-1, mode=None):
response = urllib2.urlopen(url, timeout=5)
return response.read(size)
def read_file(path, size=-1, mode='rb'):
kwargs = {}
if is_python3 and 'b' not in mode:
kwargs['encoding'] = 'utf-8'
with open(path, mode, **kwargs) as fp:
return fp.read(size)
class File():
def __init__(self):
pass
def _read(self, path, size, mode='rb'):
reader = is_url(path) and read_http or read_file
return reader(path, size, mode)
def read(self, path, size, callback=None):
"""
Read file content and return it
@param path: File's relative or absolute path
@type path: str
@return: str
"""
try:
content = self._read(path, size)
# return as array of character codes since PyV8 may corrupt
# binary data when python string is translated into JS string
if is_python3:
content = [ch for ch in content]
else:
content = [ord(ch) for ch in content]
except Exception as e:
return callback(str(e), None)
callback(None, content)
def read_text(self, path, size, callback=None):
"""
Read file content and return it
@param path: File's relative or absolute path
@type path: str
@return: str
"""
try:
content = self._read(path, size, 'r')
if not is_python3:
content = content.decode('utf-8')
except Exception as e:
return callback(str(e), None)
callback(None, content)
def locate_file(self, editor_file, file_name):
"""
Locate <code>file_name</code> file that relates to <code>editor_file</code>.
File name may be absolute or relative path
@type editor_file: str
@type file_name: str
@return String or None if <code>file_name</code> cannot be located
"""
if is_url(file_name):
return file_name
result = None
previous_parent = ''
parent = os.path.dirname(editor_file)
while parent and os.path.exists(parent) and parent != previous_parent:
tmp = self.create_path(parent, file_name)
if os.path.exists(tmp):
result = tmp
break
previous_parent = parent
parent = os.path.dirname(parent)
return result
def create_path(self, parent, file_name):
"""
Creates absolute path by concatenating <code>parent</code> and <code>file_name</code>.
If <code>parent</code> points to file, its parent directory is used
@type parent: str
@type file_name: str
@return: str
"""
result = ''
file_name = file_name.lstrip('/')
if os.path.exists(parent):
if os.path.isfile(parent):
parent = os.path.dirname(parent)
result = os.path.normpath(os.path.join(parent, file_name))
return result
def save(self, file, content):
"""
Saves <code>content</code> as <code>file</code>
@param file: File's asolute path
@type file: str
@param content: File content
@type content: str
"""
try:
fp = open(file, 'wb')
except:
fdirs, fname = os.path.split(file)
if fdirs:
os.makedirs(fdirs)
fp = open(file, 'wb')
fp.write(content)
fp.close()
def get_ext(self, file):
"""
Returns file extention in lower case
@type file: str
@return: str
"""
ext = os.path.splitext(file)[1]
if ext:
ext = ext[1:]
return ext.lower()
| mit |
p1c2u/k2 | contact/forms.py | 2 | 11230 | """
A base contact form for allowing users to send email messages through
a web interface, and a subclass demonstrating useful functionality.
"""
from socket import gethostbyaddr
from django import forms
from django.conf import settings
from django.core.mail import send_mail
from django.template import loader
from django.template import RequestContext
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
# I put this on all required fields, because it's easier to pick up
# on them with CSS or JavaScript if they have a class of "required"
# in the HTML. Your mileage may vary.
attrs_dict = { 'class': 'required' }
REASON_CHOICES = (
('sugestia', _('suggestion')),
('pytanie', _('question')),
('zgloszenie', _('bug report')),
('rejestracja', _('registration')),
('inne', _('other')),
)
class ContactForm(forms.Form):
"""
Base contact form class from which all contact form classes should
inherit.
If you don't need any custom functionality, you can simply use
this form to provide basic contact functionality; it will collect
name, email address and message.
The ``contact_form`` view included in this application knows how
to work with this form and can handle many types of subclasses as
well (see below for a discussion of the important points), so in
many cases it will be all that you need. If you'd like to use this
form or a subclass of it from one of your own views, just do the
following:
1. When you instantiate the form, pass the current ``HttpRequest``
object to the constructor as the keyword argument ``request``;
this is used internally by the base implementation, and also
made available so that subclasses can add functionality which
relies on inspecting the request.
2. To send the message, call the form's ``save`` method, which
accepts the keyword argument ``fail_silently`` and defaults it
to ``False``. This argument is passed directly to
``send_mail``, and allows you to suppress or raise exceptions
as needed for debugging. The ``save`` method has no return
value.
Other than that, treat it like any other form; validity checks and
validated data are handled normally, through the ``is_valid``
method and the ``cleaned_data`` dictionary.
Base implementation
-------------------
Under the hood, this form uses a somewhat abstracted interface in
order to make it easier to subclass and add functionality. There
are several important attributes subclasses may want to look at
overriding, all of which will work (in the base implementation) as
either plain attributes or as callable methods:
* ``from_email`` -- used to get the address to use in the
``From:`` header of the message. The base implementation returns
the value of the ``DEFAULT_FROM_EMAIL`` setting.
* ``message`` -- used to get the message body as a string. The
base implementation renders a template using the form's
``cleaned_data`` dictionary as context.
* ``recipient_list`` -- used to generate the list of recipients
for the message. The base implementation returns the email
addresses specified in the ``MANAGERS`` setting.
* ``subject`` -- used to generate the subject line for the
message. The base implementation returns the string 'Message
sent through the web site', with the name of the current
``Site`` prepended.
* ``template_name`` -- used by the base ``message`` method to
determine which template to use for rendering the
message. Default is ``contact/form.txt``.
Internally, the base implementation ``_get_message_dict`` method
collects ``from_email``, ``message``, ``recipient_list`` and
``subject`` into a dictionary, which the ``save`` method then
passes directly to ``send_mail`` as keyword arguments.
Particularly important is the ``message`` attribute, with its base
implementation as a method which renders a template; because it
passes ``cleaned_data`` as the template context, any additional
fields added by a subclass will automatically be available in the
template. This means that many useful subclasses can get by with
just adding a few fields and possibly overriding
``template_name``.
Much useful functionality can be achieved in subclasses without
having to override much of the above; adding additional validation
methods works the same as any other form, and typically only a few
items -- ``recipient_list`` and ``subject_line``, for example,
need to be overridden to achieve customized behavior.
Other notes for subclassing
---------------------------
Subclasses which want to inspect the current ``HttpRequest`` to
add functionality can access it via the attribute ``request``; the
base ``message`` takes advantage of this to use ``RequestContext``
when rendering its template. See the ``AkismetContactForm``
subclass in this file for an example of using the request to
perform additional validation.
Subclasses which override ``__init__`` need to accept ``*args``
and ``**kwargs``, and pass them via ``super`` in order to ensure
proper behavior.
Subclasses should be careful if overriding ``_get_message_dict``,
since that method **must** return a dictionary suitable for
passing directly to ``send_mail`` (unless ``save`` is overridden
as well).
Overriding ``save`` is relatively safe, though remember that code
which uses your form will expect ``save`` to accept the
``fail_silently`` keyword argument. In the base implementation,
that argument defaults to ``False``, on the assumption that it's
far better to notice errors than to silently not send mail from
the contact form (see also the Zen of Python: "Errors should never
pass silently, unless explicitly silenced").
"""
def __init__(self, data=None, files=None, request=None, *args, **kwargs):
if request is None:
raise TypeError("Keyword argument 'request' must be supplied")
super(ContactForm, self).__init__(data=data, files=files, *args, **kwargs)
self.request = request
title = forms.CharField(max_length=100,
widget=forms.TextInput(attrs=attrs_dict),
label=u'Subject')
email = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict,
maxlength=200)),
label=u'Your email address')
reason = forms.ChoiceField(choices=REASON_CHOICES)
body = forms.CharField(widget=forms.Textarea(attrs=attrs_dict),
label=u'Your message')
from_email = settings.DEFAULT_FROM_EMAIL
subject_template_name = "contact/form_subject.txt"
template_name = 'contact/form.txt'
def message(self):
"""
Render the body of the message to a string.
"""
if callable(self.template_name):
template_name = self.template_name()
else:
template_name = self.template_name
return loader.render_to_string(template_name,
self.get_context())
def subject(self):
"""
Render the subject of the message to a string.
"""
subject = loader.render_to_string(self.subject_template_name,
self.get_context())
return ''.join(subject.splitlines())
def recipient_list(self):
"""Return the recipient list with reason tag included"""
return [mail_tuple[1].replace('@','+%s@' % self.cleaned_data['reason'],1) for mail_tuple in settings.MANAGERS]
def get_context(self):
"""
Return the context used to render the templates for the email
subject and body.
By default, this context includes:
* All of the validated values in the form, as variables of the
same names as their fields.
* The current ``Site`` object, as the variable ``site``.
* Any additional variables added by context processors (this
will be a ``RequestContext``).
"""
if not self.is_valid():
raise ValueError("Cannot generate Context from invalid contact form")
return RequestContext(self.request,
dict(self.cleaned_data,
ip=self.request.META['REMOTE_ADDR'],
host=gethostbyaddr(self.request.META['REMOTE_ADDR']),
user_agent=self.request.META['HTTP_USER_AGENT'],
site=Site.objects.get_current()))
def get_message_dict(self):
"""
Generate the various parts of the message and return them in a
dictionary, suitable for passing directly as keyword arguments
to ``django.core.mail.send_mail()``.
By default, the following values are returned:
* ``from_email``
* ``message``
* ``recipient_list``
* ``subject``
"""
if not self.is_valid():
raise ValueError("Message cannot be sent from invalid contact form")
message_dict = {}
for message_part in ('from_email', 'message', 'recipient_list', 'subject'):
attr = getattr(self, message_part)
message_dict[message_part] = callable(attr) and attr() or attr
return message_dict
def save(self, fail_silently=False):
"""
Build and send the email message.
"""
send_mail(fail_silently=fail_silently, **self.get_message_dict())
class AkismetContactForm(ContactForm):
"""
Contact form which doesn't add any extra fields, but does add an
Akismet spam check to the validation routine.
Requires the setting ``AKISMET_API_KEY``, which should be a valid
Akismet API key.
"""
def clean_body(self):
"""
Perform Akismet validation of the message.
"""
if 'body' in self.cleaned_data and getattr(settings, 'AKISMET_API_KEY', ''):
from akismet import Akismet
from django.utils.encoding import smart_str
akismet_api = Akismet(key=settings.AKISMET_API_KEY,
blog_url='http://%s/' % Site.objects.get_current().domain)
if akismet_api.verify_key():
akismet_data = { 'comment_type': 'comment',
'referer': self.request.META.get('HTTP_REFERER', ''),
'user_ip': self.request.META.get('REMOTE_ADDR', ''),
'user_agent': self.request.META.get('HTTP_USER_AGENT', '') }
if akismet_api.comment_check(smart_str(self.cleaned_data['body']), data=akismet_data, build_data=True):
raise forms.ValidationError(u"Akismet thinks this message is spam")
return self.cleaned_data['body']
| gpl-2.0 |
jennolsen84/PyTables | bench/copy-bench.py | 13 | 1095 | from __future__ import print_function
import tables
import sys
import time
if len(sys.argv) != 3:
print("usage: %s source_file dest_file", sys.argv[0])
filesrc = sys.argv[1]
filedest = sys.argv[2]
filehsrc = tables.open_file(filesrc)
filehdest = tables.open_file(filedest, 'w')
ntables = 0
tsize = 0
t1 = time.time()
for group in filehsrc.walk_groups():
if isinstance(group._v_parent, tables.File):
groupdest = filehdest.root
else:
pathname = group._v_parent._v_pathname
groupdest = filehdest.create_group(pathname, group._v_name,
title=group._v_title)
for table in filehsrc.list_nodes(group, classname='Table'):
print("copying table -->", table)
table.copy(groupdest, table.name)
ntables += 1
tsize += table.nrows * table.rowsize
tsizeMB = tsize / (1024 * 1024)
ttime = round(time.time() - t1, 3)
speed = round(tsizeMB / ttime, 2)
print("Copied %s tables for a total of %s MB in %s seconds (%s MB/s)" %
(ntables, tsizeMB, ttime, speed))
filehsrc.close()
filehdest.close()
| bsd-3-clause |
Versent/ansible | v1/ansible/utils/module_docs.py | 85 | 4484 | #!/usr/bin/env python
# (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import ast
import yaml
import traceback
from collections import MutableMapping, MutableSet, MutableSequence
from ansible import utils
# modules that are ok that they do not have documentation strings
BLACKLIST_MODULES = [
'async_wrapper', 'accelerate', 'async_status'
]
def get_docstring(filename, verbose=False):
"""
Search for assignment of the DOCUMENTATION and EXAMPLES variables
in the given file.
Parse DOCUMENTATION from YAML and return the YAML doc or None
together with EXAMPLES, as plain text.
DOCUMENTATION can be extended using documentation fragments
loaded by the PluginLoader from the module_docs_fragments
directory.
"""
doc = None
plainexamples = None
returndocs = None
try:
# Thank you, Habbie, for this bit of code :-)
M = ast.parse(''.join(open(filename)))
for child in M.body:
if isinstance(child, ast.Assign):
if 'DOCUMENTATION' in (t.id for t in child.targets):
doc = yaml.safe_load(child.value.s)
fragment_slug = doc.get('extends_documentation_fragment',
'doesnotexist').lower()
# Allow the module to specify a var other than DOCUMENTATION
# to pull the fragment from, using dot notation as a separator
if '.' in fragment_slug:
fragment_name, fragment_var = fragment_slug.split('.', 1)
fragment_var = fragment_var.upper()
else:
fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION'
if fragment_slug != 'doesnotexist':
fragment_class = utils.plugins.fragment_loader.get(fragment_name)
assert fragment_class is not None
fragment_yaml = getattr(fragment_class, fragment_var, '{}')
fragment = yaml.safe_load(fragment_yaml)
if fragment.has_key('notes'):
notes = fragment.pop('notes')
if notes:
if not doc.has_key('notes'):
doc['notes'] = []
doc['notes'].extend(notes)
if 'options' not in fragment.keys():
raise Exception("missing options in fragment, possibly misformatted?")
for key, value in fragment.items():
if not doc.has_key(key):
doc[key] = value
else:
if isinstance(doc[key], MutableMapping):
doc[key].update(value)
elif isinstance(doc[key], MutableSet):
doc[key].add(value)
elif isinstance(doc[key], MutableSequence):
doc[key] = sorted(frozenset(doc[key] + value))
else:
raise Exception("Attempt to extend a documentation fragement of unknown type")
if 'EXAMPLES' in (t.id for t in child.targets):
plainexamples = child.value.s[1:] # Skip first empty line
if 'RETURN' in (t.id for t in child.targets):
returndocs = child.value.s[1:]
except:
traceback.print_exc() # temp
if verbose == True:
traceback.print_exc()
print "unable to parse %s" % filename
return doc, plainexamples, returndocs
| gpl-3.0 |
TOCyna/tabelinha | flask/lib/python2.7/site-packages/werkzeug/contrib/jsrouting.py | 513 | 8564 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.jsrouting
~~~~~~~~~~~~~~~~~~~~~~~~~~
Addon module that allows to create a JavaScript function from a map
that generates rules.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from simplejson import dumps
except ImportError:
try:
from json import dumps
except ImportError:
def dumps(*args):
raise RuntimeError('simplejson required for jsrouting')
from inspect import getmro
from werkzeug.routing import NumberConverter
from werkzeug._compat import iteritems
def render_template(name_parts, rules, converters):
result = u''
if name_parts:
for idx in range(0, len(name_parts) - 1):
name = u'.'.join(name_parts[:idx + 1])
result += u"if (typeof %s === 'undefined') %s = {}\n" % (name, name)
result += '%s = ' % '.'.join(name_parts)
result += """(function (server_name, script_name, subdomain, url_scheme) {
var converters = [%(converters)s];
var rules = %(rules)s;
function in_array(array, value) {
if (array.indexOf != undefined) {
return array.indexOf(value) != -1;
}
for (var i = 0; i < array.length; i++) {
if (array[i] == value) {
return true;
}
}
return false;
}
function array_diff(array1, array2) {
array1 = array1.slice();
for (var i = array1.length-1; i >= 0; i--) {
if (in_array(array2, array1[i])) {
array1.splice(i, 1);
}
}
return array1;
}
function split_obj(obj) {
var names = [];
var values = [];
for (var name in obj) {
if (typeof(obj[name]) != 'function') {
names.push(name);
values.push(obj[name]);
}
}
return {names: names, values: values, original: obj};
}
function suitable(rule, args) {
var default_args = split_obj(rule.defaults || {});
var diff_arg_names = array_diff(rule.arguments, default_args.names);
for (var i = 0; i < diff_arg_names.length; i++) {
if (!in_array(args.names, diff_arg_names[i])) {
return false;
}
}
if (array_diff(rule.arguments, args.names).length == 0) {
if (rule.defaults == null) {
return true;
}
for (var i = 0; i < default_args.names.length; i++) {
var key = default_args.names[i];
var value = default_args.values[i];
if (value != args.original[key]) {
return false;
}
}
}
return true;
}
function build(rule, args) {
var tmp = [];
var processed = rule.arguments.slice();
for (var i = 0; i < rule.trace.length; i++) {
var part = rule.trace[i];
if (part.is_dynamic) {
var converter = converters[rule.converters[part.data]];
var data = converter(args.original[part.data]);
if (data == null) {
return null;
}
tmp.push(data);
processed.push(part.name);
} else {
tmp.push(part.data);
}
}
tmp = tmp.join('');
var pipe = tmp.indexOf('|');
var subdomain = tmp.substring(0, pipe);
var url = tmp.substring(pipe+1);
var unprocessed = array_diff(args.names, processed);
var first_query_var = true;
for (var i = 0; i < unprocessed.length; i++) {
if (first_query_var) {
url += '?';
} else {
url += '&';
}
first_query_var = false;
url += encodeURIComponent(unprocessed[i]);
url += '=';
url += encodeURIComponent(args.original[unprocessed[i]]);
}
return {subdomain: subdomain, path: url};
}
function lstrip(s, c) {
while (s && s.substring(0, 1) == c) {
s = s.substring(1);
}
return s;
}
function rstrip(s, c) {
while (s && s.substring(s.length-1, s.length) == c) {
s = s.substring(0, s.length-1);
}
return s;
}
return function(endpoint, args, force_external) {
args = split_obj(args);
var rv = null;
for (var i = 0; i < rules.length; i++) {
var rule = rules[i];
if (rule.endpoint != endpoint) continue;
if (suitable(rule, args)) {
rv = build(rule, args);
if (rv != null) {
break;
}
}
}
if (rv == null) {
return null;
}
if (!force_external && rv.subdomain == subdomain) {
return rstrip(script_name, '/') + '/' + lstrip(rv.path, '/');
} else {
return url_scheme + '://'
+ (rv.subdomain ? rv.subdomain + '.' : '')
+ server_name + rstrip(script_name, '/')
+ '/' + lstrip(rv.path, '/');
}
};
})""" % {'converters': u', '.join(converters),
'rules': rules}
return result
def generate_map(map, name='url_map'):
"""
Generates a JavaScript function containing the rules defined in
this map, to be used with a MapAdapter's generate_javascript
method. If you don't pass a name the returned JavaScript code is
an expression that returns a function. Otherwise it's a standalone
script that assigns the function with that name. Dotted names are
resolved (so you an use a name like 'obj.url_for')
In order to use JavaScript generation, simplejson must be installed.
Note that using this feature will expose the rules
defined in your map to users. If your rules contain sensitive
information, don't use JavaScript generation!
"""
from warnings import warn
warn(DeprecationWarning('This module is deprecated'))
map.update()
rules = []
converters = []
for rule in map.iter_rules():
trace = [{
'is_dynamic': is_dynamic,
'data': data
} for is_dynamic, data in rule._trace]
rule_converters = {}
for key, converter in iteritems(rule._converters):
js_func = js_to_url_function(converter)
try:
index = converters.index(js_func)
except ValueError:
converters.append(js_func)
index = len(converters) - 1
rule_converters[key] = index
rules.append({
u'endpoint': rule.endpoint,
u'arguments': list(rule.arguments),
u'converters': rule_converters,
u'trace': trace,
u'defaults': rule.defaults
})
return render_template(name_parts=name and name.split('.') or [],
rules=dumps(rules),
converters=converters)
def generate_adapter(adapter, name='url_for', map_name='url_map'):
"""Generates the url building function for a map."""
values = {
u'server_name': dumps(adapter.server_name),
u'script_name': dumps(adapter.script_name),
u'subdomain': dumps(adapter.subdomain),
u'url_scheme': dumps(adapter.url_scheme),
u'name': name,
u'map_name': map_name
}
return u'''\
var %(name)s = %(map_name)s(
%(server_name)s,
%(script_name)s,
%(subdomain)s,
%(url_scheme)s
);''' % values
def js_to_url_function(converter):
"""Get the JavaScript converter function from a rule."""
if hasattr(converter, 'js_to_url_function'):
data = converter.js_to_url_function()
else:
for cls in getmro(type(converter)):
if cls in js_to_url_functions:
data = js_to_url_functions[cls](converter)
break
else:
return 'encodeURIComponent'
return '(function(value) { %s })' % data
def NumberConverter_js_to_url(conv):
if conv.fixed_digits:
return u'''\
var result = value.toString();
while (result.length < %s)
result = '0' + result;
return result;''' % conv.fixed_digits
return u'return value.toString();'
js_to_url_functions = {
NumberConverter: NumberConverter_js_to_url
}
| gpl-2.0 |
chrisdroid/nexmon | utilities/aircrack-ng/scripts/airgraph-ng/graphviz/libDumpParse.py | 11 | 6855 | #!/usr/bin/python
#airodump parsing lib
#returns in an array of client and Ap information
#part of the airdrop-ng project
from sys import exit as Exit
class airDumpParse:
def parser(self,file):
"""
One Function to call to parse a file and return the information
"""
self.capr = None
self.NAP = None
self.NA = None
self.apDict = None
self.clientDict = None
fileOpenResults = self.airDumpOpen(file)
self.airDumpParse(fileOpenResults)
self.clientApChannelRelationship()
return {'NA':self.NA,'capr':self.capr,'apDict':self.apDict,
'clientDict':self.clientDict,'NAP':self.NAP}
def airDumpOpen(self,file):
"""
Takes one argument (the input file) and opens it for reading
Returns a list full of data
"""
try:
openedFile = open(file, "r")
except IOError:
print "Error Airodump File",file,"does not exist"
Exit(1)
data = openedFile.xreadlines()
cleanedData = []
for line in data:
cleanedData.append(line.rstrip())
openedFile.close()
return cleanedData
def airDumpParse(self,cleanedDump):
"""
Function takes parsed dump file list and does some more cleaning.
Returns a list of 2 dictionaries (Clients and APs)
"""
try: #some very basic error handeling to make sure they are loading up the correct file
try:
apStart = cleanedDump.index('BSSID, First time seen, Last time seen, Channel, Speed, Privacy, Power, # beacons, # data, LAN IP, ESSID')
except Exception:
apStart = cleanedDump.index('BSSID, First time seen, Last time seen, channel, Speed, Privacy, Cipher, Authentication, Power, # beacons, # IV, LAN IP, ID-length, ESSID, Key')
del cleanedDump[apStart] #remove the first line of text with the headings
try:
stationStart = cleanedDump.index('Station MAC, First time seen, Last time seen, Power, # packets, BSSID, Probed ESSIDs')
except Exception:
stationStart = cleanedDump.index('Station MAC, First time seen, Last time seen, Power, # packets, BSSID, ESSID')
except Exception:
print "Invalid input file. Please make sure you are loading an airodump txt file and not a pcap"
Exit(1)
del cleanedDump[stationStart] #Remove the heading line
clientList = cleanedDump[stationStart:] #Splits all client data into its own list
del cleanedDump[stationStart:] #The remaining list is all of the AP information
self.apDict = self.apTag(cleanedDump)
self.clientDict = self.clientTag(clientList)
return
def apTag(self,devices):
"""
Create a ap dictionary with tags of the data type on an incoming list
"""
dict = {}
for entry in devices:
ap = {}
string_list = entry.split(',')
#sorry for the clusterfuck but i swear it all makse sense this is builiding a dic from our list so we dont have to do postion calls later
len(string_list)
if len(string_list) == 15:
ap = {"bssid":string_list[0].replace(' ',''),
"fts":string_list[1],
"lts":string_list[2],
"channel":string_list[3].replace(' ',''),
"speed":string_list[4],
"privacy":string_list[5].replace(' ',''),
"cipher":string_list[6],
"auth":string_list[7],
"power":string_list[8],
"beacons":string_list[9],
"iv":string_list[10],
"ip":string_list[11],
"id":string_list[12],
"essid":string_list[13][1:],
"key":string_list[14]}
elif len(string_list) == 11:
ap = {"bssid":string_list[0].replace(' ',''),
"fts":string_list[1],
"lts":string_list[2],
"channel":string_list[3].replace(' ',''),
"speed":string_list[4],
"privacy":string_list[5].replace(' ',''),
"power":string_list[6],
"beacons":string_list[7],
"data":string_list[8],
"ip":string_list[9],
"essid":string_list[10][1:]}
if len(ap) != 0:
dict[string_list[0]] = ap
return dict
def clientTag(self,devices):
"""
Create a client dictionary with tags of the data type on an incoming list
"""
dict = {}
for entry in devices:
client = {}
string_list = entry.split(',')
if len(string_list) >= 7:
client = {"station":string_list[0].replace(' ',''),
"fts":string_list[1],
"lts":string_list[2],
"power":string_list[3],
"packets":string_list[4],
"bssid":string_list[5].replace(' ',''),
"probe":string_list[6:][0:]}
if len(client) != 0:
dict[string_list[0]] = client
return dict
def clientApChannelRelationship(self):
"""
parse the dic for the relationships of client to ap
in the process also populate list of
"""
clients = self.clientDict
AP = self.apDict
NA = [] #create a var to keep the not associdated clients mac's
NAP = [] #create a var to keep track of associated clients mac's to AP's we cant see
apCount = {} #count number of Aps dict is faster the list stored as BSSID:number of essids
apClient = {} #dict that stores bssid and clients as a nested list
for key in (clients):
mac = clients[key] #mac is the MAC address of the client
if mac["bssid"] != ' (notassociated) ': #one line of of our dictionary of clients
if AP.has_key(mac["bssid"]): # if it is check to see its an AP we can see and have info on
if apClient.has_key(mac["bssid"]):
apClient[mac["bssid"]].extend([key]) #if key exists append new client
else:
apClient[mac["bssid"]] = [key] #create new key and append the client
else: NAP.append(key) # stores the clients that are talking to an access point we cant see
else: NA.append(key) #stores the lines of the not assocated AP's in a list
self.NAP = NAP
self.NA = NA
self.capr = apClient
return
| gpl-3.0 |
geminy/aidear | oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/xhr_benchmark_handler.py | 76 | 3946 | # Copyright 2014 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the COPYING file or at
# https://developers.google.com/open-source/licenses/bsd
from mod_pywebsocket import util
class XHRBenchmarkHandler(object):
def __init__(self, headers, rfile, wfile):
self._logger = util.get_class_logger(self)
self.headers = headers
self.rfile = rfile
self.wfile = wfile
def do_send(self):
content_length = int(self.headers.getheader('Content-Length'))
self._logger.debug('Requested to receive %s bytes', content_length)
RECEIVE_BLOCK_SIZE = 1024 * 1024
bytes_to_receive = content_length
while bytes_to_receive > 0:
bytes_to_receive_in_this_loop = bytes_to_receive
if bytes_to_receive_in_this_loop > RECEIVE_BLOCK_SIZE:
bytes_to_receive_in_this_loop = RECEIVE_BLOCK_SIZE
received_data = self.rfile.read(bytes_to_receive_in_this_loop)
for c in received_data:
if c != 'a':
self._logger.debug('Request body verification failed')
return
bytes_to_receive -= len(received_data)
if bytes_to_receive < 0:
self._logger.debug('Received %d more bytes than expected' %
(-bytes_to_receive))
return
# Return the number of received bytes back to the client.
response_body = '%d' % content_length
self.wfile.write(
'HTTP/1.1 200 OK\r\n'
'Content-Type: text/html\r\n'
'Content-Length: %d\r\n'
'\r\n%s' % (len(response_body), response_body))
self.wfile.flush()
def do_receive(self):
content_length = int(self.headers.getheader('Content-Length'))
request_body = self.rfile.read(content_length)
request_array = request_body.split(' ')
if len(request_array) < 2:
self._logger.debug('Malformed request body: %r', request_body)
return
# Parse the size parameter.
bytes_to_send = request_array[0]
try:
bytes_to_send = int(bytes_to_send)
except ValueError, e:
self._logger.debug('Malformed size parameter: %r', bytes_to_send)
return
self._logger.debug('Requested to send %s bytes', bytes_to_send)
# Parse the transfer encoding parameter.
chunked_mode = False
mode_parameter = request_array[1]
if mode_parameter == 'chunked':
self._logger.debug('Requested chunked transfer encoding')
chunked_mode = True
elif mode_parameter != 'none':
self._logger.debug('Invalid mode parameter: %r', mode_parameter)
return
# Write a header
response_header = (
'HTTP/1.1 200 OK\r\n'
'Content-Type: application/octet-stream\r\n')
if chunked_mode:
response_header += 'Transfer-Encoding: chunked\r\n\r\n'
else:
response_header += (
'Content-Length: %d\r\n\r\n' % bytes_to_send)
self.wfile.write(response_header)
self.wfile.flush()
# Write a body
SEND_BLOCK_SIZE = 1024 * 1024
while bytes_to_send > 0:
bytes_to_send_in_this_loop = bytes_to_send
if bytes_to_send_in_this_loop > SEND_BLOCK_SIZE:
bytes_to_send_in_this_loop = SEND_BLOCK_SIZE
if chunked_mode:
self.wfile.write('%x\r\n' % bytes_to_send_in_this_loop)
self.wfile.write('a' * bytes_to_send_in_this_loop)
if chunked_mode:
self.wfile.write('\r\n')
self.wfile.flush()
bytes_to_send -= bytes_to_send_in_this_loop
if chunked_mode:
self.wfile.write('0\r\n\r\n')
self.wfile.flush()
| gpl-3.0 |
OCForks/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/suggestreviewers.py | 125 | 2506 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.tool.steps.abstractstep import AbstractStep
from webkitpy.tool.steps.options import Options
class SuggestReviewers(AbstractStep):
@classmethod
def options(cls):
return AbstractStep.options() + [
Options.git_commit,
Options.suggest_reviewers,
]
def run(self, state):
if not self._options.suggest_reviewers:
return
reviewers = self._tool.checkout().suggested_reviewers(self._options.git_commit, self._changed_files(state))[:5]
print "The following reviewers have recently modified files in your patch:"
print ", ".join([reviewer.full_name for reviewer in reviewers])
if not state.get('bug_id'):
return
if not self._tool.user.confirm("Would you like to CC them?"):
return
reviewer_emails = [reviewer.bugzilla_email() for reviewer in reviewers]
self._tool.bugs.add_cc_to_bug(state['bug_id'], reviewer_emails)
| bsd-3-clause |
TheTypoMaster/chromium-crosswalk | third_party/android_testrunner/adb_interface.py | 42 | 19302 | #!/usr/bin/python2.4
#
#
# Copyright 2008, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides an interface to communicate with the device via the adb command.
Assumes adb binary is currently on system path.
"""
# Python imports
import os
import string
import time
# local imports
import am_instrument_parser
import errors
import logger
import run_command
class AdbInterface:
"""Helper class for communicating with Android device via adb."""
DEVICE_TRACE_DIR = "/data/test_results/"
def __init__(self, adb_path='adb'):
"""Constructor.
Args:
adb_path: Absolute path to the adb binary that should be used. Defaults
to the adb in the environment path.
"""
self._adb_path = adb_path
# argument to pass to adb, to direct command to specific device
self._target_arg = ""
def SetEmulatorTarget(self):
"""Direct all future commands to the only running emulator."""
self._target_arg = "-e"
def SetDeviceTarget(self):
"""Direct all future commands to the only connected USB device."""
self._target_arg = "-d"
def SetTargetSerial(self, serial):
"""Direct all future commands to Android target with the given serial."""
self._target_arg = "-s %s" % serial
def SendCommand(self, command_string, timeout_time=20, retry_count=3):
"""Send a command via adb.
Args:
command_string: adb command to run
timeout_time: number of seconds to wait for command to respond before
retrying
retry_count: number of times to retry command before raising
WaitForResponseTimedOutError
Returns:
string output of command
Raises:
WaitForResponseTimedOutError if device does not respond to command within time
"""
adb_cmd = "%s %s %s" % (self._adb_path, self._target_arg, command_string)
logger.SilentLog("about to run %s" % adb_cmd)
return run_command.RunCommand(adb_cmd, timeout_time=timeout_time,
retry_count=retry_count)
def SendShellCommand(self, cmd, timeout_time=20, retry_count=3):
"""Send a adb shell command.
Args:
cmd: adb shell command to run
timeout_time: number of seconds to wait for command to respond before
retrying
retry_count: number of times to retry command before raising
WaitForResponseTimedOutError
Returns:
string output of command
Raises:
WaitForResponseTimedOutError: if device does not respond to command
"""
return self.SendCommand("shell %s" % cmd, timeout_time=timeout_time,
retry_count=retry_count)
def BugReport(self, path):
"""Dumps adb bugreport to the file specified by the path.
Args:
path: Path of the file where adb bugreport is dumped to.
"""
bug_output = self.SendShellCommand("bugreport", timeout_time=60)
bugreport_file = open(path, "w")
bugreport_file.write(bug_output)
bugreport_file.close()
def Push(self, src, dest):
"""Pushes the file src onto the device at dest.
Args:
src: file path of host file to push
dest: destination absolute file path on device
"""
self.SendCommand("push %s %s" % (src, dest), timeout_time=60)
def Pull(self, src, dest):
"""Pulls the file src on the device onto dest on the host.
Args:
src: absolute file path of file on device to pull
dest: destination file path on host
Returns:
True if success and False otherwise.
"""
# Create the base dir if it doesn't exist already
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
if self.DoesFileExist(src):
self.SendCommand("pull %s %s" % (src, dest), timeout_time=60)
return True
else:
logger.Log("ADB Pull Failed: Source file %s does not exist." % src)
return False
def DoesFileExist(self, src):
"""Checks if the given path exists on device target.
Args:
src: file path to be checked.
Returns:
True if file exists
"""
output = self.SendShellCommand("ls %s" % src)
error = "No such file or directory"
if error in output:
return False
return True
def EnableAdbRoot(self):
"""Enable adb root on device."""
output = self.SendCommand("root")
if "adbd is already running as root" in output:
return True
elif "restarting adbd as root" in output:
# device will disappear from adb, wait for it to come back
self.SendCommand("wait-for-device")
return True
else:
logger.Log("Unrecognized output from adb root: %s" % output)
return False
def StartInstrumentationForPackage(
self, package_name, runner_name, timeout_time=60*10,
no_window_animation=False, instrumentation_args={}):
"""Run instrumentation test for given package and runner.
Equivalent to StartInstrumentation, except instrumentation path is
separated into its package and runner components.
"""
instrumentation_path = "%s/%s" % (package_name, runner_name)
return self.StartInstrumentation(instrumentation_path, timeout_time=timeout_time,
no_window_animation=no_window_animation,
instrumentation_args=instrumentation_args)
def StartInstrumentation(
self, instrumentation_path, timeout_time=60*10, no_window_animation=False,
profile=False, instrumentation_args={}, silent_log=False):
"""Runs an instrumentation class on the target.
Returns a dictionary containing the key value pairs from the
instrumentations result bundle and a list of TestResults. Also handles the
interpreting of error output from the device and raises the necessary
exceptions.
Args:
instrumentation_path: string. It should be the fully classified package
name, and instrumentation test runner, separated by "/"
e.g. com.android.globaltimelaunch/.GlobalTimeLaunch
timeout_time: Timeout value for the am command.
no_window_animation: boolean, Whether you want window animations enabled
or disabled
profile: If True, profiling will be turned on for the instrumentation.
instrumentation_args: Dictionary of key value bundle arguments to pass to
instrumentation.
silent_log: If True, the invocation of the instrumentation test runner
will not be logged.
Returns:
(test_results, inst_finished_bundle)
test_results: a list of TestResults
inst_finished_bundle (dict): Key/value pairs contained in the bundle that
is passed into ActivityManager.finishInstrumentation(). Included in this
bundle is the return code of the Instrumentation process, any error
codes reported by the activity manager, and any results explicitly added
by the instrumentation code.
Raises:
WaitForResponseTimedOutError: if timeout occurred while waiting for
response to adb instrument command
DeviceUnresponsiveError: if device system process is not responding
InstrumentationError: if instrumentation failed to run
"""
command_string = self._BuildInstrumentationCommandPath(
instrumentation_path, no_window_animation=no_window_animation,
profile=profile, raw_mode=True,
instrumentation_args=instrumentation_args)
if silent_log:
logger.SilentLog(command_string)
else:
logger.Log(command_string)
(test_results, inst_finished_bundle) = (
am_instrument_parser.ParseAmInstrumentOutput(
self.SendShellCommand(command_string, timeout_time=timeout_time,
retry_count=2)))
if "code" not in inst_finished_bundle:
logger.Log('No code available. inst_finished_bundle contains: %s '
% inst_finished_bundle)
raise errors.InstrumentationError("no test results... device setup "
"correctly?")
if inst_finished_bundle["code"] == "0":
long_msg_result = "no error message"
if "longMsg" in inst_finished_bundle:
long_msg_result = inst_finished_bundle["longMsg"]
logger.Log("Error! Test run failed: %s" % long_msg_result)
raise errors.InstrumentationError(long_msg_result)
if "INSTRUMENTATION_ABORTED" in inst_finished_bundle:
logger.Log("INSTRUMENTATION ABORTED!")
raise errors.DeviceUnresponsiveError
return (test_results, inst_finished_bundle)
def StartInstrumentationNoResults(
self, package_name, runner_name, no_window_animation=False,
raw_mode=False, instrumentation_args={}):
"""Runs instrumentation and dumps output to stdout.
Equivalent to StartInstrumentation, but will dump instrumentation
'normal' output to stdout, instead of parsing return results. Command will
never timeout.
"""
adb_command_string = self.PreviewInstrumentationCommand(
package_name, runner_name, no_window_animation=no_window_animation,
raw_mode=raw_mode, instrumentation_args=instrumentation_args)
logger.Log(adb_command_string)
run_command.RunCommand(adb_command_string, return_output=False)
def PreviewInstrumentationCommand(
self, package_name, runner_name, no_window_animation=False,
raw_mode=False, instrumentation_args={}):
"""Returns a string of adb command that will be executed."""
inst_command_string = self._BuildInstrumentationCommand(
package_name, runner_name, no_window_animation=no_window_animation,
raw_mode=raw_mode, instrumentation_args=instrumentation_args)
command_string = "adb %s shell %s" % (self._target_arg, inst_command_string)
return command_string
def _BuildInstrumentationCommand(
self, package, runner_name, no_window_animation=False, profile=False,
raw_mode=True, instrumentation_args={}):
instrumentation_path = "%s/%s" % (package, runner_name)
return self._BuildInstrumentationCommandPath(
instrumentation_path, no_window_animation=no_window_animation,
profile=profile, raw_mode=raw_mode,
instrumentation_args=instrumentation_args)
def _BuildInstrumentationCommandPath(
self, instrumentation_path, no_window_animation=False, profile=False,
raw_mode=True, instrumentation_args={}):
command_string = "am instrument"
if no_window_animation:
command_string += " --no_window_animation"
if profile:
self._CreateTraceDir()
command_string += (
" -p %s/%s.dmtrace" %
(self.DEVICE_TRACE_DIR, instrumentation_path.split(".")[-1]))
for key, value in instrumentation_args.items():
command_string += " -e %s '%s'" % (key, value)
if raw_mode:
command_string += " -r"
command_string += " -w %s" % instrumentation_path
return command_string
def _CreateTraceDir(self):
ls_response = self.SendShellCommand("ls /data/trace")
if ls_response.strip("#").strip(string.whitespace) != "":
self.SendShellCommand("create /data/trace", "mkdir /data/trace")
self.SendShellCommand("make /data/trace world writeable",
"chmod 777 /data/trace")
def WaitForDevicePm(self, wait_time=120):
"""Waits for targeted device's package manager to be up.
Args:
wait_time: time in seconds to wait
Raises:
WaitForResponseTimedOutError if wait_time elapses and pm still does not
respond.
"""
logger.Log("Waiting for device package manager...")
self.SendCommand("wait-for-device", timeout_time=wait_time, retry_count=0)
# Now the device is there, but may not be running.
# Query the package manager with a basic command
try:
self._WaitForShellCommandContents("pm path android", "package:",
wait_time)
except errors.WaitForResponseTimedOutError:
raise errors.WaitForResponseTimedOutError(
"Package manager did not respond after %s seconds" % wait_time)
def WaitForInstrumentation(self, package_name, runner_name, wait_time=120):
"""Waits for given instrumentation to be present on device
Args:
wait_time: time in seconds to wait
Raises:
WaitForResponseTimedOutError if wait_time elapses and instrumentation
still not present.
"""
instrumentation_path = "%s/%s" % (package_name, runner_name)
logger.Log("Waiting for instrumentation to be present")
# Query the package manager
try:
command = "pm list instrumentation | grep %s" % instrumentation_path
self._WaitForShellCommandContents(command, "instrumentation:", wait_time,
raise_abort=False)
except errors.WaitForResponseTimedOutError :
logger.Log(
"Could not find instrumentation %s on device. Does the "
"instrumentation in test's AndroidManifest.xml match definition"
"in test_defs.xml?" % instrumentation_path)
raise
def WaitForProcess(self, name, wait_time=120):
"""Wait until a process is running on the device.
Args:
name: the process name as it appears in `ps`
wait_time: time in seconds to wait
Raises:
WaitForResponseTimedOutError if wait_time elapses and the process is
still not running
"""
logger.Log("Waiting for process %s" % name)
self.SendCommand("wait-for-device")
self._WaitForShellCommandContents("ps", name, wait_time)
def WaitForProcessEnd(self, name, wait_time=120):
"""Wait until a process is no longer running on the device.
Args:
name: the process name as it appears in `ps`
wait_time: time in seconds to wait
Raises:
WaitForResponseTimedOutError if wait_time elapses and the process is
still running
"""
logger.Log("Waiting for process %s to end" % name)
self._WaitForShellCommandContents("ps", name, wait_time, invert=True)
def _WaitForShellCommandContents(self, command, expected, wait_time,
raise_abort=True, invert=False):
"""Wait until the response to a command contains a given output.
Assumes that a only successful execution of "adb shell <command>" contains
the substring expected. Assumes that a device is present.
Args:
command: adb shell command to execute
expected: the string that should appear to consider the
command successful.
wait_time: time in seconds to wait
raise_abort: if False, retry when executing the command raises an
AbortError, rather than failing.
invert: if True, wait until the command output no longer contains the
expected contents.
Raises:
WaitForResponseTimedOutError: If wait_time elapses and the command has not
returned an output containing expected yet.
"""
# Query the device with the command
success = False
attempts = 0
wait_period = 5
while not success and (attempts*wait_period) < wait_time:
# assume the command will always contain expected in the success case
try:
output = self.SendShellCommand(command, retry_count=1,
timeout_time=wait_time)
if ((not invert and expected in output)
or (invert and expected not in output)):
success = True
except errors.AbortError, e:
if raise_abort:
raise
# ignore otherwise
if not success:
time.sleep(wait_period)
attempts += 1
if not success:
raise errors.WaitForResponseTimedOutError()
def WaitForBootComplete(self, wait_time=120):
"""Waits for targeted device's bootcomplete flag to be set.
Args:
wait_time: time in seconds to wait
Raises:
WaitForResponseTimedOutError if wait_time elapses and pm still does not
respond.
"""
logger.Log("Waiting for boot complete...")
self.SendCommand("wait-for-device")
# Now the device is there, but may not be running.
# Query the package manager with a basic command
boot_complete = False
attempts = 0
wait_period = 5
while not boot_complete and (attempts*wait_period) < wait_time:
output = self.SendShellCommand("getprop dev.bootcomplete", retry_count=1)
output = output.strip()
if output == "1":
boot_complete = True
else:
time.sleep(wait_period)
attempts += 1
if not boot_complete:
raise errors.WaitForResponseTimedOutError(
"dev.bootcomplete flag was not set after %s seconds" % wait_time)
def Sync(self, retry_count=3, runtime_restart=False):
"""Perform a adb sync.
Blocks until device package manager is responding.
Args:
retry_count: number of times to retry sync before failing
runtime_restart: stop runtime during sync and restart afterwards, useful
for syncing system libraries (core, framework etc)
Raises:
WaitForResponseTimedOutError if package manager does not respond
AbortError if unrecoverable error occurred
"""
output = ""
error = None
if runtime_restart:
self.SendShellCommand("setprop ro.monkey 1", retry_count=retry_count)
# manual rest bootcomplete flag
self.SendShellCommand("setprop dev.bootcomplete 0",
retry_count=retry_count)
self.SendShellCommand("stop", retry_count=retry_count)
try:
output = self.SendCommand("sync", retry_count=retry_count)
except errors.AbortError, e:
error = e
output = e.msg
if "Read-only file system" in output:
logger.SilentLog(output)
logger.Log("Remounting read-only filesystem")
self.SendCommand("remount")
output = self.SendCommand("sync", retry_count=retry_count)
elif "No space left on device" in output:
logger.SilentLog(output)
logger.Log("Restarting device runtime")
self.SendShellCommand("stop", retry_count=retry_count)
output = self.SendCommand("sync", retry_count=retry_count)
self.SendShellCommand("start", retry_count=retry_count)
elif error is not None:
# exception occurred that cannot be recovered from
raise error
logger.SilentLog(output)
if runtime_restart:
# start runtime and wait till boot complete flag is set
self.SendShellCommand("start", retry_count=retry_count)
self.WaitForBootComplete()
# press the MENU key, this will disable key guard if runtime is started
# with ro.monkey set to 1
self.SendShellCommand("input keyevent 82", retry_count=retry_count)
else:
self.WaitForDevicePm()
return output
def GetSerialNumber(self):
"""Returns the serial number of the targeted device."""
return self.SendCommand("get-serialno").strip()
| bsd-3-clause |
CodeJuan/scrapy | tests/test_cmdline/__init__.py | 41 | 1069 | import sys
from subprocess import Popen, PIPE
import unittest
from scrapy.utils.test import get_testenv
class CmdlineTest(unittest.TestCase):
def setUp(self):
self.env = get_testenv()
self.env['SCRAPY_SETTINGS_MODULE'] = 'tests.test_cmdline.settings'
def _execute(self, *new_args, **kwargs):
args = (sys.executable, '-m', 'scrapy.cmdline') + new_args
proc = Popen(args, stdout=PIPE, stderr=PIPE, env=self.env, **kwargs)
comm = proc.communicate()
return comm[0].strip()
def test_default_settings(self):
self.assertEqual(self._execute('settings', '--get', 'TEST1'), \
'default')
def test_override_settings_using_set_arg(self):
self.assertEqual(self._execute('settings', '--get', 'TEST1', '-s', 'TEST1=override'), \
'override')
def test_override_settings_using_envvar(self):
self.env['SCRAPY_TEST1'] = 'override'
self.assertEqual(self._execute('settings', '--get', 'TEST1'), \
'override')
| bsd-3-clause |
scribusproject/scribus-tools | md_import.py | 1 | 1176 | run_script = True
import scribus
from tempfile import NamedTemporaryFile
try:
import markdown
except:
scribus.messageBox('python-markdown not installed',
'You need to install python-markdown for this script to work', scribus.ICON_WARNING)
run_script = False
run_script &= bool(scribus.getSelectedObject(0)) # We must have at least one selected object
if run_script and scribus.getSelectedObject(1):
result = scribus.messageBox('', 'More than one item selected, load all?',
button1=scribus.BUTTON_CANCEL, button2=scribus.BUTTON_YES)
if result == scribus.BUTTON_CANCEL:
run_script = False
def main():
md_name = scribus.fileDialog("Select a file", 'Markdown (*.md)')
if not md_name:
return
f = NamedTemporaryFile(suffix='.html')
markdown.markdownFromFile(md_name, f)
f.flush()
html_name = f.name
i = 0
while True:
ob_name = scribus.getSelectedObject(i)
if not ob_name:
break
if scribus.getObjectType(ob_name) == 'TextFrame':
scribus.insertHtmlText(html_name, ob_name)
i += 1
if run_script:
main()
| gpl-3.0 |
tibotic/simple-pokemongo-bot | pokemongo_bot/cell_workers/collect_level_up_reward.py | 16 | 2535 | from pokemongo_bot.base_task import BaseTask
class CollectLevelUpReward(BaseTask):
SUPPORTED_TASK_API_VERSION = 1
current_level = 0
previous_level = 0
def initialize(self):
self.current_level = self._get_current_level()
self.previous_level = 0
def work(self):
self.current_level = self._get_current_level()
# let's check level reward on bot initialization
# to be able get rewards for old bots
if self.previous_level == 0:
self._collect_level_reward()
# level up situation
elif self.current_level > self.previous_level:
self.emit_event(
'level_up',
formatted='Level up from {previous_level} to {current_level}',
data={
'previous_level': self.previous_level,
'current_level': self.current_level
}
)
self._collect_level_reward()
self.previous_level = self.current_level
def _collect_level_reward(self):
response_dict = self.bot.api.level_up_rewards(level=self.current_level)
if 'status_code' in response_dict and response_dict['status_code'] == 1:
data = (response_dict
.get('responses', {})
.get('LEVEL_UP_REWARDS', {})
.get('items_awarded', []))
for item in data:
if 'item_id' in item and str(item['item_id']) in self.bot.item_list:
got_item = self.bot.item_list[str(item['item_id'])]
item['name'] = got_item
count = 'item_count' in item and item['item_count'] or 0
self.emit_event(
'level_up_reward',
formatted='Received level up reward: {items}',
data={
'items': data
}
)
def _get_current_level(self):
level = 0
response_dict = self.bot.get_inventory()
data = (response_dict
.get('responses', {})
.get('GET_INVENTORY', {})
.get('inventory_delta', {})
.get('inventory_items', {}))
for item in data:
level = (item
.get('inventory_item_data', {})
.get('player_stats', {})
.get('level', 0))
# we found a level, no need to continue iterate
if level:
break
return level
| mit |
codemac/s3cmd | S3/S3.py | 3 | 64402 | # -*- coding: utf-8 -*-
## Amazon S3 manager
## Author: Michal Ludvig <michal@logix.cz>
## http://www.logix.cz/michal
## License: GPL Version 2
## Copyright: TGRMN Software and contributors
import sys
import os
import time
import errno
import base64
import mimetypes
from xml.sax import saxutils
from logging import debug, info, warning, error
from stat import ST_SIZE
from urllib import quote_plus
try:
from hashlib import md5
except ImportError:
from md5 import md5
from Utils import *
from SortedDict import SortedDict
from AccessLog import AccessLog
from ACL import ACL, GranteeLogDelivery
from BidirMap import BidirMap
from Config import Config
from Exceptions import *
from MultiPart import MultiPartUpload
from S3Uri import S3Uri
from ConnMan import ConnMan
from Crypto import sign_string_v2, sign_string_v4, checksum_sha256_file, checksum_sha256_buffer
from ExitCodes import *
try:
from ctypes import ArgumentError
import magic
try:
## https://github.com/ahupp/python-magic
## Always expect unicode for python 2
## (has Magic class but no "open()" function)
magic_ = magic.Magic(mime=True)
def mime_magic_file(file):
return magic_.from_file(file)
except TypeError:
try:
## file-5.11 built-in python bindings
## Sources: http://www.darwinsys.com/file/
## Expects unicode since version 5.19, encoded strings before
## we can't tell if a given copy of the magic library will take a
## filesystem-encoded string or a unicode value, so try first
## with the unicode, then with the encoded string.
## (has Magic class and "open()" function)
magic_ = magic.open(magic.MAGIC_MIME)
magic_.load()
def mime_magic_file(file):
try:
return magic_.file(file)
except (UnicodeDecodeError, UnicodeEncodeError, ArgumentError):
return magic_.file(deunicodise(file))
except AttributeError:
## http://pypi.python.org/pypi/filemagic
## Accept gracefully both unicode and encoded
## (has Magic class but not "mime" argument and no "open()" function )
magic_ = magic.Magic(flags=magic.MAGIC_MIME)
def mime_magic_file(file):
return magic_.id_filename(file)
except AttributeError:
## Older python-magic versions doesn't have a "Magic" method
## Only except encoded strings
## (has no Magic class but "open()" function)
magic_ = magic.open(magic.MAGIC_MIME)
magic_.load()
def mime_magic_file(file):
return magic_.file(deunicodise(file))
except ImportError, e:
if 'magic' in str(e):
magic_message = "Module python-magic is not available."
else:
magic_message = "Module python-magic can't be used (%s)." % e.message
magic_message += " Guessing MIME types based on file extensions."
magic_warned = False
def mime_magic_file(file):
global magic_warned
if (not magic_warned):
warning(magic_message)
magic_warned = True
return mimetypes.guess_type(file)[0]
def mime_magic(file):
## NOTE: So far in the code, "file" var is already unicode
def _mime_magic(file):
magictype = mime_magic_file(file)
return magictype
result = _mime_magic(file)
if result is not None:
if isinstance(result, str):
if ';' in result:
mimetype, charset = result.split(';')
charset = charset[len('charset'):]
result = (mimetype, charset)
else:
result = (result, None)
if result is None:
result = (None, None)
return result
__all__ = []
class S3Request(object):
region_map = {}
def __init__(self, s3, method_string, resource, headers, body, params = {}):
self.s3 = s3
self.headers = SortedDict(headers or {}, ignore_case = True)
if len(self.s3.config.access_token)>0:
self.s3.config.role_refresh()
self.headers['x-amz-security-token']=self.s3.config.access_token
self.resource = resource
self.method_string = method_string
self.params = params
self.body = body
self.requester_pays()
def requester_pays(self):
if self.method_string == "GET" or self.method_string == "POST":
if self.s3.config.requester_pays:
self.headers['x-amz-request-payer'] = 'requester'
def update_timestamp(self):
if self.headers.has_key("date"):
del(self.headers["date"])
self.headers["x-amz-date"] = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())
def format_param_str(self):
"""
Format URL parameters from self.params and returns
?parm1=val1&parm2=val2 or an empty string if there
are no parameters. Output of this function should
be appended directly to self.resource['uri']
"""
param_str = ""
for param in self.params:
if self.params[param] not in (None, ""):
param_str += "&%s=%s" % (param, self.params[param])
else:
param_str += "&%s" % param
return param_str and "?" + param_str[1:]
def use_signature_v2(self):
if self.s3.endpoint_requires_signature_v4:
return False
# in case of bad DNS name due to bucket name v2 will be used
# this way we can still use capital letters in bucket names for the older regions
if self.resource['bucket'] is None or not check_bucket_name_dns_conformity(self.resource['bucket']) or self.s3.config.signature_v2 or self.s3.fallback_to_signature_v2:
return True
return False
def sign(self):
h = self.method_string + "\n"
h += self.headers.get("content-md5", "")+"\n"
h += self.headers.get("content-type", "")+"\n"
h += self.headers.get("date", "")+"\n"
for header in sorted(self.headers.keys()):
if header.startswith("x-amz-"):
h += header+":"+str(self.headers[header])+"\n"
if header.startswith("x-emc-"):
h += header+":"+str(self.headers[header])+"\n"
if self.resource['bucket']:
h += "/" + self.resource['bucket']
h += self.resource['uri']
if self.use_signature_v2():
debug("Using signature v2")
debug("SignHeaders: " + repr(h))
signature = sign_string_v2(h)
self.headers["Authorization"] = "AWS "+self.s3.config.access_key+":"+signature
else:
debug("Using signature v4")
self.headers = sign_string_v4(self.method_string,
self.s3.get_hostname(self.resource['bucket']),
self.resource['uri'],
self.params,
S3Request.region_map.get(self.resource['bucket'], Config().bucket_location),
self.headers,
self.body)
def get_triplet(self):
self.update_timestamp()
self.sign()
resource = dict(self.resource) ## take a copy
resource['uri'] += self.format_param_str()
return (self.method_string, resource, self.headers)
class S3(object):
http_methods = BidirMap(
GET = 0x01,
PUT = 0x02,
HEAD = 0x04,
DELETE = 0x08,
POST = 0x10,
MASK = 0x1F,
)
targets = BidirMap(
SERVICE = 0x0100,
BUCKET = 0x0200,
OBJECT = 0x0400,
BATCH = 0x0800,
MASK = 0x0700,
)
operations = BidirMap(
UNDFINED = 0x0000,
LIST_ALL_BUCKETS = targets["SERVICE"] | http_methods["GET"],
BUCKET_CREATE = targets["BUCKET"] | http_methods["PUT"],
BUCKET_LIST = targets["BUCKET"] | http_methods["GET"],
BUCKET_DELETE = targets["BUCKET"] | http_methods["DELETE"],
OBJECT_PUT = targets["OBJECT"] | http_methods["PUT"],
OBJECT_GET = targets["OBJECT"] | http_methods["GET"],
OBJECT_HEAD = targets["OBJECT"] | http_methods["HEAD"],
OBJECT_DELETE = targets["OBJECT"] | http_methods["DELETE"],
OBJECT_POST = targets["OBJECT"] | http_methods["POST"],
BATCH_DELETE = targets["BATCH"] | http_methods["POST"],
)
codes = {
"NoSuchBucket" : "Bucket '%s' does not exist",
"AccessDenied" : "Access to bucket '%s' was denied",
"BucketAlreadyExists" : "Bucket '%s' already exists",
}
## S3 sometimes sends HTTP-307 response
redir_map = {}
## Maximum attempts of re-issuing failed requests
_max_retries = 5
def __init__(self, config):
self.config = config
self.fallback_to_signature_v2 = False
self.endpoint_requires_signature_v4 = False
def get_hostname(self, bucket):
if bucket and check_bucket_name_dns_support(self.config.host_bucket, bucket):
if self.redir_map.has_key(bucket):
host = self.redir_map[bucket]
else:
host = getHostnameFromBucket(bucket)
else:
host = self.config.host_base
debug('get_hostname(%s): %s' % (bucket, host))
return host
def set_hostname(self, bucket, redir_hostname):
self.redir_map[bucket] = redir_hostname
def format_uri(self, resource):
if resource['bucket'] and not check_bucket_name_dns_support(self.config.host_bucket, resource['bucket']):
uri = "/%s%s" % (resource['bucket'], resource['uri'])
else:
uri = resource['uri']
if self.config.proxy_host != "":
uri = "http://%s%s" % (self.get_hostname(resource['bucket']), uri)
debug('format_uri(): ' + uri)
return uri
## Commands / Actions
def list_all_buckets(self):
request = self.create_request("LIST_ALL_BUCKETS")
response = self.send_request(request)
response["list"] = getListFromXml(response["data"], "Bucket")
return response
def bucket_list(self, bucket, prefix = None, recursive = None, uri_params = {}):
item_list = []
prefixes = []
for dirs, objects in self.bucket_list_streaming(bucket, prefix, recursive, uri_params):
item_list.extend(objects)
prefixes.extend(dirs)
response = {}
response['list'] = item_list
response['common_prefixes'] = prefixes
return response
def bucket_list_streaming(self, bucket, prefix = None, recursive = None, uri_params = {}):
""" Generator that produces <dir_list>, <object_list> pairs of groups of content of a specified bucket. """
def _list_truncated(data):
## <IsTruncated> can either be "true" or "false" or be missing completely
is_truncated = getTextFromXml(data, ".//IsTruncated") or "false"
return is_truncated.lower() != "false"
def _get_contents(data):
return getListFromXml(data, "Contents")
def _get_common_prefixes(data):
return getListFromXml(data, "CommonPrefixes")
uri_params = uri_params.copy()
truncated = True
prefixes = []
while truncated:
response = self.bucket_list_noparse(bucket, prefix, recursive, uri_params)
current_list = _get_contents(response["data"])
current_prefixes = _get_common_prefixes(response["data"])
truncated = _list_truncated(response["data"])
if truncated:
if current_list:
uri_params['marker'] = self.urlencode_string(current_list[-1]["Key"])
else:
uri_params['marker'] = self.urlencode_string(current_prefixes[-1]["Prefix"])
debug("Listing continues after '%s'" % uri_params['marker'])
yield current_prefixes, current_list
def bucket_list_noparse(self, bucket, prefix = None, recursive = None, uri_params = {}):
if prefix:
uri_params['prefix'] = self.urlencode_string(prefix)
if not self.config.recursive and not recursive:
uri_params['delimiter'] = "/"
request = self.create_request("BUCKET_LIST", bucket = bucket, **uri_params)
response = self.send_request(request)
#debug(response)
return response
def bucket_create(self, bucket, bucket_location = None):
headers = SortedDict(ignore_case = True)
body = ""
if bucket_location and bucket_location.strip().upper() != "US" and bucket_location.strip().lower() != "us-east-1":
bucket_location = bucket_location.strip()
if bucket_location.upper() == "EU":
bucket_location = bucket_location.upper()
else:
bucket_location = bucket_location.lower()
body = "<CreateBucketConfiguration><LocationConstraint>"
body += bucket_location
body += "</LocationConstraint></CreateBucketConfiguration>"
debug("bucket_location: " + body)
check_bucket_name(bucket, dns_strict = True)
else:
check_bucket_name(bucket, dns_strict = False)
if self.config.acl_public:
headers["x-amz-acl"] = "public-read"
request = self.create_request("BUCKET_CREATE", bucket = bucket, headers = headers, body = body)
response = self.send_request(request)
return response
def bucket_delete(self, bucket):
request = self.create_request("BUCKET_DELETE", bucket = bucket)
response = self.send_request(request)
return response
def get_bucket_location(self, uri):
request = self.create_request("BUCKET_LIST", bucket = uri.bucket(), extra = "?location")
response = self.send_request(request)
location = getTextFromXml(response['data'], "LocationConstraint")
if not location or location in [ "", "US" ]:
location = "us-east-1"
elif location == "EU":
location = "eu-west-1"
return location
def get_bucket_requester_pays(self, uri):
request = self.create_request("BUCKET_LIST", bucket = uri.bucket(), extra = "?requestPayment")
response = self.send_request(request)
payer = getTextFromXml(response['data'], "Payer")
return payer
def bucket_info(self, uri):
response = {}
response['bucket-location'] = self.get_bucket_location(uri)
response['requester-pays'] = self.get_bucket_requester_pays(uri)
return response
def website_info(self, uri, bucket_location = None):
bucket = uri.bucket()
request = self.create_request("BUCKET_LIST", bucket = bucket, extra="?website")
try:
response = self.send_request(request)
response['index_document'] = getTextFromXml(response['data'], ".//IndexDocument//Suffix")
response['error_document'] = getTextFromXml(response['data'], ".//ErrorDocument//Key")
response['website_endpoint'] = self.config.website_endpoint % {
"bucket" : uri.bucket(),
"location" : self.get_bucket_location(uri)}
return response
except S3Error, e:
if e.status == 404:
debug("Could not get /?website - website probably not configured for this bucket")
return None
raise
def website_create(self, uri, bucket_location = None):
bucket = uri.bucket()
body = '<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
body += ' <IndexDocument>'
body += (' <Suffix>%s</Suffix>' % self.config.website_index)
body += ' </IndexDocument>'
if self.config.website_error:
body += ' <ErrorDocument>'
body += (' <Key>%s</Key>' % self.config.website_error)
body += ' </ErrorDocument>'
body += '</WebsiteConfiguration>'
request = self.create_request("BUCKET_CREATE", bucket = bucket, extra="?website", body = body)
response = self.send_request(request)
debug("Received response '%s'" % (response))
return response
def website_delete(self, uri, bucket_location = None):
bucket = uri.bucket()
request = self.create_request("BUCKET_DELETE", bucket = bucket, extra="?website")
response = self.send_request(request)
debug("Received response '%s'" % (response))
if response['status'] != 204:
raise S3ResponseError("Expected status 204: %s" % response)
return response
def expiration_info(self, uri, bucket_location = None):
bucket = uri.bucket()
request = self.create_request("BUCKET_LIST", bucket = bucket, extra="?lifecycle")
try:
response = self.send_request(request)
response['prefix'] = getTextFromXml(response['data'], ".//Rule//Prefix")
response['date'] = getTextFromXml(response['data'], ".//Rule//Expiration//Date")
response['days'] = getTextFromXml(response['data'], ".//Rule//Expiration//Days")
return response
except S3Error, e:
if e.status == 404:
debug("Could not get /?lifecycle - lifecycle probably not configured for this bucket")
return None
raise
def expiration_set(self, uri, bucket_location = None):
if self.config.expiry_date and self.config.expiry_days:
raise ParameterError("Expect either --expiry-day or --expiry-date")
if not (self.config.expiry_date or self.config.expiry_days):
if self.config.expiry_prefix:
raise ParameterError("Expect either --expiry-day or --expiry-date")
debug("del bucket lifecycle")
bucket = uri.bucket()
request = self.create_request("BUCKET_DELETE", bucket = bucket, extra="?lifecycle")
else:
request = self._expiration_set(uri)
response = self.send_request(request)
debug("Received response '%s'" % (response))
return response
def _expiration_set(self, uri):
debug("put bucket lifecycle")
body = '<LifecycleConfiguration>'
body += ' <Rule>'
body += (' <Prefix>%s</Prefix>' % self.config.expiry_prefix)
body += (' <Status>Enabled</Status>')
body += (' <Expiration>')
if self.config.expiry_date:
body += (' <Date>%s</Date>' % self.config.expiry_date)
elif self.config.expiry_days:
body += (' <Days>%s</Days>' % self.config.expiry_days)
body += (' </Expiration>')
body += ' </Rule>'
body += '</LifecycleConfiguration>'
headers = SortedDict(ignore_case = True)
headers['content-md5'] = compute_content_md5(body)
bucket = uri.bucket()
request = self.create_request("BUCKET_CREATE", bucket = bucket, headers = headers, extra="?lifecycle", body = body)
return (request)
def _guess_content_type(self, filename):
content_type = self.config.default_mime_type
content_charset = None
if filename == "-" and not self.config.default_mime_type:
raise ParameterError("You must specify --mime-type or --default-mime-type for files uploaded from stdin.")
if self.config.guess_mime_type:
if self.config.use_mime_magic:
(content_type, content_charset) = mime_magic(filename)
else:
(content_type, content_charset) = mimetypes.guess_type(filename)
if not content_type:
content_type = self.config.default_mime_type
return (content_type, content_charset)
def stdin_content_type(self):
content_type = self.config.mime_type
if content_type == '':
content_type = self.config.default_mime_type
content_type += "; charset=" + self.config.encoding.upper()
return content_type
def content_type(self, filename=None):
# explicit command line argument always wins
content_type = self.config.mime_type
content_charset = None
if filename == u'-':
return self.stdin_content_type()
if not content_type:
(content_type, content_charset) = self._guess_content_type(filename)
## add charset to content type
if not content_charset:
content_charset = self.config.encoding.upper()
if self.add_encoding(filename, content_type) and content_charset is not None:
content_type = content_type + "; charset=" + content_charset
return content_type
def add_encoding(self, filename, content_type):
if 'charset=' in content_type:
return False
exts = self.config.add_encoding_exts.split(',')
if exts[0]=='':
return False
parts = filename.rsplit('.',2)
if len(parts) < 2:
return False
ext = parts[1]
if ext in exts:
return True
else:
return False
def object_put(self, filename, uri, extra_headers = None, extra_label = ""):
# TODO TODO
# Make it consistent with stream-oriented object_get()
if uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % uri.type)
if filename != "-" and not os.path.isfile(deunicodise(filename)):
raise InvalidFileError(u"%s is not a regular file" % filename)
try:
if filename == "-":
file = sys.stdin
size = 0
else:
file = open(deunicodise(filename), "rb")
size = os.stat(deunicodise(filename))[ST_SIZE]
except (IOError, OSError), e:
raise InvalidFileError(u"%s: %s" % (filename, e.strerror))
headers = SortedDict(ignore_case = True)
if extra_headers:
headers.update(extra_headers)
## Set server side encryption
if self.config.server_side_encryption:
headers["x-amz-server-side-encryption"] = "AES256"
## MIME-type handling
headers["content-type"] = self.content_type(filename=filename)
## Other Amazon S3 attributes
if self.config.acl_public:
headers["x-amz-acl"] = "public-read"
if self.config.reduced_redundancy:
headers["x-amz-storage-class"] = "REDUCED_REDUNDANCY"
## Multipart decision
multipart = False
if not self.config.enable_multipart and filename == "-":
raise ParameterError("Multi-part upload is required to upload from stdin")
if self.config.enable_multipart:
if size > self.config.multipart_chunk_size_mb * 1024 * 1024 or filename == "-":
multipart = True
if multipart:
# Multipart requests are quite different... drop here
return self.send_file_multipart(file, headers, uri, size)
## Not multipart...
if self.config.put_continue:
# Note, if input was stdin, we would be performing multipart upload.
# So this will always work as long as the file already uploaded was
# not uploaded via MultiUpload, in which case its ETag will not be
# an md5.
try:
info = self.object_info(uri)
except:
info = None
if info is not None:
remote_size = long(info['headers']['content-length'])
remote_checksum = info['headers']['etag'].strip('"\'')
if size == remote_size:
checksum = calculateChecksum('', file, 0, size, self.config.send_chunk)
if remote_checksum == checksum:
warning("Put: size and md5sum match for %s, skipping." % uri)
return
else:
warning("MultiPart: checksum (%s vs %s) does not match for %s, reuploading."
% (remote_checksum, checksum, uri))
else:
warning("MultiPart: size (%d vs %d) does not match for %s, reuploading."
% (remote_size, size, uri))
headers["content-length"] = str(size)
request = self.create_request("OBJECT_PUT", uri = uri, headers = headers)
labels = { 'source' : filename, 'destination' : uri.uri(), 'extra' : extra_label }
response = self.send_file(request, file, labels)
return response
def object_get(self, uri, stream, start_position = 0, extra_label = ""):
if uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % uri.type)
request = self.create_request("OBJECT_GET", uri = uri)
labels = { 'source' : uri.uri(), 'destination' : unicodise(stream.name), 'extra' : extra_label }
response = self.recv_file(request, stream, labels, start_position)
return response
def object_batch_delete(self, remote_list):
""" Batch delete given a remote_list """
uris = [remote_list[item]['object_uri_str'] for item in remote_list]
self.object_batch_delete_uri_strs(uris)
def object_batch_delete_uri_strs(self, uris):
""" Batch delete given a list of object uris """
def compose_batch_del_xml(bucket, key_list):
body = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?><Delete>"
for key in key_list:
uri = S3Uri(key)
if uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % uri.type)
if not uri.has_object():
raise ValueError("URI '%s' has no object" % key)
if uri.bucket() != bucket:
raise ValueError("The batch should contain keys from the same bucket")
object = saxutils.escape(uri.object())
body += u"<Object><Key>%s</Key></Object>" % object
body += u"</Delete>"
body = encode_to_s3(body)
return body
batch = uris
if len(batch) == 0:
raise ValueError("Key list is empty")
bucket = S3Uri(batch[0]).bucket()
request_body = compose_batch_del_xml(bucket, batch)
md5_hash = md5()
md5_hash.update(request_body)
headers = {'content-md5': base64.b64encode(md5_hash.digest()),
'content-type': 'application/xml'}
request = self.create_request("BATCH_DELETE", bucket = bucket, extra = '?delete', headers = headers, body = request_body)
response = self.send_request(request)
return response
def object_delete(self, uri):
if uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % uri.type)
request = self.create_request("OBJECT_DELETE", uri = uri)
response = self.send_request(request)
return response
def object_restore(self, uri):
if uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % uri.type)
body = '<RestoreRequest xmlns="http://s3.amazonaws.com/doc/2006-3-01">'
body += (' <Days>%s</Days>' % self.config.restore_days)
body += '</RestoreRequest>'
request = self.create_request("OBJECT_POST", uri = uri, extra = "?restore", body = body)
response = self.send_request(request)
debug("Received response '%s'" % (response))
return response
def _sanitize_headers(self, headers):
to_remove = [
# from http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
'date',
'content-length',
'last-modified',
'content-md5',
'x-amz-version-id',
'x-amz-delete-marker',
# other headers returned from object_info() we don't want to send
'accept-ranges',
'etag',
'server',
'x-amz-id-2',
'x-amz-request-id',
]
for h in to_remove + self.config.remove_headers:
if h.lower() in headers:
del headers[h.lower()]
return headers
def object_copy(self, src_uri, dst_uri, extra_headers = None):
if src_uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % src_uri.type)
if dst_uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % dst_uri.type)
headers = SortedDict(ignore_case = True)
headers['x-amz-copy-source'] = encode_to_s3("/%s/%s" % (src_uri.bucket(), self.urlencode_string(src_uri.object())))
headers['x-amz-metadata-directive'] = "COPY"
if self.config.acl_public:
headers["x-amz-acl"] = "public-read"
if self.config.reduced_redundancy:
headers["x-amz-storage-class"] = "REDUCED_REDUNDANCY"
else:
headers["x-amz-storage-class"] = "STANDARD"
## Set server side encryption
if self.config.server_side_encryption:
headers["x-amz-server-side-encryption"] = "AES256"
if extra_headers:
headers.update(extra_headers)
request = self.create_request("OBJECT_PUT", uri = dst_uri, headers = headers)
response = self.send_request(request)
if response["data"] and getRootTagName(response["data"]) == "Error":
#http://doc.s3.amazonaws.com/proposals/copy.html
# Error during copy, status will be 200, so force error code 500
response["status"] = 500
error("Server error during the COPY operation. Overwrite response status to 500")
raise S3Error(response)
return response
def object_modify(self, src_uri, dst_uri, extra_headers = None):
if src_uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % src_uri.type)
if dst_uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % dst_uri.type)
info_response = self.object_info(src_uri)
headers = info_response['headers']
headers = self._sanitize_headers(headers)
acl = self.get_acl(src_uri)
headers['x-amz-copy-source'] = encode_to_s3("/%s/%s" % (src_uri.bucket(), self.urlencode_string(src_uri.object())))
headers['x-amz-metadata-directive'] = "REPLACE"
# cannot change between standard and reduced redundancy with a REPLACE.
## Set server side encryption
if self.config.server_side_encryption:
headers["x-amz-server-side-encryption"] = "AES256"
if extra_headers:
headers.update(extra_headers)
if self.config.mime_type:
headers["content-type"] = self.config.mime_type
request = self.create_request("OBJECT_PUT", uri = src_uri, headers = headers)
response = self.send_request(request)
if response["data"] and getRootTagName(response["data"]) == "Error":
#http://doc.s3.amazonaws.com/proposals/copy.html
# Error during modify, status will be 200, so force error code 500
response["status"] = 500
error("Server error during the MODIFY operation. Overwrite response status to 500")
raise S3Error(response)
self.set_acl(src_uri, acl)
return response
def object_move(self, src_uri, dst_uri, extra_headers = None):
response_copy = self.object_copy(src_uri, dst_uri, extra_headers)
debug("Object %s copied to %s" % (src_uri, dst_uri))
if not response_copy["data"] or getRootTagName(response_copy["data"]) == "CopyObjectResult":
self.object_delete(src_uri)
debug("Object %s deleted" % src_uri)
return response_copy
def object_info(self, uri):
request = self.create_request("OBJECT_HEAD", uri = uri)
response = self.send_request(request)
return response
def get_acl(self, uri):
if uri.has_object():
request = self.create_request("OBJECT_GET", uri = uri, extra = "?acl")
else:
request = self.create_request("BUCKET_LIST", bucket = uri.bucket(), extra = "?acl")
response = self.send_request(request)
acl = ACL(response['data'])
return acl
def set_acl(self, uri, acl):
# dreamhost doesn't support set_acl properly
if 'objects.dreamhost.com' in self.config.host_base:
return { 'status' : 501 } # not implemented
body = str(acl)
debug(u"set_acl(%s): acl-xml: %s" % (uri, body))
headers = {'content-type': 'application/xml'}
if uri.has_object():
request = self.create_request("OBJECT_PUT", uri = uri, extra = "?acl", headers = headers, body = body)
else:
request = self.create_request("BUCKET_CREATE", bucket = uri.bucket(), extra = "?acl", headers = headers, body = body)
response = self.send_request(request)
return response
def get_policy(self, uri):
request = self.create_request("BUCKET_LIST", bucket = uri.bucket(), extra = "?policy")
response = self.send_request(request)
return response['data']
def set_policy(self, uri, policy):
headers = {}
# TODO check policy is proper json string
headers['content-type'] = 'application/json'
request = self.create_request("BUCKET_CREATE", uri = uri,
extra = "?policy", headers=headers, body = policy)
response = self.send_request(request)
return response
def delete_policy(self, uri):
request = self.create_request("BUCKET_DELETE", uri = uri, extra = "?policy")
debug(u"delete_policy(%s)" % uri)
response = self.send_request(request)
return response
def get_cors(self, uri):
request = self.create_request("BUCKET_LIST", bucket = uri.bucket(), extra = "?cors")
response = self.send_request(request)
return response['data']
def set_cors(self, uri, cors):
headers = {}
# TODO check cors is proper json string
headers['content-type'] = 'application/xml'
headers['content-md5'] = compute_content_md5(cors)
request = self.create_request("BUCKET_CREATE", uri = uri,
extra = "?cors", headers=headers, body = cors)
response = self.send_request(request)
return response
def delete_cors(self, uri):
request = self.create_request("BUCKET_DELETE", uri = uri, extra = "?cors")
debug(u"delete_cors(%s)" % uri)
response = self.send_request(request)
return response
def set_lifecycle_policy(self, uri, policy):
headers = SortedDict(ignore_case = True)
headers['content-md5'] = compute_content_md5(policy)
request = self.create_request("BUCKET_CREATE", uri = uri,
extra = "?lifecycle", headers=headers, body = policy)
debug(u"set_lifecycle_policy(%s): policy-xml: %s" % (uri, policy))
response = self.send_request(request)
return response
def set_payer(self, uri):
headers = {}
headers['content-type'] = 'application/xml'
body = '<RequestPaymentConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n'
if self.config.requester_pays:
body += '<Payer>Requester</Payer>\n'
else:
body += '<Payer>BucketOwner</Payer>\n'
body += '</RequestPaymentConfiguration>\n'
request = self.create_request("BUCKET_CREATE", uri = uri,
extra = "?requestPayment", body = body)
response = self.send_request(request)
return response
def delete_lifecycle_policy(self, uri):
request = self.create_request("BUCKET_DELETE", uri = uri, extra = "?lifecycle")
debug(u"delete_lifecycle_policy(%s)" % uri)
response = self.send_request(request)
return response
def get_multipart(self, uri):
request = self.create_request("BUCKET_LIST", bucket = uri.bucket(), extra = "?uploads")
response = self.send_request(request)
return response
def abort_multipart(self, uri, id):
request = self.create_request("OBJECT_DELETE", uri=uri,
extra = ("?uploadId=%s" % id))
response = self.send_request(request)
return response
def list_multipart(self, uri, id):
request = self.create_request("OBJECT_GET", uri=uri,
extra = ("?uploadId=%s" % id))
response = self.send_request(request)
return response
def get_accesslog(self, uri):
request = self.create_request("BUCKET_LIST", bucket = uri.bucket(), extra = "?logging")
response = self.send_request(request)
accesslog = AccessLog(response['data'])
return accesslog
def set_accesslog_acl(self, uri):
acl = self.get_acl(uri)
debug("Current ACL(%s): %s" % (uri.uri(), str(acl)))
acl.appendGrantee(GranteeLogDelivery("READ_ACP"))
acl.appendGrantee(GranteeLogDelivery("WRITE"))
debug("Updated ACL(%s): %s" % (uri.uri(), str(acl)))
self.set_acl(uri, acl)
def set_accesslog(self, uri, enable, log_target_prefix_uri = None, acl_public = False):
accesslog = AccessLog()
if enable:
accesslog.enableLogging(log_target_prefix_uri)
accesslog.setAclPublic(acl_public)
else:
accesslog.disableLogging()
body = str(accesslog)
debug(u"set_accesslog(%s): accesslog-xml: %s" % (uri, body))
request = self.create_request("BUCKET_CREATE", bucket = uri.bucket(), extra = "?logging", body = body)
try:
response = self.send_request(request)
except S3Error, e:
if e.info['Code'] == "InvalidTargetBucketForLogging":
info("Setting up log-delivery ACL for target bucket.")
self.set_accesslog_acl(S3Uri(u"s3://%s" % log_target_prefix_uri.bucket()))
response = self.send_request(request)
else:
raise
return accesslog, response
## Low level methods
def urlencode_string(self, string, urlencoding_mode = None):
if type(string) == unicode:
string = string.encode("utf-8")
if urlencoding_mode is None:
urlencoding_mode = self.config.urlencoding_mode
if urlencoding_mode == "verbatim":
## Don't do any pre-processing
return string
encoded = quote_plus(string, safe="~/")
debug("String '%s' encoded to '%s'" % (string, encoded))
return encoded
def create_request(self, operation, uri = None, bucket = None, object = None, headers = None, extra = None, body = "", **params):
resource = { 'bucket' : None, 'uri' : "/" }
if uri and (bucket or object):
raise ValueError("Both 'uri' and either 'bucket' or 'object' parameters supplied")
## If URI is given use that instead of bucket/object parameters
if uri:
bucket = uri.bucket()
object = uri.has_object() and uri.object() or None
if bucket:
resource['bucket'] = str(bucket)
if object:
resource['uri'] = "/" + self.urlencode_string(object)
if extra:
resource['uri'] += extra
method_string = S3.http_methods.getkey(S3.operations[operation] & S3.http_methods["MASK"])
request = S3Request(self, method_string, resource, headers, body, params)
debug("CreateRequest: resource[uri]=" + resource['uri'])
return request
def _fail_wait(self, retries):
# Wait a few seconds. The more it fails the more we wait.
return (self._max_retries - retries + 1) * 3
def _http_400_handler(self, request, response, fn, *args, **kwargs):
# AWS response AuthorizationHeaderMalformed means we sent the request to the wrong region
# get the right region out of the response and send it there.
message = 'Unknown error'
if 'data' in response and len(response['data']) > 0:
failureCode = getTextFromXml(response['data'], 'Code')
message = getTextFromXml(response['data'], 'Message')
if failureCode == 'AuthorizationHeaderMalformed': # we sent the request to the wrong region
region = getTextFromXml(response['data'], 'Region')
if region is not None:
S3Request.region_map[request.resource['bucket']] = region
info('Forwarding request to %s' % region)
return fn(*args, **kwargs)
else:
message = u'Could not determine bucket location. Please consider using --region parameter.'
elif failureCode == 'InvalidRequest':
if message == 'The authorization mechanism you have provided is not supported. Please use AWS4-HMAC-SHA256.':
debug(u'Endpoint requires signature v4')
self.endpoint_requires_signature_v4 = True
return fn(*args, **kwargs)
elif failureCode == 'InvalidArgument': # returned by DreamObjects on send_request and send_file,
# which doesn't support signature v4. Retry with signature v2
if not request.use_signature_v2() and not self.fallback_to_signature_v2: # have not tried with v2 yet
debug(u'Falling back to signature v2')
self.fallback_to_signature_v2 = True
return fn(*args, **kwargs)
else: # returned by DreamObjects on recv_file, which doesn't support signature v4. Retry with signature v2
if not request.use_signature_v2() and not self.fallback_to_signature_v2: # have not tried with v2 yet
debug(u'Falling back to signature v2')
self.fallback_to_signature_v2 = True
return fn(*args, **kwargs)
error(u"S3 error: %s" % message)
sys.exit(ExitCodes.EX_GENERAL)
def _http_403_handler(self, request, response, fn, *args, **kwargs):
message = 'Unknown error'
if 'data' in response and len(response['data']) > 0:
failureCode = getTextFromXml(response['data'], 'Code')
message = getTextFromXml(response['data'], 'Message')
if failureCode == 'AccessDenied': # traditional HTTP 403
if message == 'AWS authentication requires a valid Date or x-amz-date header': # message from an Eucalyptus walrus server
if not request.use_signature_v2() and not self.fallback_to_signature_v2: # have not tried with v2 yet
debug(u'Falling back to signature v2')
self.fallback_to_signature_v2 = True
return fn(*args, **kwargs)
error(u"S3 error: %s" % message)
sys.exit(ExitCodes.EX_GENERAL)
def send_request(self, request, retries = _max_retries):
method_string, resource, headers = request.get_triplet()
debug("Processing request, please wait...")
try:
conn = ConnMan.get(self.get_hostname(resource['bucket']))
uri = self.format_uri(resource)
debug("Sending request method_string=%r, uri=%r, headers=%r, body=(%i bytes)" % (method_string, uri, headers, len(request.body or "")))
conn.c.request(method_string, uri, request.body, headers)
response = {}
http_response = conn.c.getresponse()
response["status"] = http_response.status
response["reason"] = http_response.reason
response["headers"] = convertTupleListToDict(http_response.getheaders())
response["data"] = http_response.read()
if response["headers"].has_key("x-amz-meta-s3cmd-attrs"):
attrs = parse_attrs_header(response["headers"]["x-amz-meta-s3cmd-attrs"])
response["s3cmd-attrs"] = attrs
debug("Response: " + str(response))
ConnMan.put(conn)
except ParameterError, e:
raise
except OSError:
raise
except (IOError, Exception), e:
if hasattr(e, 'errno') and e.errno != errno.EPIPE:
raise
# close the connection and re-establish
conn.counter = ConnMan.conn_max_counter
ConnMan.put(conn)
if retries:
warning("Retrying failed request: %s (%s)" % (resource['uri'], e))
warning("Waiting %d sec..." % self._fail_wait(retries))
time.sleep(self._fail_wait(retries))
return self.send_request(request, retries - 1)
else:
raise S3RequestError("Request failed for: %s" % resource['uri'])
if response["status"] == 400:
return self._http_400_handler(request, response, self.send_request, request)
if response["status"] == 403:
return self._http_403_handler(request, response, self.send_request, request)
if response["status"] == 405: # Method Not Allowed. Don't retry.
raise S3Error(response)
if response["status"] == 307:
## RedirectPermanent
redir_bucket = getTextFromXml(response['data'], ".//Bucket")
redir_hostname = getTextFromXml(response['data'], ".//Endpoint")
self.set_hostname(redir_bucket, redir_hostname)
info("Redirected to: %s" % (redir_hostname))
return self.send_request(request)
if response["status"] >= 500:
e = S3Error(response)
if response["status"] == 501:
## NotImplemented server error - no need to retry
retries = 0
if retries:
warning(u"Retrying failed request: %s" % resource['uri'])
warning(unicode(e))
warning("Waiting %d sec..." % self._fail_wait(retries))
time.sleep(self._fail_wait(retries))
return self.send_request(request, retries - 1)
else:
raise e
if response["status"] < 200 or response["status"] > 299:
raise S3Error(response)
return response
def send_file(self, request, file, labels, buffer = '', throttle = 0, retries = _max_retries, offset = 0, chunk_size = -1):
method_string, resource, headers = request.get_triplet()
if S3Request.region_map.get(request.resource['bucket'], Config().bucket_location) is None:
s3_uri = S3Uri(u's3://' + request.resource['bucket'])
region = self.get_bucket_location(s3_uri)
if region is not None:
S3Request.region_map[request.resource['bucket']] = region
size_left = size_total = long(headers["content-length"])
filename = unicodise(file.name)
if self.config.progress_meter:
progress = self.config.progress_class(labels, size_total)
else:
info("Sending file '%s', please wait..." % filename)
timestamp_start = time.time()
if buffer:
sha256_hash = checksum_sha256_buffer(buffer, offset, size_total)
else:
sha256_hash = checksum_sha256_file(filename, offset, size_total)
request.body = sha256_hash
method_string, resource, headers = request.get_triplet()
try:
conn = ConnMan.get(self.get_hostname(resource['bucket']))
conn.c.putrequest(method_string, self.format_uri(resource))
for header in headers.keys():
conn.c.putheader(header, str(headers[header]))
conn.c.endheaders()
except ParameterError, e:
raise
except Exception, e:
if self.config.progress_meter:
progress.done("failed")
if retries:
warning("Retrying failed request: %s (%s)" % (resource['uri'], e))
warning("Waiting %d sec..." % self._fail_wait(retries))
time.sleep(self._fail_wait(retries))
# Connection error -> same throttle value
return self.send_file(request, file, labels, buffer, throttle, retries - 1, offset, chunk_size)
else:
raise S3UploadError("Upload failed for: %s" % resource['uri'])
if buffer == '':
file.seek(offset)
md5_hash = md5()
try:
while (size_left > 0):
#debug("SendFile: Reading up to %d bytes from '%s' - remaining bytes: %s" % (self.config.send_chunk, filename, size_left))
l = min(self.config.send_chunk, size_left)
if buffer == '':
data = file.read(l)
else:
data = buffer
if self.config.limitrate > 0:
start_time = time.time()
md5_hash.update(data)
conn.c.send(data)
if self.config.progress_meter:
progress.update(delta_position = len(data))
size_left -= len(data)
#throttle
if self.config.limitrate > 0:
real_duration = time.time() - start_time
expected_duration = float(l)/self.config.limitrate
throttle = max(expected_duration - real_duration, throttle)
if throttle:
time.sleep(throttle)
md5_computed = md5_hash.hexdigest()
response = {}
http_response = conn.c.getresponse()
response["status"] = http_response.status
response["reason"] = http_response.reason
response["headers"] = convertTupleListToDict(http_response.getheaders())
response["data"] = http_response.read()
response["size"] = size_total
ConnMan.put(conn)
debug(u"Response: %s" % response)
except ParameterError, e:
raise
except Exception, e:
if self.config.progress_meter:
progress.done("failed")
if retries:
if retries < self._max_retries:
throttle = throttle and throttle * 5 or 0.01
warning("Upload failed: %s (%s)" % (resource['uri'], e))
warning("Retrying on lower speed (throttle=%0.2f)" % throttle)
warning("Waiting %d sec..." % self._fail_wait(retries))
time.sleep(self._fail_wait(retries))
# Connection error -> same throttle value
return self.send_file(request, file, labels, buffer, throttle, retries - 1, offset, chunk_size)
else:
debug("Giving up on '%s' %s" % (filename, e))
raise S3UploadError("Upload failed for: %s" % resource['uri'])
timestamp_end = time.time()
response["elapsed"] = timestamp_end - timestamp_start
response["speed"] = response["elapsed"] and float(response["size"]) / response["elapsed"] or float(-1)
if self.config.progress_meter:
## Finalising the upload takes some time -> update() progress meter
## to correct the average speed. Otherwise people will complain that
## 'progress' and response["speed"] are inconsistent ;-)
progress.update()
progress.done("done")
if response["status"] == 307:
## RedirectPermanent
redir_bucket = getTextFromXml(response['data'], ".//Bucket")
redir_hostname = getTextFromXml(response['data'], ".//Endpoint")
self.set_hostname(redir_bucket, redir_hostname)
info("Redirected to: %s" % (redir_hostname))
return self.send_file(request, file, labels, buffer, offset = offset, chunk_size = chunk_size)
if response["status"] == 400:
return self._http_400_handler(request, response, self.send_file, request, file, labels, buffer, offset = offset, chunk_size = chunk_size)
if response["status"] == 403:
return self._http_403_handler(request, response, self.send_file, request, file, labels, buffer, offset = offset, chunk_size = chunk_size)
# S3 from time to time doesn't send ETag back in a response :-(
# Force re-upload here.
if not response['headers'].has_key('etag'):
response['headers']['etag'] = ''
if response["status"] < 200 or response["status"] > 299:
try_retry = False
if response["status"] >= 500:
## AWS internal error - retry
try_retry = True
elif response["status"] >= 400:
err = S3Error(response)
## Retriable client error?
if err.code in [ 'BadDigest', 'OperationAborted', 'TokenRefreshRequired', 'RequestTimeout' ]:
try_retry = True
if try_retry:
if retries:
warning("Upload failed: %s (%s)" % (resource['uri'], S3Error(response)))
warning("Waiting %d sec..." % self._fail_wait(retries))
time.sleep(self._fail_wait(retries))
return self.send_file(request, file, labels, buffer, throttle, retries - 1, offset, chunk_size)
else:
warning("Too many failures. Giving up on '%s'" % (filename))
raise S3UploadError
## Non-recoverable error
raise S3Error(response)
debug("MD5 sums: computed=%s, received=%s" % (md5_computed, response["headers"]["etag"]))
if response["headers"]["etag"].strip('"\'') != md5_hash.hexdigest():
warning("MD5 Sums don't match!")
if retries:
warning("Retrying upload of %s" % (filename))
return self.send_file(request, file, labels, buffer, throttle, retries - 1, offset, chunk_size)
else:
warning("Too many failures. Giving up on '%s'" % (filename))
raise S3UploadError
return response
def send_file_multipart(self, file, headers, uri, size):
timestamp_start = time.time()
upload = MultiPartUpload(self, file, uri, headers)
upload.upload_all_parts()
response = upload.complete_multipart_upload()
timestamp_end = time.time()
response["elapsed"] = timestamp_end - timestamp_start
response["size"] = size
response["speed"] = response["elapsed"] and float(response["size"]) / response["elapsed"] or float(-1)
if response["data"] and getRootTagName(response["data"]) == "Error":
#http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
# Error Complete Multipart UPLOAD, status may be 200
# raise S3UploadError
raise S3UploadError(getTextFromXml(response["data"], 'Message'))
return response
def recv_file(self, request, stream, labels, start_position = 0, retries = _max_retries):
method_string, resource, headers = request.get_triplet()
filename = unicodise(stream.name)
if self.config.progress_meter:
progress = self.config.progress_class(labels, 0)
else:
info("Receiving file '%s', please wait..." % filename)
timestamp_start = time.time()
try:
conn = ConnMan.get(self.get_hostname(resource['bucket']))
conn.c.putrequest(method_string, self.format_uri(resource))
for header in headers.keys():
conn.c.putheader(header, str(headers[header]))
if start_position > 0:
debug("Requesting Range: %d .. end" % start_position)
conn.c.putheader("Range", "bytes=%d-" % start_position)
conn.c.endheaders()
response = {}
http_response = conn.c.getresponse()
response["status"] = http_response.status
response["reason"] = http_response.reason
response["headers"] = convertTupleListToDict(http_response.getheaders())
if response["headers"].has_key("x-amz-meta-s3cmd-attrs"):
attrs = parse_attrs_header(response["headers"]["x-amz-meta-s3cmd-attrs"])
response["s3cmd-attrs"] = attrs
debug("Response: %s" % response)
except ParameterError, e:
raise
except OSError, e:
raise
except (IOError, Exception), e:
if self.config.progress_meter:
progress.done("failed")
if hasattr(e, 'errno') and e.errno != errno.EPIPE:
raise
# close the connection and re-establish
conn.counter = ConnMan.conn_max_counter
ConnMan.put(conn)
if retries:
warning("Retrying failed request: %s (%s)" % (resource['uri'], e))
warning("Waiting %d sec..." % self._fail_wait(retries))
time.sleep(self._fail_wait(retries))
# Connection error -> same throttle value
return self.recv_file(request, stream, labels, start_position, retries - 1)
else:
raise S3DownloadError("Download failed for: %s" % resource['uri'])
if response["status"] == 307:
## RedirectPermanent
response['data'] = http_response.read()
redir_bucket = getTextFromXml(response['data'], ".//Bucket")
redir_hostname = getTextFromXml(response['data'], ".//Endpoint")
self.set_hostname(redir_bucket, redir_hostname)
info("Redirected to: %s" % (redir_hostname))
return self.recv_file(request, stream, labels)
if response["status"] == 400:
return self._http_400_handler(request, response, self.recv_file, request, stream, labels)
if response["status"] == 403:
return self._http_403_handler(request, response, self.recv_file, request, stream, labels)
if response["status"] == 405: # Method Not Allowed. Don't retry.
raise S3Error(response)
if response["status"] < 200 or response["status"] > 299:
raise S3Error(response)
if start_position == 0:
# Only compute MD5 on the fly if we're downloading from beginning
# Otherwise we'd get a nonsense.
md5_hash = md5()
size_left = long(response["headers"]["content-length"])
size_total = start_position + size_left
current_position = start_position
if self.config.progress_meter:
progress.total_size = size_total
progress.initial_position = current_position
progress.current_position = current_position
try:
# Fix for issue #432. Even when content size is 0, httplib expect the response to be read.
if size_left == 0:
data = http_response.read(1)
# It is not supposed to be some data returned in that case
assert(len(data) == 0)
while (current_position < size_total):
this_chunk = size_left > self.config.recv_chunk and self.config.recv_chunk or size_left
if self.config.limitrate > 0:
start_time = time.time()
data = http_response.read(this_chunk)
if len(data) == 0:
raise S3ResponseError("EOF from S3!")
#throttle
if self.config.limitrate > 0:
real_duration = time.time() - start_time
expected_duration = float(this_chunk)/self.config.limitrate
if expected_duration > real_duration:
time.sleep(expected_duration - real_duration)
stream.write(data)
if start_position == 0:
md5_hash.update(data)
current_position += len(data)
## Call progress meter from here...
if self.config.progress_meter:
progress.update(delta_position = len(data))
ConnMan.put(conn)
except OSError:
raise
except (IOError, Exception), e:
if self.config.progress_meter:
progress.done("failed")
if hasattr(e, 'errno') and e.errno != errno.EPIPE:
raise
# close the connection and re-establish
conn.counter = ConnMan.conn_max_counter
ConnMan.put(conn)
if retries:
warning("Retrying failed request: %s (%s)" % (resource['uri'], e))
warning("Waiting %d sec..." % self._fail_wait(retries))
time.sleep(self._fail_wait(retries))
# Connection error -> same throttle value
return self.recv_file(request, stream, labels, current_position, retries - 1)
else:
raise S3DownloadError("Download failed for: %s" % resource['uri'])
stream.flush()
timestamp_end = time.time()
if self.config.progress_meter:
## The above stream.flush() may take some time -> update() progress meter
## to correct the average speed. Otherwise people will complain that
## 'progress' and response["speed"] are inconsistent ;-)
progress.update()
progress.done("done")
md5_from_s3 = response["headers"]["etag"].strip('"')
if not 'x-amz-meta-s3tools-gpgenc' in response["headers"]:
# we can't trust our stored md5 because we
# encrypted the file after calculating it but before
# uploading it.
try:
md5_from_s3 = response["s3cmd-attrs"]["md5"]
except KeyError:
pass
# we must have something to compare against to bother with the calculation
if '-' not in md5_from_s3:
if start_position == 0:
# Only compute MD5 on the fly if we were downloading from the beginning
response["md5"] = md5_hash.hexdigest()
else:
# Otherwise try to compute MD5 of the output file
try:
response["md5"] = hash_file_md5(filename)
except IOError, e:
if e.errno != errno.ENOENT:
warning("Unable to open file: %s: %s" % (filename, e))
warning("Unable to verify MD5. Assume it matches.")
response["md5match"] = response.get("md5") == md5_from_s3
response["elapsed"] = timestamp_end - timestamp_start
response["size"] = current_position
response["speed"] = response["elapsed"] and float(response["size"]) / response["elapsed"] or float(-1)
if response["size"] != start_position + long(response["headers"]["content-length"]):
warning("Reported size (%s) does not match received size (%s)" % (
start_position + long(response["headers"]["content-length"]), response["size"]))
debug("ReceiveFile: Computed MD5 = %s" % response.get("md5"))
# avoid ETags from multipart uploads that aren't the real md5
if '-' not in md5_from_s3 and not response["md5match"]:
warning("MD5 signatures do not match: computed=%s, received=%s" % (
response.get("md5"), md5_from_s3))
return response
__all__.append("S3")
def parse_attrs_header(attrs_header):
attrs = {}
for attr in attrs_header.split("/"):
key, val = attr.split(":")
attrs[key] = val
return attrs
def compute_content_md5(body):
m = md5(body)
base64md5 = base64.encodestring(m.digest())
if base64md5[-1] == '\n':
base64md5 = base64md5[0:-1]
return base64md5
# vim:et:ts=4:sts=4:ai
| gpl-2.0 |
mjtamlyn/django | tests/defer_regress/models.py | 21 | 2543 | """
Regression tests for defer() / only() behavior.
"""
from django.db import models
class Item(models.Model):
name = models.CharField(max_length=15)
text = models.TextField(default="xyzzy")
value = models.IntegerField()
other_value = models.IntegerField(default=0)
def __str__(self):
return self.name
class RelatedItem(models.Model):
item = models.ForeignKey(Item, models.CASCADE)
class ProxyRelated(RelatedItem):
class Meta:
proxy = True
class Child(models.Model):
name = models.CharField(max_length=10)
value = models.IntegerField()
class Leaf(models.Model):
name = models.CharField(max_length=10)
child = models.ForeignKey(Child, models.CASCADE)
second_child = models.ForeignKey(Child, models.SET_NULL, related_name="other", null=True)
value = models.IntegerField(default=42)
def __str__(self):
return self.name
class ResolveThis(models.Model):
num = models.FloatField()
name = models.CharField(max_length=16)
class Proxy(Item):
class Meta:
proxy = True
class SimpleItem(models.Model):
name = models.CharField(max_length=15)
value = models.IntegerField()
def __str__(self):
return self.name
class Feature(models.Model):
item = models.ForeignKey(SimpleItem, models.CASCADE)
class SpecialFeature(models.Model):
feature = models.ForeignKey(Feature, models.CASCADE)
class OneToOneItem(models.Model):
item = models.OneToOneField(Item, models.CASCADE, related_name="one_to_one_item")
name = models.CharField(max_length=15)
class ItemAndSimpleItem(models.Model):
item = models.ForeignKey(Item, models.CASCADE)
simple = models.ForeignKey(SimpleItem, models.CASCADE)
class Profile(models.Model):
profile1 = models.CharField(max_length=1000, default='profile1')
class Location(models.Model):
location1 = models.CharField(max_length=1000, default='location1')
class Request(models.Model):
profile = models.ForeignKey(Profile, models.SET_NULL, null=True, blank=True)
location = models.ForeignKey(Location, models.CASCADE)
items = models.ManyToManyField(Item)
request1 = models.CharField(default='request1', max_length=1000)
request2 = models.CharField(default='request2', max_length=1000)
request3 = models.CharField(default='request3', max_length=1000)
request4 = models.CharField(default='request4', max_length=1000)
class Base(models.Model):
text = models.TextField()
class Derived(Base):
other_text = models.TextField()
| bsd-3-clause |
rosmo/boto | boto/ec2/__init__.py | 145 | 3100 | # Copyright (c) 2006-2008 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
"""
This module provides an interface to the Elastic Compute Cloud (EC2)
service from AWS.
"""
from boto.ec2.connection import EC2Connection
from boto.regioninfo import RegionInfo, get_regions, load_regions
RegionData = load_regions().get('ec2', {})
def regions(**kw_params):
"""
Get all available regions for the EC2 service.
You may pass any of the arguments accepted by the EC2Connection
object's constructor as keyword arguments and they will be
passed along to the EC2Connection object.
:rtype: list
:return: A list of :class:`boto.ec2.regioninfo.RegionInfo`
"""
return get_regions('ec2', connection_cls=EC2Connection)
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.ec2.connection.EC2Connection`.
Any additional parameters after the region_name are passed on to
the connect method of the region object.
:type: str
:param region_name: The name of the region to connect to.
:rtype: :class:`boto.ec2.connection.EC2Connection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
if 'region' in kw_params and isinstance(kw_params['region'], RegionInfo)\
and region_name == kw_params['region'].name:
return EC2Connection(**kw_params)
for region in regions(**kw_params):
if region.name == region_name:
return region.connect(**kw_params)
return None
def get_region(region_name, **kw_params):
"""
Find and return a :class:`boto.ec2.regioninfo.RegionInfo` object
given a region name.
:type: str
:param: The name of the region.
:rtype: :class:`boto.ec2.regioninfo.RegionInfo`
:return: The RegionInfo object for the given region or None if
an invalid region name is provided.
"""
for region in regions(**kw_params):
if region.name == region_name:
return region
return None
| mit |
raccoongang/edx-platform | openedx/core/djangoapps/credit/tests/test_signature.py | 36 | 1729 | # coding=utf-8
"""
Tests for digital signatures used to validate messages to/from credit providers.
"""
from nose.plugins.attrib import attr
from django.test import TestCase
from django.test.utils import override_settings
from openedx.core.djangoapps.credit import signature
@attr(shard=2)
@override_settings(CREDIT_PROVIDER_SECRET_KEYS={
"asu": u'abcd1234'
})
class SignatureTest(TestCase):
"""
Tests for digital signatures.
"""
def test_unicode_secret_key(self):
# Test a key that has type `unicode` but consists of ASCII characters
# (This can happen, for example, when loading the key from a JSON configuration file)
# When retrieving the shared secret, the type should be converted to `str`
key = signature.get_shared_secret_key("asu")
sig = signature.signature({}, key)
self.assertEqual(sig, "7d70a26b834d9881cc14466eceac8d39188fc5ef5ffad9ab281a8327c2c0d093")
@override_settings(CREDIT_PROVIDER_SECRET_KEYS={
"asu": u'\u4567'
})
def test_non_ascii_unicode_secret_key(self):
# Test a key that contains non-ASCII unicode characters
# This should return `None` and log an error; the caller
# is then responsible for logging the appropriate errors
# so we can fix the misconfiguration.
key = signature.get_shared_secret_key("asu")
self.assertIs(key, None)
def test_unicode_data(self):
""" Verify the signature generation method supports Unicode data. """
key = signature.get_shared_secret_key("asu")
sig = signature.signature({'name': u'Ed Xavíer'}, key)
self.assertEqual(sig, "76b6c9a657000829253d7c23977b35b34ad750c5681b524d7fdfb25cd5273cec")
| agpl-3.0 |
bruderstein/PythonScript | PythonLib/full/distutils/tests/__init__.py | 3 | 1344 | """Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
from test.support import run_unittest, save_restore_warnings_filters
here = os.path.dirname(__file__) or os.curdir
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources
# which adds a warnings filter.
with save_restore_warnings_filters():
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
return suite
if __name__ == "__main__":
run_unittest(test_suite())
| gpl-2.0 |
pgum/emi1 | lib/python3.5/site-packages/setuptools/package_index.py | 19 | 39541 | """PyPI and direct package downloading"""
import sys
import os
import re
import shutil
import socket
import base64
import hashlib
import itertools
from functools import wraps
try:
from urllib.parse import splituser
except ImportError:
from urllib2 import splituser
from setuptools.extern import six
from setuptools.extern.six.moves import urllib, http_client, configparser, map
import setuptools
from pkg_resources import (
CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST,
require, Environment, find_distributions, safe_name, safe_version,
to_filename, Requirement, DEVELOP_DIST,
)
from setuptools import ssl_support
from distutils import log
from distutils.errors import DistutilsError
from fnmatch import translate
from setuptools.py26compat import strip_fragment
from setuptools.py27compat import get_all_headers
EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
# this is here to fix emacs' cruddy broken syntax highlighting
PYPI_MD5 = re.compile(
'<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)'
'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)'
)
URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
__all__ = [
'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
'interpret_distro_name',
]
_SOCKET_TIMEOUT = 15
_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
user_agent = _tmpl.format(py_major=sys.version[:3], **globals())
def parse_bdist_wininst(name):
"""Return (base,pyversion) or (None,None) for possible .exe name"""
lower = name.lower()
base, py_ver, plat = None, None, None
if lower.endswith('.exe'):
if lower.endswith('.win32.exe'):
base = name[:-10]
plat = 'win32'
elif lower.startswith('.win32-py',-16):
py_ver = name[-7:-4]
base = name[:-16]
plat = 'win32'
elif lower.endswith('.win-amd64.exe'):
base = name[:-14]
plat = 'win-amd64'
elif lower.startswith('.win-amd64-py',-20):
py_ver = name[-7:-4]
base = name[:-20]
plat = 'win-amd64'
return base,py_ver,plat
def egg_info_for_url(url):
parts = urllib.parse.urlparse(url)
scheme, server, path, parameters, query, fragment = parts
base = urllib.parse.unquote(path.split('/')[-1])
if server=='sourceforge.net' and base=='download': # XXX Yuck
base = urllib.parse.unquote(path.split('/')[-2])
if '#' in base: base, fragment = base.split('#',1)
return base,fragment
def distros_for_url(url, metadata=None):
"""Yield egg or source distribution objects that might be found at a URL"""
base, fragment = egg_info_for_url(url)
for dist in distros_for_location(url, base, metadata): yield dist
if fragment:
match = EGG_FRAGMENT.match(fragment)
if match:
for dist in interpret_distro_name(
url, match.group(1), metadata, precedence = CHECKOUT_DIST
):
yield dist
def distros_for_location(location, basename, metadata=None):
"""Yield egg or source distribution objects based on basename"""
if basename.endswith('.egg.zip'):
basename = basename[:-4] # strip the .zip
if basename.endswith('.egg') and '-' in basename:
# only one, unambiguous interpretation
return [Distribution.from_location(location, basename, metadata)]
if basename.endswith('.exe'):
win_base, py_ver, platform = parse_bdist_wininst(basename)
if win_base is not None:
return interpret_distro_name(
location, win_base, metadata, py_ver, BINARY_DIST, platform
)
# Try source distro extensions (.zip, .tgz, etc.)
#
for ext in EXTENSIONS:
if basename.endswith(ext):
basename = basename[:-len(ext)]
return interpret_distro_name(location, basename, metadata)
return [] # no extension matched
def distros_for_filename(filename, metadata=None):
"""Yield possible egg or source distribution objects based on a filename"""
return distros_for_location(
normalize_path(filename), os.path.basename(filename), metadata
)
def interpret_distro_name(
location, basename, metadata, py_version=None, precedence=SOURCE_DIST,
platform=None
):
"""Generate alternative interpretations of a source distro name
Note: if `location` is a filesystem filename, you should call
``pkg_resources.normalize_path()`` on it before passing it to this
routine!
"""
# Generate alternative interpretations of a source distro name
# Because some packages are ambiguous as to name/versions split
# e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
# So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
# "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
# the spurious interpretations should be ignored, because in the event
# there's also an "adns" package, the spurious "python-1.1.0" version will
# compare lower than any numeric version number, and is therefore unlikely
# to match a request for it. It's still a potential problem, though, and
# in the long run PyPI and the distutils should go for "safe" names and
# versions in distribution archive names (sdist and bdist).
parts = basename.split('-')
if not py_version and any(re.match('py\d\.\d$', p) for p in parts[2:]):
# it is a bdist_dumb, not an sdist -- bail out
return
for p in range(1,len(parts)+1):
yield Distribution(
location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
py_version=py_version, precedence = precedence,
platform = platform
)
# From Python 2.7 docs
def unique_everseen(iterable, key=None):
"List unique elements, preserving order. Remember all elements ever seen."
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
# unique_everseen('ABBCcAD', str.lower) --> A B C D
seen = set()
seen_add = seen.add
if key is None:
for element in six.moves.filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element
def unique_values(func):
"""
Wrap a function returning an iterable such that the resulting iterable
only ever yields unique items.
"""
@wraps(func)
def wrapper(*args, **kwargs):
return unique_everseen(func(*args, **kwargs))
return wrapper
REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
# this line is here to fix emacs' cruddy broken syntax highlighting
@unique_values
def find_external_links(url, page):
"""Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
for match in REL.finditer(page):
tag, rel = match.groups()
rels = set(map(str.strip, rel.lower().split(',')))
if 'homepage' in rels or 'download' in rels:
for match in HREF.finditer(tag):
yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
for tag in ("<th>Home Page", "<th>Download URL"):
pos = page.find(tag)
if pos!=-1:
match = HREF.search(page,pos)
if match:
yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
class ContentChecker(object):
"""
A null content checker that defines the interface for checking content
"""
def feed(self, block):
"""
Feed a block of data to the hash.
"""
return
def is_valid(self):
"""
Check the hash. Return False if validation fails.
"""
return True
def report(self, reporter, template):
"""
Call reporter with information about the checker (hash name)
substituted into the template.
"""
return
class HashChecker(ContentChecker):
pattern = re.compile(
r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)='
r'(?P<expected>[a-f0-9]+)'
)
def __init__(self, hash_name, expected):
self.hash_name = hash_name
self.hash = hashlib.new(hash_name)
self.expected = expected
@classmethod
def from_url(cls, url):
"Construct a (possibly null) ContentChecker from a URL"
fragment = urllib.parse.urlparse(url)[-1]
if not fragment:
return ContentChecker()
match = cls.pattern.search(fragment)
if not match:
return ContentChecker()
return cls(**match.groupdict())
def feed(self, block):
self.hash.update(block)
def is_valid(self):
return self.hash.hexdigest() == self.expected
def report(self, reporter, template):
msg = template % self.hash_name
return reporter(msg)
class PackageIndex(Environment):
"""A distribution index that scans web pages for download URLs"""
def __init__(
self, index_url="https://pypi.python.org/simple", hosts=('*',),
ca_bundle=None, verify_ssl=True, *args, **kw
):
Environment.__init__(self,*args,**kw)
self.index_url = index_url + "/"[:not index_url.endswith('/')]
self.scanned_urls = {}
self.fetched_urls = {}
self.package_pages = {}
self.allows = re.compile('|'.join(map(translate,hosts))).match
self.to_scan = []
if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()):
self.opener = ssl_support.opener_for(ca_bundle)
else: self.opener = urllib.request.urlopen
def process_url(self, url, retrieve=False):
"""Evaluate a URL as a possible download, and maybe retrieve it"""
if url in self.scanned_urls and not retrieve:
return
self.scanned_urls[url] = True
if not URL_SCHEME(url):
self.process_filename(url)
return
else:
dists = list(distros_for_url(url))
if dists:
if not self.url_ok(url):
return
self.debug("Found link: %s", url)
if dists or not retrieve or url in self.fetched_urls:
list(map(self.add, dists))
return # don't need the actual page
if not self.url_ok(url):
self.fetched_urls[url] = True
return
self.info("Reading %s", url)
self.fetched_urls[url] = True # prevent multiple fetch attempts
f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url)
if f is None: return
self.fetched_urls[f.url] = True
if 'html' not in f.headers.get('content-type', '').lower():
f.close() # not html, we can't process it
return
base = f.url # handle redirects
page = f.read()
if not isinstance(page, str): # We are in Python 3 and got bytes. We want str.
if isinstance(f, urllib.error.HTTPError):
# Errors have no charset, assume latin1:
charset = 'latin-1'
else:
charset = f.headers.get_param('charset') or 'latin-1'
page = page.decode(charset, "ignore")
f.close()
for match in HREF.finditer(page):
link = urllib.parse.urljoin(base, htmldecode(match.group(1)))
self.process_url(link)
if url.startswith(self.index_url) and getattr(f,'code',None)!=404:
page = self.process_index(url, page)
def process_filename(self, fn, nested=False):
# process filenames or directories
if not os.path.exists(fn):
self.warn("Not found: %s", fn)
return
if os.path.isdir(fn) and not nested:
path = os.path.realpath(fn)
for item in os.listdir(path):
self.process_filename(os.path.join(path,item), True)
dists = distros_for_filename(fn)
if dists:
self.debug("Found: %s", fn)
list(map(self.add, dists))
def url_ok(self, url, fatal=False):
s = URL_SCHEME(url)
if (s and s.group(1).lower()=='file') or self.allows(urllib.parse.urlparse(url)[1]):
return True
msg = ("\nNote: Bypassing %s (disallowed host; see "
"http://bit.ly/1dg9ijs for details).\n")
if fatal:
raise DistutilsError(msg % url)
else:
self.warn(msg, url)
def scan_egg_links(self, search_path):
dirs = filter(os.path.isdir, search_path)
egg_links = (
(path, entry)
for path in dirs
for entry in os.listdir(path)
if entry.endswith('.egg-link')
)
list(itertools.starmap(self.scan_egg_link, egg_links))
def scan_egg_link(self, path, entry):
with open(os.path.join(path, entry)) as raw_lines:
# filter non-empty lines
lines = list(filter(None, map(str.strip, raw_lines)))
if len(lines) != 2:
# format is not recognized; punt
return
egg_path, setup_path = lines
for dist in find_distributions(os.path.join(path, egg_path)):
dist.location = os.path.join(path, *lines)
dist.precedence = SOURCE_DIST
self.add(dist)
def process_index(self,url,page):
"""Process the contents of a PyPI page"""
def scan(link):
# Process a URL to see if it's for a package page
if link.startswith(self.index_url):
parts = list(map(
urllib.parse.unquote, link[len(self.index_url):].split('/')
))
if len(parts)==2 and '#' not in parts[1]:
# it's a package page, sanitize and index it
pkg = safe_name(parts[0])
ver = safe_version(parts[1])
self.package_pages.setdefault(pkg.lower(),{})[link] = True
return to_filename(pkg), to_filename(ver)
return None, None
# process an index page into the package-page index
for match in HREF.finditer(page):
try:
scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
except ValueError:
pass
pkg, ver = scan(url) # ensure this page is in the page index
if pkg:
# process individual package page
for new_url in find_external_links(url, page):
# Process the found URL
base, frag = egg_info_for_url(new_url)
if base.endswith('.py') and not frag:
if ver:
new_url+='#egg=%s-%s' % (pkg,ver)
else:
self.need_version_info(url)
self.scan_url(new_url)
return PYPI_MD5.sub(
lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
)
else:
return "" # no sense double-scanning non-package pages
def need_version_info(self, url):
self.scan_all(
"Page at %s links to .py file(s) without version info; an index "
"scan is required.", url
)
def scan_all(self, msg=None, *args):
if self.index_url not in self.fetched_urls:
if msg: self.warn(msg,*args)
self.info(
"Scanning index of all packages (this may take a while)"
)
self.scan_url(self.index_url)
def find_packages(self, requirement):
self.scan_url(self.index_url + requirement.unsafe_name+'/')
if not self.package_pages.get(requirement.key):
# Fall back to safe version of the name
self.scan_url(self.index_url + requirement.project_name+'/')
if not self.package_pages.get(requirement.key):
# We couldn't find the target package, so search the index page too
self.not_found_in_index(requirement)
for url in list(self.package_pages.get(requirement.key,())):
# scan each page that might be related to the desired package
self.scan_url(url)
def obtain(self, requirement, installer=None):
self.prescan()
self.find_packages(requirement)
for dist in self[requirement.key]:
if dist in requirement:
return dist
self.debug("%s does not match %s", requirement, dist)
return super(PackageIndex, self).obtain(requirement,installer)
def check_hash(self, checker, filename, tfp):
"""
checker is a ContentChecker
"""
checker.report(self.debug,
"Validating %%s checksum for %s" % filename)
if not checker.is_valid():
tfp.close()
os.unlink(filename)
raise DistutilsError(
"%s validation failed for %s; "
"possible download problem?" % (
checker.hash.name, os.path.basename(filename))
)
def add_find_links(self, urls):
"""Add `urls` to the list that will be prescanned for searches"""
for url in urls:
if (
self.to_scan is None # if we have already "gone online"
or not URL_SCHEME(url) # or it's a local file/directory
or url.startswith('file:')
or list(distros_for_url(url)) # or a direct package link
):
# then go ahead and process it now
self.scan_url(url)
else:
# otherwise, defer retrieval till later
self.to_scan.append(url)
def prescan(self):
"""Scan urls scheduled for prescanning (e.g. --find-links)"""
if self.to_scan:
list(map(self.scan_url, self.to_scan))
self.to_scan = None # from now on, go ahead and process immediately
def not_found_in_index(self, requirement):
if self[requirement.key]: # we've seen at least one distro
meth, msg = self.info, "Couldn't retrieve index page for %r"
else: # no distros seen for this name, might be misspelled
meth, msg = (self.warn,
"Couldn't find index page for %r (maybe misspelled?)")
meth(msg, requirement.unsafe_name)
self.scan_all()
def download(self, spec, tmpdir):
"""Locate and/or download `spec` to `tmpdir`, returning a local path
`spec` may be a ``Requirement`` object, or a string containing a URL,
an existing local filename, or a project/version requirement spec
(i.e. the string form of a ``Requirement`` object). If it is the URL
of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
automatically created alongside the downloaded file.
If `spec` is a ``Requirement`` object or a string containing a
project/version requirement spec, this method returns the location of
a matching distribution (possibly after downloading it to `tmpdir`).
If `spec` is a locally existing file or directory name, it is simply
returned unchanged. If `spec` is a URL, it is downloaded to a subpath
of `tmpdir`, and the local filename is returned. Various errors may be
raised if a problem occurs during downloading.
"""
if not isinstance(spec,Requirement):
scheme = URL_SCHEME(spec)
if scheme:
# It's a url, download it to tmpdir
found = self._download_url(scheme.group(1), spec, tmpdir)
base, fragment = egg_info_for_url(spec)
if base.endswith('.py'):
found = self.gen_setup(found,fragment,tmpdir)
return found
elif os.path.exists(spec):
# Existing file or directory, just return it
return spec
else:
try:
spec = Requirement.parse(spec)
except ValueError:
raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" %
(spec,)
)
return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
def fetch_distribution(
self, requirement, tmpdir, force_scan=False, source=False,
develop_ok=False, local_index=None
):
"""Obtain a distribution suitable for fulfilling `requirement`
`requirement` must be a ``pkg_resources.Requirement`` instance.
If necessary, or if the `force_scan` flag is set, the requirement is
searched for in the (online) package index as well as the locally
installed packages. If a distribution matching `requirement` is found,
the returned distribution's ``location`` is the value you would have
gotten from calling the ``download()`` method with the matching
distribution's URL or filename. If no matching distribution is found,
``None`` is returned.
If the `source` flag is set, only source distributions and source
checkout links will be considered. Unless the `develop_ok` flag is
set, development and system eggs (i.e., those using the ``.egg-info``
format) will be ignored.
"""
# process a Requirement
self.info("Searching for %s", requirement)
skipped = {}
dist = None
def find(req, env=None):
if env is None:
env = self
# Find a matching distribution; may be called more than once
for dist in env[req.key]:
if dist.precedence==DEVELOP_DIST and not develop_ok:
if dist not in skipped:
self.warn("Skipping development or system egg: %s",dist)
skipped[dist] = 1
continue
if dist in req and (dist.precedence<=SOURCE_DIST or not source):
return dist
if force_scan:
self.prescan()
self.find_packages(requirement)
dist = find(requirement)
if local_index is not None:
dist = dist or find(requirement, local_index)
if dist is None:
if self.to_scan is not None:
self.prescan()
dist = find(requirement)
if dist is None and not force_scan:
self.find_packages(requirement)
dist = find(requirement)
if dist is None:
self.warn(
"No local packages or download links found for %s%s",
(source and "a source distribution of " or ""),
requirement,
)
else:
self.info("Best match: %s", dist)
return dist.clone(location=self.download(dist.location, tmpdir))
def fetch(self, requirement, tmpdir, force_scan=False, source=False):
"""Obtain a file suitable for fulfilling `requirement`
DEPRECATED; use the ``fetch_distribution()`` method now instead. For
backward compatibility, this routine is identical but returns the
``location`` of the downloaded distribution instead of a distribution
object.
"""
dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
if dist is not None:
return dist.location
return None
def gen_setup(self, filename, fragment, tmpdir):
match = EGG_FRAGMENT.match(fragment)
dists = match and [
d for d in
interpret_distro_name(filename, match.group(1), None) if d.version
] or []
if len(dists)==1: # unambiguous ``#egg`` fragment
basename = os.path.basename(filename)
# Make sure the file has been downloaded to the temp dir.
if os.path.dirname(filename) != tmpdir:
dst = os.path.join(tmpdir, basename)
from setuptools.command.easy_install import samefile
if not samefile(filename, dst):
shutil.copy2(filename, dst)
filename=dst
with open(os.path.join(tmpdir, 'setup.py'), 'w') as file:
file.write(
"from setuptools import setup\n"
"setup(name=%r, version=%r, py_modules=[%r])\n"
% (
dists[0].project_name, dists[0].version,
os.path.splitext(basename)[0]
)
)
return filename
elif match:
raise DistutilsError(
"Can't unambiguously interpret project/version identifier %r; "
"any dashes in the name or version should be escaped using "
"underscores. %r" % (fragment,dists)
)
else:
raise DistutilsError(
"Can't process plain .py files without an '#egg=name-version'"
" suffix to enable automatic setup script generation."
)
dl_blocksize = 8192
def _download_to(self, url, filename):
self.info("Downloading %s", url)
# Download the file
fp, info = None, None
try:
checker = HashChecker.from_url(url)
fp = self.open_url(strip_fragment(url))
if isinstance(fp, urllib.error.HTTPError):
raise DistutilsError(
"Can't download %s: %s %s" % (url, fp.code,fp.msg)
)
headers = fp.info()
blocknum = 0
bs = self.dl_blocksize
size = -1
if "content-length" in headers:
# Some servers return multiple Content-Length headers :(
sizes = get_all_headers(headers, 'Content-Length')
size = max(map(int, sizes))
self.reporthook(url, filename, blocknum, bs, size)
with open(filename,'wb') as tfp:
while True:
block = fp.read(bs)
if block:
checker.feed(block)
tfp.write(block)
blocknum += 1
self.reporthook(url, filename, blocknum, bs, size)
else:
break
self.check_hash(checker, filename, tfp)
return headers
finally:
if fp: fp.close()
def reporthook(self, url, filename, blocknum, blksize, size):
pass # no-op
def open_url(self, url, warning=None):
if url.startswith('file:'):
return local_open(url)
try:
return open_with_auth(url, self.opener)
except (ValueError, http_client.InvalidURL) as v:
msg = ' '.join([str(arg) for arg in v.args])
if warning:
self.warn(warning, msg)
else:
raise DistutilsError('%s %s' % (url, msg))
except urllib.error.HTTPError as v:
return v
except urllib.error.URLError as v:
if warning:
self.warn(warning, v.reason)
else:
raise DistutilsError("Download error for %s: %s"
% (url, v.reason))
except http_client.BadStatusLine as v:
if warning:
self.warn(warning, v.line)
else:
raise DistutilsError(
'%s returned a bad status line. The server might be '
'down, %s' %
(url, v.line)
)
except http_client.HTTPException as v:
if warning:
self.warn(warning, v)
else:
raise DistutilsError("Download error for %s: %s"
% (url, v))
def _download_url(self, scheme, url, tmpdir):
# Determine download filename
#
name, fragment = egg_info_for_url(url)
if name:
while '..' in name:
name = name.replace('..','.').replace('\\','_')
else:
name = "__downloaded__" # default if URL has no path contents
if name.endswith('.egg.zip'):
name = name[:-4] # strip the extra .zip before download
filename = os.path.join(tmpdir,name)
# Download the file
#
if scheme=='svn' or scheme.startswith('svn+'):
return self._download_svn(url, filename)
elif scheme=='git' or scheme.startswith('git+'):
return self._download_git(url, filename)
elif scheme.startswith('hg+'):
return self._download_hg(url, filename)
elif scheme=='file':
return urllib.request.url2pathname(urllib.parse.urlparse(url)[2])
else:
self.url_ok(url, True) # raises error if not allowed
return self._attempt_download(url, filename)
def scan_url(self, url):
self.process_url(url, True)
def _attempt_download(self, url, filename):
headers = self._download_to(url, filename)
if 'html' in headers.get('content-type','').lower():
return self._download_html(url, headers, filename)
else:
return filename
def _download_html(self, url, headers, filename):
file = open(filename)
for line in file:
if line.strip():
# Check for a subversion index page
if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
# it's a subversion index page:
file.close()
os.unlink(filename)
return self._download_svn(url, filename)
break # not an index page
file.close()
os.unlink(filename)
raise DistutilsError("Unexpected HTML page found at "+url)
def _download_svn(self, url, filename):
url = url.split('#',1)[0] # remove any fragment for svn's sake
creds = ''
if url.lower().startswith('svn:') and '@' in url:
scheme, netloc, path, p, q, f = urllib.parse.urlparse(url)
if not netloc and path.startswith('//') and '/' in path[2:]:
netloc, path = path[2:].split('/',1)
auth, host = splituser(netloc)
if auth:
if ':' in auth:
user, pw = auth.split(':',1)
creds = " --username=%s --password=%s" % (user, pw)
else:
creds = " --username="+auth
netloc = host
parts = scheme, netloc, url, p, q, f
url = urllib.parse.urlunparse(parts)
self.info("Doing subversion checkout from %s to %s", url, filename)
os.system("svn checkout%s -q %s %s" % (creds, url, filename))
return filename
@staticmethod
def _vcs_split_rev_from_url(url, pop_prefix=False):
scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
scheme = scheme.split('+', 1)[-1]
# Some fragment identification fails
path = path.split('#',1)[0]
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
# Also, discard fragment
url = urllib.parse.urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def _download_git(self, url, filename):
filename = filename.split('#',1)[0]
url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
self.info("Doing git clone from %s to %s", url, filename)
os.system("git clone --quiet %s %s" % (url, filename))
if rev is not None:
self.info("Checking out %s", rev)
os.system("(cd %s && git checkout --quiet %s)" % (
filename,
rev,
))
return filename
def _download_hg(self, url, filename):
filename = filename.split('#',1)[0]
url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
self.info("Doing hg clone from %s to %s", url, filename)
os.system("hg clone --quiet %s %s" % (url, filename))
if rev is not None:
self.info("Updating to %s", rev)
os.system("(cd %s && hg up -C -r %s >&-)" % (
filename,
rev,
))
return filename
def debug(self, msg, *args):
log.debug(msg, *args)
def info(self, msg, *args):
log.info(msg, *args)
def warn(self, msg, *args):
log.warn(msg, *args)
# This pattern matches a character entity reference (a decimal numeric
# references, a hexadecimal numeric reference, or a named reference).
entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
def uchr(c):
if not isinstance(c, int):
return c
if c>255: return six.unichr(c)
return chr(c)
def decode_entity(match):
what = match.group(1)
if what.startswith('#x'):
what = int(what[2:], 16)
elif what.startswith('#'):
what = int(what[1:])
else:
what = six.moves.html_entities.name2codepoint.get(what, match.group(0))
return uchr(what)
def htmldecode(text):
"""Decode HTML entities in the given text."""
return entity_sub(decode_entity, text)
def socket_timeout(timeout=15):
def _socket_timeout(func):
def _socket_timeout(*args, **kwargs):
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
return func(*args, **kwargs)
finally:
socket.setdefaulttimeout(old_timeout)
return _socket_timeout
return _socket_timeout
def _encode_auth(auth):
"""
A function compatible with Python 2.3-3.3 that will encode
auth from a URL suitable for an HTTP header.
>>> str(_encode_auth('username%3Apassword'))
'dXNlcm5hbWU6cGFzc3dvcmQ='
Long auth strings should not cause a newline to be inserted.
>>> long_auth = 'username:' + 'password'*10
>>> chr(10) in str(_encode_auth(long_auth))
False
"""
auth_s = urllib.parse.unquote(auth)
# convert to bytes
auth_bytes = auth_s.encode()
# use the legacy interface for Python 2.3 support
encoded_bytes = base64.encodestring(auth_bytes)
# convert back to a string
encoded = encoded_bytes.decode()
# strip the trailing carriage return
return encoded.replace('\n','')
class Credential(object):
"""
A username/password pair. Use like a namedtuple.
"""
def __init__(self, username, password):
self.username = username
self.password = password
def __iter__(self):
yield self.username
yield self.password
def __str__(self):
return '%(username)s:%(password)s' % vars(self)
class PyPIConfig(configparser.RawConfigParser):
def __init__(self):
"""
Load from ~/.pypirc
"""
defaults = dict.fromkeys(['username', 'password', 'repository'], '')
configparser.RawConfigParser.__init__(self, defaults)
rc = os.path.join(os.path.expanduser('~'), '.pypirc')
if os.path.exists(rc):
self.read(rc)
@property
def creds_by_repository(self):
sections_with_repositories = [
section for section in self.sections()
if self.get(section, 'repository').strip()
]
return dict(map(self._get_repo_cred, sections_with_repositories))
def _get_repo_cred(self, section):
repo = self.get(section, 'repository').strip()
return repo, Credential(
self.get(section, 'username').strip(),
self.get(section, 'password').strip(),
)
def find_credential(self, url):
"""
If the URL indicated appears to be a repository defined in this
config, return the credential for that repository.
"""
for repository, cred in self.creds_by_repository.items():
if url.startswith(repository):
return cred
def open_with_auth(url, opener=urllib.request.urlopen):
"""Open a urllib2 request, handling HTTP authentication"""
scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
# Double scheme does not raise on Mac OS X as revealed by a
# failing test. We would expect "nonnumeric port". Refs #20.
if netloc.endswith(':'):
raise http_client.InvalidURL("nonnumeric port: ''")
if scheme in ('http', 'https'):
auth, host = splituser(netloc)
else:
auth = None
if not auth:
cred = PyPIConfig().find_credential(url)
if cred:
auth = str(cred)
info = cred.username, url
log.info('Authenticating as %s for %s (from .pypirc)' % info)
if auth:
auth = "Basic " + _encode_auth(auth)
parts = scheme, host, path, params, query, frag
new_url = urllib.parse.urlunparse(parts)
request = urllib.request.Request(new_url)
request.add_header("Authorization", auth)
else:
request = urllib.request.Request(url)
request.add_header('User-Agent', user_agent)
fp = opener(request)
if auth:
# Put authentication info back into request URL if same host,
# so that links found on the page will work
s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url)
if s2==scheme and h2==host:
parts = s2, netloc, path2, param2, query2, frag2
fp.url = urllib.parse.urlunparse(parts)
return fp
# adding a timeout to avoid freezing package_index
open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
def fix_sf_url(url):
return url # backward compatibility
def local_open(url):
"""Read a local path, with special support for directories"""
scheme, server, path, param, query, frag = urllib.parse.urlparse(url)
filename = urllib.request.url2pathname(path)
if os.path.isfile(filename):
return urllib.request.urlopen(url)
elif path.endswith('/') and os.path.isdir(filename):
files = []
for f in os.listdir(filename):
filepath = os.path.join(filename, f)
if f == 'index.html':
with open(filepath, 'r') as fp:
body = fp.read()
break
elif os.path.isdir(filepath):
f += '/'
files.append('<a href="{name}">{name}</a>'.format(name=f))
else:
tmpl = ("<html><head><title>{url}</title>"
"</head><body>{files}</body></html>")
body = tmpl.format(url=url, files='\n'.join(files))
status, message = 200, "OK"
else:
status, message, body = 404, "Path not found", "Not found"
headers = {'content-type': 'text/html'}
body_stream = six.StringIO(body)
return urllib.error.HTTPError(url, status, message, headers, body_stream)
| gpl-3.0 |
vprime/puuuu | env/lib/python2.7/site-packages/Crypto/SelfTest/Hash/test_MD2.py | 116 | 2368 | # -*- coding: utf-8 -*-
#
# SelfTest/Hash/MD2.py: Self-test for the MD2 hash function
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.Hash.MD2"""
__revision__ = "$Id$"
from Crypto.Util.py3compat import *
# This is a list of (expected_result, input[, description]) tuples.
test_data = [
# Test vectors from RFC 1319
('8350e5a3e24c153df2275c9f80692773', '', "'' (empty string)"),
('32ec01ec4a6dac72c0ab96fb34c0b5d1', 'a'),
('da853b0d3f88d99b30283a69e6ded6bb', 'abc'),
('ab4f496bfb2a530b219ff33031fe06b0', 'message digest'),
('4e8ddff3650292ab5a4108c3aa47940b', 'abcdefghijklmnopqrstuvwxyz',
'a-z'),
('da33def2a42df13975352846c30338cd',
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
'A-Z, a-z, 0-9'),
('d5976f79d83d3a0dc9806c3c66f3efd8',
'1234567890123456789012345678901234567890123456'
+ '7890123456789012345678901234567890',
"'1234567890' * 8"),
]
def get_tests(config={}):
from Crypto.Hash import MD2
from common import make_hash_tests
return make_hash_tests(MD2, "MD2", test_data,
digest_size=16,
oid="\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x02")
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| mit |
mancoast/CPythonPyc_test | fail/325_test_code.py | 56 | 3572 | """This module includes tests of the code object representation.
>>> def f(x):
... def g(y):
... return x + y
... return g
...
>>> dump(f.__code__)
name: f
argcount: 1
kwonlyargcount: 0
names: ()
varnames: ('x', 'g')
cellvars: ('x',)
freevars: ()
nlocals: 2
flags: 3
consts: ('None', '<code object g>')
>>> dump(f(4).__code__)
name: g
argcount: 1
kwonlyargcount: 0
names: ()
varnames: ('y',)
cellvars: ()
freevars: ('x',)
nlocals: 1
flags: 19
consts: ('None',)
>>> def h(x, y):
... a = x + y
... b = x - y
... c = a * b
... return c
...
>>> dump(h.__code__)
name: h
argcount: 2
kwonlyargcount: 0
names: ()
varnames: ('x', 'y', 'a', 'b', 'c')
cellvars: ()
freevars: ()
nlocals: 5
flags: 67
consts: ('None',)
>>> def attrs(obj):
... print(obj.attr1)
... print(obj.attr2)
... print(obj.attr3)
>>> dump(attrs.__code__)
name: attrs
argcount: 1
kwonlyargcount: 0
names: ('print', 'attr1', 'attr2', 'attr3')
varnames: ('obj',)
cellvars: ()
freevars: ()
nlocals: 1
flags: 67
consts: ('None',)
>>> def optimize_away():
... 'doc string'
... 'not a docstring'
... 53
... 0x53
>>> dump(optimize_away.__code__)
name: optimize_away
argcount: 0
kwonlyargcount: 0
names: ()
varnames: ()
cellvars: ()
freevars: ()
nlocals: 0
flags: 67
consts: ("'doc string'", 'None')
>>> def keywordonly_args(a,b,*,k1):
... return a,b,k1
...
>>> dump(keywordonly_args.__code__)
name: keywordonly_args
argcount: 2
kwonlyargcount: 1
names: ()
varnames: ('a', 'b', 'k1')
cellvars: ()
freevars: ()
nlocals: 3
flags: 67
consts: ('None',)
"""
import unittest
import weakref
import _testcapi
def consts(t):
"""Yield a doctest-safe sequence of object reprs."""
for elt in t:
r = repr(elt)
if r.startswith("<code object"):
yield "<code object %s>" % elt.co_name
else:
yield r
def dump(co):
"""Print out a text representation of a code object."""
for attr in ["name", "argcount", "kwonlyargcount", "names", "varnames",
"cellvars", "freevars", "nlocals", "flags"]:
print("%s: %s" % (attr, getattr(co, "co_" + attr)))
print("consts:", tuple(consts(co.co_consts)))
class CodeTest(unittest.TestCase):
def test_newempty(self):
co = _testcapi.code_newempty("filename", "funcname", 15)
self.assertEqual(co.co_filename, "filename")
self.assertEqual(co.co_name, "funcname")
self.assertEqual(co.co_firstlineno, 15)
class CodeWeakRefTest(unittest.TestCase):
def test_basic(self):
# Create a code object in a clean environment so that we know we have
# the only reference to it left.
namespace = {}
exec("def f(): pass", globals(), namespace)
f = namespace["f"]
del namespace
self.called = False
def callback(code):
self.called = True
# f is now the last reference to the function, and through it, the code
# object. While we hold it, check that we can create a weakref and
# deref it. Then delete it, and check that the callback gets called and
# the reference dies.
coderef = weakref.ref(f.__code__, callback)
self.assertTrue(bool(coderef()))
del f
self.assertFalse(bool(coderef()))
self.assertTrue(self.called)
def test_main(verbose=None):
from test.support import run_doctest, run_unittest
from test import test_code
run_doctest(test_code, verbose)
run_unittest(CodeTest, CodeWeakRefTest)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
jtux270/translate | ovirt/3.6_source/packaging/setup/plugins/ovirt-engine-setup/ovirt-engine/fence_kdump_listener/config.py | 8 | 3122 | #
# ovirt-engine-setup -- ovirt engine setup
# Copyright (C) 2014-2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""fence_kdump listener plugin."""
import gettext
from otopi import plugin, util
from ovirt_engine_setup import constants as osetupcons
from ovirt_engine_setup.engine import constants as oenginecons
from ovirt_engine_setup.engine_common import constants as oengcommcons
def _(m):
return gettext.dgettext(message=m, domain='ovirt-engine-setup')
@util.export
class Plugin(plugin.PluginBase):
"""fence_kdump listener plugin."""
def __init__(self, context):
super(Plugin, self).__init__(context=context)
self._enabled = True
@plugin.event(
stage=plugin.Stages.STAGE_SETUP,
)
def _setup(self):
self.environment[
oengcommcons.ConfigEnv.FENCE_KDUMP_LISTENER_STOP_NEEDED
] = True
@plugin.event(
stage=plugin.Stages.STAGE_CUSTOMIZATION,
before=(
osetupcons.Stages.DIALOG_TITLES_E_SYSTEM,
),
after=(
oengcommcons.Stages.DB_CONNECTION_STATUS,
osetupcons.Stages.DIALOG_TITLES_S_SYSTEM,
),
name=oenginecons.Stages.KDUMP_ALLOW,
)
def _customization_disable(self):
if not self.environment[oenginecons.CoreEnv.ENABLE]:
self._enabled = False
@plugin.event(
stage=plugin.Stages.STAGE_CUSTOMIZATION,
condition=lambda self: self._enabled,
before=(
osetupcons.Stages.DIALOG_TITLES_E_SYSTEM,
),
after=(
oengcommcons.Stages.DB_CONNECTION_STATUS,
osetupcons.Stages.DIALOG_TITLES_S_SYSTEM,
oenginecons.Stages.KDUMP_ALLOW,
),
)
def _customization_firewall(self):
self.environment[osetupcons.NetEnv.FIREWALLD_SERVICES].extend([
{
'name': 'ovirt-fence-kdump-listener',
'directory': 'ovirt-engine'
},
])
@plugin.event(
stage=plugin.Stages.STAGE_CLOSEUP,
condition=lambda self: (
not self.environment[
osetupcons.CoreEnv.DEVELOPER_MODE
] and
self._enabled
),
)
def _closeup(self):
for state in (False, True):
self.services.state(
name=oenginecons.Const.FENCE_KDUMP_LISTENER_SERVICE_NAME,
state=state,
)
self.services.startup(
name=oenginecons.Const.FENCE_KDUMP_LISTENER_SERVICE_NAME,
state=True,
)
# vim: expandtab tabstop=4 shiftwidth=4
| gpl-3.0 |
tuxerman/cdn-old | cdn/openstack/common/log.py | 1 | 19790 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Openstack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import sys
import traceback
from oslo.config import cfg
from six import moves
from cdn.openstack.common.gettextutils import _ # noqa
from cdn.openstack.common import importutils
from cdn.openstack.common import jsonutils
from cdn.openstack.common import local
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config',
metavar='PATH',
help='If this option is specified, the logging configuration '
'file specified is used and overrides any other logging '
'options specified. Please see the Python logging module '
'documentation for details on logging configuration '
'files.'),
cfg.StrOpt('log-format',
default=None,
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='syslog facility to receive log lines')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error')
]
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user)s %(tenant)s] '
'%(instance)s%(message)s',
help='format string to use for log messages with context'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='format string to use for log messages without context'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='data to append to log format when level is DEBUG'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='prefix each line of exception output with this format'),
cfg.ListOpt('default_log_levels',
default=[
'amqplib=WARN',
'sqlalchemy=WARN',
'boto=WARN',
'suds=INFO',
'keystone=INFO',
'eventlet.wsgi.server=WARN'
],
help='list of logger=LEVEL pairs'),
cfg.BoolOpt('publish_errors',
default=False,
help='publish error events'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='make deprecations fatal'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='If an instance is passed with the log message, format '
'it like this'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='If an instance UUID is passed with the log message, '
'format it like this'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
else:
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_uuid = (extra.get('instance_uuid', None) or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
elif instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra.update({'instance': instance_extra})
extra.update({"project": self.project})
extra.update({"version": self.version})
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [itertools.ifilter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(type, value, tb):
extra = {}
if CONF.verbose:
extra['exc_info'] = (type, value, tb)
getLogger(product_name).critical(str(value), **extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config):
try:
logging.config.fileConfig(log_config)
except moves.configparser.Error as exc:
raise LogConfigError(log_config, str(exc))
def setup(product_name):
"""Setup logging."""
if CONF.log_config:
_load_log_config(CONF.log_config)
else:
_setup_logging_from_conf()
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string):
cfg.set_defaults(log_opts,
logging_context_format_string=
logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
def _setup_logging_from_conf():
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
if CONF.use_syslog:
facility = _find_facility_from_conf()
syslog = logging.handlers.SysLogHandler(address='/dev/log',
facility=facility)
log_root.addHandler(syslog)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not CONF.log_file:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
handler = importutils.import_object(
"cdn.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
level = logging.getLevelName(level_name)
logger = logging.getLogger(mod)
logger.setLevel(level)
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg)
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
"""
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(sdague): default the fancier formating params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id', None):
self._fmt = CONF.logging_context_format_string
else:
self._fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
self._fmt += " " + CONF.logging_debug_format_suffix
# Cache this on the record, Logger will respect our formated copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))
| apache-2.0 |
Code4SA/pmgbilltracker | pmg_scrapers/pmg/scrape_pdf.py | 1 | 2621 | import requests
import json
import os
import tempfile
import re
url = "http://db3sqepoi5n3s.cloudfront.net/files/pmb5_2013.pdf"
url = "http://db3sqepoi5n3s.cloudfront.net/files/130416pmb3-2013.pdf"
url = "http://db3sqepoi5n3s.cloudfront.net/files/131031b18b-2013.pdf"
url = "http://db3sqepoi5n3s.cloudfront.net/files/130621b15-2013.pdf"
url = "http://db3sqepoi5n3s.cloudfront.net/files/131118b55-2013public_administration_management.pdf"
reg_section1 = re.compile(r"section\s+(?:74|75|76|77)\s+bill", re.IGNORECASE)
reg_section2 = re.compile(r"section\s+(?:74|75|76|77)\b", re.IGNORECASE)
reg_introduced_by1 = re.compile(r"""
# Search for something that looks like (Minister of Finance)
\(
(
Minister
[^)]+
)
\)
""", re.VERBOSE | re.IGNORECASE)
reg_introduced_by2 = re.compile(r"""
# Search for something that looks like (Ms J Jacobson MP)
\(
(
[^)]+
MP)
\)
""", re.VERBOSE | re.IGNORECASE)
reg_introduced_by3 = re.compile(r"""
# Search for something that looks like (Select committee on Cooperative ....)
\(
([^)]*Committee[^)]*)
\)
""", re.VERBOSE | re.IGNORECASE)
def get_pdf(url, chunk_size=1000):
fp = tempfile.NamedTemporaryFile("rw", prefix="pmg_", suffix=".pdf", delete=False)
with open(fp.name, "wb") as fp:
resp = requests.get(url, stream=True)
for chunk in resp.iter_content(chunk_size):
fp.write(chunk)
return fp.name
def convert_to_text(path):
cmd = "pdftotext %s" % path
os.system(cmd)
return path.replace(".pdf", ".txt")
def extract_section(text):
match = reg_section1.search(text)
if not match:
match = reg_section2.search(text)
if not match:
return None
section = match.group()
if "74" in section: return 74
if "75" in section: return 75
if "76" in section: return 76
if "77" in section: return 77
def extract_introduced_by(text):
match = reg_introduced_by1.search(text)
if not match:
match = reg_introduced_by2.search(text)
if not match:
match = reg_introduced_by3.search(text)
if not match:
return "Boom!!"
return match.groups()[0]
def extract_introduction_location(text):
return "NA"
def scrape_pdf(url):
pdf_path = get_pdf(url)
text_path = convert_to_text(pdf_path)
text = open(text_path).read()[0:2000]
js = {
"section" : extract_section(text),
"introduced_by" : extract_introduced_by(text),
"introduced_at" : extract_introduction_location(text)
}
print json.dumps(js, indent=4)
scrape_pdf(url)
| apache-2.0 |
SM-G920P/Vindicator-S6-Sprint | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
fo2rist/infra-strike | backend/venv/Lib/encodings/raw_unicode_escape.py | 852 | 1208 | """ Python 'raw-unicode-escape' Codec
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.raw_unicode_escape_encode
decode = codecs.raw_unicode_escape_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.raw_unicode_escape_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.raw_unicode_escape_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='raw-unicode-escape',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| lgpl-3.0 |
sgerhart/ansible | test/units/module_utils/urls/test_fetch_url.py | 28 | 7987 | # -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import socket
from ansible.module_utils.six import StringIO
from ansible.module_utils.six.moves.http_cookiejar import Cookie
from ansible.module_utils.six.moves.http_client import HTTPMessage
from ansible.module_utils.urls import fetch_url, urllib_error, ConnectionError, NoSSLError, httplib
import pytest
from mock import MagicMock
class AnsibleModuleExit(Exception):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class ExitJson(AnsibleModuleExit):
pass
class FailJson(AnsibleModuleExit):
pass
@pytest.fixture
def open_url_mock(mocker):
return mocker.patch('ansible.module_utils.urls.open_url')
@pytest.fixture
def fake_ansible_module():
return FakeAnsibleModule()
class FakeAnsibleModule:
def __init__(self):
self.params = {}
self.tmpdir = None
def exit_json(self, *args, **kwargs):
raise ExitJson(*args, **kwargs)
def fail_json(self, *args, **kwargs):
raise FailJson(*args, **kwargs)
def test_fetch_url_no_urlparse(mocker, fake_ansible_module):
mocker.patch('ansible.module_utils.urls.HAS_URLPARSE', new=False)
with pytest.raises(FailJson):
fetch_url(fake_ansible_module, 'http://ansible.com/')
def test_fetch_url(open_url_mock, fake_ansible_module):
r, info = fetch_url(fake_ansible_module, 'http://ansible.com/')
dummy, kwargs = open_url_mock.call_args
open_url_mock.assert_called_once_with('http://ansible.com/', client_cert=None, client_key=None, cookies=kwargs['cookies'], data=None,
follow_redirects='urllib2', force=False, force_basic_auth='', headers=None,
http_agent='ansible-httpget', last_mod_time=None, method=None, timeout=10, url_password='', url_username='',
use_proxy=True, validate_certs=True)
def test_fetch_url_params(open_url_mock, fake_ansible_module):
fake_ansible_module.params = {
'validate_certs': False,
'url_username': 'user',
'url_password': 'passwd',
'http_agent': 'ansible-test',
'force_basic_auth': True,
'follow_redirects': 'all',
'client_cert': 'client.pem',
'client_key': 'client.key',
}
r, info = fetch_url(fake_ansible_module, 'http://ansible.com/')
dummy, kwargs = open_url_mock.call_args
open_url_mock.assert_called_once_with('http://ansible.com/', client_cert='client.pem', client_key='client.key', cookies=kwargs['cookies'], data=None,
follow_redirects='all', force=False, force_basic_auth=True, headers=None,
http_agent='ansible-test', last_mod_time=None, method=None, timeout=10, url_password='passwd', url_username='user',
use_proxy=True, validate_certs=False)
def test_fetch_url_cookies(mocker, fake_ansible_module):
def make_cookies(*args, **kwargs):
cookies = kwargs['cookies']
r = MagicMock()
try:
r.headers = HTTPMessage()
add_header = r.headers.add_header
except TypeError:
# PY2
r.headers = HTTPMessage(StringIO())
add_header = r.headers.addheader
r.info.return_value = r.headers
for name, value in (('Foo', 'bar'), ('Baz', 'qux')):
cookie = Cookie(
version=0,
name=name,
value=value,
port=None,
port_specified=False,
domain="ansible.com",
domain_specified=True,
domain_initial_dot=False,
path="/",
path_specified=True,
secure=False,
expires=None,
discard=False,
comment=None,
comment_url=None,
rest=None
)
cookies.set_cookie(cookie)
add_header('Set-Cookie', '%s=%s' % (name, value))
return r
mocker = mocker.patch('ansible.module_utils.urls.open_url', new=make_cookies)
r, info = fetch_url(fake_ansible_module, 'http://ansible.com/')
assert info['cookies'] == {'Baz': 'qux', 'Foo': 'bar'}
# Python sorts cookies in order of most specific (ie. longest) path first
# items with the same path are reversed from response order
assert info['cookies_string'] == 'Baz=qux; Foo=bar'
# The key here has a `-` as opposed to what we see in the `uri` module that converts to `_`
# Note: this is response order, which differs from cookies_string
assert info['set-cookie'] == 'Foo=bar, Baz=qux'
def test_fetch_url_nossl(open_url_mock, fake_ansible_module, mocker):
mocker.patch('ansible.module_utils.urls.get_distribution', return_value='notredhat')
open_url_mock.side_effect = NoSSLError
with pytest.raises(FailJson) as excinfo:
fetch_url(fake_ansible_module, 'http://ansible.com/')
assert 'python-ssl' not in excinfo.value.kwargs['msg']
mocker.patch('ansible.module_utils.urls.get_distribution', return_value='redhat')
open_url_mock.side_effect = NoSSLError
with pytest.raises(FailJson) as excinfo:
fetch_url(fake_ansible_module, 'http://ansible.com/')
assert 'python-ssl' in excinfo.value.kwargs['msg']
def test_fetch_url_connectionerror(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = ConnectionError('TESTS')
with pytest.raises(FailJson) as excinfo:
fetch_url(fake_ansible_module, 'http://ansible.com/')
assert excinfo.value.kwargs['msg'] == 'TESTS'
open_url_mock.side_effect = ValueError('TESTS')
with pytest.raises(FailJson) as excinfo:
fetch_url(fake_ansible_module, 'http://ansible.com/')
assert excinfo.value.kwargs['msg'] == 'TESTS'
def test_fetch_url_httperror(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = urllib_error.HTTPError(
'http://ansible.com/',
500,
'Internal Server Error',
{'Content-Type': 'application/json'},
StringIO('TESTS')
)
r, info = fetch_url(fake_ansible_module, 'http://ansible.com/')
assert info == {'msg': 'HTTP Error 500: Internal Server Error', 'body': 'TESTS',
'status': 500, 'url': 'http://ansible.com/', 'content-type': 'application/json'}
def test_fetch_url_urlerror(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = urllib_error.URLError('TESTS')
r, info = fetch_url(fake_ansible_module, 'http://ansible.com/')
assert info == {'msg': 'Request failed: <urlopen error TESTS>', 'status': -1, 'url': 'http://ansible.com/'}
def test_fetch_url_socketerror(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = socket.error('TESTS')
r, info = fetch_url(fake_ansible_module, 'http://ansible.com/')
assert info == {'msg': 'Connection failure: TESTS', 'status': -1, 'url': 'http://ansible.com/'}
def test_fetch_url_exception(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = Exception('TESTS')
r, info = fetch_url(fake_ansible_module, 'http://ansible.com/')
exception = info.pop('exception')
assert info == {'msg': 'An unknown error occurred: TESTS', 'status': -1, 'url': 'http://ansible.com/'}
assert "Exception: TESTS" in exception
def test_fetch_url_badstatusline(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = httplib.BadStatusLine('TESTS')
r, info = fetch_url(fake_ansible_module, 'http://ansible.com/')
assert info == {'msg': 'Connection failure: connection was closed before a valid response was received: TESTS', 'status': -1, 'url': 'http://ansible.com/'}
| mit |
johankaito/fufuka | microblog/old-flask/lib/python2.7/site-packages/whoosh/query/nested.py | 30 | 15443 | # Copyright 2012 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
from whoosh import matching
from whoosh.compat import text_type, u, xrange
from whoosh.query import qcore
from whoosh.query.wrappers import WrappingQuery
class NestedParent(WrappingQuery):
"""A query that allows you to search for "nested" documents, where you can
index (possibly multiple levels of) "parent" and "child" documents using
the :meth:`~whoosh.writing.IndexWriter.group` and/or
:meth:`~whoosh.writing.IndexWriter.start_group` methods of a
:class:`whoosh.writing.IndexWriter` to indicate that hierarchically related
documents should be kept together::
schema = fields.Schema(type=fields.ID, text=fields.TEXT(stored=True))
with ix.writer() as w:
# Say we're indexing chapters (type=chap) and each chapter has a
# number of paragraphs (type=p)
with w.group():
w.add_document(type="chap", text="Chapter 1")
w.add_document(type="p", text="Able baker")
w.add_document(type="p", text="Bright morning")
with w.group():
w.add_document(type="chap", text="Chapter 2")
w.add_document(type="p", text="Car trip")
w.add_document(type="p", text="Dog eared")
w.add_document(type="p", text="Every day")
with w.group():
w.add_document(type="chap", text="Chapter 3")
w.add_document(type="p", text="Fine day")
The ``NestedParent`` query wraps two sub-queries: the "parent query"
matches a class of "parent documents". The "sub query" matches nested
documents you want to find. For each "sub document" the "sub query" finds,
this query acts as if it found the corresponding "parent document".
>>> with ix.searcher() as s:
... r = s.search(query.Term("text", "day"))
... for hit in r:
... print(hit["text"])
...
Chapter 2
Chapter 3
"""
def __init__(self, parents, subq, per_parent_limit=None, score_fn=sum):
"""
:param parents: a query, DocIdSet object, or Results object
representing the documents you want to use as the "parent"
documents. Where the sub-query matches, the corresponding document
in these results will be returned as the match.
:param subq: a query matching the information you want to find.
:param per_parent_limit: a maximum number of "sub documents" to search
per parent. The default is None, meaning no limit.
:param score_fn: a function to use to combine the scores of matching
sub-documents to calculate the score returned for the parent
document. The default is ``sum``, that is, add up the scores of the
sub-documents.
"""
self.parents = parents
self.child = subq
self.per_parent_limit = per_parent_limit
self.score_fn = score_fn
def normalize(self):
p = self.parents
if isinstance(p, qcore.Query):
p = p.normalize()
q = self.child.normalize()
if p is qcore.NullQuery or q is qcore.NullQuery:
return qcore.NullQuery
return self.__class__(p, q)
def requires(self):
return self.child.requires()
def matcher(self, searcher, context=None):
bits = searcher._filter_to_comb(self.parents)
if not bits:
return matching.NullMatcher
m = self.child.matcher(searcher, context)
if not m.is_active():
return matching.NullMatcher
return self.NestedParentMatcher(bits, m, self.per_parent_limit,
searcher.doc_count_all())
def deletion_docs(self, searcher):
bits = searcher._filter_to_comb(self.parents)
if not bits:
return
m = self.child.matcher(searcher, searcher.boolean_context())
maxdoc = searcher.doc_count_all()
while m.is_active():
docnum = m.id()
parentdoc = bits.before(docnum + 1)
nextparent = bits.after(docnum) or maxdoc
for i in xrange(parentdoc, nextparent):
yield i
m.skip_to(nextparent)
class NestedParentMatcher(matching.Matcher):
def __init__(self, comb, child, per_parent_limit, maxdoc):
self.comb = comb
self.child = child
self.per_parent_limit = per_parent_limit
self.maxdoc = maxdoc
self._nextdoc = None
if self.child.is_active():
self._gather()
def is_active(self):
return self._nextdoc is not None
def supports_block_quality(self):
return False
def _gather(self):
# This is where the magic happens ;)
child = self.child
pplimit = self.per_parent_limit
# The next document returned by this matcher is the parent of the
# child's current document. We don't have to worry about whether
# the parent is deleted, because the query that gave us the parents
# wouldn't return deleted documents.
self._nextdoc = self.comb.before(child.id() + 1)
# The next parent after the child matcher's current document
nextparent = self.comb.after(child.id()) or self.maxdoc
# Sum the scores of all matching documents under the parent
count = 1
score = 0
while child.is_active() and child.id() < nextparent:
if pplimit and count > pplimit:
child.skip_to(nextparent)
break
score += child.score()
child.next()
count += 1
self._nextscore = score
def id(self):
return self._nextdoc
def score(self):
return self._nextscore
def reset(self):
self.child.reset()
self._gather()
def next(self):
if self.child.is_active():
self._gather()
else:
if self._nextdoc is None:
raise matching.ReadTooFar
else:
self._nextdoc = None
def skip_to(self, id):
self.child.skip_to(id)
self._gather()
def value(self):
raise NotImplementedError(self.__class__)
def spans(self):
return []
class NestedChildren(WrappingQuery):
"""This is the reverse of a :class:`NestedParent` query: instead of taking
a query that matches children but returns the parent, this query matches
parents but returns the children.
This is useful, for example, to search for an album title and return the
songs in the album::
schema = fields.Schema(type=fields.ID(stored=True),
album_name=fields.TEXT(stored=True),
track_num=fields.NUMERIC(stored=True),
track_name=fields.TEXT(stored=True),
lyrics=fields.TEXT)
ix = RamStorage().create_index(schema)
# Indexing
with ix.writer() as w:
# For each album, index a "group" of a parent "album" document and
# multiple child "track" documents.
with w.group():
w.add_document(type="album",
artist="The Cure", album_name="Disintegration")
w.add_document(type="track", track_num=1,
track_name="Plainsong")
w.add_document(type="track", track_num=2,
track_name="Pictures of You")
# ...
# ...
# Find songs where the song name has "heaven" in the title and the
# album the song is on has "hell" in the title
qp = QueryParser("lyrics", ix.schema)
with ix.searcher() as s:
# A query that matches all parents
all_albums = qp.parse("type:album")
# A query that matches the parents we want
albums_with_hell = qp.parse("album_name:hell")
# A query that matches the desired albums but returns the tracks
songs_on_hell_albums = NestedChildren(all_albums, albums_with_hell)
# A query that matches tracks with heaven in the title
songs_with_heaven = qp.parse("track_name:heaven")
# A query that finds tracks with heaven in the title on albums
# with hell in the title
q = query.And([songs_on_hell_albums, songs_with_heaven])
"""
def __init__(self, parents, subq, boost=1.0):
self.parents = parents
self.child = subq
self.boost = boost
def matcher(self, searcher, context=None):
bits = searcher._filter_to_comb(self.parents)
if not bits:
return matching.NullMatcher
m = self.child.matcher(searcher, context)
if not m.is_active():
return matching.NullMatcher
return self.NestedChildMatcher(bits, m, searcher.doc_count_all(),
searcher.reader().is_deleted,
boost=self.boost)
class NestedChildMatcher(matching.WrappingMatcher):
def __init__(self, parent_comb, wanted_parent_matcher, limit,
is_deleted, boost=1.0):
self.parent_comb = parent_comb
self.child = wanted_parent_matcher
self.limit = limit
self.is_deleted = is_deleted
self.boost = boost
self._nextchild = -1
self._nextparent = -1
self._find_next_children()
def __repr__(self):
return "%s(%r, %r)" % (self.__class__.__name__,
self.parent_comb,
self.child)
def reset(self):
self.child.reset()
self._reset()
def _reset(self):
self._nextchild = -1
self._nextparent = -1
self._find_next_children()
def is_active(self):
return self._nextchild < self._nextparent
def replace(self, minquality=0):
return self
def _find_next_children(self):
# "comb" contains the doc IDs of all parent documents
comb = self.parent_comb
# "m" is the matcher for "wanted" parents
m = self.child
# Last doc ID + 1
limit = self.limit
# A function that returns True if a doc ID is deleted
is_deleted = self.is_deleted
nextchild = self._nextchild
nextparent = self._nextparent
while m.is_active():
# Move the "child id" to the document after the current match
nextchild = m.id() + 1
# Move the parent matcher to the next match
m.next()
# Find the next parent document (matching or not) after this
nextparent = comb.after(nextchild)
if nextparent is None:
nextparent = limit
# Skip any deleted child documents
while is_deleted(nextchild):
nextchild += 1
# If skipping deleted documents put us to or past the next
# parent doc, go again
if nextchild >= nextparent:
continue
else:
# Otherwise, we're done
break
self._nextchild = nextchild
self._nextparent = nextparent
def id(self):
return self._nextchild
def all_ids(self):
while self.is_active():
yield self.id()
self.next()
def next(self):
is_deleted = self.is_deleted
limit = self.limit
nextparent = self._nextparent
# Go to the next document
nextchild = self._nextchild
nextchild += 1
# Skip over any deleted child documents
while nextchild < nextparent and is_deleted(nextchild):
nextchild += 1
self._nextchild = nextchild
# If we're at or past the next parent doc, go to the next set of
# children
if nextchild >= limit:
return
elif nextchild >= nextparent:
self._find_next_children()
def skip_to(self, docid):
comb = self.parent_comb
wanted = self.child
# self._nextchild is the "current" matching child ID
if docid <= self._nextchild:
return
# self._nextparent is the next parent ID (matching or not)
if docid < self._nextparent:
# Just iterate
while self.is_active() and self.id() < docid:
self.next()
elif wanted.is_active():
# Find the parent before the target ID
pid = comb.before(docid)
# Skip the parent matcher to that ID
wanted.skip_to(pid)
# If that made the matcher inactive, then we're done
if not wanted.is_active():
self._nextchild = self._nextparent = self.limit
else:
# Reestablish for the next child after the next matching
# parent
self._find_next_children()
else:
self._nextchild = self._nextparent = self.limit
def value(self):
raise NotImplementedError(self.__class__)
def score(self):
return self.boost
def spans(self):
return []
| apache-2.0 |
Immortalin/python-for-android | python3-alpha/python3-src/Lib/test/testcodec.py | 203 | 1046 | """ Test Codecs (used by test_charmapcodec)
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x78: "abc", # 1-n decoding mapping
b"abc": 0x0078,# 1-n encoding mapping
0x01: None, # decoding mapping to <undefined>
0x79: "", # decoding mapping to <remove character>
})
### Encoding Map
encoding_map = {}
for k,v in decoding_map.items():
encoding_map[v] = k
| apache-2.0 |
40223139/39g7test | static/Brython3.1.1-20150328-091302/Lib/threading.py | 730 | 45641 | """Thread module emulating a subset of Java's threading model."""
import sys as _sys
import _thread
from time import sleep as _sleep
try:
from time import monotonic as _time
except ImportError:
from time import time as _time
from traceback import format_exc as _format_exc
from _weakrefset import WeakSet
# Note regarding PEP 8 compliant names
# This threading model was originally inspired by Java, and inherited
# the convention of camelCase function and method names from that
# language. Those original names are not in any imminent danger of
# being deprecated (even for Py3k),so this module provides them as an
# alias for the PEP 8 compliant names
# Note that using the new PEP 8 compliant names facilitates substitution
# with the multiprocessing module, which doesn't provide the old
# Java inspired names.
__all__ = ['active_count', 'Condition', 'current_thread', 'enumerate', 'Event',
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread', 'Barrier',
'Timer', 'ThreadError', 'setprofile', 'settrace', 'local', 'stack_size']
# Rename some stuff so "from threading import *" is safe
_start_new_thread = _thread.start_new_thread
_allocate_lock = _thread.allocate_lock
get_ident = _thread.get_ident
ThreadError = _thread.error
try:
_CRLock = _thread.RLock
except AttributeError:
_CRLock = None
TIMEOUT_MAX = _thread.TIMEOUT_MAX
del _thread
# Support for profile and trace hooks
_profile_hook = None
_trace_hook = None
def setprofile(func):
"""Set a profile function for all threads started from the threading module.
The func will be passed to sys.setprofile() for each thread, before its
run() method is called.
"""
global _profile_hook
_profile_hook = func
def settrace(func):
"""Set a trace function for all threads started from the threading module.
The func will be passed to sys.settrace() for each thread, before its run()
method is called.
"""
global _trace_hook
_trace_hook = func
# Synchronization classes
Lock = _allocate_lock
def RLock(*args, **kwargs):
"""Factory function that returns a new reentrant lock.
A reentrant lock must be released by the thread that acquired it. Once a
thread has acquired a reentrant lock, the same thread may acquire it again
without blocking; the thread must release it once for each time it has
acquired it.
"""
if _CRLock is None:
return _PyRLock(*args, **kwargs)
return _CRLock(*args, **kwargs)
class _RLock:
"""This class implements reentrant lock objects.
A reentrant lock must be released by the thread that acquired it. Once a
thread has acquired a reentrant lock, the same thread may acquire it
again without blocking; the thread must release it once for each time it
has acquired it.
"""
def __init__(self):
self._block = _allocate_lock()
self._owner = None
self._count = 0
def __repr__(self):
owner = self._owner
try:
owner = _active[owner].name
except KeyError:
pass
return "<%s owner=%r count=%d>" % (
self.__class__.__name__, owner, self._count)
def acquire(self, blocking=True, timeout=-1):
"""Acquire a lock, blocking or non-blocking.
When invoked without arguments: if this thread already owns the lock,
increment the recursion level by one, and return immediately. Otherwise,
if another thread owns the lock, block until the lock is unlocked. Once
the lock is unlocked (not owned by any thread), then grab ownership, set
the recursion level to one, and return. If more than one thread is
blocked waiting until the lock is unlocked, only one at a time will be
able to grab ownership of the lock. There is no return value in this
case.
When invoked with the blocking argument set to true, do the same thing
as when called without arguments, and return true.
When invoked with the blocking argument set to false, do not block. If a
call without an argument would block, return false immediately;
otherwise, do the same thing as when called without arguments, and
return true.
When invoked with the floating-point timeout argument set to a positive
value, block for at most the number of seconds specified by timeout
and as long as the lock cannot be acquired. Return true if the lock has
been acquired, false if the timeout has elapsed.
"""
me = get_ident()
if self._owner == me:
self._count = self._count + 1
return 1
rc = self._block.acquire(blocking, timeout)
if rc:
self._owner = me
self._count = 1
return rc
__enter__ = acquire
def release(self):
"""Release a lock, decrementing the recursion level.
If after the decrement it is zero, reset the lock to unlocked (not owned
by any thread), and if any other threads are blocked waiting for the
lock to become unlocked, allow exactly one of them to proceed. If after
the decrement the recursion level is still nonzero, the lock remains
locked and owned by the calling thread.
Only call this method when the calling thread owns the lock. A
RuntimeError is raised if this method is called when the lock is
unlocked.
There is no return value.
"""
if self._owner != get_ident():
raise RuntimeError("cannot release un-acquired lock")
self._count = count = self._count - 1
if not count:
self._owner = None
self._block.release()
def __exit__(self, t, v, tb):
self.release()
# Internal methods used by condition variables
def _acquire_restore(self, state):
self._block.acquire()
self._count, self._owner = state
def _release_save(self):
if self._count == 0:
raise RuntimeError("cannot release un-acquired lock")
count = self._count
self._count = 0
owner = self._owner
self._owner = None
self._block.release()
return (count, owner)
def _is_owned(self):
return self._owner == get_ident()
_PyRLock = _RLock
class Condition:
"""Class that implements a condition variable.
A condition variable allows one or more threads to wait until they are
notified by another thread.
If the lock argument is given and not None, it must be a Lock or RLock
object, and it is used as the underlying lock. Otherwise, a new RLock object
is created and used as the underlying lock.
"""
def __init__(self, lock=None):
if lock is None:
lock = RLock()
self._lock = lock
# Export the lock's acquire() and release() methods
self.acquire = lock.acquire
self.release = lock.release
# If the lock defines _release_save() and/or _acquire_restore(),
# these override the default implementations (which just call
# release() and acquire() on the lock). Ditto for _is_owned().
try:
self._release_save = lock._release_save
except AttributeError:
pass
try:
self._acquire_restore = lock._acquire_restore
except AttributeError:
pass
try:
self._is_owned = lock._is_owned
except AttributeError:
pass
self._waiters = []
def __enter__(self):
return self._lock.__enter__()
def __exit__(self, *args):
return self._lock.__exit__(*args)
def __repr__(self):
return "<Condition(%s, %d)>" % (self._lock, len(self._waiters))
def _release_save(self):
self._lock.release() # No state to save
def _acquire_restore(self, x):
self._lock.acquire() # Ignore saved state
def _is_owned(self):
# Return True if lock is owned by current_thread.
# This method is called only if __lock doesn't have _is_owned().
if self._lock.acquire(0):
self._lock.release()
return False
else:
return True
def wait(self, timeout=None):
"""Wait until notified or until a timeout occurs.
If the calling thread has not acquired the lock when this method is
called, a RuntimeError is raised.
This method releases the underlying lock, and then blocks until it is
awakened by a notify() or notify_all() call for the same condition
variable in another thread, or until the optional timeout occurs. Once
awakened or timed out, it re-acquires the lock and returns.
When the timeout argument is present and not None, it should be a
floating point number specifying a timeout for the operation in seconds
(or fractions thereof).
When the underlying lock is an RLock, it is not released using its
release() method, since this may not actually unlock the lock when it
was acquired multiple times recursively. Instead, an internal interface
of the RLock class is used, which really unlocks it even when it has
been recursively acquired several times. Another internal interface is
then used to restore the recursion level when the lock is reacquired.
"""
if not self._is_owned():
raise RuntimeError("cannot wait on un-acquired lock")
waiter = _allocate_lock()
waiter.acquire()
self._waiters.append(waiter)
saved_state = self._release_save()
try: # restore state no matter what (e.g., KeyboardInterrupt)
if timeout is None:
waiter.acquire()
gotit = True
else:
if timeout > 0:
gotit = waiter.acquire(True, timeout)
else:
gotit = waiter.acquire(False)
if not gotit:
try:
self._waiters.remove(waiter)
except ValueError:
pass
return gotit
finally:
self._acquire_restore(saved_state)
def wait_for(self, predicate, timeout=None):
"""Wait until a condition evaluates to True.
predicate should be a callable which result will be interpreted as a
boolean value. A timeout may be provided giving the maximum time to
wait.
"""
endtime = None
waittime = timeout
result = predicate()
while not result:
if waittime is not None:
if endtime is None:
endtime = _time() + waittime
else:
waittime = endtime - _time()
if waittime <= 0:
break
self.wait(waittime)
result = predicate()
return result
def notify(self, n=1):
"""Wake up one or more threads waiting on this condition, if any.
If the calling thread has not acquired the lock when this method is
called, a RuntimeError is raised.
This method wakes up at most n of the threads waiting for the condition
variable; it is a no-op if no threads are waiting.
"""
if not self._is_owned():
raise RuntimeError("cannot notify on un-acquired lock")
__waiters = self._waiters
waiters = __waiters[:n]
if not waiters:
return
for waiter in waiters:
waiter.release()
try:
__waiters.remove(waiter)
except ValueError:
pass
def notify_all(self):
"""Wake up all threads waiting on this condition.
If the calling thread has not acquired the lock when this method
is called, a RuntimeError is raised.
"""
self.notify(len(self._waiters))
notifyAll = notify_all
class Semaphore:
"""This class implements semaphore objects.
Semaphores manage a counter representing the number of release() calls minus
the number of acquire() calls, plus an initial value. The acquire() method
blocks if necessary until it can return without making the counter
negative. If not given, value defaults to 1.
"""
# After Tim Peters' semaphore class, but not quite the same (no maximum)
def __init__(self, value=1):
if value < 0:
raise ValueError("semaphore initial value must be >= 0")
self._cond = Condition(Lock())
self._value = value
def acquire(self, blocking=True, timeout=None):
"""Acquire a semaphore, decrementing the internal counter by one.
When invoked without arguments: if the internal counter is larger than
zero on entry, decrement it by one and return immediately. If it is zero
on entry, block, waiting until some other thread has called release() to
make it larger than zero. This is done with proper interlocking so that
if multiple acquire() calls are blocked, release() will wake exactly one
of them up. The implementation may pick one at random, so the order in
which blocked threads are awakened should not be relied on. There is no
return value in this case.
When invoked with blocking set to true, do the same thing as when called
without arguments, and return true.
When invoked with blocking set to false, do not block. If a call without
an argument would block, return false immediately; otherwise, do the
same thing as when called without arguments, and return true.
When invoked with a timeout other than None, it will block for at
most timeout seconds. If acquire does not complete successfully in
that interval, return false. Return true otherwise.
"""
if not blocking and timeout is not None:
raise ValueError("can't specify timeout for non-blocking acquire")
rc = False
endtime = None
with self._cond:
while self._value == 0:
if not blocking:
break
if timeout is not None:
if endtime is None:
endtime = _time() + timeout
else:
timeout = endtime - _time()
if timeout <= 0:
break
self._cond.wait(timeout)
else:
self._value = self._value - 1
rc = True
return rc
__enter__ = acquire
def release(self):
"""Release a semaphore, incrementing the internal counter by one.
When the counter is zero on entry and another thread is waiting for it
to become larger than zero again, wake up that thread.
"""
with self._cond:
self._value = self._value + 1
self._cond.notify()
def __exit__(self, t, v, tb):
self.release()
class BoundedSemaphore(Semaphore):
"""Implements a bounded semaphore.
A bounded semaphore checks to make sure its current value doesn't exceed its
initial value. If it does, ValueError is raised. In most situations
semaphores are used to guard resources with limited capacity.
If the semaphore is released too many times it's a sign of a bug. If not
given, value defaults to 1.
Like regular semaphores, bounded semaphores manage a counter representing
the number of release() calls minus the number of acquire() calls, plus an
initial value. The acquire() method blocks if necessary until it can return
without making the counter negative. If not given, value defaults to 1.
"""
def __init__(self, value=1):
Semaphore.__init__(self, value)
self._initial_value = value
def release(self):
"""Release a semaphore, incrementing the internal counter by one.
When the counter is zero on entry and another thread is waiting for it
to become larger than zero again, wake up that thread.
If the number of releases exceeds the number of acquires,
raise a ValueError.
"""
with self._cond:
if self._value >= self._initial_value:
raise ValueError("Semaphore released too many times")
self._value += 1
self._cond.notify()
class Event:
"""Class implementing event objects.
Events manage a flag that can be set to true with the set() method and reset
to false with the clear() method. The wait() method blocks until the flag is
true. The flag is initially false.
"""
# After Tim Peters' event class (without is_posted())
def __init__(self):
self._cond = Condition(Lock())
self._flag = False
def _reset_internal_locks(self):
# private! called by Thread._reset_internal_locks by _after_fork()
self._cond.__init__()
def is_set(self):
"""Return true if and only if the internal flag is true."""
return self._flag
isSet = is_set
def set(self):
"""Set the internal flag to true.
All threads waiting for it to become true are awakened. Threads
that call wait() once the flag is true will not block at all.
"""
self._cond.acquire()
try:
self._flag = True
self._cond.notify_all()
finally:
self._cond.release()
def clear(self):
"""Reset the internal flag to false.
Subsequently, threads calling wait() will block until set() is called to
set the internal flag to true again.
"""
self._cond.acquire()
try:
self._flag = False
finally:
self._cond.release()
def wait(self, timeout=None):
"""Block until the internal flag is true.
If the internal flag is true on entry, return immediately. Otherwise,
block until another thread calls set() to set the flag to true, or until
the optional timeout occurs.
When the timeout argument is present and not None, it should be a
floating point number specifying a timeout for the operation in seconds
(or fractions thereof).
This method returns the internal flag on exit, so it will always return
True except if a timeout is given and the operation times out.
"""
self._cond.acquire()
try:
signaled = self._flag
if not signaled:
signaled = self._cond.wait(timeout)
return signaled
finally:
self._cond.release()
# A barrier class. Inspired in part by the pthread_barrier_* api and
# the CyclicBarrier class from Java. See
# http://sourceware.org/pthreads-win32/manual/pthread_barrier_init.html and
# http://java.sun.com/j2se/1.5.0/docs/api/java/util/concurrent/
# CyclicBarrier.html
# for information.
# We maintain two main states, 'filling' and 'draining' enabling the barrier
# to be cyclic. Threads are not allowed into it until it has fully drained
# since the previous cycle. In addition, a 'resetting' state exists which is
# similar to 'draining' except that threads leave with a BrokenBarrierError,
# and a 'broken' state in which all threads get the exception.
class Barrier:
"""Implements a Barrier.
Useful for synchronizing a fixed number of threads at known synchronization
points. Threads block on 'wait()' and are simultaneously once they have all
made that call.
"""
def __init__(self, parties, action=None, timeout=None):
"""Create a barrier, initialised to 'parties' threads.
'action' is a callable which, when supplied, will be called by one of
the threads after they have all entered the barrier and just prior to
releasing them all. If a 'timeout' is provided, it is uses as the
default for all subsequent 'wait()' calls.
"""
self._cond = Condition(Lock())
self._action = action
self._timeout = timeout
self._parties = parties
self._state = 0 #0 filling, 1, draining, -1 resetting, -2 broken
self._count = 0
def wait(self, timeout=None):
"""Wait for the barrier.
When the specified number of threads have started waiting, they are all
simultaneously awoken. If an 'action' was provided for the barrier, one
of the threads will have executed that callback prior to returning.
Returns an individual index number from 0 to 'parties-1'.
"""
if timeout is None:
timeout = self._timeout
with self._cond:
self._enter() # Block while the barrier drains.
index = self._count
self._count += 1
try:
if index + 1 == self._parties:
# We release the barrier
self._release()
else:
# We wait until someone releases us
self._wait(timeout)
return index
finally:
self._count -= 1
# Wake up any threads waiting for barrier to drain.
self._exit()
# Block until the barrier is ready for us, or raise an exception
# if it is broken.
def _enter(self):
while self._state in (-1, 1):
# It is draining or resetting, wait until done
self._cond.wait()
#see if the barrier is in a broken state
if self._state < 0:
raise BrokenBarrierError
assert self._state == 0
# Optionally run the 'action' and release the threads waiting
# in the barrier.
def _release(self):
try:
if self._action:
self._action()
# enter draining state
self._state = 1
self._cond.notify_all()
except:
#an exception during the _action handler. Break and reraise
self._break()
raise
# Wait in the barrier until we are relased. Raise an exception
# if the barrier is reset or broken.
def _wait(self, timeout):
if not self._cond.wait_for(lambda : self._state != 0, timeout):
#timed out. Break the barrier
self._break()
raise BrokenBarrierError
if self._state < 0:
raise BrokenBarrierError
assert self._state == 1
# If we are the last thread to exit the barrier, signal any threads
# waiting for the barrier to drain.
def _exit(self):
if self._count == 0:
if self._state in (-1, 1):
#resetting or draining
self._state = 0
self._cond.notify_all()
def reset(self):
"""Reset the barrier to the initial state.
Any threads currently waiting will get the BrokenBarrier exception
raised.
"""
with self._cond:
if self._count > 0:
if self._state == 0:
#reset the barrier, waking up threads
self._state = -1
elif self._state == -2:
#was broken, set it to reset state
#which clears when the last thread exits
self._state = -1
else:
self._state = 0
self._cond.notify_all()
def abort(self):
"""Place the barrier into a 'broken' state.
Useful in case of error. Any currently waiting threads and threads
attempting to 'wait()' will have BrokenBarrierError raised.
"""
with self._cond:
self._break()
def _break(self):
# An internal error was detected. The barrier is set to
# a broken state all parties awakened.
self._state = -2
self._cond.notify_all()
@property
def parties(self):
"""Return the number of threads required to trip the barrier."""
return self._parties
@property
def n_waiting(self):
"""Return the number of threads currently waiting at the barrier."""
# We don't need synchronization here since this is an ephemeral result
# anyway. It returns the correct value in the steady state.
if self._state == 0:
return self._count
return 0
@property
def broken(self):
"""Return True if the barrier is in a broken state."""
return self._state == -2
# exception raised by the Barrier class
class BrokenBarrierError(RuntimeError):
pass
# Helper to generate new thread names
_counter = 0
def _newname(template="Thread-%d"):
global _counter
_counter = _counter + 1
return template % _counter
# Active thread administration
_active_limbo_lock = _allocate_lock()
_active = {} # maps thread id to Thread object
_limbo = {}
# For debug and leak testing
_dangling = WeakSet()
# Main class for threads
class Thread:
"""A class that represents a thread of control.
This class can be safely subclassed in a limited fashion. There are two ways
to specify the activity: by passing a callable object to the constructor, or
by overriding the run() method in a subclass.
"""
__initialized = False
# Need to store a reference to sys.exc_info for printing
# out exceptions when a thread tries to use a global var. during interp.
# shutdown and thus raises an exception about trying to perform some
# operation on/with a NoneType
__exc_info = _sys.exc_info
# Keep sys.exc_clear too to clear the exception just before
# allowing .join() to return.
#XXX __exc_clear = _sys.exc_clear
def __init__(self, group=None, target=None, name=None,
args=(), kwargs=None, *, daemon=None):
"""This constructor should always be called with keyword arguments. Arguments are:
*group* should be None; reserved for future extension when a ThreadGroup
class is implemented.
*target* is the callable object to be invoked by the run()
method. Defaults to None, meaning nothing is called.
*name* is the thread name. By default, a unique name is constructed of
the form "Thread-N" where N is a small decimal number.
*args* is the argument tuple for the target invocation. Defaults to ().
*kwargs* is a dictionary of keyword arguments for the target
invocation. Defaults to {}.
If a subclass overrides the constructor, it must make sure to invoke
the base class constructor (Thread.__init__()) before doing anything
else to the thread.
"""
assert group is None, "group argument must be None for now"
if kwargs is None:
kwargs = {}
self._target = target
self._name = str(name or _newname())
self._args = args
self._kwargs = kwargs
if daemon is not None:
self._daemonic = daemon
else:
self._daemonic = current_thread().daemon
self._ident = None
self._started = Event()
self._stopped = False
self._block = Condition(Lock())
self._initialized = True
# sys.stderr is not stored in the class like
# sys.exc_info since it can be changed between instances
self._stderr = _sys.stderr
_dangling.add(self)
def _reset_internal_locks(self):
# private! Called by _after_fork() to reset our internal locks as
# they may be in an invalid state leading to a deadlock or crash.
if hasattr(self, '_block'): # DummyThread deletes _block
self._block.__init__()
self._started._reset_internal_locks()
def __repr__(self):
assert self._initialized, "Thread.__init__() was not called"
status = "initial"
if self._started.is_set():
status = "started"
if self._stopped:
status = "stopped"
if self._daemonic:
status += " daemon"
if self._ident is not None:
status += " %s" % self._ident
return "<%s(%s, %s)>" % (self.__class__.__name__, self._name, status)
def start(self):
"""Start the thread's activity.
It must be called at most once per thread object. It arranges for the
object's run() method to be invoked in a separate thread of control.
This method will raise a RuntimeError if called more than once on the
same thread object.
"""
if not self._initialized:
raise RuntimeError("thread.__init__() not called")
if self._started.is_set():
raise RuntimeError("threads can only be started once")
with _active_limbo_lock:
_limbo[self] = self
try:
_start_new_thread(self._bootstrap, ())
except Exception:
with _active_limbo_lock:
del _limbo[self]
raise
self._started.wait()
def run(self):
"""Method representing the thread's activity.
You may override this method in a subclass. The standard run() method
invokes the callable object passed to the object's constructor as the
target argument, if any, with sequential and keyword arguments taken
from the args and kwargs arguments, respectively.
"""
try:
if self._target:
self._target(*self._args, **self._kwargs)
finally:
# Avoid a refcycle if the thread is running a function with
# an argument that has a member that points to the thread.
del self._target, self._args, self._kwargs
def _bootstrap(self):
# Wrapper around the real bootstrap code that ignores
# exceptions during interpreter cleanup. Those typically
# happen when a daemon thread wakes up at an unfortunate
# moment, finds the world around it destroyed, and raises some
# random exception *** while trying to report the exception in
# _bootstrap_inner() below ***. Those random exceptions
# don't help anybody, and they confuse users, so we suppress
# them. We suppress them only when it appears that the world
# indeed has already been destroyed, so that exceptions in
# _bootstrap_inner() during normal business hours are properly
# reported. Also, we only suppress them for daemonic threads;
# if a non-daemonic encounters this, something else is wrong.
try:
self._bootstrap_inner()
except:
if self._daemonic and _sys is None:
return
raise
def _set_ident(self):
self._ident = get_ident()
def _bootstrap_inner(self):
try:
self._set_ident()
self._started.set()
with _active_limbo_lock:
_active[self._ident] = self
del _limbo[self]
if _trace_hook:
_sys.settrace(_trace_hook)
if _profile_hook:
_sys.setprofile(_profile_hook)
try:
self.run()
except SystemExit:
pass
except:
# If sys.stderr is no more (most likely from interpreter
# shutdown) use self._stderr. Otherwise still use sys (as in
# _sys) in case sys.stderr was redefined since the creation of
# self.
if _sys:
_sys.stderr.write("Exception in thread %s:\n%s\n" %
(self.name, _format_exc()))
else:
# Do the best job possible w/o a huge amt. of code to
# approximate a traceback (code ideas from
# Lib/traceback.py)
exc_type, exc_value, exc_tb = self._exc_info()
try:
print((
"Exception in thread " + self.name +
" (most likely raised during interpreter shutdown):"), file=self._stderr)
print((
"Traceback (most recent call last):"), file=self._stderr)
while exc_tb:
print((
' File "%s", line %s, in %s' %
(exc_tb.tb_frame.f_code.co_filename,
exc_tb.tb_lineno,
exc_tb.tb_frame.f_code.co_name)), file=self._stderr)
exc_tb = exc_tb.tb_next
print(("%s: %s" % (exc_type, exc_value)), file=self._stderr)
# Make sure that exc_tb gets deleted since it is a memory
# hog; deleting everything else is just for thoroughness
finally:
del exc_type, exc_value, exc_tb
finally:
# Prevent a race in
# test_threading.test_no_refcycle_through_target when
# the exception keeps the target alive past when we
# assert that it's dead.
#XXX self.__exc_clear()
pass
finally:
with _active_limbo_lock:
self._stop()
try:
# We don't call self._delete() because it also
# grabs _active_limbo_lock.
del _active[get_ident()]
except:
pass
def _stop(self):
self._block.acquire()
self._stopped = True
self._block.notify_all()
self._block.release()
def _delete(self):
"Remove current thread from the dict of currently running threads."
# Notes about running with _dummy_thread:
#
# Must take care to not raise an exception if _dummy_thread is being
# used (and thus this module is being used as an instance of
# dummy_threading). _dummy_thread.get_ident() always returns -1 since
# there is only one thread if _dummy_thread is being used. Thus
# len(_active) is always <= 1 here, and any Thread instance created
# overwrites the (if any) thread currently registered in _active.
#
# An instance of _MainThread is always created by 'threading'. This
# gets overwritten the instant an instance of Thread is created; both
# threads return -1 from _dummy_thread.get_ident() and thus have the
# same key in the dict. So when the _MainThread instance created by
# 'threading' tries to clean itself up when atexit calls this method
# it gets a KeyError if another Thread instance was created.
#
# This all means that KeyError from trying to delete something from
# _active if dummy_threading is being used is a red herring. But
# since it isn't if dummy_threading is *not* being used then don't
# hide the exception.
try:
with _active_limbo_lock:
del _active[get_ident()]
# There must not be any python code between the previous line
# and after the lock is released. Otherwise a tracing function
# could try to acquire the lock again in the same thread, (in
# current_thread()), and would block.
except KeyError:
if 'dummy_threading' not in _sys.modules:
raise
def join(self, timeout=None):
"""Wait until the thread terminates.
This blocks the calling thread until the thread whose join() method is
called terminates -- either normally or through an unhandled exception
or until the optional timeout occurs.
When the timeout argument is present and not None, it should be a
floating point number specifying a timeout for the operation in seconds
(or fractions thereof). As join() always returns None, you must call
isAlive() after join() to decide whether a timeout happened -- if the
thread is still alive, the join() call timed out.
When the timeout argument is not present or None, the operation will
block until the thread terminates.
A thread can be join()ed many times.
join() raises a RuntimeError if an attempt is made to join the current
thread as that would cause a deadlock. It is also an error to join() a
thread before it has been started and attempts to do so raises the same
exception.
"""
if not self._initialized:
raise RuntimeError("Thread.__init__() not called")
if not self._started.is_set():
raise RuntimeError("cannot join thread before it is started")
if self is current_thread():
raise RuntimeError("cannot join current thread")
self._block.acquire()
try:
if timeout is None:
while not self._stopped:
self._block.wait()
else:
deadline = _time() + timeout
while not self._stopped:
delay = deadline - _time()
if delay <= 0:
break
self._block.wait(delay)
finally:
self._block.release()
@property
def name(self):
"""A string used for identification purposes only.
It has no semantics. Multiple threads may be given the same name. The
initial name is set by the constructor.
"""
assert self._initialized, "Thread.__init__() not called"
return self._name
@name.setter
def name(self, name):
assert self._initialized, "Thread.__init__() not called"
self._name = str(name)
@property
def ident(self):
"""Thread identifier of this thread or None if it has not been started.
This is a nonzero integer. See the thread.get_ident() function. Thread
identifiers may be recycled when a thread exits and another thread is
created. The identifier is available even after the thread has exited.
"""
assert self._initialized, "Thread.__init__() not called"
return self._ident
def is_alive(self):
"""Return whether the thread is alive.
This method returns True just before the run() method starts until just
after the run() method terminates. The module function enumerate()
returns a list of all alive threads.
"""
assert self._initialized, "Thread.__init__() not called"
return self._started.is_set() and not self._stopped
isAlive = is_alive
@property
def daemon(self):
"""A boolean value indicating whether this thread is a daemon thread.
This must be set before start() is called, otherwise RuntimeError is
raised. Its initial value is inherited from the creating thread; the
main thread is not a daemon thread and therefore all threads created in
the main thread default to daemon = False.
The entire Python program exits when no alive non-daemon threads are
left.
"""
assert self._initialized, "Thread.__init__() not called"
return self._daemonic
@daemon.setter
def daemon(self, daemonic):
if not self._initialized:
raise RuntimeError("Thread.__init__() not called")
if self._started.is_set():
raise RuntimeError("cannot set daemon status of active thread");
self._daemonic = daemonic
def isDaemon(self):
return self.daemon
def setDaemon(self, daemonic):
self.daemon = daemonic
def getName(self):
return self.name
def setName(self, name):
self.name = name
# The timer class was contributed by Itamar Shtull-Trauring
class Timer(Thread):
"""Call a function after a specified number of seconds:
t = Timer(30.0, f, args=None, kwargs=None)
t.start()
t.cancel() # stop the timer's action if it's still waiting
"""
def __init__(self, interval, function, args=None, kwargs=None):
Thread.__init__(self)
self.interval = interval
self.function = function
self.args = args if args is not None else []
self.kwargs = kwargs if kwargs is not None else {}
self.finished = Event()
def cancel(self):
"""Stop the timer if it hasn't finished yet."""
self.finished.set()
def run(self):
self.finished.wait(self.interval)
if not self.finished.is_set():
self.function(*self.args, **self.kwargs)
self.finished.set()
# Special thread class to represent the main thread
# This is garbage collected through an exit handler
class _MainThread(Thread):
def __init__(self):
Thread.__init__(self, name="MainThread", daemon=False)
self._started.set()
self._set_ident()
with _active_limbo_lock:
_active[self._ident] = self
def _exitfunc(self):
self._stop()
t = _pickSomeNonDaemonThread()
while t:
t.join()
t = _pickSomeNonDaemonThread()
self._delete()
def _pickSomeNonDaemonThread():
for t in enumerate():
if not t.daemon and t.is_alive():
return t
return None
# Dummy thread class to represent threads not started here.
# These aren't garbage collected when they die, nor can they be waited for.
# If they invoke anything in threading.py that calls current_thread(), they
# leave an entry in the _active dict forever after.
# Their purpose is to return *something* from current_thread().
# They are marked as daemon threads so we won't wait for them
# when we exit (conform previous semantics).
class _DummyThread(Thread):
def __init__(self):
Thread.__init__(self, name=_newname("Dummy-%d"), daemon=True)
# Thread._block consumes an OS-level locking primitive, which
# can never be used by a _DummyThread. Since a _DummyThread
# instance is immortal, that's bad, so release this resource.
del self._block
self._started.set()
self._set_ident()
with _active_limbo_lock:
_active[self._ident] = self
def _stop(self):
pass
def join(self, timeout=None):
assert False, "cannot join a dummy thread"
# Global API functions
def current_thread():
"""Return the current Thread object, corresponding to the caller's thread of control.
If the caller's thread of control was not created through the threading
module, a dummy thread object with limited functionality is returned.
"""
try:
return _active[get_ident()]
except KeyError:
return _DummyThread()
currentThread = current_thread
def active_count():
"""Return the number of Thread objects currently alive.
The returned count is equal to the length of the list returned by
enumerate().
"""
with _active_limbo_lock:
return len(_active) + len(_limbo)
activeCount = active_count
def _enumerate():
# Same as enumerate(), but without the lock. Internal use only.
return list(_active.values()) + list(_limbo.values())
def enumerate():
"""Return a list of all Thread objects currently alive.
The list includes daemonic threads, dummy thread objects created by
current_thread(), and the main thread. It excludes terminated threads and
threads that have not yet been started.
"""
with _active_limbo_lock:
return list(_active.values()) + list(_limbo.values())
from _thread import stack_size
# Create the main thread object,
# and make it available for the interpreter
# (Py_Main) as threading._shutdown.
_shutdown = _MainThread()._exitfunc
# get thread-local implementation, either from the thread
# module, or from the python fallback
try:
from _thread import _local as local
except ImportError:
from _threading_local import local
def _after_fork():
# This function is called by Python/ceval.c:PyEval_ReInitThreads which
# is called from PyOS_AfterFork. Here we cleanup threading module state
# that should not exist after a fork.
# Reset _active_limbo_lock, in case we forked while the lock was held
# by another (non-forked) thread. http://bugs.python.org/issue874900
global _active_limbo_lock
_active_limbo_lock = _allocate_lock()
# fork() only copied the current thread; clear references to others.
new_active = {}
current = current_thread()
with _active_limbo_lock:
for thread in _enumerate():
# Any lock/condition variable may be currently locked or in an
# invalid state, so we reinitialize them.
thread._reset_internal_locks()
if thread is current:
# There is only one active thread. We reset the ident to
# its new value since it can have changed.
ident = get_ident()
thread._ident = ident
new_active[ident] = thread
else:
# All the others are already stopped.
thread._stop()
_limbo.clear()
_active.clear()
_active.update(new_active)
assert len(_active) == 1
| gpl-3.0 |
xmdy/ibstats | src/stats/models.py | 1 | 1877 | from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext as _
import random
def get_random_value(start=100, end=100000):
def get_random():
return random.randint(start, end) * 0.01
return get_random
class Trader(models.Model):
name = models.CharField(verbose_name=_('name'), max_length=128)
balance = models.FloatField(verbose_name=_('balance'), default=get_random_value())
class Meta:
verbose_name = _('trader')
verbose_name_plural = _('traders')
ordering = ['-id']
def __unicode__(self):
return '<Trader: %s, %s>' % (self.id, self.name)
class Transaction(models.Model):
trader = models.ForeignKey(Trader, verbose_name=_('trader'))
time = models.DateTimeField(verbose_name=_('time'), auto_created=True)
amount = models.FloatField(verbose_name=_('amount'), default=get_random_value())
type = models.IntegerField(verbose_name=_('type'), default=1, db_index=True)
class Meta:
verbose_name = _('transaction')
verbose_name_plural = _('transactions')
ordering = ['-id']
def __unicode__(self):
return '<Transaction: %s, %s, %s, %s>' % (self.id, self.time, self.amount, self.trader_id)
class Deal(models.Model):
trader = models.ForeignKey(Trader, verbose_name=_('trader'))
time = models.DateTimeField(verbose_name=_('time'), db_index=True, auto_created=True)
amount = models.FloatField(verbose_name=_('amount'), default=get_random_value())
result_amount = models.FloatField(verbose_name=_('result amount'), default=get_random_value(-100000))
class Meta:
verbose_name = _('deal')
verbose_name_plural = _('deals')
ordering = ['-id']
def __unicode__(self):
return '<Deal: %s, %s, %s, %s>' % (self.id, self.time, self.amount, self.trader_id) | gpl-3.0 |
anant-dev/django | tests/select_for_update/tests.py | 123 | 9449 | from __future__ import unicode_literals
import threading
import time
from multiple_database.routers import TestRouter
from django.db import DatabaseError, connection, router, transaction
from django.test import (
TransactionTestCase, override_settings, skipIfDBFeature,
skipUnlessDBFeature,
)
from .models import Person
# We need to set settings.DEBUG to True so we can capture the output SQL
# to examine.
@override_settings(DEBUG=True)
class SelectForUpdateTests(TransactionTestCase):
available_apps = ['select_for_update']
def setUp(self):
# This is executed in autocommit mode so that code in
# run_select_for_update can see this data.
self.person = Person.objects.create(name='Reinhardt')
# We need another database connection in transaction to test that one
# connection issuing a SELECT ... FOR UPDATE will block.
self.new_connection = connection.copy()
def tearDown(self):
try:
self.end_blocking_transaction()
except (DatabaseError, AttributeError):
pass
self.new_connection.close()
def start_blocking_transaction(self):
self.new_connection.set_autocommit(False)
# Start a blocking transaction. At some point,
# end_blocking_transaction() should be called.
self.cursor = self.new_connection.cursor()
sql = 'SELECT * FROM %(db_table)s %(for_update)s;' % {
'db_table': Person._meta.db_table,
'for_update': self.new_connection.ops.for_update_sql(),
}
self.cursor.execute(sql, ())
self.cursor.fetchone()
def end_blocking_transaction(self):
# Roll back the blocking transaction.
self.new_connection.rollback()
self.new_connection.set_autocommit(True)
def has_for_update_sql(self, tested_connection, nowait=False):
# Examine the SQL that was executed to determine whether it
# contains the 'SELECT..FOR UPDATE' stanza.
for_update_sql = tested_connection.ops.for_update_sql(nowait)
sql = tested_connection.queries[-1]['sql']
return bool(sql.find(for_update_sql) > -1)
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_sql_generated(self):
"""
Test that the backend's FOR UPDATE variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic():
list(Person.objects.all().select_for_update())
self.assertTrue(self.has_for_update_sql(connection))
@skipUnlessDBFeature('has_select_for_update_nowait')
def test_for_update_sql_generated_nowait(self):
"""
Test that the backend's FOR UPDATE NOWAIT variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic():
list(Person.objects.all().select_for_update(nowait=True))
self.assertTrue(self.has_for_update_sql(connection, nowait=True))
@skipUnlessDBFeature('has_select_for_update_nowait')
def test_nowait_raises_error_on_block(self):
"""
If nowait is specified, we expect an error to be raised rather
than blocking.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={'nowait': True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipIfDBFeature('has_select_for_update_nowait')
@skipUnlessDBFeature('has_select_for_update')
def test_unsupported_nowait_raises_error(self):
"""
If a SELECT...FOR UPDATE NOWAIT is run on a database backend
that supports FOR UPDATE but not NOWAIT, then we should find
that a DatabaseError is raised.
"""
self.assertRaises(
DatabaseError,
list,
Person.objects.all().select_for_update(nowait=True)
)
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_requires_transaction(self):
"""
Test that a TransactionManagementError is raised
when a select_for_update query is executed outside of a transaction.
"""
with self.assertRaises(transaction.TransactionManagementError):
list(Person.objects.all().select_for_update())
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_requires_transaction_only_in_execution(self):
"""
Test that no TransactionManagementError is raised
when select_for_update is invoked outside of a transaction -
only when the query is executed.
"""
people = Person.objects.all().select_for_update()
with self.assertRaises(transaction.TransactionManagementError):
list(people)
def run_select_for_update(self, status, nowait=False):
"""
Utility method that runs a SELECT FOR UPDATE against all
Person instances. After the select_for_update, it attempts
to update the name of the only record, save, and commit.
This function expects to run in a separate thread.
"""
status.append('started')
try:
# We need to enter transaction management again, as this is done on
# per-thread basis
with transaction.atomic():
people = list(
Person.objects.all().select_for_update(nowait=nowait)
)
people[0].name = 'Fred'
people[0].save()
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
connection.close()
@skipUnlessDBFeature('has_select_for_update')
@skipUnlessDBFeature('supports_transactions')
def test_block(self):
"""
Check that a thread running a select_for_update that
accesses rows being touched by a similar operation
on another connection blocks correctly.
"""
# First, let's start the transaction in our thread.
self.start_blocking_transaction()
# Now, try it again using the ORM's select_for_update
# facility. Do this in a separate thread.
status = []
thread = threading.Thread(
target=self.run_select_for_update, args=(status,)
)
# The thread should immediately block, but we'll sleep
# for a bit to make sure.
thread.start()
sanity_count = 0
while len(status) != 1 and sanity_count < 10:
sanity_count += 1
time.sleep(1)
if sanity_count >= 10:
raise ValueError('Thread did not run and block')
# Check the person hasn't been updated. Since this isn't
# using FOR UPDATE, it won't block.
p = Person.objects.get(pk=self.person.pk)
self.assertEqual('Reinhardt', p.name)
# When we end our blocking transaction, our thread should
# be able to continue.
self.end_blocking_transaction()
thread.join(5.0)
# Check the thread has finished. Assuming it has, we should
# find that it has updated the person's name.
self.assertFalse(thread.isAlive())
# We must commit the transaction to ensure that MySQL gets a fresh read,
# since by default it runs in REPEATABLE READ mode
transaction.commit()
p = Person.objects.get(pk=self.person.pk)
self.assertEqual('Fred', p.name)
@skipUnlessDBFeature('has_select_for_update')
def test_raw_lock_not_available(self):
"""
Check that running a raw query which can't obtain a FOR UPDATE lock
raises the correct exception
"""
self.start_blocking_transaction()
def raw(status):
try:
list(
Person.objects.raw(
'SELECT * FROM %s %s' % (
Person._meta.db_table,
connection.ops.for_update_sql(nowait=True)
)
)
)
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
connection.close()
status = []
thread = threading.Thread(target=raw, kwargs={'status': status})
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature('has_select_for_update')
@override_settings(DATABASE_ROUTERS=[TestRouter()])
def test_select_for_update_on_multidb(self):
query = Person.objects.select_for_update()
self.assertEqual(router.db_for_write(Person), query.db)
@skipUnlessDBFeature('has_select_for_update')
def test_select_for_update_with_get(self):
with transaction.atomic():
person = Person.objects.select_for_update().get(name='Reinhardt')
self.assertEqual(person.name, 'Reinhardt')
| bsd-3-clause |
travisdoor/ArgosRts | engine/thirdparty/yaml-cpp/test/gmock-1.7.0/gtest/scripts/fuse_gtest_files.py | 2577 | 8813 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""fuse_gtest_files.py v0.2.0
Fuses Google Test source code into a .h file and a .cc file.
SYNOPSIS
fuse_gtest_files.py [GTEST_ROOT_DIR] OUTPUT_DIR
Scans GTEST_ROOT_DIR for Google Test source code, and generates
two files: OUTPUT_DIR/gtest/gtest.h and OUTPUT_DIR/gtest/gtest-all.cc.
Then you can build your tests by adding OUTPUT_DIR to the include
search path and linking with OUTPUT_DIR/gtest/gtest-all.cc. These
two files contain everything you need to use Google Test. Hence
you can "install" Google Test by copying them to wherever you want.
GTEST_ROOT_DIR can be omitted and defaults to the parent
directory of the directory holding this script.
EXAMPLES
./fuse_gtest_files.py fused_gtest
./fuse_gtest_files.py path/to/unpacked/gtest fused_gtest
This tool is experimental. In particular, it assumes that there is no
conditional inclusion of Google Test headers. Please report any
problems to googletestframework@googlegroups.com. You can read
http://code.google.com/p/googletest/wiki/GoogleTestAdvancedGuide for
more information.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sets
import sys
# We assume that this file is in the scripts/ directory in the Google
# Test root directory.
DEFAULT_GTEST_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
# Regex for matching '#include "gtest/..."'.
INCLUDE_GTEST_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(gtest/.+)"')
# Regex for matching '#include "src/..."'.
INCLUDE_SRC_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(src/.+)"')
# Where to find the source seed files.
GTEST_H_SEED = 'include/gtest/gtest.h'
GTEST_SPI_H_SEED = 'include/gtest/gtest-spi.h'
GTEST_ALL_CC_SEED = 'src/gtest-all.cc'
# Where to put the generated files.
GTEST_H_OUTPUT = 'gtest/gtest.h'
GTEST_ALL_CC_OUTPUT = 'gtest/gtest-all.cc'
def VerifyFileExists(directory, relative_path):
"""Verifies that the given file exists; aborts on failure.
relative_path is the file path relative to the given directory.
"""
if not os.path.isfile(os.path.join(directory, relative_path)):
print 'ERROR: Cannot find %s in directory %s.' % (relative_path,
directory)
print ('Please either specify a valid project root directory '
'or omit it on the command line.')
sys.exit(1)
def ValidateGTestRootDir(gtest_root):
"""Makes sure gtest_root points to a valid gtest root directory.
The function aborts the program on failure.
"""
VerifyFileExists(gtest_root, GTEST_H_SEED)
VerifyFileExists(gtest_root, GTEST_ALL_CC_SEED)
def VerifyOutputFile(output_dir, relative_path):
"""Verifies that the given output file path is valid.
relative_path is relative to the output_dir directory.
"""
# Makes sure the output file either doesn't exist or can be overwritten.
output_file = os.path.join(output_dir, relative_path)
if os.path.exists(output_file):
# TODO(wan@google.com): The following user-interaction doesn't
# work with automated processes. We should provide a way for the
# Makefile to force overwriting the files.
print ('%s already exists in directory %s - overwrite it? (y/N) ' %
(relative_path, output_dir))
answer = sys.stdin.readline().strip()
if answer not in ['y', 'Y']:
print 'ABORTED.'
sys.exit(1)
# Makes sure the directory holding the output file exists; creates
# it and all its ancestors if necessary.
parent_directory = os.path.dirname(output_file)
if not os.path.isdir(parent_directory):
os.makedirs(parent_directory)
def ValidateOutputDir(output_dir):
"""Makes sure output_dir points to a valid output directory.
The function aborts the program on failure.
"""
VerifyOutputFile(output_dir, GTEST_H_OUTPUT)
VerifyOutputFile(output_dir, GTEST_ALL_CC_OUTPUT)
def FuseGTestH(gtest_root, output_dir):
"""Scans folder gtest_root to generate gtest/gtest.h in output_dir."""
output_file = file(os.path.join(output_dir, GTEST_H_OUTPUT), 'w')
processed_files = sets.Set() # Holds all gtest headers we've processed.
def ProcessFile(gtest_header_path):
"""Processes the given gtest header file."""
# We don't process the same header twice.
if gtest_header_path in processed_files:
return
processed_files.add(gtest_header_path)
# Reads each line in the given gtest header.
for line in file(os.path.join(gtest_root, gtest_header_path), 'r'):
m = INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
# It's '#include "gtest/..."' - let's process it recursively.
ProcessFile('include/' + m.group(1))
else:
# Otherwise we copy the line unchanged to the output file.
output_file.write(line)
ProcessFile(GTEST_H_SEED)
output_file.close()
def FuseGTestAllCcToFile(gtest_root, output_file):
"""Scans folder gtest_root to generate gtest/gtest-all.cc in output_file."""
processed_files = sets.Set()
def ProcessFile(gtest_source_file):
"""Processes the given gtest source file."""
# We don't process the same #included file twice.
if gtest_source_file in processed_files:
return
processed_files.add(gtest_source_file)
# Reads each line in the given gtest source file.
for line in file(os.path.join(gtest_root, gtest_source_file), 'r'):
m = INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
if 'include/' + m.group(1) == GTEST_SPI_H_SEED:
# It's '#include "gtest/gtest-spi.h"'. This file is not
# #included by "gtest/gtest.h", so we need to process it.
ProcessFile(GTEST_SPI_H_SEED)
else:
# It's '#include "gtest/foo.h"' where foo is not gtest-spi.
# We treat it as '#include "gtest/gtest.h"', as all other
# gtest headers are being fused into gtest.h and cannot be
# #included directly.
# There is no need to #include "gtest/gtest.h" more than once.
if not GTEST_H_SEED in processed_files:
processed_files.add(GTEST_H_SEED)
output_file.write('#include "%s"\n' % (GTEST_H_OUTPUT,))
else:
m = INCLUDE_SRC_FILE_REGEX.match(line)
if m:
# It's '#include "src/foo"' - let's process it recursively.
ProcessFile(m.group(1))
else:
output_file.write(line)
ProcessFile(GTEST_ALL_CC_SEED)
def FuseGTestAllCc(gtest_root, output_dir):
"""Scans folder gtest_root to generate gtest/gtest-all.cc in output_dir."""
output_file = file(os.path.join(output_dir, GTEST_ALL_CC_OUTPUT), 'w')
FuseGTestAllCcToFile(gtest_root, output_file)
output_file.close()
def FuseGTest(gtest_root, output_dir):
"""Fuses gtest.h and gtest-all.cc."""
ValidateGTestRootDir(gtest_root)
ValidateOutputDir(output_dir)
FuseGTestH(gtest_root, output_dir)
FuseGTestAllCc(gtest_root, output_dir)
def main():
argc = len(sys.argv)
if argc == 2:
# fuse_gtest_files.py OUTPUT_DIR
FuseGTest(DEFAULT_GTEST_ROOT_DIR, sys.argv[1])
elif argc == 3:
# fuse_gtest_files.py GTEST_ROOT_DIR OUTPUT_DIR
FuseGTest(sys.argv[1], sys.argv[2])
else:
print __doc__
sys.exit(1)
if __name__ == '__main__':
main()
| mit |
sidzan/netforce | netforce_cms/netforce_cms/models/account_invoice.py | 4 | 3563 | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields, get_model
import time
from netforce import config
from netforce.database import get_connection
from netforec.logger import audit_log
import urllib
class Invoice(Model):
_inherit = "account.invoice"
_fields = {
"number": fields.Char("Number"),
}
def check_payment_online(self, ids, context={}):
obj = self.browse(ids)[0]
date = time.strftime('%Y%m%d%H%M%S')
qs = urllib.parse.urlencode([
('mid', 1000006741),
('terminal', 524114384),
('command', 'CRINQ'),
('ref_no', obj.number),
('ref_date', date),
('service_id', 10),
('cur_abbr', 'THB'),
('amount', obj.amount_total),
])
url = 'https://nsips-test.scb.co.th:443/NSIPSWeb/NsipsMessageAction.do?'
data = qs.encode('utf-8')
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
ur = response.read()
te = ur.decode('utf-8')
p = urllib.parse.parse_qsl(te)
params = dict(list(map(lambda x: (x[0], x[1]), p)))
payment_status = params['payment_status'] or ''
amount = params['amount'] or ''
trans_no = params['trans_no'] or ''
if payment_status == "002":
try:
db = get_connection()
vals = {
"type": "in",
"pay_type": "invoice",
"contact_id": obj.contact_id.id,
"date": time.strftime("%Y-%m-%d"),
"ref": trans_no,
"account_id": obj.account_id.id,
"currency_id": obj.currency_id.id,
"lines": [("create", {
"type": "invoice",
"invoice_id": obj.id,
"account_id": obj.account_id.id,
"amount": amount,
})]
}
pmt_id = get_model("account.payment").create(vals, context={"type": vals["type"]})
get_model("account.payment").post([pmt_id])
db.commit()
except Exception as e:
db = get_connection()
db.rollback
import traceback
audit_log("Failed to get result payment from scb", details=traceback.format_exc())
traceback.print_exc()
| mit |
Ravenm/2143-OOP-NASH | python3env/Lib/site-packages/pip/_vendor/requests/packages/chardet/latin1prober.py | 1778 | 5232 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe
from .compat import wrap_ord
FREQ_CAT_NUM = 4
UDF = 0 # undefined
OTH = 1 # other
ASC = 2 # ascii capital letter
ASS = 3 # ascii small letter
ACV = 4 # accent capital vowel
ACO = 5 # accent capital other
ASV = 6 # accent small vowel
ASO = 7 # accent small other
CLASS_NUM = 8 # total classes
Latin1_CharToClass = (
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F
OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57
ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F
OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77
ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F
OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87
OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F
UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97
OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF
ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7
ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF
ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7
ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF
ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7
ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF
ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7
ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF
)
# 0 : illegal
# 1 : very unlikely
# 2 : normal
# 3 : very likely
Latin1ClassModel = (
# UDF OTH ASC ASS ACV ACO ASV ASO
0, 0, 0, 0, 0, 0, 0, 0, # UDF
0, 3, 3, 3, 3, 3, 3, 3, # OTH
0, 3, 3, 3, 3, 3, 3, 3, # ASC
0, 3, 3, 3, 1, 1, 3, 3, # ASS
0, 3, 3, 3, 1, 2, 1, 2, # ACV
0, 3, 3, 3, 3, 3, 3, 3, # ACO
0, 3, 1, 3, 1, 1, 1, 3, # ASV
0, 3, 1, 3, 1, 1, 3, 3, # ASO
)
class Latin1Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self.reset()
def reset(self):
self._mLastCharClass = OTH
self._mFreqCounter = [0] * FREQ_CAT_NUM
CharSetProber.reset(self)
def get_charset_name(self):
return "windows-1252"
def feed(self, aBuf):
aBuf = self.filter_with_english_letters(aBuf)
for c in aBuf:
charClass = Latin1_CharToClass[wrap_ord(c)]
freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
+ charClass]
if freq == 0:
self._mState = eNotMe
break
self._mFreqCounter[freq] += 1
self._mLastCharClass = charClass
return self.get_state()
def get_confidence(self):
if self.get_state() == eNotMe:
return 0.01
total = sum(self._mFreqCounter)
if total < 0.01:
confidence = 0.0
else:
confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
/ total)
if confidence < 0.0:
confidence = 0.0
# lower the confidence of latin1 so that other more accurate
# detector can take priority.
confidence = confidence * 0.73
return confidence
| cc0-1.0 |
dhermes/google-cloud-python | dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py | 2 | 7121 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.cloud.dataproc_v1.proto import (
clusters_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2,
)
from google.longrunning import (
operations_pb2 as google_dot_longrunning_dot_operations__pb2,
)
class ClusterControllerStub(object):
"""The ClusterControllerService provides methods to manage clusters
of Compute Engine instances.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateCluster = channel.unary_unary(
"/google.cloud.dataproc.v1.ClusterController/CreateCluster",
request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.CreateClusterRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.UpdateCluster = channel.unary_unary(
"/google.cloud.dataproc.v1.ClusterController/UpdateCluster",
request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.UpdateClusterRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.DeleteCluster = channel.unary_unary(
"/google.cloud.dataproc.v1.ClusterController/DeleteCluster",
request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.DeleteClusterRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.GetCluster = channel.unary_unary(
"/google.cloud.dataproc.v1.ClusterController/GetCluster",
request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.GetClusterRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.Cluster.FromString,
)
self.ListClusters = channel.unary_unary(
"/google.cloud.dataproc.v1.ClusterController/ListClusters",
request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.ListClustersRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.ListClustersResponse.FromString,
)
self.DiagnoseCluster = channel.unary_unary(
"/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster",
request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.DiagnoseClusterRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
class ClusterControllerServicer(object):
"""The ClusterControllerService provides methods to manage clusters
of Compute Engine instances.
"""
def CreateCluster(self, request, context):
"""Creates a cluster in a project.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateCluster(self, request, context):
"""Updates a cluster in a project.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteCluster(self, request, context):
"""Deletes a cluster in a project.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetCluster(self, request, context):
"""Gets the resource representation for a cluster in a project.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListClusters(self, request, context):
"""Lists all regions/{region}/clusters in a project.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DiagnoseCluster(self, request, context):
"""Gets cluster diagnostic information.
After the operation completes, the Operation.response field
contains `DiagnoseClusterOutputLocation`.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_ClusterControllerServicer_to_server(servicer, server):
rpc_method_handlers = {
"CreateCluster": grpc.unary_unary_rpc_method_handler(
servicer.CreateCluster,
request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.CreateClusterRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"UpdateCluster": grpc.unary_unary_rpc_method_handler(
servicer.UpdateCluster,
request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.UpdateClusterRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"DeleteCluster": grpc.unary_unary_rpc_method_handler(
servicer.DeleteCluster,
request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.DeleteClusterRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"GetCluster": grpc.unary_unary_rpc_method_handler(
servicer.GetCluster,
request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.GetClusterRequest.FromString,
response_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.Cluster.SerializeToString,
),
"ListClusters": grpc.unary_unary_rpc_method_handler(
servicer.ListClusters,
request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.ListClustersRequest.FromString,
response_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.ListClustersResponse.SerializeToString,
),
"DiagnoseCluster": grpc.unary_unary_rpc_method_handler(
servicer.DiagnoseCluster,
request_deserializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.DiagnoseClusterRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"google.cloud.dataproc.v1.ClusterController", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
| apache-2.0 |
andela-ooladayo/django | django/db/migrations/executor.py | 170 | 12029 | from __future__ import unicode_literals
from django.apps.registry import apps as global_apps
from django.db import migrations
from .loader import MigrationLoader
from .recorder import MigrationRecorder
from .state import ProjectState
class MigrationExecutor(object):
"""
End-to-end migration execution - loads migrations, and runs them
up or down to a specified set of targets.
"""
def __init__(self, connection, progress_callback=None):
self.connection = connection
self.loader = MigrationLoader(self.connection)
self.recorder = MigrationRecorder(self.connection)
self.progress_callback = progress_callback
def migration_plan(self, targets, clean_start=False):
"""
Given a set of targets, returns a list of (Migration instance, backwards?).
"""
plan = []
if clean_start:
applied = set()
else:
applied = set(self.loader.applied_migrations)
for target in targets:
# If the target is (app_label, None), that means unmigrate everything
if target[1] is None:
for root in self.loader.graph.root_nodes():
if root[0] == target[0]:
for migration in self.loader.graph.backwards_plan(root):
if migration in applied:
plan.append((self.loader.graph.nodes[migration], True))
applied.remove(migration)
# If the migration is already applied, do backwards mode,
# otherwise do forwards mode.
elif target in applied:
# Don't migrate backwards all the way to the target node (that
# may roll back dependencies in other apps that don't need to
# be rolled back); instead roll back through target's immediate
# child(ren) in the same app, and no further.
next_in_app = sorted(
n for n in
self.loader.graph.node_map[target].children
if n[0] == target[0]
)
for node in next_in_app:
for migration in self.loader.graph.backwards_plan(node):
if migration in applied:
plan.append((self.loader.graph.nodes[migration], True))
applied.remove(migration)
else:
for migration in self.loader.graph.forwards_plan(target):
if migration not in applied:
plan.append((self.loader.graph.nodes[migration], False))
applied.add(migration)
return plan
def migrate(self, targets, plan=None, fake=False, fake_initial=False):
"""
Migrates the database up to the given targets.
Django first needs to create all project states before a migration is
(un)applied and in a second step run all the database operations.
"""
if plan is None:
plan = self.migration_plan(targets)
migrations_to_run = {m[0] for m in plan}
# Create the forwards plan Django would follow on an empty database
full_plan = self.migration_plan(self.loader.graph.leaf_nodes(), clean_start=True)
# Holds all states right before a migration is applied
# if the migration is being run.
states = {}
state = ProjectState(real_apps=list(self.loader.unmigrated_apps))
if self.progress_callback:
self.progress_callback("render_start")
# Phase 1 -- Store all project states of migrations right before they
# are applied. The first migration that will be applied in phase 2 will
# trigger the rendering of the initial project state. From this time on
# models will be recursively reloaded as explained in
# `django.db.migrations.state.get_related_models_recursive()`.
for migration, _ in full_plan:
if not migrations_to_run:
# We remove every migration whose state was already computed
# from the set below (`migrations_to_run.remove(migration)`).
# If no states for migrations must be computed, we can exit
# this loop. Migrations that occur after the latest migration
# that is about to be applied would only trigger unneeded
# mutate_state() calls.
break
do_run = migration in migrations_to_run
if do_run:
if 'apps' not in state.__dict__:
state.apps # Render all real_apps -- performance critical
states[migration] = state.clone()
migrations_to_run.remove(migration)
# Only preserve the state if the migration is being run later
state = migration.mutate_state(state, preserve=do_run)
if self.progress_callback:
self.progress_callback("render_success")
# Phase 2 -- Run the migrations
for migration, backwards in plan:
if not backwards:
self.apply_migration(states[migration], migration, fake=fake, fake_initial=fake_initial)
else:
self.unapply_migration(states[migration], migration, fake=fake)
self.check_replacements()
def collect_sql(self, plan):
"""
Takes a migration plan and returns a list of collected SQL
statements that represent the best-efforts version of that plan.
"""
statements = []
state = None
for migration, backwards in plan:
with self.connection.schema_editor(collect_sql=True) as schema_editor:
if state is None:
state = self.loader.project_state((migration.app_label, migration.name), at_end=False)
if not backwards:
state = migration.apply(state, schema_editor, collect_sql=True)
else:
state = migration.unapply(state, schema_editor, collect_sql=True)
statements.extend(schema_editor.collected_sql)
return statements
def apply_migration(self, state, migration, fake=False, fake_initial=False):
"""
Runs a migration forwards.
"""
if self.progress_callback:
self.progress_callback("apply_start", migration, fake)
if not fake:
if fake_initial:
# Test to see if this is an already-applied initial migration
applied, state = self.detect_soft_applied(state, migration)
if applied:
fake = True
if not fake:
# Alright, do it normally
with self.connection.schema_editor() as schema_editor:
state = migration.apply(state, schema_editor)
# For replacement migrations, record individual statuses
if migration.replaces:
for app_label, name in migration.replaces:
self.recorder.record_applied(app_label, name)
else:
self.recorder.record_applied(migration.app_label, migration.name)
# Report progress
if self.progress_callback:
self.progress_callback("apply_success", migration, fake)
return state
def unapply_migration(self, state, migration, fake=False):
"""
Runs a migration backwards.
"""
if self.progress_callback:
self.progress_callback("unapply_start", migration, fake)
if not fake:
with self.connection.schema_editor() as schema_editor:
state = migration.unapply(state, schema_editor)
# For replacement migrations, record individual statuses
if migration.replaces:
for app_label, name in migration.replaces:
self.recorder.record_unapplied(app_label, name)
else:
self.recorder.record_unapplied(migration.app_label, migration.name)
# Report progress
if self.progress_callback:
self.progress_callback("unapply_success", migration, fake)
return state
def check_replacements(self):
"""
Mark replacement migrations applied if their replaced set all are.
We do this unconditionally on every migrate, rather than just when
migrations are applied or unapplied, so as to correctly handle the case
when a new squash migration is pushed to a deployment that already had
all its replaced migrations applied. In this case no new migration will
be applied, but we still want to correctly maintain the applied state
of the squash migration.
"""
applied = self.recorder.applied_migrations()
for key, migration in self.loader.replacements.items():
all_applied = all(m in applied for m in migration.replaces)
if all_applied and key not in applied:
self.recorder.record_applied(*key)
def detect_soft_applied(self, project_state, migration):
"""
Tests whether a migration has been implicitly applied - that the
tables or columns it would create exist. This is intended only for use
on initial migrations (as it only looks for CreateModel and AddField).
"""
if migration.initial is None:
# Bail if the migration isn't the first one in its app
if any(app == migration.app_label for app, name in migration.dependencies):
return False, project_state
elif migration.initial is False:
# Bail if it's NOT an initial migration
return False, project_state
if project_state is None:
after_state = self.loader.project_state((migration.app_label, migration.name), at_end=True)
else:
after_state = migration.mutate_state(project_state)
apps = after_state.apps
found_create_model_migration = False
found_add_field_migration = False
# Make sure all create model and add field operations are done
for operation in migration.operations:
if isinstance(operation, migrations.CreateModel):
model = apps.get_model(migration.app_label, operation.name)
if model._meta.swapped:
# We have to fetch the model to test with from the
# main app cache, as it's not a direct dependency.
model = global_apps.get_model(model._meta.swapped)
if model._meta.proxy or not model._meta.managed:
continue
if model._meta.db_table not in self.connection.introspection.table_names(self.connection.cursor()):
return False, project_state
found_create_model_migration = True
elif isinstance(operation, migrations.AddField):
model = apps.get_model(migration.app_label, operation.model_name)
if model._meta.swapped:
# We have to fetch the model to test with from the
# main app cache, as it's not a direct dependency.
model = global_apps.get_model(model._meta.swapped)
if model._meta.proxy or not model._meta.managed:
continue
table = model._meta.db_table
db_field = model._meta.get_field(operation.name).column
fields = self.connection.introspection.get_table_description(self.connection.cursor(), table)
if db_field not in (f.name for f in fields):
return False, project_state
found_add_field_migration = True
# If we get this far and we found at least one CreateModel or AddField migration,
# the migration is considered implicitly applied.
return (found_create_model_migration or found_add_field_migration), after_state
| bsd-3-clause |
0x0mar/androguard | androdd.py | 23 | 9411 | #!/usr/bin/env python
# This file is part of Androguard.
#
# Copyright (C) 2012/2013, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
import shutil
import sys
import os
import re
from optparse import OptionParser
from androguard.core.androgen import Androguard
from androguard.core import androconf
from androguard.core.analysis import analysis
from androguard.core.bytecodes import dvm
from androguard.core.bytecode import method2dot, method2format
from androguard.decompiler import decompiler
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use this filename', 'nargs' : 1 }
option_1 = { 'name' : ('-o', '--output'), 'help' : 'base directory to output all files', 'nargs' : 1 }
option_2 = { 'name' : ('-d', '--decompiler'), 'help' : 'choose a decompiler', 'nargs' : 1 }
option_3 = { 'name' : ('-j', '--jar'), 'help' : 'output jar file', 'action' : 'count' }
option_4 = { 'name' : ('-f', '--format'), 'help' : 'write the method in specific format (png, ...)', 'nargs' : 1 }
option_5 = { 'name' : ('-l', '--limit'), 'help' : 'limit analysis to specific methods/classes by using a regexp', 'nargs' : 1}
option_6 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
options = [option_0, option_1, option_2, option_3, option_4, option_5, option_6]
def valid_class_name(class_name):
if class_name[-1] == ";":
return class_name[1:-1]
return class_name
def create_directory(class_name, output):
output_name = output
if output_name[-1] != "/":
output_name = output_name + "/"
pathdir = output_name + class_name
try:
if not os.path.exists(pathdir):
os.makedirs(pathdir)
except OSError:
# FIXME
pass
def export_apps_to_format(filename, a, output, methods_filter=None, jar=None, decompiler_type=None, format=None):
print "Dump information %s in %s" % (filename, output)
if not os.path.exists(output):
print "Create directory %s" % output
os.makedirs(output)
else:
print "Clean directory %s" % output
androconf.rrmdir(output)
os.makedirs(output)
methods_filter_expr = None
if methods_filter:
methods_filter_expr = re.compile(methods_filter)
output_name = output
if output_name[-1] != "/":
output_name = output_name + "/"
dump_classes = []
for vm in a.get_vms():
print "Analysis ...",
sys.stdout.flush()
vmx = analysis.VMAnalysis(vm)
print "End"
print "Decompilation ...",
sys.stdout.flush()
if not decompiler_type:
vm.set_decompiler(decompiler.DecompilerDAD(vm, vmx))
elif decompiler_type == "dex2jad":
vm.set_decompiler(decompiler.DecompilerDex2Jad(vm,
androconf.CONF["PATH_DEX2JAR"],
androconf.CONF["BIN_DEX2JAR"],
androconf.CONF["PATH_JAD"],
androconf.CONF["BIN_JAD"],
androconf.CONF["TMP_DIRECTORY"]))
elif decompiler_type == "dex2winejad":
vm.set_decompiler(decompiler.DecompilerDex2WineJad(vm,
androconf.CONF["PATH_DEX2JAR"],
androconf.CONF["BIN_DEX2JAR"],
androconf.CONF["PATH_JAD"],
androconf.CONF["BIN_WINEJAD"],
androconf.CONF["TMP_DIRECTORY"]))
elif decompiler_type == "ded":
vm.set_decompiler(decompiler.DecompilerDed(vm,
androconf.CONF["PATH_DED"],
androconf.CONF["BIN_DED"],
androconf.CONF["TMP_DIRECTORY"]))
elif decompiler_type == "dex2fernflower":
vm.set_decompiler(decompiler.DecompilerDex2Fernflower(vm,
androconf.CONF["PATH_DEX2JAR"],
androconf.CONF["BIN_DEX2JAR"],
androconf.CONF["PATH_FERNFLOWER"],
androconf.CONF["BIN_FERNFLOWER"],
androconf.CONF["OPTIONS_FERNFLOWER"],
androconf.CONF["TMP_DIRECTORY"]))
else:
raise("invalid decompiler !")
print "End"
if options.jar:
print "jar ...",
filenamejar = decompiler.Dex2Jar(vm,
androconf.CONF["PATH_DEX2JAR"],
androconf.CONF["BIN_DEX2JAR"],
androconf.CONF["TMP_DIRECTORY"]).get_jar()
shutil.move(filenamejar, output + "classes.jar")
print "End"
for method in vm.get_methods():
if methods_filter_expr:
msig = "%s%s%s" % (method.get_class_name(),
method.get_name(),
method.get_descriptor())
if not methods_filter_expr.search(msig):
continue
filename_class = valid_class_name(method.get_class_name())
create_directory(filename_class, output)
print "Dump %s %s %s ..." % (method.get_class_name(),
method.get_name(),
method.get_descriptor()),
filename_class = output_name + filename_class
if filename_class[-1] != "/":
filename_class = filename_class + "/"
descriptor = method.get_descriptor()
descriptor = descriptor.replace(";", "")
descriptor = descriptor.replace(" ", "")
descriptor = descriptor.replace("(", "-")
descriptor = descriptor.replace(")", "-")
descriptor = descriptor.replace("/", "_")
filename = filename_class + method.get_name() + descriptor
if len(method.get_name() + descriptor) > 250:
all_identical_name_methods = vm.get_methods_descriptor(method.get_class_name(), method.get_name())
pos = 0
for i in all_identical_name_methods:
if i.get_descriptor() == method.get_descriptor():
break
pos += 1
filename = filename_class + method.get_name() + "_%d" % pos
buff = method2dot(vmx.get_method(method))
if format:
print "%s ..." % format,
method2format(filename + "." + format, format, None, buff)
if method.get_class_name() not in dump_classes:
print "source codes ...",
current_class = vm.get_class(method.get_class_name())
current_filename_class = valid_class_name(current_class.get_name())
create_directory(filename_class, output)
current_filename_class = output_name + current_filename_class + ".java"
with open(current_filename_class, "w") as fd:
fd.write(current_class.get_source())
dump_classes.append(method.get_class_name())
print "bytecodes ...",
bytecode_buff = dvm.get_bytecodes_method(vm, vmx, method)
with open(filename + ".ag", "w") as fd:
fd.write(bytecode_buff)
print
def main(options, arguments):
if options.input != None and options.output != None:
a = Androguard([options.input])
export_apps_to_format(options.input, a, options.output, options.limit, options.jar, options.decompiler, options.format)
elif options.version != None:
print "Androdd version %s" % androconf.ANDROGUARD_VERSION
else:
print "Please, specify an input file and an output directory"
if __name__ == "__main__":
parser = OptionParser()
for option in options:
param = option['name']
del option['name']
parser.add_option(*param, **option)
options, arguments = parser.parse_args()
sys.argv[:] = arguments
main(options, arguments)
| apache-2.0 |
smn/heatherrd | setup.py | 2 | 1220 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requirements = req_file.read().split('\n')
with open('requirements-dev.txt') as req_file:
requirements_dev = req_file.read().split('\n')
with open('VERSION') as fp:
version = fp.read().strip()
setup(
name='heatherrd',
version=version,
description="A server that relays RTM messages via HTTP.",
long_description=readme,
author="Praekelt Foundation",
author_email='dev@praekeltfoundation.org',
url='https://github.com/smn/heatherrd',
packages=[
'heatherrd',
],
package_dir={'heatherrd':
'heatherrd'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='heatherrd',
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': ['heatherrd = heatherrd.server:run'],
},
)
| bsd-3-clause |
jcpeterson/avoidr | player.py | 1 | 3915 | import pygame, os
class Player:
# this takes in screen width/height to calculate the player's starting position (center of screen)
# it also takes in the background color to compute its own color (inverted background color)
def __init__(self, screenWidth, screenHeight):
self.posRangeX = screenWidth
self.posRangeY = screenHeight
self.posX = screenWidth/2
self.posY = screenHeight/2
self.speed = 10
self.sizeMax = 80
self.sizeMin = 42#40
# might as well start out at the minimum size
self.size = self.sizeMin
self.state = 'growing'
# make the player color white; it doesn't really matter how it starts
self.color = (255,255,255)
self.isJumping = False
self.goingUp = True
self.killed = False
self.exploding = False
self.rect = pygame.Rect((self.posX,self.posY),(self.size,self.size))
# load the jump sound
self.jumpSound = pygame.mixer.Sound(os.path.join('audio','jump.wav'))
def updateSize(self):
# # player size changes
# if self.state == 'growing' and self.size >= self.sizeMin:
# self.size += 1
# if self.size >= self.sizeMax:
# self.state = 'shrinking'
# if self.state == 'shrinking' and self.size <= self.sizeMax:
# self.size -= 1
# if self.size <= self.sizeMin:
# self.state = 'growing'
if self.isJumping:
self.speed = 3
# player size changes when jumpin
if self.goingUp:
self.size += 1
if self.size == self.sizeMax:
self.goingUp = False
if not self.goingUp:
self.size -= 1
if self.size == self.sizeMin:
self.isJumping = False
self.goingUp = True
self.speed = 10
self.rect.width = self.size
self.rect.height = self.size
# sounds/animations for this have not been implemented yet
if self.killed:
self.exploding = True
def updatePos(self, keys):
# left border collision detection
if (self.posX != 0 + self.sizeMax) and (self.posX > 0 + self.sizeMax + 5):
# player movement input
if keys[pygame.K_LEFT]:
self.posX = self.posX - self.speed
# right border collision detection
if (self.posX != self.posRangeX - self.sizeMax) and (self.posX < self.posRangeX - (self.sizeMax + 5)):
# player movement input
if keys[pygame.K_RIGHT]:
self.posX = self.posX + self.speed
# vertical border collision detection
if (self.posY != 0 + self.sizeMax) and (self.posY > 0 + self.sizeMax + 5):
# player movement input
if keys[pygame.K_UP]:
self.posY = self.posY - self.speed
# vertical border collision detection
if (self.posY != self.posRangeY - self.sizeMax) and (self.posY < self.posRangeY - (self.sizeMax + 5)):
# player movement input
if keys[pygame.K_DOWN]:
self.posY = self.posY + self.speed
# MOVE THIS OVER TO THE SIZE FUNCTION SOON!!!
if keys[pygame.K_SPACE]:
if not self.isJumping:
# play the jump sound
self.jumpSound.play()
self.isJumping = True
self.rect.x = self.posX
self.rect.y = self.posY
# use something like this for input handling later
# for e in pygame.event.get():
# if e.type == QUIT: raise SystemExit, "QUIT"
# if e.type == KEYDOWN and e.key == K_ESCAPE:
# raise SystemExit, "ESCAPE"
# pressed = pygame.key.get_pressed()
# up, left, right = [pressed[key] for key in (K_UP, K_LEFT, K_RIGHT)]
def updateColor(self,gameBgColor):
# update the player color with the inverted current background color
self.color = (255-gameBgColor[0],255-gameBgColor[1],255-gameBgColor[2]) | gpl-3.0 |
ruuk/script.module.youtube.dl | lib/youtube_dl/extractor/wdr.py | 23 | 12523 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urlparse,
)
from ..utils import (
determine_ext,
ExtractorError,
js_to_json,
strip_jsonp,
try_get,
unified_strdate,
update_url_query,
urlhandle_detect_ext,
)
class WDRIE(InfoExtractor):
_VALID_URL = r'https?://deviceids-medp\.wdr\.de/ondemand/\d+/(?P<id>\d+)\.js'
_GEO_COUNTRIES = ['DE']
_TEST = {
'url': 'http://deviceids-medp.wdr.de/ondemand/155/1557833.js',
'info_dict': {
'id': 'mdb-1557833',
'ext': 'mp4',
'title': 'Biathlon-Staffel verpasst Podest bei Olympia-Generalprobe',
'upload_date': '20180112',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
metadata = self._download_json(
url, video_id, transform_source=strip_jsonp)
is_live = metadata.get('mediaType') == 'live'
tracker_data = metadata['trackerData']
media_resource = metadata['mediaResource']
formats = []
# check if the metadata contains a direct URL to a file
for kind, media_resource in media_resource.items():
if kind not in ('dflt', 'alt'):
continue
for tag_name, medium_url in media_resource.items():
if tag_name not in ('videoURL', 'audioURL'):
continue
ext = determine_ext(medium_url)
if ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
medium_url, video_id, 'mp4', 'm3u8_native',
m3u8_id='hls'))
elif ext == 'f4m':
manifest_url = update_url_query(
medium_url, {'hdcore': '3.2.0', 'plugin': 'aasp-3.2.0.77.18'})
formats.extend(self._extract_f4m_formats(
manifest_url, video_id, f4m_id='hds', fatal=False))
elif ext == 'smil':
formats.extend(self._extract_smil_formats(
medium_url, 'stream', fatal=False))
else:
a_format = {
'url': medium_url
}
if ext == 'unknown_video':
urlh = self._request_webpage(
medium_url, video_id, note='Determining extension')
ext = urlhandle_detect_ext(urlh)
a_format['ext'] = ext
formats.append(a_format)
self._sort_formats(formats)
subtitles = {}
caption_url = media_resource.get('captionURL')
if caption_url:
subtitles['de'] = [{
'url': caption_url,
'ext': 'ttml',
}]
title = tracker_data['trackerClipTitle']
return {
'id': tracker_data.get('trackerClipId', video_id),
'title': self._live_title(title) if is_live else title,
'alt_title': tracker_data.get('trackerClipSubcategory'),
'formats': formats,
'subtitles': subtitles,
'upload_date': unified_strdate(tracker_data.get('trackerClipAirTime')),
'is_live': is_live,
}
class WDRPageIE(InfoExtractor):
_CURRENT_MAUS_URL = r'https?://(?:www\.)wdrmaus.de/(?:[^/]+/){1,2}[^/?#]+\.php5'
_PAGE_REGEX = r'/(?:mediathek/)?(?:[^/]+/)*(?P<display_id>[^/]+)\.html'
_VALID_URL = r'https?://(?:www\d?\.)?(?:wdr\d?|sportschau)\.de' + _PAGE_REGEX + '|' + _CURRENT_MAUS_URL
_TESTS = [
{
'url': 'http://www1.wdr.de/mediathek/video/sendungen/doku-am-freitag/video-geheimnis-aachener-dom-100.html',
# HDS download, MD5 is unstable
'info_dict': {
'id': 'mdb-1058683',
'ext': 'flv',
'display_id': 'doku-am-freitag/video-geheimnis-aachener-dom-100',
'title': 'Geheimnis Aachener Dom',
'alt_title': 'Doku am Freitag',
'upload_date': '20160304',
'description': 'md5:87be8ff14d8dfd7a7ee46f0299b52318',
'is_live': False,
'subtitles': {'de': [{
'url': 'http://ondemand-ww.wdr.de/medp/fsk0/105/1058683/1058683_12220974.xml',
'ext': 'ttml',
}]},
},
'skip': 'HTTP Error 404: Not Found',
},
{
'url': 'http://www1.wdr.de/mediathek/audio/wdr3/wdr3-gespraech-am-samstag/audio-schriftstellerin-juli-zeh-100.html',
'md5': 'f4c1f96d01cf285240f53ea4309663d8',
'info_dict': {
'id': 'mdb-1072000',
'ext': 'mp3',
'display_id': 'wdr3-gespraech-am-samstag/audio-schriftstellerin-juli-zeh-100',
'title': 'Schriftstellerin Juli Zeh',
'alt_title': 'WDR 3 Gespräch am Samstag',
'upload_date': '20160312',
'description': 'md5:e127d320bc2b1f149be697ce044a3dd7',
'is_live': False,
'subtitles': {}
},
'skip': 'HTTP Error 404: Not Found',
},
{
'url': 'http://www1.wdr.de/mediathek/video/live/index.html',
'info_dict': {
'id': 'mdb-1406149',
'ext': 'mp4',
'title': r're:^WDR Fernsehen im Livestream \(nur in Deutschland erreichbar\) [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'alt_title': 'WDR Fernsehen Live',
'upload_date': '20150101',
'is_live': True,
},
'params': {
'skip_download': True, # m3u8 download
},
},
{
'url': 'http://www1.wdr.de/mediathek/video/sendungen/aktuelle-stunde/aktuelle-stunde-120.html',
'playlist_mincount': 7,
'info_dict': {
'id': 'aktuelle-stunde-120',
},
},
{
'url': 'http://www.wdrmaus.de/aktuelle-sendung/index.php5',
'info_dict': {
'id': 'mdb-1552552',
'ext': 'mp4',
'upload_date': 're:^[0-9]{8}$',
'title': 're:^Die Sendung mit der Maus vom [0-9.]{10}$',
},
'skip': 'The id changes from week to week because of the new episode'
},
{
'url': 'http://www.wdrmaus.de/filme/sachgeschichten/achterbahn.php5',
'md5': '803138901f6368ee497b4d195bb164f2',
'info_dict': {
'id': 'mdb-186083',
'ext': 'mp4',
'upload_date': '20130919',
'title': 'Sachgeschichte - Achterbahn ',
},
},
{
'url': 'http://www1.wdr.de/radio/player/radioplayer116~_layout-popupVersion.html',
# Live stream, MD5 unstable
'info_dict': {
'id': 'mdb-869971',
'ext': 'mp4',
'title': r're:^COSMO Livestream [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'upload_date': '20160101',
},
'params': {
'skip_download': True, # m3u8 download
}
},
{
'url': 'http://www.sportschau.de/handballem2018/handball-nationalmannschaft-em-stolperstein-vorrunde-100.html',
'info_dict': {
'id': 'mdb-1556012',
'ext': 'mp4',
'title': 'DHB-Vizepräsident Bob Hanning - "Die Weltspitze ist extrem breit"',
'upload_date': '20180111',
},
'params': {
'skip_download': True,
},
},
{
'url': 'http://www.sportschau.de/handballem2018/audio-vorschau---die-handball-em-startet-mit-grossem-favoritenfeld-100.html',
'only_matching': True,
}
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
display_id = mobj.group('display_id')
webpage = self._download_webpage(url, display_id)
entries = []
# Article with several videos
# for wdr.de the data-extension is in a tag with the class "mediaLink"
# for wdr.de radio players, in a tag with the class "wdrrPlayerPlayBtn"
# for wdrmaus, in a tag with the class "videoButton" (previously a link
# to the page in a multiline "videoLink"-tag)
for mobj in re.finditer(
r'''(?sx)class=
(?:
(["\'])(?:mediaLink|wdrrPlayerPlayBtn|videoButton)\b.*?\1[^>]+|
(["\'])videoLink\b.*?\2[\s]*>\n[^\n]*
)data-extension=(["\'])(?P<data>(?:(?!\3).)+)\3
''', webpage):
media_link_obj = self._parse_json(
mobj.group('data'), display_id, transform_source=js_to_json,
fatal=False)
if not media_link_obj:
continue
jsonp_url = try_get(
media_link_obj, lambda x: x['mediaObj']['url'], compat_str)
if jsonp_url:
entries.append(self.url_result(jsonp_url, ie=WDRIE.ie_key()))
# Playlist (e.g. https://www1.wdr.de/mediathek/video/sendungen/aktuelle-stunde/aktuelle-stunde-120.html)
if not entries:
entries = [
self.url_result(
compat_urlparse.urljoin(url, mobj.group('href')),
ie=WDRPageIE.ie_key())
for mobj in re.finditer(
r'<a[^>]+\bhref=(["\'])(?P<href>(?:(?!\1).)+)\1[^>]+\bdata-extension=',
webpage) if re.match(self._PAGE_REGEX, mobj.group('href'))
]
return self.playlist_result(entries, playlist_id=display_id)
class WDRElefantIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)wdrmaus\.de/elefantenseite/#(?P<id>.+)'
_TEST = {
'url': 'http://www.wdrmaus.de/elefantenseite/#folge_ostern_2015',
'info_dict': {
'title': 'Folge Oster-Spezial 2015',
'id': 'mdb-1088195',
'ext': 'mp4',
'age_limit': None,
'upload_date': '20150406'
},
'params': {
'skip_download': True,
},
}
def _real_extract(self, url):
display_id = self._match_id(url)
# Table of Contents seems to always be at this address, so fetch it directly.
# The website fetches configurationJS.php5, which links to tableOfContentsJS.php5.
table_of_contents = self._download_json(
'https://www.wdrmaus.de/elefantenseite/data/tableOfContentsJS.php5',
display_id)
if display_id not in table_of_contents:
raise ExtractorError(
'No entry in site\'s table of contents for this URL. '
'Is the fragment part of the URL (after the #) correct?',
expected=True)
xml_metadata_path = table_of_contents[display_id]['xmlPath']
xml_metadata = self._download_xml(
'https://www.wdrmaus.de/elefantenseite/' + xml_metadata_path,
display_id)
zmdb_url_element = xml_metadata.find('./movie/zmdb_url')
if zmdb_url_element is None:
raise ExtractorError(
'%s is not a video' % display_id, expected=True)
return self.url_result(zmdb_url_element.text, ie=WDRIE.ie_key())
class WDRMobileIE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://mobile-ondemand\.wdr\.de/
.*?/fsk(?P<age_limit>[0-9]+)
/[0-9]+/[0-9]+/
(?P<id>[0-9]+)_(?P<title>[0-9]+)'''
IE_NAME = 'wdr:mobile'
_TEST = {
'url': 'http://mobile-ondemand.wdr.de/CMS2010/mdb/ondemand/weltweit/fsk0/42/421735/421735_4283021.mp4',
'info_dict': {
'title': '4283021',
'id': '421735',
'ext': 'mp4',
'age_limit': 0,
},
'skip': 'Problems with loading data.'
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
return {
'id': mobj.group('id'),
'title': mobj.group('title'),
'age_limit': int(mobj.group('age_limit')),
'url': url,
'http_headers': {
'User-Agent': 'mobile',
},
}
| gpl-2.0 |
alexallah/django | tests/admin_scripts/tests.py | 18 | 93587 | """
A series of tests to establish that the command-line management tools work as
advertised - especially with regards to the handling of the
DJANGO_SETTINGS_MODULE and default settings.py files.
"""
import codecs
import os
import re
import shutil
import socket
import subprocess
import sys
import tempfile
import unittest
from io import StringIO
from unittest import mock
import django
from django import conf, get_version
from django.conf import settings
from django.core.management import (
BaseCommand, CommandError, call_command, color,
)
from django.db import ConnectionHandler
from django.db.migrations.exceptions import MigrationSchemaMissing
from django.db.migrations.recorder import MigrationRecorder
from django.test import (
LiveServerTestCase, SimpleTestCase, TestCase, override_settings,
)
custom_templates_dir = os.path.join(os.path.dirname(__file__), 'custom_templates')
PY36 = sys.version_info >= (3, 6)
SYSTEM_CHECK_MSG = 'System check identified no issues'
class AdminScriptTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.test_dir = os.path.realpath(os.path.join(
tempfile.gettempdir(),
cls.__name__,
'test_project',
))
if not os.path.exists(cls.test_dir):
os.makedirs(cls.test_dir)
with open(os.path.join(cls.test_dir, '__init__.py'), 'w'):
pass
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.test_dir)
super().tearDownClass()
def write_settings(self, filename, apps=None, is_dir=False, sdict=None, extra=None):
if is_dir:
settings_dir = os.path.join(self.test_dir, filename)
os.mkdir(settings_dir)
settings_file_path = os.path.join(settings_dir, '__init__.py')
else:
settings_file_path = os.path.join(self.test_dir, filename)
with open(settings_file_path, 'w') as settings_file:
settings_file.write('# Settings file automatically generated by admin_scripts test case\n')
if extra:
settings_file.write("%s\n" % extra)
exports = [
'DATABASES',
'ROOT_URLCONF',
'SECRET_KEY',
]
for s in exports:
if hasattr(settings, s):
o = getattr(settings, s)
if not isinstance(o, (dict, tuple, list)):
o = "'%s'" % o
settings_file.write("%s = %s\n" % (s, o))
if apps is None:
apps = ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts']
settings_file.write("INSTALLED_APPS = %s\n" % apps)
if sdict:
for k, v in sdict.items():
settings_file.write("%s = %s\n" % (k, v))
def remove_settings(self, filename, is_dir=False):
full_name = os.path.join(self.test_dir, filename)
if is_dir:
shutil.rmtree(full_name)
else:
os.remove(full_name)
# Also try to remove the compiled file; if it exists, it could
# mess up later tests that depend upon the .py file not existing
try:
if sys.platform.startswith('java'):
# Jython produces module$py.class files
os.remove(re.sub(r'\.py$', '$py.class', full_name))
except OSError:
pass
# Also remove a __pycache__ directory, if it exists
cache_name = os.path.join(self.test_dir, '__pycache__')
if os.path.isdir(cache_name):
shutil.rmtree(cache_name)
def _ext_backend_paths(self):
"""
Returns the paths for any external backend packages.
"""
paths = []
first_package_re = re.compile(r'(^[^\.]+)\.')
for backend in settings.DATABASES.values():
result = first_package_re.findall(backend['ENGINE'])
if result and result != ['django']:
backend_pkg = __import__(result[0])
backend_dir = os.path.dirname(backend_pkg.__file__)
paths.append(os.path.dirname(backend_dir))
return paths
def run_test(self, script, args, settings_file=None, apps=None):
base_dir = os.path.dirname(self.test_dir)
# The base dir for Django's tests is one level up.
tests_dir = os.path.dirname(os.path.dirname(__file__))
# The base dir for Django is one level above the test dir. We don't use
# `import django` to figure that out, so we don't pick up a Django
# from site-packages or similar.
django_dir = os.path.dirname(tests_dir)
ext_backend_base_dirs = self._ext_backend_paths()
# Define a temporary environment for the subprocess
test_environ = os.environ.copy()
if sys.platform.startswith('java'):
python_path_var_name = 'JYTHONPATH'
else:
python_path_var_name = 'PYTHONPATH'
old_cwd = os.getcwd()
# Set the test environment
if settings_file:
test_environ['DJANGO_SETTINGS_MODULE'] = settings_file
elif 'DJANGO_SETTINGS_MODULE' in test_environ:
del test_environ['DJANGO_SETTINGS_MODULE']
python_path = [base_dir, django_dir, tests_dir]
python_path.extend(ext_backend_base_dirs)
test_environ[python_path_var_name] = os.pathsep.join(python_path)
test_environ['PYTHONWARNINGS'] = ''
# Move to the test directory and run
os.chdir(self.test_dir)
out, err = subprocess.Popen(
[sys.executable, script] + args,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=test_environ, universal_newlines=True,
).communicate()
# Move back to the old working directory
os.chdir(old_cwd)
return out, err
def run_django_admin(self, args, settings_file=None):
script_dir = os.path.abspath(os.path.join(os.path.dirname(django.__file__), 'bin'))
return self.run_test(os.path.join(script_dir, 'django-admin.py'), args, settings_file)
def run_manage(self, args, settings_file=None):
def safe_remove(path):
try:
os.remove(path)
except OSError:
pass
conf_dir = os.path.dirname(conf.__file__)
template_manage_py = os.path.join(conf_dir, 'project_template', 'manage.py-tpl')
test_manage_py = os.path.join(self.test_dir, 'manage.py')
shutil.copyfile(template_manage_py, test_manage_py)
with open(test_manage_py, 'r') as fp:
manage_py_contents = fp.read()
manage_py_contents = manage_py_contents.replace(
"{{ project_name }}", "test_project")
with open(test_manage_py, 'w') as fp:
fp.write(manage_py_contents)
self.addCleanup(safe_remove, test_manage_py)
return self.run_test('./manage.py', args, settings_file)
def assertNoOutput(self, stream):
"Utility assertion: assert that the given stream is empty"
self.assertEqual(len(stream), 0, "Stream should be empty: actually contains '%s'" % stream)
def assertOutput(self, stream, msg, regex=False):
"Utility assertion: assert that the given message exists in the output"
if regex:
self.assertIsNotNone(
re.search(msg, stream),
"'%s' does not match actual output text '%s'" % (msg, stream)
)
else:
self.assertIn(msg, stream, "'%s' does not match actual output text '%s'" % (msg, stream))
def assertNotInOutput(self, stream, msg):
"Utility assertion: assert that the given message doesn't exist in the output"
self.assertNotIn(msg, stream, "'%s' matches actual output text '%s'" % (msg, stream))
##########################################################################
# DJANGO ADMIN TESTS
# This first series of test classes checks the environment processing
# of the django-admin.py script
##########################################################################
class DjangoAdminNoSettings(AdminScriptTestCase):
"A series of tests for django-admin.py when there is no settings.py file."
def test_builtin_command(self):
"no settings: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_bad_settings(self):
"no settings: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"no settings: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
class DjangoAdminDefaultSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings.py file that
contains the test application.
"""
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"default: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"default: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"default: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"default: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"default: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"default: django-admin can't execute user commands if it isn't provided settings"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"default: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"default: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminFullPathDefaultSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings.py file that
contains the test application specified using a full path.
"""
def setUp(self):
self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes',
'admin_scripts', 'admin_scripts.complex_app'])
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"fulldefault: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"fulldefault: django-admin builtin commands succeed if a settings file is provided"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"fulldefault: django-admin builtin commands succeed if the environment contains settings"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"fulldefault: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"fulldefault: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"fulldefault: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"fulldefault: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"fulldefault: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminMinimalSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings.py file that
doesn't contain the test application.
"""
def setUp(self):
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"minimal: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"minimal: django-admin builtin commands fail if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_environment(self):
"minimal: django-admin builtin commands fail if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_bad_settings(self):
"minimal: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"minimal: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"minimal: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"minimal: django-admin can't execute user commands, even if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_environment(self):
"minimal: django-admin can't execute user commands, even if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
class DjangoAdminAlternateSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings file
with a name other than 'settings.py'.
"""
def setUp(self):
self.write_settings('alternate_settings.py')
def tearDown(self):
self.remove_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"alternate: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.alternate_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"alternate: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"alternate: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"alternate: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.alternate_settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"alternate: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminMultipleSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when multiple settings files
(including the default 'settings.py') are available. The default settings
file is insufficient for performing the operations described, so the
alternate settings must be used by the running script.
"""
def setUp(self):
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def tearDown(self):
self.remove_settings('settings.py')
self.remove_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"alternate: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.alternate_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"alternate: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"alternate: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"alternate: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.alternate_settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"alternate: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminSettingsDirectory(AdminScriptTestCase):
"""
A series of tests for django-admin.py when the settings file is in a
directory. (see #9751).
"""
def setUp(self):
self.write_settings('settings', is_dir=True)
def tearDown(self):
self.remove_settings('settings', is_dir=True)
def test_setup_environ(self):
"directory: startapp creates the correct directory"
args = ['startapp', 'settings_test']
app_path = os.path.join(self.test_dir, 'settings_test')
out, err = self.run_django_admin(args, 'test_project.settings')
self.addCleanup(shutil.rmtree, app_path)
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
with open(os.path.join(app_path, 'apps.py'), 'r') as f:
content = f.read()
self.assertIn("class SettingsTestConfig(AppConfig)", content)
self.assertIn("name = 'settings_test'", content)
def test_setup_environ_custom_template(self):
"directory: startapp creates the correct directory with a custom template"
template_path = os.path.join(custom_templates_dir, 'app_template')
args = ['startapp', '--template', template_path, 'custom_settings_test']
app_path = os.path.join(self.test_dir, 'custom_settings_test')
out, err = self.run_django_admin(args, 'test_project.settings')
self.addCleanup(shutil.rmtree, app_path)
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
self.assertTrue(os.path.exists(os.path.join(app_path, 'api.py')))
def test_startapp_unicode_name(self):
"directory: startapp creates the correct directory with unicode characters"
args = ['startapp', 'こんにちは']
app_path = os.path.join(self.test_dir, 'こんにちは')
out, err = self.run_django_admin(args, 'test_project.settings')
self.addCleanup(shutil.rmtree, app_path)
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
with open(os.path.join(app_path, 'apps.py'), 'r', encoding='utf8') as f:
content = f.read()
self.assertIn("class こんにちはConfig(AppConfig)", content)
self.assertIn("name = 'こんにちは'", content)
def test_builtin_command(self):
"directory: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_bad_settings(self):
"directory: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"directory: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"directory: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_builtin_with_settings(self):
"directory: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"directory: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
##########################################################################
# MANAGE.PY TESTS
# This next series of test classes checks the environment processing
# of the generated manage.py script
##########################################################################
class ManageNoSettings(AdminScriptTestCase):
"A series of tests for manage.py when there is no settings.py file."
def test_builtin_command(self):
"no settings: manage.py builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True)
def test_builtin_with_bad_settings(self):
"no settings: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"no settings: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
class ManageDefaultSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
contains the test application.
"""
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"default: manage.py builtin commands succeed when default settings are appropriate"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_settings(self):
"default: manage.py builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"default: manage.py builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"default: manage.py builtin commands succeed if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"default: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"default: manage.py can execute user commands when default settings are appropriate"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_settings(self):
"default: manage.py can execute user commands when settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"default: manage.py can execute user commands when settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class ManageFullPathDefaultSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
contains the test application specified using a full path.
"""
def setUp(self):
self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts'])
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"fulldefault: manage.py builtin commands succeed when default settings are appropriate"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_settings(self):
"fulldefault: manage.py builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"fulldefault: manage.py builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"fulldefault: manage.py builtin commands succeed if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"fulldefault: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"fulldefault: manage.py can execute user commands when default settings are appropriate"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_settings(self):
"fulldefault: manage.py can execute user commands when settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"fulldefault: manage.py can execute user commands when settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class ManageMinimalSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
doesn't contain the test application.
"""
def setUp(self):
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"minimal: manage.py builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_settings(self):
"minimal: manage.py builtin commands fail if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_environment(self):
"minimal: manage.py builtin commands fail if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_bad_settings(self):
"minimal: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"minimal: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"minimal: manage.py can't execute user commands without appropriate settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"minimal: manage.py can't execute user commands, even if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_environment(self):
"minimal: manage.py can't execute user commands, even if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
class ManageAlternateSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings file
with a name other than 'settings.py'.
"""
def setUp(self):
self.write_settings('alternate_settings.py')
def tearDown(self):
self.remove_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: manage.py builtin commands fail with an error when no default settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True)
def test_builtin_with_settings(self):
"alternate: manage.py builtin commands work with settings provided as argument"
args = ['check', '--settings=alternate_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertOutput(out, SYSTEM_CHECK_MSG)
self.assertNoOutput(err)
def test_builtin_with_environment(self):
"alternate: manage.py builtin commands work if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'alternate_settings')
self.assertOutput(out, SYSTEM_CHECK_MSG)
self.assertNoOutput(err)
def test_builtin_with_bad_settings(self):
"alternate: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"alternate: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"alternate: manage.py can't execute user commands without settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True)
def test_custom_command_with_settings(self):
"alternate: manage.py can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('no_color', False), "
"('pythonpath', None), ('settings', 'alternate_settings'), "
"('traceback', False), ('verbosity', 1)]"
)
self.assertNoOutput(err)
def test_custom_command_with_environment(self):
"alternate: manage.py can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'alternate_settings')
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), "
"('verbosity', 1)]"
)
self.assertNoOutput(err)
def test_custom_command_output_color(self):
"alternate: manage.py output syntax color can be deactivated with the `--no-color` option"
args = ['noargs_command', '--no-color', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('no_color', True), "
"('pythonpath', None), ('settings', 'alternate_settings'), "
"('traceback', False), ('verbosity', 1)]"
)
self.assertNoOutput(err)
class ManageMultipleSettings(AdminScriptTestCase):
"""A series of tests for manage.py when multiple settings files
(including the default 'settings.py') are available. The default settings
file is insufficient for performing the operations described, so the
alternate settings must be used by the running script.
"""
def setUp(self):
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def tearDown(self):
self.remove_settings('settings.py')
self.remove_settings('alternate_settings.py')
def test_builtin_command(self):
"multiple: manage.py builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_settings(self):
"multiple: manage.py builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=alternate_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"multiple: manage.py can execute builtin commands if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"multiple: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"multiple: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"multiple: manage.py can't execute user commands using default settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"multiple: manage.py can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"multiple: manage.py can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class ManageSettingsWithSettingsErrors(AdminScriptTestCase):
"""
Tests for manage.py when using the default settings.py file containing
runtime errors.
"""
def tearDown(self):
self.remove_settings('settings.py')
def write_settings_with_import_error(self, filename):
settings_file_path = os.path.join(self.test_dir, filename)
with open(settings_file_path, 'w') as settings_file:
settings_file.write('# Settings file automatically generated by admin_scripts test case\n')
settings_file.write('# The next line will cause an import error:\nimport foo42bar\n')
def test_import_error(self):
"""
import error: manage.py builtin commands shows useful diagnostic info
when settings with import errors is provided (#14130).
"""
self.write_settings_with_import_error('settings.py')
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named")
self.assertOutput(err, "foo42bar")
def test_attribute_error(self):
"""
manage.py builtin commands does not swallow attribute error due to bad
settings (#18845).
"""
self.write_settings('settings.py', sdict={'BAD_VAR': 'INSTALLED_APPS.crash'})
args = ['collectstatic', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "AttributeError: 'list' object has no attribute 'crash'")
def test_key_error(self):
self.write_settings('settings.py', sdict={'BAD_VAR': 'DATABASES["blah"]'})
args = ['collectstatic', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "KeyError: 'blah'")
def test_help(self):
"""
Test listing available commands output note when only core commands are
available.
"""
self.write_settings(
'settings.py',
extra='from django.core.exceptions import ImproperlyConfigured\n'
'raise ImproperlyConfigured()',
)
args = ['help']
out, err = self.run_manage(args)
self.assertOutput(out, 'only Django core commands are listed')
self.assertNoOutput(err)
class ManageCheck(AdminScriptTestCase):
def tearDown(self):
self.remove_settings('settings.py')
def test_nonexistent_app(self):
"""check reports an error on a nonexistent app in INSTALLED_APPS."""
self.write_settings(
'settings.py',
apps=['admin_scriptz.broken_app'],
sdict={'USE_I18N': False},
)
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'ModuleNotFoundError' if PY36 else 'ImportError')
self.assertOutput(err, 'No module named')
self.assertOutput(err, 'admin_scriptz')
def test_broken_app(self):
""" manage.py check reports an ImportError if an app's models.py
raises one on import """
self.write_settings('settings.py', apps=['admin_scripts.broken_app'])
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'ImportError')
def test_complex_app(self):
""" manage.py check does not raise an ImportError validating a
complex app with nested calls to load_app """
self.write_settings(
'settings.py',
apps=[
'admin_scripts.complex_app',
'admin_scripts.simple_app',
'django.contrib.admin.apps.SimpleAdminConfig',
'django.contrib.auth',
'django.contrib.contenttypes',
],
sdict={
'DEBUG': True
}
)
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertEqual(out, 'System check identified no issues (0 silenced).\n')
def test_app_with_import(self):
""" manage.py check does not raise errors when an app imports a base
class that itself has an abstract base. """
self.write_settings(
'settings.py',
apps=[
'admin_scripts.app_with_import',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sites',
],
sdict={'DEBUG': True},
)
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertEqual(out, 'System check identified no issues (0 silenced).\n')
def test_output_format(self):
""" All errors/warnings should be sorted by level and by message. """
self.write_settings(
'settings.py',
apps=[
'admin_scripts.app_raising_messages',
'django.contrib.auth',
'django.contrib.contenttypes',
],
sdict={'DEBUG': True},
)
args = ['check']
out, err = self.run_manage(args)
expected_err = (
"SystemCheckError: System check identified some issues:\n"
"\n"
"ERRORS:\n"
"?: An error\n"
"\tHINT: Error hint\n"
"\n"
"WARNINGS:\n"
"a: Second warning\n"
"obj: First warning\n"
"\tHINT: Hint\n"
"\n"
"System check identified 3 issues (0 silenced).\n"
)
self.assertEqual(err, expected_err)
self.assertNoOutput(out)
def test_warning_does_not_halt(self):
"""
When there are only warnings or less serious messages, then Django
should not prevent user from launching their project, so `check`
command should not raise `CommandError` exception.
In this test we also test output format.
"""
self.write_settings(
'settings.py',
apps=[
'admin_scripts.app_raising_warning',
'django.contrib.auth',
'django.contrib.contenttypes',
],
sdict={'DEBUG': True},
)
args = ['check']
out, err = self.run_manage(args)
expected_err = (
"System check identified some issues:\n" # No "CommandError: " part
"\n"
"WARNINGS:\n"
"?: A warning\n"
"\n"
"System check identified 1 issue (0 silenced).\n"
)
self.assertEqual(err, expected_err)
self.assertNoOutput(out)
class ManageRunserver(AdminScriptTestCase):
def setUp(self):
from django.core.management.commands.runserver import Command
def monkey_run(*args, **options):
return
self.output = StringIO()
self.cmd = Command(stdout=self.output)
self.cmd.run = monkey_run
def assertServerSettings(self, addr, port, ipv6=False, raw_ipv6=False):
self.assertEqual(self.cmd.addr, addr)
self.assertEqual(self.cmd.port, port)
self.assertEqual(self.cmd.use_ipv6, ipv6)
self.assertEqual(self.cmd._raw_ipv6, raw_ipv6)
def test_runserver_addrport(self):
call_command(self.cmd)
self.assertServerSettings('127.0.0.1', '8000')
call_command(self.cmd, addrport="1.2.3.4:8000")
self.assertServerSettings('1.2.3.4', '8000')
call_command(self.cmd, addrport="7000")
self.assertServerSettings('127.0.0.1', '7000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_addrport_ipv6(self):
call_command(self.cmd, addrport="", use_ipv6=True)
self.assertServerSettings('::1', '8000', ipv6=True, raw_ipv6=True)
call_command(self.cmd, addrport="7000", use_ipv6=True)
self.assertServerSettings('::1', '7000', ipv6=True, raw_ipv6=True)
call_command(self.cmd, addrport="[2001:0db8:1234:5678::9]:7000")
self.assertServerSettings('2001:0db8:1234:5678::9', '7000', ipv6=True, raw_ipv6=True)
def test_runner_hostname(self):
call_command(self.cmd, addrport="localhost:8000")
self.assertServerSettings('localhost', '8000')
call_command(self.cmd, addrport="test.domain.local:7000")
self.assertServerSettings('test.domain.local', '7000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_hostname_ipv6(self):
call_command(self.cmd, addrport="test.domain.local:7000", use_ipv6=True)
self.assertServerSettings('test.domain.local', '7000', ipv6=True)
def test_runner_custom_defaults(self):
self.cmd.default_addr = '0.0.0.0'
self.cmd.default_port = '5000'
call_command(self.cmd)
self.assertServerSettings('0.0.0.0', '5000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_custom_defaults_ipv6(self):
self.cmd.default_addr_ipv6 = '::'
call_command(self.cmd, use_ipv6=True)
self.assertServerSettings('::', '8000', ipv6=True, raw_ipv6=True)
def test_runner_ambiguous(self):
# Only 4 characters, all of which could be in an ipv6 address
call_command(self.cmd, addrport="beef:7654")
self.assertServerSettings('beef', '7654')
# Uses only characters that could be in an ipv6 address
call_command(self.cmd, addrport="deadbeef:7654")
self.assertServerSettings('deadbeef', '7654')
def test_no_database(self):
"""
Ensure runserver.check_migrations doesn't choke on empty DATABASES.
"""
tested_connections = ConnectionHandler({})
with mock.patch('django.core.management.base.connections', new=tested_connections):
self.cmd.check_migrations()
def test_readonly_database(self):
"""
Ensure runserver.check_migrations doesn't choke when a database is read-only
(with possibly no django_migrations table).
"""
with mock.patch.object(
MigrationRecorder, 'ensure_schema',
side_effect=MigrationSchemaMissing()):
self.cmd.check_migrations()
# Check a warning is emitted
self.assertIn("Not checking migrations", self.output.getvalue())
class ManageRunserverMigrationWarning(TestCase):
def setUp(self):
from django.core.management.commands.runserver import Command
self.stdout = StringIO()
self.runserver_command = Command(stdout=self.stdout)
@override_settings(INSTALLED_APPS=["admin_scripts.app_waiting_migration"])
def test_migration_warning_one_app(self):
self.runserver_command.check_migrations()
output = self.stdout.getvalue()
self.assertIn('You have 1 unapplied migration(s)', output)
self.assertIn('apply the migrations for app(s): app_waiting_migration.', output)
@override_settings(
INSTALLED_APPS=[
"admin_scripts.app_waiting_migration",
"admin_scripts.another_app_waiting_migration",
],
)
def test_migration_warning_multiple_apps(self):
self.runserver_command.check_migrations()
output = self.stdout.getvalue()
self.assertIn('You have 2 unapplied migration(s)', output)
self.assertIn(
'apply the migrations for app(s): another_app_waiting_migration, '
'app_waiting_migration.', output
)
class ManageRunserverEmptyAllowedHosts(AdminScriptTestCase):
def setUp(self):
self.write_settings('settings.py', sdict={
'ALLOWED_HOSTS': [],
'DEBUG': False,
})
def tearDown(self):
self.remove_settings('settings.py')
def test_empty_allowed_hosts_error(self):
out, err = self.run_manage(['runserver'])
self.assertNoOutput(out)
self.assertOutput(err, 'CommandError: You must set settings.ALLOWED_HOSTS if DEBUG is False.')
class ManageTestserver(AdminScriptTestCase):
from django.core.management.commands.testserver import Command as TestserverCommand
@mock.patch.object(TestserverCommand, 'handle', return_value='')
def test_testserver_handle_params(self, mock_handle):
out = StringIO()
call_command('testserver', 'blah.json', stdout=out)
mock_handle.assert_called_with(
'blah.json',
stdout=out, settings=None, pythonpath=None, verbosity=1,
traceback=False, addrport='', no_color=False, use_ipv6=False,
skip_checks=True, interactive=True,
)
##########################################################################
# COMMAND PROCESSING TESTS
# user-space commands are correctly handled - in particular, arguments to
# the commands are correctly parsed and processed.
##########################################################################
class CommandTypes(AdminScriptTestCase):
"Tests for the various types of base command types that can be defined."
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_version(self):
"version is handled as a special case"
args = ['version']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, get_version())
def test_version_alternative(self):
"--version is equivalent to version"
args1, args2 = ['version'], ['--version']
# It's possible one outputs on stderr and the other on stdout, hence the set
self.assertEqual(set(self.run_manage(args1)), set(self.run_manage(args2)))
def test_help(self):
"help is handled as a special case"
args = ['help']
out, err = self.run_manage(args)
self.assertOutput(out, "Type 'manage.py help <subcommand>' for help on a specific subcommand.")
self.assertOutput(out, '[django]')
self.assertOutput(out, 'startapp')
self.assertOutput(out, 'startproject')
def test_help_commands(self):
"help --commands shows the list of all available commands"
args = ['help', '--commands']
out, err = self.run_manage(args)
self.assertNotInOutput(out, 'usage:')
self.assertNotInOutput(out, 'Options:')
self.assertNotInOutput(out, '[django]')
self.assertOutput(out, 'startapp')
self.assertOutput(out, 'startproject')
self.assertNotInOutput(out, '\n\n')
def test_help_alternative(self):
"--help is equivalent to help"
args1, args2 = ['help'], ['--help']
self.assertEqual(self.run_manage(args1), self.run_manage(args2))
def test_help_short_altert(self):
"-h is handled as a short form of --help"
args1, args2 = ['--help'], ['-h']
self.assertEqual(self.run_manage(args1), self.run_manage(args2))
def test_specific_help(self):
"--help can be used on a specific command"
args = ['check', '--help']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "Checks the entire Django project for potential problems.")
def test_color_style(self):
style = color.no_style()
self.assertEqual(style.ERROR('Hello, world!'), 'Hello, world!')
style = color.make_style('nocolor')
self.assertEqual(style.ERROR('Hello, world!'), 'Hello, world!')
style = color.make_style('dark')
self.assertIn('Hello, world!', style.ERROR('Hello, world!'))
self.assertNotEqual(style.ERROR('Hello, world!'), 'Hello, world!')
# Default palette has color.
style = color.make_style('')
self.assertIn('Hello, world!', style.ERROR('Hello, world!'))
self.assertNotEqual(style.ERROR('Hello, world!'), 'Hello, world!')
def test_command_color(self):
class Command(BaseCommand):
requires_system_checks = False
def handle(self, *args, **options):
self.stdout.write('Hello, world!', self.style.ERROR)
self.stderr.write('Hello, world!', self.style.ERROR)
out = StringIO()
err = StringIO()
command = Command(stdout=out, stderr=err)
call_command(command)
if color.supports_color():
self.assertIn('Hello, world!\n', out.getvalue())
self.assertIn('Hello, world!\n', err.getvalue())
self.assertNotEqual(out.getvalue(), 'Hello, world!\n')
self.assertNotEqual(err.getvalue(), 'Hello, world!\n')
else:
self.assertEqual(out.getvalue(), 'Hello, world!\n')
self.assertEqual(err.getvalue(), 'Hello, world!\n')
def test_command_no_color(self):
"--no-color prevent colorization of the output"
class Command(BaseCommand):
requires_system_checks = False
def handle(self, *args, **options):
self.stdout.write('Hello, world!', self.style.ERROR)
self.stderr.write('Hello, world!', self.style.ERROR)
out = StringIO()
err = StringIO()
command = Command(stdout=out, stderr=err, no_color=True)
call_command(command)
self.assertEqual(out.getvalue(), 'Hello, world!\n')
self.assertEqual(err.getvalue(), 'Hello, world!\n')
out = StringIO()
err = StringIO()
command = Command(stdout=out, stderr=err)
call_command(command, no_color=True)
self.assertEqual(out.getvalue(), 'Hello, world!\n')
self.assertEqual(err.getvalue(), 'Hello, world!\n')
def test_custom_stdout(self):
class Command(BaseCommand):
requires_system_checks = False
def handle(self, *args, **options):
self.stdout.write("Hello, World!")
out = StringIO()
command = Command(stdout=out)
call_command(command)
self.assertEqual(out.getvalue(), "Hello, World!\n")
out.truncate(0)
new_out = StringIO()
call_command(command, stdout=new_out)
self.assertEqual(out.getvalue(), "")
self.assertEqual(new_out.getvalue(), "Hello, World!\n")
def test_custom_stderr(self):
class Command(BaseCommand):
requires_system_checks = False
def handle(self, *args, **options):
self.stderr.write("Hello, World!")
err = StringIO()
command = Command(stderr=err)
call_command(command)
self.assertEqual(err.getvalue(), "Hello, World!\n")
err.truncate(0)
new_err = StringIO()
call_command(command, stderr=new_err)
self.assertEqual(err.getvalue(), "")
self.assertEqual(new_err.getvalue(), "Hello, World!\n")
def test_base_command(self):
"User BaseCommands can execute when a label is provided"
args = ['base_command', 'testlabel']
expected_labels = "('testlabel',)"
self._test_base_command(args, expected_labels)
def test_base_command_no_label(self):
"User BaseCommands can execute when no labels are provided"
args = ['base_command']
expected_labels = "()"
self._test_base_command(args, expected_labels)
def test_base_command_multiple_label(self):
"User BaseCommands can execute when no labels are provided"
args = ['base_command', 'testlabel', 'anotherlabel']
expected_labels = "('testlabel', 'anotherlabel')"
self._test_base_command(args, expected_labels)
def test_base_command_with_option(self):
"User BaseCommands can execute with options when a label is provided"
args = ['base_command', 'testlabel', '--option_a=x']
expected_labels = "('testlabel',)"
self._test_base_command(args, expected_labels, option_a="'x'")
def test_base_command_with_options(self):
"User BaseCommands can execute with multiple options when a label is provided"
args = ['base_command', 'testlabel', '-a', 'x', '--option_b=y']
expected_labels = "('testlabel',)"
self._test_base_command(args, expected_labels, option_a="'x'", option_b="'y'")
def test_base_command_with_wrong_option(self):
"User BaseCommands outputs command usage when wrong option is specified"
args = ['base_command', '--invalid']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "usage: manage.py base_command")
self.assertOutput(err, "error: unrecognized arguments: --invalid")
def _test_base_command(self, args, labels, option_a="'1'", option_b="'2'"):
out, err = self.run_manage(args)
expected_out = (
"EXECUTE:BaseCommand labels=%s, "
"options=[('no_color', False), ('option_a', %s), ('option_b', %s), "
"('option_c', '3'), ('pythonpath', None), ('settings', None), "
"('traceback', False), ('verbosity', 1)]") % (labels, option_a, option_b)
self.assertNoOutput(err)
self.assertOutput(out, expected_out)
def test_base_run_from_argv(self):
"""
Test run_from_argv properly terminates even with custom execute() (#19665)
Also test proper traceback display.
"""
err = StringIO()
command = BaseCommand(stderr=err)
def raise_command_error(*args, **kwargs):
raise CommandError("Custom error")
command.execute = lambda args: args # This will trigger TypeError
# If the Exception is not CommandError it should always
# raise the original exception.
with self.assertRaises(TypeError):
command.run_from_argv(['', ''])
# If the Exception is CommandError and --traceback is not present
# this command should raise a SystemExit and don't print any
# traceback to the stderr.
command.execute = raise_command_error
err.truncate(0)
with self.assertRaises(SystemExit):
command.run_from_argv(['', ''])
err_message = err.getvalue()
self.assertNotIn("Traceback", err_message)
self.assertIn("CommandError", err_message)
# If the Exception is CommandError and --traceback is present
# this command should raise the original CommandError as if it
# were not a CommandError.
err.truncate(0)
with self.assertRaises(CommandError):
command.run_from_argv(['', '', '--traceback'])
def test_run_from_argv_non_ascii_error(self):
"""
Non-ASCII message of CommandError does not raise any
UnicodeDecodeError in run_from_argv.
"""
def raise_command_error(*args, **kwargs):
raise CommandError("Erreur personnalisée")
command = BaseCommand(stderr=StringIO())
command.execute = raise_command_error
with self.assertRaises(SystemExit):
command.run_from_argv(['', ''])
def test_run_from_argv_closes_connections(self):
"""
A command called from the command line should close connections after
being executed (#21255).
"""
command = BaseCommand(stderr=StringIO())
command.check = lambda: []
command.handle = lambda *args, **kwargs: args
with mock.patch('django.core.management.base.connections') as mock_connections:
command.run_from_argv(['', ''])
# Test connections have been closed
self.assertTrue(mock_connections.close_all.called)
def test_noargs(self):
"NoArg Commands can be executed"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), "
"('verbosity', 1)]"
)
def test_noargs_with_args(self):
"NoArg Commands raise an error if an argument is provided"
args = ['noargs_command', 'argument']
out, err = self.run_manage(args)
self.assertOutput(err, "error: unrecognized arguments: argument")
def test_app_command(self):
"User AppCommands can execute when a single app name is provided"
args = ['app_command', 'auth']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.auth, options=")
self.assertOutput(
out,
", options=[('no_color', False), ('pythonpath', None), "
"('settings', None), ('traceback', False), ('verbosity', 1)]"
)
def test_app_command_no_apps(self):
"User AppCommands raise an error when no app name is provided"
args = ['app_command']
out, err = self.run_manage(args)
self.assertOutput(err, 'error: Enter at least one application label.')
def test_app_command_multiple_apps(self):
"User AppCommands raise an error when multiple app names are provided"
args = ['app_command', 'auth', 'contenttypes']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.auth, options=")
self.assertOutput(
out,
", options=[('no_color', False), ('pythonpath', None), "
"('settings', None), ('traceback', False), ('verbosity', 1)]"
)
self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.contenttypes, options=")
self.assertOutput(
out,
", options=[('no_color', False), ('pythonpath', None), "
"('settings', None), ('traceback', False), ('verbosity', 1)]"
)
def test_app_command_invalid_app_label(self):
"User AppCommands can execute when a single app name is provided"
args = ['app_command', 'NOT_AN_APP']
out, err = self.run_manage(args)
self.assertOutput(err, "No installed app with label 'NOT_AN_APP'.")
def test_app_command_some_invalid_app_labels(self):
"User AppCommands can execute when some of the provided app names are invalid"
args = ['app_command', 'auth', 'NOT_AN_APP']
out, err = self.run_manage(args)
self.assertOutput(err, "No installed app with label 'NOT_AN_APP'.")
def test_label_command(self):
"User LabelCommands can execute when a label is provided"
args = ['label_command', 'testlabel']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE:LabelCommand label=testlabel, options=[('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), ('verbosity', 1)]"
)
def test_label_command_no_label(self):
"User LabelCommands raise an error if no label is provided"
args = ['label_command']
out, err = self.run_manage(args)
self.assertOutput(err, 'Enter at least one label')
def test_label_command_multiple_label(self):
"User LabelCommands are executed multiple times if multiple labels are provided"
args = ['label_command', 'testlabel', 'anotherlabel']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE:LabelCommand label=testlabel, options=[('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), ('verbosity', 1)]"
)
self.assertOutput(
out,
"EXECUTE:LabelCommand label=anotherlabel, options=[('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), ('verbosity', 1)]"
)
class Discovery(SimpleTestCase):
def test_precedence(self):
"""
Apps listed first in INSTALLED_APPS have precedence.
"""
with self.settings(INSTALLED_APPS=['admin_scripts.complex_app',
'admin_scripts.simple_app',
'django.contrib.auth',
'django.contrib.contenttypes']):
out = StringIO()
call_command('duplicate', stdout=out)
self.assertEqual(out.getvalue().strip(), 'complex_app')
with self.settings(INSTALLED_APPS=['admin_scripts.simple_app',
'admin_scripts.complex_app',
'django.contrib.auth',
'django.contrib.contenttypes']):
out = StringIO()
call_command('duplicate', stdout=out)
self.assertEqual(out.getvalue().strip(), 'simple_app')
class ArgumentOrder(AdminScriptTestCase):
"""Tests for 2-stage argument parsing scheme.
django-admin command arguments are parsed in 2 parts; the core arguments
(--settings, --traceback and --pythonpath) are parsed using a basic parser,
ignoring any unknown options. Then the full settings are
passed to the command parser, which extracts commands of interest to the
individual command.
"""
def setUp(self):
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def tearDown(self):
self.remove_settings('settings.py')
self.remove_settings('alternate_settings.py')
def test_setting_then_option(self):
""" Options passed after settings are correctly handled. """
args = ['base_command', 'testlabel', '--settings=alternate_settings', '--option_a=x']
self._test(args)
def test_setting_then_short_option(self):
""" Short options passed after settings are correctly handled. """
args = ['base_command', 'testlabel', '--settings=alternate_settings', '-a', 'x']
self._test(args)
def test_option_then_setting(self):
""" Options passed before settings are correctly handled. """
args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings']
self._test(args)
def test_short_option_then_setting(self):
""" Short options passed before settings are correctly handled. """
args = ['base_command', 'testlabel', '-a', 'x', '--settings=alternate_settings']
self._test(args)
def test_option_then_setting_then_option(self):
""" Options are correctly handled when they are passed before and after
a setting. """
args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings', '--option_b=y']
self._test(args, option_b="'y'")
def _test(self, args, option_b="'2'"):
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE:BaseCommand labels=('testlabel',), options=[('no_color', False), "
"('option_a', 'x'), ('option_b', %s), ('option_c', '3'), "
"('pythonpath', None), ('settings', 'alternate_settings'), "
"('traceback', False), ('verbosity', 1)]" % option_b
)
@override_settings(ROOT_URLCONF='admin_scripts.urls')
class StartProject(LiveServerTestCase, AdminScriptTestCase):
available_apps = [
'admin_scripts',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
]
def test_wrong_args(self):
"Make sure passing the wrong kinds of arguments outputs an error and prints usage"
out, err = self.run_django_admin(['startproject'])
self.assertNoOutput(out)
self.assertOutput(err, "usage:")
self.assertOutput(err, "You must provide a project name.")
def test_simple_project(self):
"Make sure the startproject management command creates a project"
args = ['startproject', 'testproject']
testproject_dir = os.path.join(self.test_dir, 'testproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
# running again..
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "already exists")
def test_invalid_project_name(self):
"Make sure the startproject management command validates a project name"
for bad_name in ('7testproject', '../testproject'):
args = ['startproject', bad_name]
testproject_dir = os.path.join(self.test_dir, bad_name)
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertOutput(
err,
"Error: '%s' is not a valid project name. Please make "
"sure the name is a valid identifier." % bad_name
)
self.assertFalse(os.path.exists(testproject_dir))
def test_simple_project_different_directory(self):
"Make sure the startproject management command creates a project in a specific directory"
args = ['startproject', 'testproject', 'othertestproject']
testproject_dir = os.path.join(self.test_dir, 'othertestproject')
os.mkdir(testproject_dir)
self.addCleanup(shutil.rmtree, testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'manage.py')))
# running again..
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "already exists")
def test_custom_project_template(self):
"Make sure the startproject management command is able to use a different project template"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'customtestproject']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
def test_template_dir_with_trailing_slash(self):
"Ticket 17475: Template dir passed has a trailing path separator"
template_path = os.path.join(custom_templates_dir, 'project_template' + os.sep)
args = ['startproject', '--template', template_path, 'customtestproject']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
def test_custom_project_template_from_tarball_by_path(self):
"Make sure the startproject management command is able to use a different project template from a tarball"
template_path = os.path.join(custom_templates_dir, 'project_template.tgz')
args = ['startproject', '--template', template_path, 'tarballtestproject']
testproject_dir = os.path.join(self.test_dir, 'tarballtestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_custom_project_template_from_tarball_to_alternative_location(self):
"Startproject can use a project template from a tarball and create it in a specified location"
template_path = os.path.join(custom_templates_dir, 'project_template.tgz')
args = ['startproject', '--template', template_path, 'tarballtestproject', 'altlocation']
testproject_dir = os.path.join(self.test_dir, 'altlocation')
os.mkdir(testproject_dir)
self.addCleanup(shutil.rmtree, testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_custom_project_template_from_tarball_by_url(self):
"""
The startproject management command is able to use a different project
template from a tarball via a URL.
"""
template_url = '%s/custom_templates/project_template.tgz' % self.live_server_url
args = ['startproject', '--template', template_url, 'urltestproject']
testproject_dir = os.path.join(self.test_dir, 'urltestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_project_template_tarball_url(self):
"Startproject management command handles project template tar/zip balls from non-canonical urls"
template_url = '%s/custom_templates/project_template.tgz/' % self.live_server_url
args = ['startproject', '--template', template_url, 'urltestproject']
testproject_dir = os.path.join(self.test_dir, 'urltestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_file_without_extension(self):
"Make sure the startproject management command is able to render custom files"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'customtestproject', '-e', 'txt', '-n', 'Procfile']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
base_path = os.path.join(testproject_dir, 'additional_dir')
for f in ('Procfile', 'additional_file.py', 'requirements.txt'):
self.assertTrue(os.path.exists(os.path.join(base_path, f)))
with open(os.path.join(base_path, f)) as fh:
self.assertEqual(fh.read().strip(), '# some file for customtestproject test project')
def test_custom_project_template_context_variables(self):
"Make sure template context variables are rendered with proper values"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'another_project', 'project_dir']
testproject_dir = os.path.join(self.test_dir, 'project_dir')
os.mkdir(testproject_dir)
self.addCleanup(shutil.rmtree, testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
test_manage_py = os.path.join(testproject_dir, 'manage.py')
with open(test_manage_py, 'r') as fp:
content = fp.read()
self.assertIn("project_name = 'another_project'", content)
self.assertIn("project_directory = '%s'" % testproject_dir, content)
def test_no_escaping_of_project_variables(self):
"Make sure template context variables are not html escaped"
# We're using a custom command so we need the alternate settings
self.write_settings('alternate_settings.py')
self.addCleanup(self.remove_settings, 'alternate_settings.py')
template_path = os.path.join(custom_templates_dir, 'project_template')
args = [
'custom_startproject', '--template', template_path,
'another_project', 'project_dir', '--extra', '<&>',
'--settings=alternate_settings',
]
testproject_dir = os.path.join(self.test_dir, 'project_dir')
os.mkdir(testproject_dir)
self.addCleanup(shutil.rmtree, testproject_dir)
out, err = self.run_manage(args)
self.assertNoOutput(err)
test_manage_py = os.path.join(testproject_dir, 'additional_dir', 'extra.py')
with open(test_manage_py, 'r') as fp:
content = fp.read()
self.assertIn("<&>", content)
def test_custom_project_destination_missing(self):
"""
Make sure an exception is raised when the provided
destination directory doesn't exist
"""
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'yet_another_project', 'project_dir2']
testproject_dir = os.path.join(self.test_dir, 'project_dir2')
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Destination directory '%s' does not exist, please create it first." % testproject_dir)
self.assertFalse(os.path.exists(testproject_dir))
def test_custom_project_template_with_non_ascii_templates(self):
"""
The startproject management command is able to render templates with
non-ASCII content.
"""
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, '--extension=txt', 'customtestproject']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
path = os.path.join(testproject_dir, 'ticket-18091-non-ascii-template.txt')
with codecs.open(path, 'r', encoding='utf-8') as f:
self.assertEqual(f.read().splitlines(False), [
'Some non-ASCII text for testing ticket #18091:',
'üäö €'])
class DiffSettings(AdminScriptTestCase):
"""Tests for diffsettings management command."""
def test_basic(self):
"""Runs without error and emits settings diff."""
self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'})
self.addCleanup(self.remove_settings, 'settings_to_diff.py')
args = ['diffsettings', '--settings=settings_to_diff']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "FOO = 'bar' ###")
def test_all(self):
"""The all option also shows settings with the default value."""
self.write_settings('settings_to_diff.py', sdict={'STATIC_URL': 'None'})
self.addCleanup(self.remove_settings, 'settings_to_diff.py')
args = ['diffsettings', '--settings=settings_to_diff', '--all']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "### STATIC_URL = None")
def test_custom_default(self):
"""
The --default option specifies an alternate settings module for
comparison.
"""
self.write_settings('settings_default.py', sdict={'FOO': '"foo"', 'BAR': '"bar1"'})
self.addCleanup(self.remove_settings, 'settings_default.py')
self.write_settings('settings_to_diff.py', sdict={'FOO': '"foo"', 'BAR': '"bar2"'})
self.addCleanup(self.remove_settings, 'settings_to_diff.py')
out, err = self.run_manage(['diffsettings', '--settings=settings_to_diff', '--default=settings_default'])
self.assertNoOutput(err)
self.assertNotInOutput(out, "FOO")
self.assertOutput(out, "BAR = 'bar2'")
class Dumpdata(AdminScriptTestCase):
"""Tests for dumpdata management command."""
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_pks_parsing(self):
"""Regression for #20509
Test would raise an exception rather than printing an error message.
"""
args = ['dumpdata', '--pks=1']
out, err = self.run_manage(args)
self.assertOutput(err, "You can only use --pks option with one model")
self.assertNoOutput(out)
class MainModule(AdminScriptTestCase):
"""python -m django works like django-admin."""
def test_runs_django_admin(self):
cmd_out, _ = self.run_django_admin(['--version'])
mod_out, _ = self.run_test('-m', ['django', '--version'])
self.assertEqual(mod_out, cmd_out)
| bsd-3-clause |
jordanemedlock/psychtruths | temboo/core/Library/GitHub/IssuesAPI/Issues/ListIssuesForRepo.py | 4 | 8201 | # -*- coding: utf-8 -*-
###############################################################################
#
# ListIssuesForRepo
# List all issues for a specified repository.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListIssuesForRepo(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListIssuesForRepo Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ListIssuesForRepo, self).__init__(temboo_session, '/Library/GitHub/IssuesAPI/Issues/ListIssuesForRepo')
def new_input_set(self):
return ListIssuesForRepoInputSet()
def _make_result_set(self, result, path):
return ListIssuesForRepoResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListIssuesForRepoChoreographyExecution(session, exec_id, path)
class ListIssuesForRepoInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListIssuesForRepo
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved during the OAuth process.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('AccessToken', value)
def set_Assignee(self, value):
"""
Set the value of the Assignee input for this Choreo. ((required, string) Can be set to a username, "none" for issues with no assinged user, or * for issues with any assigned user.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('Assignee', value)
def set_Creator(self, value):
"""
Set the value of the Creator input for this Choreo. ((optional, string) The user that created the issue.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('Creator', value)
def set_Direction(self, value):
"""
Set the value of the Direction input for this Choreo. ((optional, string) The direction of the sort order. Valid values are: asc or desc (the default).)
"""
super(ListIssuesForRepoInputSet, self)._set_input('Direction', value)
def set_Labels(self, value):
"""
Set the value of the Labels input for this Choreo. ((optional, string) A comma separated list of label names (i.e. bug, ui, etc).)
"""
super(ListIssuesForRepoInputSet, self)._set_input('Labels', value)
def set_Mentioned(self, value):
"""
Set the value of the Mentioned input for this Choreo. ((optional, string) A Github username that is mentioned.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('Mentioned', value)
def set_Milestone(self, value):
"""
Set the value of the Milestone input for this Choreo. ((required, string) Can be set to a milestone number, "none" for issues with no milestone, or * for issues with any milestone.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('Milestone', value)
def set_Page(self, value):
"""
Set the value of the Page input for this Choreo. ((optional, integer) Indicates the page index that you want to retrieve. This is used to page through many results. Defaults to 1.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('Page', value)
def set_PerPage(self, value):
"""
Set the value of the PerPage input for this Choreo. ((optional, integer) The number of results to return per page.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('PerPage', value)
def set_Repo(self, value):
"""
Set the value of the Repo input for this Choreo. ((required, string) The name of the repo.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('Repo', value)
def set_Since(self, value):
"""
Set the value of the Since input for this Choreo. ((optional, date) A timestamp in ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ). Returns issues since this date.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('Since', value)
def set_Sort(self, value):
"""
Set the value of the Sort input for this Choreo. ((optional, string) Indicates how the issues will be sorted in the response. Valid sort options are: created (the default), updated, comments.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('Sort', value)
def set_State(self, value):
"""
Set the value of the State input for this Choreo. ((optional, string) Returns issues with a particular state: open (the default) or closed.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('State', value)
def set_User(self, value):
"""
Set the value of the User input for this Choreo. ((required, string) A GitHub username.)
"""
super(ListIssuesForRepoInputSet, self)._set_input('User', value)
class ListIssuesForRepoResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListIssuesForRepo Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from GitHub.)
"""
return self._output.get('Response', None)
def get_FirstPage(self):
"""
Retrieve the value for the "FirstPage" output from this Choreo execution. ((integer) The index for the first page of results.)
"""
return self._output.get('FirstPage', None)
def get_LastPage(self):
"""
Retrieve the value for the "LastPage" output from this Choreo execution. ((integer) The index for the last page of results.)
"""
return self._output.get('LastPage', None)
def get_Limit(self):
"""
Retrieve the value for the "Limit" output from this Choreo execution. ((integer) The available rate limit for your account. This is returned in the GitHub response header.)
"""
return self._output.get('Limit', None)
def get_NextPage(self):
"""
Retrieve the value for the "NextPage" output from this Choreo execution. ((integer) The index for the next page of results.)
"""
return self._output.get('NextPage', None)
def get_PreviousPage(self):
"""
Retrieve the value for the "PreviousPage" output from this Choreo execution. ((integer) The index for the previous page of results.)
"""
return self._output.get('PreviousPage', None)
def get_Remaining(self):
"""
Retrieve the value for the "Remaining" output from this Choreo execution. ((integer) The remaining number of API requests available to you. This is returned in the GitHub response header.)
"""
return self._output.get('Remaining', None)
class ListIssuesForRepoChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListIssuesForRepoResultSet(response, path)
| apache-2.0 |
manasi24/tempest | tempest/scenario/test_swift_basic_ops.py | 27 | 3079 | # Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest import config
from tempest.scenario import manager
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class TestSwiftBasicOps(manager.SwiftScenarioTest):
"""
Test swift basic ops.
* get swift stat.
* create container.
* upload a file to the created container.
* list container's objects and assure that the uploaded file is present.
* download the object and check the content
* delete object from container.
* list container's objects and assure that the deleted file is gone.
* delete a container.
* list containers and assure that the deleted container is gone.
* change ACL of the container and make sure it works successfully
"""
@test.idempotent_id('b920faf1-7b8a-4657-b9fe-9c4512bfb381')
@test.services('object_storage')
def test_swift_basic_ops(self):
self.get_swift_stat()
container_name = self.create_container()
obj_name, obj_data = self.upload_object_to_container(container_name)
self.list_and_check_container_objects(container_name,
present_obj=[obj_name])
self.download_and_verify(container_name, obj_name, obj_data)
self.delete_object(container_name, obj_name)
self.list_and_check_container_objects(container_name,
not_present_obj=[obj_name])
self.delete_container(container_name)
@test.idempotent_id('916c7111-cb1f-44b2-816d-8f760e4ea910')
@test.services('object_storage')
def test_swift_acl_anonymous_download(self):
"""This test will cover below steps:
1. Create container
2. Upload object to the new container
3. Change the ACL of the container
4. Check if the object can be download by anonymous user
5. Delete the object and container
"""
container_name = self.create_container()
obj_name, _ = self.upload_object_to_container(container_name)
obj_url = '%s/%s/%s' % (self.object_client.base_url,
container_name, obj_name)
resp, _ = self.object_client.raw_request(obj_url, 'GET')
self.assertEqual(resp.status, 401)
self.change_container_acl(container_name, '.r:*')
resp, _ = self.object_client.raw_request(obj_url, 'GET')
self.assertEqual(resp.status, 200)
| apache-2.0 |
staranjeet/fjord | fjord/redirector/tests/test_dummy.py | 1 | 1413 | from fjord.base.tests import reverse, TestCase
from fjord.redirector import get_redirectors
from fjord.redirector.base import build_redirect_url
from fjord.redirector.providers.dummy import DummyRedirector
from fjord.redirector.tests import RedirectorTestMixin
class DummyRedirectorLoadingTestCase(RedirectorTestMixin, TestCase):
redirectors = []
def test_didnt_load(self):
dummy_providers = [
prov for prov in get_redirectors()
if isinstance(prov, DummyRedirector)
]
assert len(dummy_providers) == 0
class DummyRedirectorTestCase(RedirectorTestMixin, TestCase):
redirectors = [
'fjord.redirector.providers.dummy.DummyRedirector'
]
def test_load(self):
dummy_redirectors = [
prov for prov in get_redirectors()
if isinstance(prov, DummyRedirector)
]
assert len(dummy_redirectors) == 1
def test_handle_redirect(self):
resp = self.client.get(build_redirect_url('dummy:ou812'))
assert resp.status_code == 302
assert resp['Location'] == 'http://example.com/ou812'
def test_nothing_handled_it_404(self):
resp = self.client.get(build_redirect_url('notdummy:ou812'))
assert resp.status_code == 404
def test_no_redirect_specified_404(self):
resp = self.client.get(reverse('redirect-view'))
assert resp.status_code == 404
| bsd-3-clause |
xbmc/atv2 | xbmc/lib/libPython/Python/Tools/scripts/fixcid.py | 4 | 10003 | #! /usr/bin/env python
# Perform massive identifier substitution on C source files.
# This actually tokenizes the files (to some extent) so it can
# avoid making substitutions inside strings or comments.
# Inside strings, substitutions are never made; inside comments,
# it is a user option (off by default).
#
# The substitutions are read from one or more files whose lines,
# when not empty, after stripping comments starting with #,
# must contain exactly two words separated by whitespace: the
# old identifier and its replacement.
#
# The option -r reverses the sense of the substitutions (this may be
# useful to undo a particular substitution).
#
# If the old identifier is prefixed with a '*' (with no intervening
# whitespace), then it will not be substituted inside comments.
#
# Command line arguments are files or directories to be processed.
# Directories are searched recursively for files whose name looks
# like a C file (ends in .h or .c). The special filename '-' means
# operate in filter mode: read stdin, write stdout.
#
# Symbolic links are always ignored (except as explicit directory
# arguments).
#
# The original files are kept as back-up with a "~" suffix.
#
# Changes made are reported to stdout in a diff-like format.
#
# NB: by changing only the function fixline() you can turn this
# into a program for different changes to C source files; by
# changing the function wanted() you can make a different selection of
# files.
import sys
import regex
import os
from stat import *
import getopt
err = sys.stderr.write
dbg = err
rep = sys.stdout.write
def usage():
progname = sys.argv[0]
err('Usage: ' + progname +
' [-c] [-r] [-s file] ... file-or-directory ...\n')
err('\n')
err('-c : substitute inside comments\n')
err('-r : reverse direction for following -s options\n')
err('-s substfile : add a file of substitutions\n')
err('\n')
err('Each non-empty non-comment line in a substitution file must\n')
err('contain exactly two words: an identifier and its replacement.\n')
err('Comments start with a # character and end at end of line.\n')
err('If an identifier is preceded with a *, it is not substituted\n')
err('inside a comment even when -c is specified.\n')
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], 'crs:')
except getopt.error, msg:
err('Options error: ' + str(msg) + '\n')
usage()
sys.exit(2)
bad = 0
if not args: # No arguments
usage()
sys.exit(2)
for opt, arg in opts:
if opt == '-c':
setdocomments()
if opt == '-r':
setreverse()
if opt == '-s':
addsubst(arg)
for arg in args:
if os.path.isdir(arg):
if recursedown(arg): bad = 1
elif os.path.islink(arg):
err(arg + ': will not process symbolic links\n')
bad = 1
else:
if fix(arg): bad = 1
sys.exit(bad)
# Change this regular expression to select a different set of files
Wanted = '^[a-zA-Z0-9_]+\.[ch]$'
def wanted(name):
return regex.match(Wanted, name) >= 0
def recursedown(dirname):
dbg('recursedown(%r)\n' % (dirname,))
bad = 0
try:
names = os.listdir(dirname)
except os.error, msg:
err(dirname + ': cannot list directory: ' + str(msg) + '\n')
return 1
names.sort()
subdirs = []
for name in names:
if name in (os.curdir, os.pardir): continue
fullname = os.path.join(dirname, name)
if os.path.islink(fullname): pass
elif os.path.isdir(fullname):
subdirs.append(fullname)
elif wanted(name):
if fix(fullname): bad = 1
for fullname in subdirs:
if recursedown(fullname): bad = 1
return bad
def fix(filename):
## dbg('fix(%r)\n' % (filename,))
if filename == '-':
# Filter mode
f = sys.stdin
g = sys.stdout
else:
# File replacement mode
try:
f = open(filename, 'r')
except IOError, msg:
err(filename + ': cannot open: ' + str(msg) + '\n')
return 1
head, tail = os.path.split(filename)
tempname = os.path.join(head, '@' + tail)
g = None
# If we find a match, we rewind the file and start over but
# now copy everything to a temp file.
lineno = 0
initfixline()
while 1:
line = f.readline()
if not line: break
lineno = lineno + 1
while line[-2:] == '\\\n':
nextline = f.readline()
if not nextline: break
line = line + nextline
lineno = lineno + 1
newline = fixline(line)
if newline != line:
if g is None:
try:
g = open(tempname, 'w')
except IOError, msg:
f.close()
err(tempname+': cannot create: '+
str(msg)+'\n')
return 1
f.seek(0)
lineno = 0
initfixline()
rep(filename + ':\n')
continue # restart from the beginning
rep(repr(lineno) + '\n')
rep('< ' + line)
rep('> ' + newline)
if g is not None:
g.write(newline)
# End of file
if filename == '-': return 0 # Done in filter mode
f.close()
if not g: return 0 # No changes
# Finishing touch -- move files
# First copy the file's mode to the temp file
try:
statbuf = os.stat(filename)
os.chmod(tempname, statbuf[ST_MODE] & 07777)
except os.error, msg:
err(tempname + ': warning: chmod failed (' + str(msg) + ')\n')
# Then make a backup of the original file as filename~
try:
os.rename(filename, filename + '~')
except os.error, msg:
err(filename + ': warning: backup failed (' + str(msg) + ')\n')
# Now move the temp file to the original file
try:
os.rename(tempname, filename)
except os.error, msg:
err(filename + ': rename failed (' + str(msg) + ')\n')
return 1
# Return succes
return 0
# Tokenizing ANSI C (partly)
Identifier = '\(struct \)?[a-zA-Z_][a-zA-Z0-9_]+'
String = '"\([^\n\\"]\|\\\\.\)*"'
Char = '\'\([^\n\\\']\|\\\\.\)*\''
CommentStart = '/\*'
CommentEnd = '\*/'
Hexnumber = '0[xX][0-9a-fA-F]*[uUlL]*'
Octnumber = '0[0-7]*[uUlL]*'
Decnumber = '[1-9][0-9]*[uUlL]*'
Intnumber = Hexnumber + '\|' + Octnumber + '\|' + Decnumber
Exponent = '[eE][-+]?[0-9]+'
Pointfloat = '\([0-9]+\.[0-9]*\|\.[0-9]+\)\(' + Exponent + '\)?'
Expfloat = '[0-9]+' + Exponent
Floatnumber = Pointfloat + '\|' + Expfloat
Number = Floatnumber + '\|' + Intnumber
# Anything else is an operator -- don't list this explicitly because of '/*'
OutsideComment = (Identifier, Number, String, Char, CommentStart)
OutsideCommentPattern = '\(' + '\|'.join(OutsideComment) + '\)'
OutsideCommentProgram = regex.compile(OutsideCommentPattern)
InsideComment = (Identifier, Number, CommentEnd)
InsideCommentPattern = '\(' + '\|'.join(InsideComment) + '\)'
InsideCommentProgram = regex.compile(InsideCommentPattern)
def initfixline():
global Program
Program = OutsideCommentProgram
def fixline(line):
global Program
## print '-->', repr(line)
i = 0
while i < len(line):
i = Program.search(line, i)
if i < 0: break
found = Program.group(0)
## if Program is InsideCommentProgram: print '...',
## else: print ' ',
## print found
if len(found) == 2:
if found == '/*':
Program = InsideCommentProgram
elif found == '*/':
Program = OutsideCommentProgram
n = len(found)
if Dict.has_key(found):
subst = Dict[found]
if Program is InsideCommentProgram:
if not Docomments:
print 'Found in comment:', found
i = i + n
continue
if NotInComment.has_key(found):
## print 'Ignored in comment:',
## print found, '-->', subst
## print 'Line:', line,
subst = found
## else:
## print 'Substituting in comment:',
## print found, '-->', subst
## print 'Line:', line,
line = line[:i] + subst + line[i+n:]
n = len(subst)
i = i + n
return line
Docomments = 0
def setdocomments():
global Docomments
Docomments = 1
Reverse = 0
def setreverse():
global Reverse
Reverse = (not Reverse)
Dict = {}
NotInComment = {}
def addsubst(substfile):
try:
fp = open(substfile, 'r')
except IOError, msg:
err(substfile + ': cannot read substfile: ' + str(msg) + '\n')
sys.exit(1)
lineno = 0
while 1:
line = fp.readline()
if not line: break
lineno = lineno + 1
try:
i = line.index('#')
except ValueError:
i = -1 # Happens to delete trailing \n
words = line[:i].split()
if not words: continue
if len(words) == 3 and words[0] == 'struct':
words[:2] = [words[0] + ' ' + words[1]]
elif len(words) <> 2:
err(substfile + '%s:%r: warning: bad line: %r' % (substfile, lineno, line))
continue
if Reverse:
[value, key] = words
else:
[key, value] = words
if value[0] == '*':
value = value[1:]
if key[0] == '*':
key = key[1:]
NotInComment[key] = value
if Dict.has_key(key):
err('%s:%r: warning: overriding: %r %r\n' % (substfile, lineno, key, value))
err('%s:%r: warning: previous: %r\n' % (substfile, lineno, Dict[key]))
Dict[key] = value
fp.close()
if __name__ == '__main__':
main()
| gpl-2.0 |
lucidfrontier45/scikit-learn | examples/manifold/plot_manifold_sphere.py | 1 | 4572 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
=============================================
Manifold Learning methods on a severed sphere
=============================================
An application of the different :ref:`manifold` techniques
on a spherical data-set. Here one can see the use of
dimensionality reduction in order to gain some intuition
regarding the Manifold learning methods. Regarding the dataset,
the poles are cut from the sphere, as well as a thin slice down its
side. This enables the manifold learning techniques to
'spread it open' whilst projecting it onto two dimensions.
For a similiar example, where the methods are applied to the
S-curve dataset, see :ref:`example_manifold_plot_compare_methods.py`
Note that the purpose of the :ref:`MDS <multidimensional_scaling>` is
to find a low-dimensional representation of the data (here 2D) in
which the distances respect well the distances in the original
high-dimensional space, unlike other manifold-learning algorithms,
it does not seeks an isotropic representation of the data in
the low-dimensional space. Here the manifold problem matches fairly
that of representing a flat map of the Earth, as with
`map projection <http://en.wikipedia.org/wiki/Map_projection>`_
"""
# Author: Jaques Grobler <jaques.grobler@inria.fr>
# License: BSD
print __doc__
from time import time
import numpy as np
import pylab as pl
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import NullFormatter
from sklearn import manifold
from sklearn.utils import check_random_state
# Next line to silence pyflakes.
Axes3D
# Variables for manifold learning.
n_neighbors = 10
n_samples = 1000
# Create our sphere.
random_state = check_random_state(0)
p = random_state.rand(n_samples) * (2 * np.pi - 0.55)
t = random_state.rand(n_samples) * np.pi
# Sever the poles from the sphere.
indices = ((t < (np.pi - (np.pi / 8))) & (t > ((np.pi / 8))))
colors = p[indices]
x, y, z = np.sin(t[indices]) * np.cos(p[indices]), \
np.sin(t[indices]) * np.sin(p[indices]), \
np.cos(t[indices])
# Plot our dataset.
fig = pl.figure(figsize=(15, 8))
pl.suptitle("Manifold Learning with %i points, %i neighbors"
% (1000, n_neighbors), fontsize=14)
ax = fig.add_subplot(241, projection='3d')
ax.scatter(x, y, z, c=p[indices], cmap=pl.cm.rainbow)
try:
# compatibility matplotlib < 1.0
ax.view_init(40, -10)
except:
pass
sphere_data = np.array([x, y, z]).T
# Perform Locally Linear Embedding Manifold learning
methods = ['standard', 'ltsa', 'hessian', 'modified']
labels = ['LLE', 'LTSA', 'Hessian LLE', 'Modified LLE']
for i, method in enumerate(methods):
t0 = time()
trans_data = manifold\
.LocallyLinearEmbedding(n_neighbors, 2,
method=method).fit_transform(sphere_data).T
t1 = time()
print "%s: %.2g sec" % (methods[i], t1 - t0)
ax = fig.add_subplot(242 + i)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("%s (%.2g sec)" % (labels[i], t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
# Perform Isomap Manifold learning.
t0 = time()
trans_data = manifold.Isomap(n_neighbors, n_components=2)\
.fit_transform(sphere_data).T
t1 = time()
print "%s: %.2g sec" % ('ISO', t1 - t0)
ax = fig.add_subplot(246)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("%s (%.2g sec)" % ('Isomap', t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
# Perform Multi-dimensional scaling.
t0 = time()
mds = manifold.MDS(2, max_iter=100, n_init=1)
trans_data = mds.fit_transform(sphere_data).T
t1 = time()
print "MDS: %.2g sec" % (t1 - t0)
ax = fig.add_subplot(247)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("MDS (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
# Perform Spectral Embedding.
t0 = time()
se = manifold.SpectralEmbedding(n_components=2,
n_neighbors=n_neighbors)
trans_data = se.fit_transform(sphere_data).T
t1 = time()
print "Spectral Embedding: %.2g sec" % (t1 - t0)
ax = fig.add_subplot(248)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("Spectral Embedding (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
pl.show()
| bsd-3-clause |
shelvenzhou/BTCGPU | test/functional/test_framework/script.py | 2 | 26221 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Functionality to build scripts, as well as SignatureHash().
This file is modified from python-bitcoinlib.
"""
from .mininode import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string
from binascii import hexlify
import hashlib
import sys
bchr = chr
bord = ord
if sys.version > '3':
long = int
bchr = lambda x: bytes([x])
bord = lambda x: x
import struct
from .bignum import bn2vch
MAX_SCRIPT_SIZE = 10000
MAX_SCRIPT_ELEMENT_SIZE = 520
MAX_SCRIPT_OPCODES = 201
OPCODE_NAMES = {}
def hash160(s):
return hashlib.new('ripemd160', sha256(s)).digest()
_opcode_instances = []
class CScriptOp(int):
"""A single script opcode"""
__slots__ = []
@staticmethod
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
return b'' + bchr(len(d)) + d # OP_PUSHDATA
elif len(d) <= 0xff:
return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@staticmethod
def encode_op_n(n):
"""Encode a small integer op, returning an opcode"""
if not (0 <= n <= 16):
raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n)
if n == 0:
return OP_0
else:
return CScriptOp(OP_1 + n-1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
if self == OP_0:
return 0
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
return int(self - OP_1+1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
if 0x51 <= self <= 0x60 or self == 0:
return True
else:
return False
def __str__(self):
return repr(self)
def __repr__(self):
if self in OPCODE_NAMES:
return OPCODE_NAMES[self]
else:
return 'CScriptOp(0x%x)' % self
def __new__(cls, n):
try:
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
_opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
for n in range(0xff+1):
CScriptOp(n)
# push value
OP_0 = CScriptOp(0x00)
OP_FALSE = OP_0
OP_PUSHDATA1 = CScriptOp(0x4c)
OP_PUSHDATA2 = CScriptOp(0x4d)
OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
OP_TRUE=OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
OP_5 = CScriptOp(0x55)
OP_6 = CScriptOp(0x56)
OP_7 = CScriptOp(0x57)
OP_8 = CScriptOp(0x58)
OP_9 = CScriptOp(0x59)
OP_10 = CScriptOp(0x5a)
OP_11 = CScriptOp(0x5b)
OP_12 = CScriptOp(0x5c)
OP_13 = CScriptOp(0x5d)
OP_14 = CScriptOp(0x5e)
OP_15 = CScriptOp(0x5f)
OP_16 = CScriptOp(0x60)
# control
OP_NOP = CScriptOp(0x61)
OP_VER = CScriptOp(0x62)
OP_IF = CScriptOp(0x63)
OP_NOTIF = CScriptOp(0x64)
OP_VERIF = CScriptOp(0x65)
OP_VERNOTIF = CScriptOp(0x66)
OP_ELSE = CScriptOp(0x67)
OP_ENDIF = CScriptOp(0x68)
OP_VERIFY = CScriptOp(0x69)
OP_RETURN = CScriptOp(0x6a)
# stack ops
OP_TOALTSTACK = CScriptOp(0x6b)
OP_FROMALTSTACK = CScriptOp(0x6c)
OP_2DROP = CScriptOp(0x6d)
OP_2DUP = CScriptOp(0x6e)
OP_3DUP = CScriptOp(0x6f)
OP_2OVER = CScriptOp(0x70)
OP_2ROT = CScriptOp(0x71)
OP_2SWAP = CScriptOp(0x72)
OP_IFDUP = CScriptOp(0x73)
OP_DEPTH = CScriptOp(0x74)
OP_DROP = CScriptOp(0x75)
OP_DUP = CScriptOp(0x76)
OP_NIP = CScriptOp(0x77)
OP_OVER = CScriptOp(0x78)
OP_PICK = CScriptOp(0x79)
OP_ROLL = CScriptOp(0x7a)
OP_ROT = CScriptOp(0x7b)
OP_SWAP = CScriptOp(0x7c)
OP_TUCK = CScriptOp(0x7d)
# splice ops
OP_CAT = CScriptOp(0x7e)
OP_SUBSTR = CScriptOp(0x7f)
OP_LEFT = CScriptOp(0x80)
OP_RIGHT = CScriptOp(0x81)
OP_SIZE = CScriptOp(0x82)
# bit logic
OP_INVERT = CScriptOp(0x83)
OP_AND = CScriptOp(0x84)
OP_OR = CScriptOp(0x85)
OP_XOR = CScriptOp(0x86)
OP_EQUAL = CScriptOp(0x87)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_RESERVED1 = CScriptOp(0x89)
OP_RESERVED2 = CScriptOp(0x8a)
# numeric
OP_1ADD = CScriptOp(0x8b)
OP_1SUB = CScriptOp(0x8c)
OP_2MUL = CScriptOp(0x8d)
OP_2DIV = CScriptOp(0x8e)
OP_NEGATE = CScriptOp(0x8f)
OP_ABS = CScriptOp(0x90)
OP_NOT = CScriptOp(0x91)
OP_0NOTEQUAL = CScriptOp(0x92)
OP_ADD = CScriptOp(0x93)
OP_SUB = CScriptOp(0x94)
OP_MUL = CScriptOp(0x95)
OP_DIV = CScriptOp(0x96)
OP_MOD = CScriptOp(0x97)
OP_LSHIFT = CScriptOp(0x98)
OP_RSHIFT = CScriptOp(0x99)
OP_BOOLAND = CScriptOp(0x9a)
OP_BOOLOR = CScriptOp(0x9b)
OP_NUMEQUAL = CScriptOp(0x9c)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_NUMNOTEQUAL = CScriptOp(0x9e)
OP_LESSTHAN = CScriptOp(0x9f)
OP_GREATERTHAN = CScriptOp(0xa0)
OP_LESSTHANOREQUAL = CScriptOp(0xa1)
OP_GREATERTHANOREQUAL = CScriptOp(0xa2)
OP_MIN = CScriptOp(0xa3)
OP_MAX = CScriptOp(0xa4)
OP_WITHIN = CScriptOp(0xa5)
# crypto
OP_RIPEMD160 = CScriptOp(0xa6)
OP_SHA1 = CScriptOp(0xa7)
OP_SHA256 = CScriptOp(0xa8)
OP_HASH160 = CScriptOp(0xa9)
OP_HASH256 = CScriptOp(0xaa)
OP_CODESEPARATOR = CScriptOp(0xab)
OP_CHECKSIG = CScriptOp(0xac)
OP_CHECKSIGVERIFY = CScriptOp(0xad)
OP_CHECKMULTISIG = CScriptOp(0xae)
OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
OP_CHECKLOCKTIMEVERIFY = CScriptOp(0xb1)
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
OP_NOP6 = CScriptOp(0xb5)
OP_NOP7 = CScriptOp(0xb6)
OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
# template matching params
OP_SMALLINTEGER = CScriptOp(0xfa)
OP_PUBKEYS = CScriptOp(0xfb)
OP_PUBKEYHASH = CScriptOp(0xfd)
OP_PUBKEY = CScriptOp(0xfe)
OP_INVALIDOPCODE = CScriptOp(0xff)
VALID_OPCODES = {
OP_1NEGATE,
OP_RESERVED,
OP_1,
OP_2,
OP_3,
OP_4,
OP_5,
OP_6,
OP_7,
OP_8,
OP_9,
OP_10,
OP_11,
OP_12,
OP_13,
OP_14,
OP_15,
OP_16,
OP_NOP,
OP_VER,
OP_IF,
OP_NOTIF,
OP_VERIF,
OP_VERNOTIF,
OP_ELSE,
OP_ENDIF,
OP_VERIFY,
OP_RETURN,
OP_TOALTSTACK,
OP_FROMALTSTACK,
OP_2DROP,
OP_2DUP,
OP_3DUP,
OP_2OVER,
OP_2ROT,
OP_2SWAP,
OP_IFDUP,
OP_DEPTH,
OP_DROP,
OP_DUP,
OP_NIP,
OP_OVER,
OP_PICK,
OP_ROLL,
OP_ROT,
OP_SWAP,
OP_TUCK,
OP_CAT,
OP_SUBSTR,
OP_LEFT,
OP_RIGHT,
OP_SIZE,
OP_INVERT,
OP_AND,
OP_OR,
OP_XOR,
OP_EQUAL,
OP_EQUALVERIFY,
OP_RESERVED1,
OP_RESERVED2,
OP_1ADD,
OP_1SUB,
OP_2MUL,
OP_2DIV,
OP_NEGATE,
OP_ABS,
OP_NOT,
OP_0NOTEQUAL,
OP_ADD,
OP_SUB,
OP_MUL,
OP_DIV,
OP_MOD,
OP_LSHIFT,
OP_RSHIFT,
OP_BOOLAND,
OP_BOOLOR,
OP_NUMEQUAL,
OP_NUMEQUALVERIFY,
OP_NUMNOTEQUAL,
OP_LESSTHAN,
OP_GREATERTHAN,
OP_LESSTHANOREQUAL,
OP_GREATERTHANOREQUAL,
OP_MIN,
OP_MAX,
OP_WITHIN,
OP_RIPEMD160,
OP_SHA1,
OP_SHA256,
OP_HASH160,
OP_HASH256,
OP_CODESEPARATOR,
OP_CHECKSIG,
OP_CHECKSIGVERIFY,
OP_CHECKMULTISIG,
OP_CHECKMULTISIGVERIFY,
OP_NOP1,
OP_CHECKLOCKTIMEVERIFY,
OP_CHECKSEQUENCEVERIFY,
OP_NOP4,
OP_NOP5,
OP_NOP6,
OP_NOP7,
OP_NOP8,
OP_NOP9,
OP_NOP10,
OP_SMALLINTEGER,
OP_PUBKEYS,
OP_PUBKEYHASH,
OP_PUBKEY,
}
OPCODE_NAMES.update({
OP_0 : 'OP_0',
OP_PUSHDATA1 : 'OP_PUSHDATA1',
OP_PUSHDATA2 : 'OP_PUSHDATA2',
OP_PUSHDATA4 : 'OP_PUSHDATA4',
OP_1NEGATE : 'OP_1NEGATE',
OP_RESERVED : 'OP_RESERVED',
OP_1 : 'OP_1',
OP_2 : 'OP_2',
OP_3 : 'OP_3',
OP_4 : 'OP_4',
OP_5 : 'OP_5',
OP_6 : 'OP_6',
OP_7 : 'OP_7',
OP_8 : 'OP_8',
OP_9 : 'OP_9',
OP_10 : 'OP_10',
OP_11 : 'OP_11',
OP_12 : 'OP_12',
OP_13 : 'OP_13',
OP_14 : 'OP_14',
OP_15 : 'OP_15',
OP_16 : 'OP_16',
OP_NOP : 'OP_NOP',
OP_VER : 'OP_VER',
OP_IF : 'OP_IF',
OP_NOTIF : 'OP_NOTIF',
OP_VERIF : 'OP_VERIF',
OP_VERNOTIF : 'OP_VERNOTIF',
OP_ELSE : 'OP_ELSE',
OP_ENDIF : 'OP_ENDIF',
OP_VERIFY : 'OP_VERIFY',
OP_RETURN : 'OP_RETURN',
OP_TOALTSTACK : 'OP_TOALTSTACK',
OP_FROMALTSTACK : 'OP_FROMALTSTACK',
OP_2DROP : 'OP_2DROP',
OP_2DUP : 'OP_2DUP',
OP_3DUP : 'OP_3DUP',
OP_2OVER : 'OP_2OVER',
OP_2ROT : 'OP_2ROT',
OP_2SWAP : 'OP_2SWAP',
OP_IFDUP : 'OP_IFDUP',
OP_DEPTH : 'OP_DEPTH',
OP_DROP : 'OP_DROP',
OP_DUP : 'OP_DUP',
OP_NIP : 'OP_NIP',
OP_OVER : 'OP_OVER',
OP_PICK : 'OP_PICK',
OP_ROLL : 'OP_ROLL',
OP_ROT : 'OP_ROT',
OP_SWAP : 'OP_SWAP',
OP_TUCK : 'OP_TUCK',
OP_CAT : 'OP_CAT',
OP_SUBSTR : 'OP_SUBSTR',
OP_LEFT : 'OP_LEFT',
OP_RIGHT : 'OP_RIGHT',
OP_SIZE : 'OP_SIZE',
OP_INVERT : 'OP_INVERT',
OP_AND : 'OP_AND',
OP_OR : 'OP_OR',
OP_XOR : 'OP_XOR',
OP_EQUAL : 'OP_EQUAL',
OP_EQUALVERIFY : 'OP_EQUALVERIFY',
OP_RESERVED1 : 'OP_RESERVED1',
OP_RESERVED2 : 'OP_RESERVED2',
OP_1ADD : 'OP_1ADD',
OP_1SUB : 'OP_1SUB',
OP_2MUL : 'OP_2MUL',
OP_2DIV : 'OP_2DIV',
OP_NEGATE : 'OP_NEGATE',
OP_ABS : 'OP_ABS',
OP_NOT : 'OP_NOT',
OP_0NOTEQUAL : 'OP_0NOTEQUAL',
OP_ADD : 'OP_ADD',
OP_SUB : 'OP_SUB',
OP_MUL : 'OP_MUL',
OP_DIV : 'OP_DIV',
OP_MOD : 'OP_MOD',
OP_LSHIFT : 'OP_LSHIFT',
OP_RSHIFT : 'OP_RSHIFT',
OP_BOOLAND : 'OP_BOOLAND',
OP_BOOLOR : 'OP_BOOLOR',
OP_NUMEQUAL : 'OP_NUMEQUAL',
OP_NUMEQUALVERIFY : 'OP_NUMEQUALVERIFY',
OP_NUMNOTEQUAL : 'OP_NUMNOTEQUAL',
OP_LESSTHAN : 'OP_LESSTHAN',
OP_GREATERTHAN : 'OP_GREATERTHAN',
OP_LESSTHANOREQUAL : 'OP_LESSTHANOREQUAL',
OP_GREATERTHANOREQUAL : 'OP_GREATERTHANOREQUAL',
OP_MIN : 'OP_MIN',
OP_MAX : 'OP_MAX',
OP_WITHIN : 'OP_WITHIN',
OP_RIPEMD160 : 'OP_RIPEMD160',
OP_SHA1 : 'OP_SHA1',
OP_SHA256 : 'OP_SHA256',
OP_HASH160 : 'OP_HASH160',
OP_HASH256 : 'OP_HASH256',
OP_CODESEPARATOR : 'OP_CODESEPARATOR',
OP_CHECKSIG : 'OP_CHECKSIG',
OP_CHECKSIGVERIFY : 'OP_CHECKSIGVERIFY',
OP_CHECKMULTISIG : 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
OP_NOP1 : 'OP_NOP1',
OP_CHECKLOCKTIMEVERIFY : 'OP_CHECKLOCKTIMEVERIFY',
OP_CHECKSEQUENCEVERIFY : 'OP_CHECKSEQUENCEVERIFY',
OP_NOP4 : 'OP_NOP4',
OP_NOP5 : 'OP_NOP5',
OP_NOP6 : 'OP_NOP6',
OP_NOP7 : 'OP_NOP7',
OP_NOP8 : 'OP_NOP8',
OP_NOP9 : 'OP_NOP9',
OP_NOP10 : 'OP_NOP10',
OP_SMALLINTEGER : 'OP_SMALLINTEGER',
OP_PUBKEYS : 'OP_PUBKEYS',
OP_PUBKEYHASH : 'OP_PUBKEYHASH',
OP_PUBKEY : 'OP_PUBKEY',
OP_INVALIDOPCODE : 'OP_INVALIDOPCODE',
})
OPCODES_BY_NAME = {
'OP_0' : OP_0,
'OP_PUSHDATA1' : OP_PUSHDATA1,
'OP_PUSHDATA2' : OP_PUSHDATA2,
'OP_PUSHDATA4' : OP_PUSHDATA4,
'OP_1NEGATE' : OP_1NEGATE,
'OP_RESERVED' : OP_RESERVED,
'OP_1' : OP_1,
'OP_2' : OP_2,
'OP_3' : OP_3,
'OP_4' : OP_4,
'OP_5' : OP_5,
'OP_6' : OP_6,
'OP_7' : OP_7,
'OP_8' : OP_8,
'OP_9' : OP_9,
'OP_10' : OP_10,
'OP_11' : OP_11,
'OP_12' : OP_12,
'OP_13' : OP_13,
'OP_14' : OP_14,
'OP_15' : OP_15,
'OP_16' : OP_16,
'OP_NOP' : OP_NOP,
'OP_VER' : OP_VER,
'OP_IF' : OP_IF,
'OP_NOTIF' : OP_NOTIF,
'OP_VERIF' : OP_VERIF,
'OP_VERNOTIF' : OP_VERNOTIF,
'OP_ELSE' : OP_ELSE,
'OP_ENDIF' : OP_ENDIF,
'OP_VERIFY' : OP_VERIFY,
'OP_RETURN' : OP_RETURN,
'OP_TOALTSTACK' : OP_TOALTSTACK,
'OP_FROMALTSTACK' : OP_FROMALTSTACK,
'OP_2DROP' : OP_2DROP,
'OP_2DUP' : OP_2DUP,
'OP_3DUP' : OP_3DUP,
'OP_2OVER' : OP_2OVER,
'OP_2ROT' : OP_2ROT,
'OP_2SWAP' : OP_2SWAP,
'OP_IFDUP' : OP_IFDUP,
'OP_DEPTH' : OP_DEPTH,
'OP_DROP' : OP_DROP,
'OP_DUP' : OP_DUP,
'OP_NIP' : OP_NIP,
'OP_OVER' : OP_OVER,
'OP_PICK' : OP_PICK,
'OP_ROLL' : OP_ROLL,
'OP_ROT' : OP_ROT,
'OP_SWAP' : OP_SWAP,
'OP_TUCK' : OP_TUCK,
'OP_CAT' : OP_CAT,
'OP_SUBSTR' : OP_SUBSTR,
'OP_LEFT' : OP_LEFT,
'OP_RIGHT' : OP_RIGHT,
'OP_SIZE' : OP_SIZE,
'OP_INVERT' : OP_INVERT,
'OP_AND' : OP_AND,
'OP_OR' : OP_OR,
'OP_XOR' : OP_XOR,
'OP_EQUAL' : OP_EQUAL,
'OP_EQUALVERIFY' : OP_EQUALVERIFY,
'OP_RESERVED1' : OP_RESERVED1,
'OP_RESERVED2' : OP_RESERVED2,
'OP_1ADD' : OP_1ADD,
'OP_1SUB' : OP_1SUB,
'OP_2MUL' : OP_2MUL,
'OP_2DIV' : OP_2DIV,
'OP_NEGATE' : OP_NEGATE,
'OP_ABS' : OP_ABS,
'OP_NOT' : OP_NOT,
'OP_0NOTEQUAL' : OP_0NOTEQUAL,
'OP_ADD' : OP_ADD,
'OP_SUB' : OP_SUB,
'OP_MUL' : OP_MUL,
'OP_DIV' : OP_DIV,
'OP_MOD' : OP_MOD,
'OP_LSHIFT' : OP_LSHIFT,
'OP_RSHIFT' : OP_RSHIFT,
'OP_BOOLAND' : OP_BOOLAND,
'OP_BOOLOR' : OP_BOOLOR,
'OP_NUMEQUAL' : OP_NUMEQUAL,
'OP_NUMEQUALVERIFY' : OP_NUMEQUALVERIFY,
'OP_NUMNOTEQUAL' : OP_NUMNOTEQUAL,
'OP_LESSTHAN' : OP_LESSTHAN,
'OP_GREATERTHAN' : OP_GREATERTHAN,
'OP_LESSTHANOREQUAL' : OP_LESSTHANOREQUAL,
'OP_GREATERTHANOREQUAL' : OP_GREATERTHANOREQUAL,
'OP_MIN' : OP_MIN,
'OP_MAX' : OP_MAX,
'OP_WITHIN' : OP_WITHIN,
'OP_RIPEMD160' : OP_RIPEMD160,
'OP_SHA1' : OP_SHA1,
'OP_SHA256' : OP_SHA256,
'OP_HASH160' : OP_HASH160,
'OP_HASH256' : OP_HASH256,
'OP_CODESEPARATOR' : OP_CODESEPARATOR,
'OP_CHECKSIG' : OP_CHECKSIG,
'OP_CHECKSIGVERIFY' : OP_CHECKSIGVERIFY,
'OP_CHECKMULTISIG' : OP_CHECKMULTISIG,
'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY,
'OP_NOP1' : OP_NOP1,
'OP_CHECKLOCKTIMEVERIFY' : OP_CHECKLOCKTIMEVERIFY,
'OP_CHECKSEQUENCEVERIFY' : OP_CHECKSEQUENCEVERIFY,
'OP_NOP4' : OP_NOP4,
'OP_NOP5' : OP_NOP5,
'OP_NOP6' : OP_NOP6,
'OP_NOP7' : OP_NOP7,
'OP_NOP8' : OP_NOP8,
'OP_NOP9' : OP_NOP9,
'OP_NOP10' : OP_NOP10,
'OP_SMALLINTEGER' : OP_SMALLINTEGER,
'OP_PUBKEYS' : OP_PUBKEYS,
'OP_PUBKEYHASH' : OP_PUBKEYHASH,
'OP_PUBKEY' : OP_PUBKEY,
}
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
super(CScriptTruncatedPushDataError, self).__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
class CScriptNum(object):
def __init__(self, d=0):
self.value = d
@staticmethod
def encode(obj):
r = bytearray(0)
if obj.value == 0:
return bytes(r)
neg = obj.value < 0
absvalue = -obj.value if neg else obj.value
while (absvalue):
r.append(absvalue & 0xff)
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return bytes(bchr(len(r)) + r)
class CScript(bytes):
"""Serialized script
A bytes subclass, so you can use this directly whenever bytes are accepted.
Note that this means that indexing does *not* work - you'll get an index by
byte rather than opcode. This format was chosen for efficiency so that the
general case would not require creating a lot of little CScriptOP objects.
iter(script) however does iterate by opcode.
"""
@classmethod
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
other = bchr(other)
elif isinstance(other, CScriptNum):
if (other.value == 0):
other = bchr(CScriptOp(OP_0))
else:
other = CScriptNum.encode(other)
elif isinstance(other, int):
if 0 <= other <= 16:
other = bytes(bchr(CScriptOp.encode_op_n(other)))
elif other == -1:
other = bytes(bchr(OP_1NEGATE))
else:
other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = CScriptOp.encode_op_pushdata(other)
return other
def __add__(self, other):
# Do the coercion outside of the try block so that errors in it are
# noticed.
other = self.__coerce_instance(other)
try:
# bytes.__add__ always returns bytes instances unfortunately
return CScript(super(CScript, self).__add__(other))
except TypeError:
raise TypeError('Can not add a %r instance to a CScript' % other.__class__)
def join(self, iterable):
# join makes no sense for a CScript()
raise NotImplementedError
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super(CScript, cls).__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
Yields tuples of (opcode, data, sop_idx) so that the different possible
PUSHDATA encodings can be accurately distinguished, as well as
determining the exact opcode byte indexes. (sop_idx)
"""
i = 0
while i < len(self):
sop_idx = i
opcode = bord(self[i])
i += 1
if opcode > OP_PUSHDATA4:
yield (opcode, None, sop_idx)
else:
datasize = None
pushdata_type = None
if opcode < OP_PUSHDATA1:
pushdata_type = 'PUSHDATA(%d)' % opcode
datasize = opcode
elif opcode == OP_PUSHDATA1:
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
datasize = bord(self[i])
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24)
i += 4
else:
assert False # shouldn't happen
data = bytes(self[i:i+datasize])
# Check for truncation
if len(data) < datasize:
raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data)
i += datasize
yield (opcode, data, sop_idx)
def __iter__(self):
"""'Cooked' iteration
Returns either a CScriptOP instance, an integer, or bytes, as
appropriate.
See raw_iter() if you need to distinguish the different possible
PUSHDATA encodings.
"""
for (opcode, data, sop_idx) in self.raw_iter():
if data is not None:
yield data
else:
opcode = CScriptOp(opcode)
if opcode.is_small_int():
yield opcode.decode_op_n()
else:
yield CScriptOp(opcode)
def __repr__(self):
# For Python3 compatibility add b before strings so testcases don't
# need to change
def _repr(o):
if isinstance(o, bytes):
return (b"x('%s')" % hexlify(o)).decode('ascii')
else:
return repr(o)
ops = []
i = iter(self)
while True:
op = None
try:
op = _repr(next(i))
except CScriptTruncatedPushDataError as err:
op = '%s...<ERROR: %s>' % (_repr(err.data), err)
break
except CScriptInvalidError as err:
op = '<ERROR: %s>' % err
break
except StopIteration:
break
finally:
if op is not None:
ops.append(op)
return "CScript([%s])" % ', '.join(ops)
def GetSigOpCount(self, fAccurate):
"""Get the SigOp count.
fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details.
Note that this is consensus-critical.
"""
n = 0
lastOpcode = OP_INVALIDOPCODE
for (opcode, data, sop_idx) in self.raw_iter():
if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY):
n += 1
elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY):
if fAccurate and (OP_1 <= lastOpcode <= OP_16):
n += opcode.decode_op_n()
else:
n += 20
lastOpcode = opcode
return n
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_FORKID = 0x40
SIGHASH_ANYONECANPAY = 0x80
FORKID_BTG = 79 # Atomic number AU
def FindAndDelete(script, sig):
"""Consensus critical, see FindAndDelete() in Satoshi codebase"""
r = b''
last_sop_idx = sop_idx = 0
skip = True
for (opcode, data, sop_idx) in script.raw_iter():
if not skip:
r += script[last_sop_idx:sop_idx]
last_sop_idx = sop_idx
if script[sop_idx:sop_idx + len(sig)] == sig:
skip = True
else:
skip = False
if not skip:
r += script[last_sop_idx:]
return CScript(r)
def SignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
Returns (hash, err) to precisely match the consensus-critical behavior of
the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity)
"""
assert not (hashtype & SIGHASH_FORKID), "BIP143 is mandatory for FORKID enabled transactions."
HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
if inIdx >= len(txTo.vin):
return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin)))
txtmp = CTransaction(txTo)
for txin in txtmp.vin:
txin.scriptSig = b''
txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR]))
if (hashtype & 0x1f) == SIGHASH_NONE:
txtmp.vout = []
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
elif (hashtype & 0x1f) == SIGHASH_SINGLE:
outIdx = inIdx
if outIdx >= len(txtmp.vout):
return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout)))
tmp = txtmp.vout[outIdx]
txtmp.vout = []
for i in range(outIdx):
txtmp.vout.append(CTxOut(-1))
txtmp.vout.append(tmp)
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
if hashtype & SIGHASH_ANYONECANPAY:
tmp = txtmp.vin[inIdx]
txtmp.vin = []
txtmp.vin.append(tmp)
s = txtmp.serialize()
s += struct.pack(b"<I", hashtype)
hash = hash256(s)
return (hash, None)
# TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided.
# Performance optimization probably not necessary for python tests, however.
# Note that this corresponds to sigversion == 1 in EvalScript, which is used
# for version 0 witnesses and all the FORKID transactions.
def SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, amount):
hashPrevouts = 0
hashSequence = 0
hashOutputs = 0
if not (hashtype & SIGHASH_ANYONECANPAY):
serialize_prevouts = bytes()
for i in txTo.vin:
serialize_prevouts += i.prevout.serialize()
hashPrevouts = uint256_from_str(hash256(serialize_prevouts))
if (not (hashtype & SIGHASH_ANYONECANPAY) and (hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_sequence = bytes()
for i in txTo.vin:
serialize_sequence += struct.pack("<I", i.nSequence)
hashSequence = uint256_from_str(hash256(serialize_sequence))
if ((hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_outputs = bytes()
for o in txTo.vout:
serialize_outputs += o.serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
elif ((hashtype & 0x1f) == SIGHASH_SINGLE and inIdx < len(txTo.vout)):
serialize_outputs = txTo.vout[inIdx].serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
if hashtype & SIGHASH_FORKID:
hashtype |= FORKID_BTG << 8
ss = bytes()
ss += struct.pack("<i", txTo.nVersion)
ss += ser_uint256(hashPrevouts)
ss += ser_uint256(hashSequence)
ss += txTo.vin[inIdx].prevout.serialize()
ss += ser_string(script)
ss += struct.pack("<q", amount)
ss += struct.pack("<I", txTo.vin[inIdx].nSequence)
ss += ser_uint256(hashOutputs)
ss += struct.pack("<i", txTo.nLockTime)
ss += struct.pack("<I", hashtype)
return hash256(ss)
| mit |
iulian787/spack | var/spack/repos/builtin/packages/linktest/package.py | 2 | 1299 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class Linktest(MakefilePackage):
"""Performance tool to generate communication matrix using
parallel ping-pong benchmark"""
homepage = "https://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/LinkTest/_node.html"
url = "http://apps.fz-juelich.de/jsc/linktest/download.php?version=1.2p1"
maintainers = ['pramodk']
version('1.2p1', sha256='981b96da1d5bf214507b8e219a36e8d0183d8bd5c10539b26f660b2c83e5269d', extension='tar.gz')
depends_on('mpi')
depends_on('sionlib')
def edit(self, spec, prefix):
with working_dir('src'):
makefile = FileFilter('Makefile')
makefile.filter('= gcc', '= cc')
makefile.filter('mpicc', spec['mpi'].mpicc)
makefile.filter('#SIONLIB_INST=.*',
'SIONLIB_INST=%s' % spec['sionlib'].prefix)
def build(self, spec, prefix):
with working_dir('src'):
make()
def install(self, spec, prefix):
mkdir(prefix.bin)
install('src/mpilinktest', prefix.bin)
install('src/pingponganalysis', prefix.bin)
| lgpl-2.1 |
mottosso/mindbender-setup | bin/windows/python36/Lib/ensurepip/__init__.py | 5 | 5824 | import os
import os.path
import pkgutil
import sys
import tempfile
__all__ = ["version", "bootstrap"]
_SETUPTOOLS_VERSION = "28.8.0"
_PIP_VERSION = "9.0.1"
_PROJECTS = [
("setuptools", _SETUPTOOLS_VERSION),
("pip", _PIP_VERSION),
]
def _run_pip(args, additional_paths=None):
# Add our bundled software to the sys.path so we can import it
if additional_paths is not None:
sys.path = additional_paths + sys.path
# Install the bundled software
import pip
pip.main(args)
def version():
"""
Returns a string specifying the bundled version of pip.
"""
return _PIP_VERSION
def _disable_pip_configuration_settings():
# We deliberately ignore all pip environment variables
# when invoking pip
# See http://bugs.python.org/issue19734 for details
keys_to_remove = [k for k in os.environ if k.startswith("PIP_")]
for k in keys_to_remove:
del os.environ[k]
# We also ignore the settings in the default pip configuration file
# See http://bugs.python.org/issue20053 for details
os.environ['PIP_CONFIG_FILE'] = os.devnull
def bootstrap(*, root=None, upgrade=False, user=False,
altinstall=False, default_pip=False,
verbosity=0):
"""
Bootstrap pip into the current Python installation (or the given root
directory).
Note that calling this function will alter both sys.path and os.environ.
"""
if altinstall and default_pip:
raise ValueError("Cannot use altinstall and default_pip together")
_disable_pip_configuration_settings()
# By default, installing pip and setuptools installs all of the
# following scripts (X.Y == running Python version):
#
# pip, pipX, pipX.Y, easy_install, easy_install-X.Y
#
# pip 1.5+ allows ensurepip to request that some of those be left out
if altinstall:
# omit pip, pipX and easy_install
os.environ["ENSUREPIP_OPTIONS"] = "altinstall"
elif not default_pip:
# omit pip and easy_install
os.environ["ENSUREPIP_OPTIONS"] = "install"
with tempfile.TemporaryDirectory() as tmpdir:
# Put our bundled wheels into a temporary directory and construct the
# additional paths that need added to sys.path
additional_paths = []
for project, version in _PROJECTS:
wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
whl = pkgutil.get_data(
"ensurepip",
"_bundled/{}".format(wheel_name),
)
with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
fp.write(whl)
additional_paths.append(os.path.join(tmpdir, wheel_name))
# Construct the arguments to be passed to the pip command
args = ["install", "--no-index", "--find-links", tmpdir]
if root:
args += ["--root", root]
if upgrade:
args += ["--upgrade"]
if user:
args += ["--user"]
if verbosity:
args += ["-" + "v" * verbosity]
_run_pip(args + [p[0] for p in _PROJECTS], additional_paths)
def _uninstall_helper(*, verbosity=0):
"""Helper to support a clean default uninstall process on Windows
Note that calling this function may alter os.environ.
"""
# Nothing to do if pip was never installed, or has been removed
try:
import pip
except ImportError:
return
# If the pip version doesn't match the bundled one, leave it alone
if pip.__version__ != _PIP_VERSION:
msg = ("ensurepip will only uninstall a matching version "
"({!r} installed, {!r} bundled)")
print(msg.format(pip.__version__, _PIP_VERSION), file=sys.stderr)
return
_disable_pip_configuration_settings()
# Construct the arguments to be passed to the pip command
args = ["uninstall", "-y", "--disable-pip-version-check"]
if verbosity:
args += ["-" + "v" * verbosity]
_run_pip(args + [p[0] for p in reversed(_PROJECTS)])
def _main(argv=None):
import argparse
parser = argparse.ArgumentParser(prog="python -m ensurepip")
parser.add_argument(
"--version",
action="version",
version="pip {}".format(version()),
help="Show the version of pip that is bundled with this Python.",
)
parser.add_argument(
"-v", "--verbose",
action="count",
default=0,
dest="verbosity",
help=("Give more output. Option is additive, and can be used up to 3 "
"times."),
)
parser.add_argument(
"-U", "--upgrade",
action="store_true",
default=False,
help="Upgrade pip and dependencies, even if already installed.",
)
parser.add_argument(
"--user",
action="store_true",
default=False,
help="Install using the user scheme.",
)
parser.add_argument(
"--root",
default=None,
help="Install everything relative to this alternate root directory.",
)
parser.add_argument(
"--altinstall",
action="store_true",
default=False,
help=("Make an alternate install, installing only the X.Y versioned"
"scripts (Default: pipX, pipX.Y, easy_install-X.Y)"),
)
parser.add_argument(
"--default-pip",
action="store_true",
default=False,
help=("Make a default pip install, installing the unqualified pip "
"and easy_install in addition to the versioned scripts"),
)
args = parser.parse_args(argv)
bootstrap(
root=args.root,
upgrade=args.upgrade,
user=args.user,
verbosity=args.verbosity,
altinstall=args.altinstall,
default_pip=args.default_pip,
)
| mit |
stollcri/UA-3460-635-P2 | code/eigen/debug/gdb/printers.py | 143 | 6360 | # -*- coding: utf-8 -*-
# This file is part of Eigen, a lightweight C++ template library
# for linear algebra.
#
# Copyright (C) 2009 Benjamin Schindler <bschindler@inf.ethz.ch>
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Pretty printers for Eigen::Matrix
# This is still pretty basic as the python extension to gdb is still pretty basic.
# It cannot handle complex eigen types and it doesn't support any of the other eigen types
# Such as quaternion or some other type.
# This code supports fixed size as well as dynamic size matrices
# To use it:
#
# * Create a directory and put the file as well as an empty __init__.py in
# that directory.
# * Create a ~/.gdbinit file, that contains the following:
# python
# import sys
# sys.path.insert(0, '/path/to/eigen/printer/directory')
# from printers import register_eigen_printers
# register_eigen_printers (None)
# end
import gdb
import re
import itertools
class EigenMatrixPrinter:
"Print Eigen Matrix or Array of some kind"
def __init__(self, variety, val):
"Extract all the necessary information"
# Save the variety (presumably "Matrix" or "Array") for later usage
self.variety = variety
# The gdb extension does not support value template arguments - need to extract them by hand
type = val.type
if type.code == gdb.TYPE_CODE_REF:
type = type.target()
self.type = type.unqualified().strip_typedefs()
tag = self.type.tag
regex = re.compile('\<.*\>')
m = regex.findall(tag)[0][1:-1]
template_params = m.split(',')
template_params = map(lambda x:x.replace(" ", ""), template_params)
if template_params[1] == '-0x00000000000000001' or template_params[1] == '-0x000000001' or template_params[1] == '-1':
self.rows = val['m_storage']['m_rows']
else:
self.rows = int(template_params[1])
if template_params[2] == '-0x00000000000000001' or template_params[2] == '-0x000000001' or template_params[2] == '-1':
self.cols = val['m_storage']['m_cols']
else:
self.cols = int(template_params[2])
self.options = 0 # default value
if len(template_params) > 3:
self.options = template_params[3];
self.rowMajor = (int(self.options) & 0x1)
self.innerType = self.type.template_argument(0)
self.val = val
# Fixed size matrices have a struct as their storage, so we need to walk through this
self.data = self.val['m_storage']['m_data']
if self.data.type.code == gdb.TYPE_CODE_STRUCT:
self.data = self.data['array']
self.data = self.data.cast(self.innerType.pointer())
class _iterator:
def __init__ (self, rows, cols, dataPtr, rowMajor):
self.rows = rows
self.cols = cols
self.dataPtr = dataPtr
self.currentRow = 0
self.currentCol = 0
self.rowMajor = rowMajor
def __iter__ (self):
return self
def next(self):
row = self.currentRow
col = self.currentCol
if self.rowMajor == 0:
if self.currentCol >= self.cols:
raise StopIteration
self.currentRow = self.currentRow + 1
if self.currentRow >= self.rows:
self.currentRow = 0
self.currentCol = self.currentCol + 1
else:
if self.currentRow >= self.rows:
raise StopIteration
self.currentCol = self.currentCol + 1
if self.currentCol >= self.cols:
self.currentCol = 0
self.currentRow = self.currentRow + 1
item = self.dataPtr.dereference()
self.dataPtr = self.dataPtr + 1
if (self.cols == 1): #if it's a column vector
return ('[%d]' % (row,), item)
elif (self.rows == 1): #if it's a row vector
return ('[%d]' % (col,), item)
return ('[%d,%d]' % (row, col), item)
def children(self):
return self._iterator(self.rows, self.cols, self.data, self.rowMajor)
def to_string(self):
return "Eigen::%s<%s,%d,%d,%s> (data ptr: %s)" % (self.variety, self.innerType, self.rows, self.cols, "RowMajor" if self.rowMajor else "ColMajor", self.data)
class EigenQuaternionPrinter:
"Print an Eigen Quaternion"
def __init__(self, val):
"Extract all the necessary information"
# The gdb extension does not support value template arguments - need to extract them by hand
type = val.type
if type.code == gdb.TYPE_CODE_REF:
type = type.target()
self.type = type.unqualified().strip_typedefs()
self.innerType = self.type.template_argument(0)
self.val = val
# Quaternions have a struct as their storage, so we need to walk through this
self.data = self.val['m_coeffs']['m_storage']['m_data']['array']
self.data = self.data.cast(self.innerType.pointer())
class _iterator:
def __init__ (self, dataPtr):
self.dataPtr = dataPtr
self.currentElement = 0
self.elementNames = ['x', 'y', 'z', 'w']
def __iter__ (self):
return self
def next(self):
element = self.currentElement
if self.currentElement >= 4: #there are 4 elements in a quanternion
raise StopIteration
self.currentElement = self.currentElement + 1
item = self.dataPtr.dereference()
self.dataPtr = self.dataPtr + 1
return ('[%s]' % (self.elementNames[element],), item)
def children(self):
return self._iterator(self.data)
def to_string(self):
return "Eigen::Quaternion<%s> (data ptr: %s)" % (self.innerType, self.data)
def build_eigen_dictionary ():
pretty_printers_dict[re.compile('^Eigen::Quaternion<.*>$')] = lambda val: EigenQuaternionPrinter(val)
pretty_printers_dict[re.compile('^Eigen::Matrix<.*>$')] = lambda val: EigenMatrixPrinter("Matrix", val)
pretty_printers_dict[re.compile('^Eigen::Array<.*>$')] = lambda val: EigenMatrixPrinter("Array", val)
def register_eigen_printers(obj):
"Register eigen pretty-printers with objfile Obj"
if obj == None:
obj = gdb
obj.pretty_printers.append(lookup_function)
def lookup_function(val):
"Look-up and return a pretty-printer that can print va."
type = val.type
if type.code == gdb.TYPE_CODE_REF:
type = type.target()
type = type.unqualified().strip_typedefs()
typename = type.tag
if typename == None:
return None
for function in pretty_printers_dict:
if function.search(typename):
return pretty_printers_dict[function](val)
return None
pretty_printers_dict = {}
build_eigen_dictionary ()
| mit |
bmanojlovic/ansible | lib/ansible/modules/packaging/os/homebrew_tap.py | 27 | 7348 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Daniel Jaouen <dcj24@cornell.edu>
# (c) 2016, Indrajit Raychaudhuri <irc+code@indrajit.com>
#
# Based on homebrew (Andrew Dunham <andrew@du.nham.ca>)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import re
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: homebrew_tap
author:
- "Indrajit Raychaudhuri (@indrajitr)"
- "Daniel Jaouen (@danieljaouen)"
short_description: Tap a Homebrew repository.
description:
- Tap external Homebrew repositories.
version_added: "1.6"
options:
name:
description:
- The GitHub user/organization repository to tap.
required: true
aliases: ['tap']
url:
description:
- The optional git URL of the repository to tap. The URL is not
assumed to be on GitHub, and the protocol doesn't have to be HTTP.
Any location and protocol that git can handle is fine.
required: false
version_added: "2.2"
note:
- I(name) option may not be a list of multiple taps (but a single
tap instead) when this option is provided.
state:
description:
- state of the repository.
choices: [ 'present', 'absent' ]
required: false
default: 'present'
requirements: [ homebrew ]
'''
EXAMPLES = '''
- homebrew_tap:
name: homebrew/dupes
- homebrew_tap:
name: homebrew/dupes
state: absent
- homebrew_tap:
name: homebrew/dupes,homebrew/science
state: present
- homebrew_tap:
name: telemachus/brew
url: 'https://bitbucket.org/telemachus/brew'
'''
def a_valid_tap(tap):
'''Returns True if the tap is valid.'''
regex = re.compile(r'^([\w-]+)/(homebrew-)?([\w-]+)$')
return regex.match(tap)
def already_tapped(module, brew_path, tap):
'''Returns True if already tapped.'''
rc, out, err = module.run_command([
brew_path,
'tap',
])
taps = [tap_.strip().lower() for tap_ in out.split('\n') if tap_]
tap_name = re.sub('homebrew-', '', tap.lower())
return tap_name in taps
def add_tap(module, brew_path, tap, url=None):
'''Adds a single tap.'''
failed, changed, msg = False, False, ''
if not a_valid_tap(tap):
failed = True
msg = 'not a valid tap: %s' % tap
elif not already_tapped(module, brew_path, tap):
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([
brew_path,
'tap',
tap,
url,
])
if already_tapped(module, brew_path, tap):
changed = True
msg = 'successfully tapped: %s' % tap
else:
failed = True
msg = 'failed to tap: %s' % tap
else:
msg = 'already tapped: %s' % tap
return (failed, changed, msg)
def add_taps(module, brew_path, taps):
'''Adds one or more taps.'''
failed, unchanged, added, msg = False, 0, 0, ''
for tap in taps:
(failed, changed, msg) = add_tap(module, brew_path, tap)
if failed:
break
if changed:
added += 1
else:
unchanged += 1
if failed:
msg = 'added: %d, unchanged: %d, error: ' + msg
msg = msg % (added, unchanged)
elif added:
changed = True
msg = 'added: %d, unchanged: %d' % (added, unchanged)
else:
msg = 'added: %d, unchanged: %d' % (added, unchanged)
return (failed, changed, msg)
def remove_tap(module, brew_path, tap):
'''Removes a single tap.'''
failed, changed, msg = False, False, ''
if not a_valid_tap(tap):
failed = True
msg = 'not a valid tap: %s' % tap
elif already_tapped(module, brew_path, tap):
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([
brew_path,
'untap',
tap,
])
if not already_tapped(module, brew_path, tap):
changed = True
msg = 'successfully untapped: %s' % tap
else:
failed = True
msg = 'failed to untap: %s' % tap
else:
msg = 'already untapped: %s' % tap
return (failed, changed, msg)
def remove_taps(module, brew_path, taps):
'''Removes one or more taps.'''
failed, unchanged, removed, msg = False, 0, 0, ''
for tap in taps:
(failed, changed, msg) = remove_tap(module, brew_path, tap)
if failed:
break
if changed:
removed += 1
else:
unchanged += 1
if failed:
msg = 'removed: %d, unchanged: %d, error: ' + msg
msg = msg % (removed, unchanged)
elif removed:
changed = True
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
else:
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
return (failed, changed, msg)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=['tap'], type='list', required=True),
url=dict(default=None, required=False),
state=dict(default='present', choices=['present', 'absent']),
),
supports_check_mode=True,
)
brew_path = module.get_bin_path(
'brew',
required=True,
opt_dirs=['/usr/local/bin']
)
taps = module.params['name']
url = module.params['url']
if module.params['state'] == 'present':
if url is None:
# No tap URL provided explicitly, continue with bulk addition
# of all the taps.
failed, changed, msg = add_taps(module, brew_path, taps)
else:
# When an tap URL is provided explicitly, we allow adding
# *single* tap only. Validate and proceed to add single tap.
if len(taps) > 1:
msg = "List of muliple taps may not be provided with 'url' option."
module.fail_json(msg=msg)
else:
failed, changed, msg = add_tap(module, brew_path, taps[0], url)
if failed:
module.fail_json(msg=msg)
else:
module.exit_json(changed=changed, msg=msg)
elif module.params['state'] == 'absent':
failed, changed, msg = remove_taps(module, brew_path, taps)
if failed:
module.fail_json(msg=msg)
else:
module.exit_json(changed=changed, msg=msg)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
TomAugspurger/pandas | pandas/tests/series/methods/test_rename_axis.py | 4 | 1503 | import pytest
from pandas import Index, MultiIndex, Series
import pandas._testing as tm
class TestSeriesRenameAxis:
def test_rename_axis_mapper(self):
# GH 19978
mi = MultiIndex.from_product([["a", "b", "c"], [1, 2]], names=["ll", "nn"])
ser = Series(list(range(len(mi))), index=mi)
result = ser.rename_axis(index={"ll": "foo"})
assert result.index.names == ["foo", "nn"]
result = ser.rename_axis(index=str.upper, axis=0)
assert result.index.names == ["LL", "NN"]
result = ser.rename_axis(index=["foo", "goo"])
assert result.index.names == ["foo", "goo"]
with pytest.raises(TypeError, match="unexpected"):
ser.rename_axis(columns="wrong")
def test_rename_axis_inplace(self, datetime_series):
# GH 15704
expected = datetime_series.rename_axis("foo")
result = datetime_series
no_return = result.rename_axis("foo", inplace=True)
assert no_return is None
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("kwargs", [{"mapper": None}, {"index": None}, {}])
def test_rename_axis_none(self, kwargs):
# GH 25034
index = Index(list("abc"), name="foo")
ser = Series([1, 2, 3], index=index)
result = ser.rename_axis(**kwargs)
expected_index = index.rename(None) if kwargs else index
expected = Series([1, 2, 3], index=expected_index)
tm.assert_series_equal(result, expected)
| bsd-3-clause |
denys-duchier/django | tests/managers_regress/tests.py | 43 | 10902 | from django.db import models
from django.template import Context, Template
from django.test import TestCase, override_settings
from django.test.utils import isolate_apps
from .models import (
AbstractBase1, AbstractBase2, AbstractBase3, Child1, Child2, Child3,
Child4, Child5, Child6, Child7, RelatedModel, RelationModel,
)
class ManagersRegressionTests(TestCase):
def test_managers(self):
Child1.objects.create(name='fred', data='a1')
Child1.objects.create(name='barney', data='a2')
Child2.objects.create(name='fred', data='b1', value=1)
Child2.objects.create(name='barney', data='b2', value=42)
Child3.objects.create(name='fred', data='c1', comment='yes')
Child3.objects.create(name='barney', data='c2', comment='no')
Child4.objects.create(name='fred', data='d1')
Child4.objects.create(name='barney', data='d2')
Child5.objects.create(name='fred', comment='yes')
Child5.objects.create(name='barney', comment='no')
Child6.objects.create(name='fred', data='f1', value=42)
Child6.objects.create(name='barney', data='f2', value=42)
Child7.objects.create(name='fred')
Child7.objects.create(name='barney')
self.assertQuerysetEqual(Child1.manager1.all(), ["<Child1: a1>"])
self.assertQuerysetEqual(Child1.manager2.all(), ["<Child1: a2>"])
self.assertQuerysetEqual(Child1._default_manager.all(), ["<Child1: a1>"])
self.assertQuerysetEqual(Child2._default_manager.all(), ["<Child2: b1>"])
self.assertQuerysetEqual(Child2.restricted.all(), ["<Child2: b2>"])
self.assertQuerysetEqual(Child3._default_manager.all(), ["<Child3: c1>"])
self.assertQuerysetEqual(Child3.manager1.all(), ["<Child3: c1>"])
self.assertQuerysetEqual(Child3.manager2.all(), ["<Child3: c2>"])
# Since Child6 inherits from Child4, the corresponding rows from f1 and
# f2 also appear here. This is the expected result.
self.assertQuerysetEqual(Child4._default_manager.order_by('data'), [
"<Child4: d1>",
"<Child4: d2>",
"<Child4: f1>",
"<Child4: f2>",
])
self.assertQuerysetEqual(Child4.manager1.all(), ["<Child4: d1>", "<Child4: f1>"], ordered=False)
self.assertQuerysetEqual(Child5._default_manager.all(), ["<Child5: fred>"])
self.assertQuerysetEqual(Child6._default_manager.all(), ["<Child6: f1>", "<Child6: f2>"], ordered=False)
self.assertQuerysetEqual(
Child7._default_manager.order_by('name'),
["<Child7: barney>", "<Child7: fred>"]
)
def test_abstract_manager(self):
# Accessing the manager on an abstract model should
# raise an attribute error with an appropriate message.
# This error message isn't ideal, but if the model is abstract and
# a lot of the class instantiation logic isn't invoked; if the
# manager is implied, then we don't get a hook to install the
# error-raising manager.
msg = "type object 'AbstractBase3' has no attribute 'objects'"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase3.objects.all()
def test_custom_abstract_manager(self):
# Accessing the manager on an abstract model with an custom
# manager should raise an attribute error with an appropriate
# message.
msg = "Manager isn't available; AbstractBase2 is abstract"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase2.restricted.all()
def test_explicit_abstract_manager(self):
# Accessing the manager on an abstract model with an explicit
# manager should raise an attribute error with an appropriate
# message.
msg = "Manager isn't available; AbstractBase1 is abstract"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase1.objects.all()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
@isolate_apps('managers_regress')
def test_swappable_manager(self):
class SwappableModel(models.Model):
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model should
# raise an attribute error with a helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.objects.all()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
@isolate_apps('managers_regress')
def test_custom_swappable_manager(self):
class SwappableModel(models.Model):
stuff = models.Manager()
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model with an
# explicit manager should raise an attribute error with a
# helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.stuff.all()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
@isolate_apps('managers_regress')
def test_explicit_swappable_manager(self):
class SwappableModel(models.Model):
objects = models.Manager()
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model with an
# explicit manager should raise an attribute error with a
# helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.objects.all()
def test_regress_3871(self):
related = RelatedModel.objects.create()
relation = RelationModel()
relation.fk = related
relation.gfk = related
relation.save()
relation.m2m.add(related)
t = Template('{{ related.test_fk.all.0 }}{{ related.test_gfk.all.0 }}{{ related.test_m2m.all.0 }}')
self.assertEqual(
t.render(Context({'related': related})),
''.join([str(relation.pk)] * 3),
)
def test_field_can_be_called_exact(self):
# Make sure related managers core filters don't include an
# explicit `__exact` lookup that could be interpreted as a
# reference to a foreign `exact` field. refs #23940.
related = RelatedModel.objects.create(exact=False)
relation = related.test_fk.create()
self.assertEqual(related.test_fk.get(), relation)
@isolate_apps('managers_regress')
class TestManagerInheritance(TestCase):
def test_implicit_inheritance(self):
class CustomManager(models.Manager):
pass
class AbstractModel(models.Model):
custom_manager = CustomManager()
class Meta:
abstract = True
class PlainModel(models.Model):
custom_manager = CustomManager()
self.assertIsInstance(PlainModel._base_manager, models.Manager)
self.assertIsInstance(PlainModel._default_manager, CustomManager)
class ModelWithAbstractParent(AbstractModel):
pass
self.assertIsInstance(ModelWithAbstractParent._base_manager, models.Manager)
self.assertIsInstance(ModelWithAbstractParent._default_manager, CustomManager)
class ProxyModel(PlainModel):
class Meta:
proxy = True
self.assertIsInstance(ProxyModel._base_manager, models.Manager)
self.assertIsInstance(ProxyModel._default_manager, CustomManager)
class MTIModel(PlainModel):
pass
self.assertIsInstance(MTIModel._base_manager, models.Manager)
self.assertIsInstance(MTIModel._default_manager, CustomManager)
def test_default_manager_inheritance(self):
class CustomManager(models.Manager):
pass
class AbstractModel(models.Model):
another_manager = models.Manager()
custom_manager = CustomManager()
class Meta:
default_manager_name = 'custom_manager'
abstract = True
class PlainModel(models.Model):
another_manager = models.Manager()
custom_manager = CustomManager()
class Meta:
default_manager_name = 'custom_manager'
self.assertIsInstance(PlainModel._default_manager, CustomManager)
class ModelWithAbstractParent(AbstractModel):
pass
self.assertIsInstance(ModelWithAbstractParent._default_manager, CustomManager)
class ProxyModel(PlainModel):
class Meta:
proxy = True
self.assertIsInstance(ProxyModel._default_manager, CustomManager)
class MTIModel(PlainModel):
pass
self.assertIsInstance(MTIModel._default_manager, CustomManager)
def test_base_manager_inheritance(self):
class CustomManager(models.Manager):
pass
class AbstractModel(models.Model):
another_manager = models.Manager()
custom_manager = CustomManager()
class Meta:
base_manager_name = 'custom_manager'
abstract = True
class PlainModel(models.Model):
another_manager = models.Manager()
custom_manager = CustomManager()
class Meta:
base_manager_name = 'custom_manager'
self.assertIsInstance(PlainModel._base_manager, CustomManager)
class ModelWithAbstractParent(AbstractModel):
pass
self.assertIsInstance(ModelWithAbstractParent._base_manager, CustomManager)
class ProxyModel(PlainModel):
class Meta:
proxy = True
self.assertIsInstance(ProxyModel._base_manager, CustomManager)
class MTIModel(PlainModel):
pass
self.assertIsInstance(MTIModel._base_manager, CustomManager)
def test_manager_no_duplicates(self):
class CustomManager(models.Manager):
pass
class AbstractModel(models.Model):
custom_manager = models.Manager()
class Meta:
abstract = True
class TestModel(AbstractModel):
custom_manager = CustomManager()
self.assertEqual(TestModel._meta.managers, (TestModel.custom_manager,))
self.assertEqual(TestModel._meta.managers_map, {'custom_manager': TestModel.custom_manager})
| bsd-3-clause |
gnperumal/exscript | src/Exscript/AccountManager.py | 7 | 6709 | # Copyright (C) 2007-2010 Samuel Abels.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
Manages user accounts.
"""
from Exscript.AccountPool import AccountPool
class AccountManager(object):
"""
Keeps track of available user accounts and assigns them to the
worker threads.
"""
def __init__(self):
"""
Constructor.
"""
self.default_pool = None
self.pools = None
self.reset()
def reset(self):
"""
Removes all account pools.
"""
self.default_pool = AccountPool()
self.pools = []
def add_pool(self, pool, match = None):
"""
Adds a new account pool. If the given match argument is
None, the pool the default pool. Otherwise, the match argument is
a callback function that is invoked to decide whether or not the
given pool should be used for a host.
When Exscript logs into a host, the account is chosen in the following
order:
# Exscript checks whether an account was attached to the
L{Host} object using L{Host.set_account()}), and uses that.
# If the L{Host} has no account attached, Exscript walks
through all pools that were passed to L{Queue.add_account_pool()}.
For each pool, it passes the L{Host} to the function in the
given match argument. If the return value is True, the account
pool is used to acquire an account.
(Accounts within each pool are taken in a round-robin
fashion.)
# If no matching account pool is found, an account is taken
from the default account pool.
# Finally, if all that fails and the default account pool
contains no accounts, an error is raised.
Example usage::
def do_nothing(conn):
conn.autoinit()
def use_this_pool(host):
return host.get_name().startswith('foo')
default_pool = AccountPool()
default_pool.add_account(Account('default-user', 'password'))
other_pool = AccountPool()
other_pool.add_account(Account('user', 'password'))
queue = Queue()
queue.account_manager.add_pool(default_pool)
queue.account_manager.add_pool(other_pool, use_this_pool)
host = Host('localhost')
queue.run(host, do_nothing)
In the example code, the host has no account attached. As a result,
the queue checks whether use_this_pool() returns True. Because the
hostname does not start with 'foo', the function returns False, and
Exscript takes the 'default-user' account from the default pool.
@type pool: AccountPool
@param pool: The account pool that is added.
@type match: callable
@param match: A callback to check if the pool should be used.
"""
if match is None:
self.default_pool = pool
else:
self.pools.append((match, pool))
def add_account(self, account):
"""
Adds the given account to the default account pool that Exscript uses
to log into all hosts that have no specific L{Account} attached.
@type account: Account
@param account: The account that is added.
"""
self.default_pool.add_account(account)
def get_account_from_hash(self, account_hash):
"""
Returns the account with the given hash, if it is contained in any
of the pools. Returns None otherwise.
@type account_hash: str
@param account_hash: The hash of an account object.
"""
for _, pool in self.pools:
account = pool.get_account_from_hash(account_hash)
if account is not None:
return account
return self.default_pool.get_account_from_hash(account_hash)
def acquire_account(self, account = None, owner = None):
"""
Acquires the given account. If no account is given, one is chosen
from the default pool.
@type account: Account
@param account: The account that is added.
@type owner: object
@param owner: An optional descriptor for the owner.
@rtype: L{Account}
@return: The account that was acquired.
"""
if account is not None:
for _, pool in self.pools:
if pool.has_account(account):
return pool.acquire_account(account, owner)
if not self.default_pool.has_account(account):
# The account is not in any pool.
account.acquire()
return account
return self.default_pool.acquire_account(account, owner)
def acquire_account_for(self, host, owner = None):
"""
Acquires an account for the given host and returns it.
The host is passed to each of the match functions that were
passed in when adding the pool. The first pool for which the
match function returns True is chosen to assign an account.
@type host: L{Host}
@param host: The host for which an account is acquired.
@type owner: object
@param owner: An optional descriptor for the owner.
@rtype: L{Account}
@return: The account that was acquired.
"""
# Check whether a matching account pool exists.
for match, pool in self.pools:
if match(host) is True:
return pool.acquire_account(owner = owner)
# Else, choose an account from the default account pool.
return self.default_pool.acquire_account(owner = owner)
def release_accounts(self, owner):
"""
Releases all accounts that were acquired by the given owner.
@type owner: object
@param owner: The owner descriptor as passed to acquire_account().
"""
for _, pool in self.pools:
pool.release_accounts(owner)
self.default_pool.release_accounts(owner)
| gpl-2.0 |
AnishShah/tensorflow | tensorflow/contrib/recurrent/python/ops/functional_rnn.py | 2 | 16152 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A tf.nn.dynamic_rnn variant, built on the Recurrent class.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.contrib.recurrent.python.ops import recurrent
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.util import nest
def _GetDTypesFromStructure(struct):
dtypes_list = []
for x in nest.flatten(struct):
x = ops.convert_to_tensor(x)
dtypes_list.append(x.dtype)
return dtypes_list
def _SetShapeFromTemplate(struct, struct_template):
as_list = nest.flatten(struct)
template_as_list = nest.flatten(struct_template)
for element, template in zip(as_list, template_as_list):
element.set_shape(template.shape)
class _FunctionalRnnCell(object):
"""Wrapper around RNNCell which separates state from computation.
This class accomplishes the following:
* Turn the cell's `__call__` function into a pure function. The global
side effects are separated as `theta`. They are the variables created
for the weights of the computation.
* Unless the output is aliased as part of the state, extend the state to
contain the output so that we store the history in `Recurrent`.
* Set static shapes as required.
"""
def __init__(self, rnn_cell, seq_inputs, initial_state):
assert initial_state is not None
# TODO(drpng): Dtype needs to be configurable.
input_dtypes = [dtypes.float32] + _GetDTypesFromStructure(initial_state)
# See _index.
like_inputs_t = nest.map_structure(
lambda x: array_ops.stop_gradient(array_ops.gather(x, 0)), seq_inputs)
input_structure = (like_inputs_t, initial_state)
@function.Defun(*input_dtypes)
def FlatCellStep(*flat_inputs):
"""The flattened version of `rnn_cell`."""
inputs_t, state0 = nest.pack_sequence_as(input_structure, flat_inputs)
_SetShapeFromTemplate(state0, initial_state)
_SetShapeFromTemplate(inputs_t, like_inputs_t)
outputs_t, state1 = rnn_cell(inputs_t, state0)
state_list = nest.flatten(state1)
self._output_shape = outputs_t.shape
if outputs_t in state_list:
output_index_in_state = state_list.index(outputs_t)
else:
output_index_in_state = None
if output_index_in_state is None:
self._prepend_output = True
self._output_state_idx = 0
return [outputs_t] + state_list
else:
self._output_state_idx = output_index_in_state
self._prepend_output = False
# To save memory, we don't store return the output separately
# from the state list, since we know it's the same.
return state_list
def _ToPureFunction(func):
# NOTE: This forces the creating of the function.
if func.captured_inputs:
pure_func = copy.copy(func)
# pylint: disable=protected-access
pure_func._extra_inputs = []
return pure_func
return func
pure_flat_cell_step = _ToPureFunction(FlatCellStep)
def CellStep(theta, extended_state0, inputs_t):
"""Performs one time steps on structured inputs.
The purpose of this function is to turn the parameters into flattened
versions, and to resolve the parameter order difference between
`Recurrent` and `RNNCell`.
In the event the cell returns a transformed output that is not aliased
within its state, the `extended_state0` also contains the output as its
first element.
Args:
theta: Weights required for the computation. A structure of tensors.
extended_state0: the state0, and possibly the output at the previous
time step. A structure of tensors.
inputs_t: the inputs at time t.
Returns:
A pair of the next state (inclusive of the output), and an empty list
(unused `extras`).
The next state is congruent to state0.
"""
extended_state0_flat = nest.flatten(extended_state0)
state0_flat = self.MaybeRemoveOutputFromState(extended_state0_flat)
full_inputs = [inputs_t] + state0_flat + theta
# Note that the thetas are additional inputs appeneded as extra
# parameters.
cell_out = pure_flat_cell_step(*full_inputs)
return cell_out, []
self._cell_step = CellStep
self._theta = FlatCellStep.captured_inputs
self._zero_state = rnn_cell.zero_state
self._state_template = initial_state
self._output_size = rnn_cell.output_size
@property
def extended_initial_state(self):
if self._prepend_output:
return [array_ops.zeros(self._output_shape), self._state_template]
else:
# The base case, where the output is just the hidden state.
return self._state_template
@property
def cell_step(self):
return self._cell_step
@property
def theta(self):
return self._theta
@property
def state_template(self):
return self._state_template
@property
def output_shape(self):
return self._output_shape
def GetOutputFromState(self, state):
return nest.flatten(state)[self._output_state_idx]
def MaybeRemoveOutputFromState(self, flat_state):
if self._prepend_output:
return flat_state[1:]
return flat_state
def _ApplyLengthsToBatch(sequence_lengths, tf_output):
# TODO(drpng): just use Update so that we don't carry over the gradients?
"""Sets the output to be zero at the end of the sequence."""
# output is batch major.
shape = array_ops.shape(tf_output)
batch_size, max_time, vector_size = shape[0], shape[1], shape[2]
output_time = array_ops.tile(math_ops.range(0, max_time), [batch_size])
output_time = array_ops.reshape(output_time, [batch_size, max_time])
lengths = array_ops.tile(
array_ops.reshape(sequence_lengths, [-1, 1]), [1, max_time])
is_less = math_ops.cast(
math_ops.less(output_time, lengths), dtype=dtypes.float32)
keep_mask = array_ops.tile(
array_ops.expand_dims(is_less, -1),
[1, 1, vector_size])
final_output = keep_mask * tf_output
return final_output
def _PickFinalStateFromHistory(acc_state, sequence_length):
"""Implements acc_state[sequence_length - 1]."""
# This will work on all platforms, unlike the regular slice.
last_value = []
for state_var in nest.flatten(acc_state):
# We compute the following with matrix operations:
# last_var = state_var[sequence_length - 1]
shape = array_ops.shape(state_var)
max_time, batch_size = shape[0], shape[1]
output_time = array_ops.tile(math_ops.range(0, max_time), [batch_size])
output_time = array_ops.reshape(output_time, [batch_size, max_time])
lengths = array_ops.tile(array_ops.reshape(sequence_length,
[-1, 1]), [1, max_time])
last_idx = math_ops.cast(math_ops.equal(output_time, lengths - 1),
dtype=state_var.dtype)
last_idx = array_ops.transpose(last_idx)
last_idx_for_bcast = array_ops.expand_dims(last_idx, -1)
sliced = math_ops.multiply(last_idx_for_bcast, state_var)
last_var = math_ops.reduce_sum(sliced, 0)
last_value += [last_var]
return nest.pack_sequence_as(acc_state, last_value)
def _PostProcessOutput(extended_acc_state, extended_final_state, func_cell,
total_time, inputs_lengths):
"""Post-process output of recurrent.
This function takes the accumulated extended state and extracts the requested
state and output.
When `inputs_lengths` has been set, it extracts the output from the
accumulated state. It also sets outputs past.
It also sets the static shape information.
Args:
extended_acc_state: A structure containing the accumulated state at each
time. It may contain the output at each time as well.
extended_final_state: A structure containing the final state. It may
contain the output at the final time.
func_cell: The functional wrapper around the cell.
total_time: A scalar integer tensor.
inputs_lengths: An integer tensor with one entry per input.
Returns:
A tuple with the outputs at each time, and the final state.
"""
if inputs_lengths is None:
flat_final_state = func_cell.MaybeRemoveOutputFromState(
nest.flatten(extended_final_state))
tf_state = nest.pack_sequence_as(func_cell.state_template, flat_final_state)
else:
# The accumulated state is over the entire sequence, so we pick it
# out from the acc_state sequence.
flat_acc_state = func_cell.MaybeRemoveOutputFromState(
nest.flatten(extended_acc_state))
acc_state = nest.pack_sequence_as(
func_cell.state_template, flat_acc_state)
tf_state = _PickFinalStateFromHistory(acc_state, inputs_lengths)
output_from_state = func_cell.GetOutputFromState(extended_acc_state)
tf_output = array_ops.transpose(output_from_state, [1, 0, 2])
tf_output.set_shape(
[func_cell.output_shape[0], total_time, func_cell.output_shape[1]])
if inputs_lengths is not None:
# Need set the outputs to zero.
tf_output = _ApplyLengthsToBatch(inputs_lengths, tf_output)
# tf_output = array_ops.zeros([4, 3, 5])
_SetShapeFromTemplate(tf_state, func_cell.state_template)
return tf_output, tf_state
# pylint: disable=invalid-name
def functional_rnn(cell, inputs, sequence_length=None,
initial_state=None, dtype=None, time_major=False,
scope=None, use_tpu=False):
"""Same interface as `tf.nn.dynamic_rnn`."""
with variable_scope.variable_scope(scope or 'rnn'):
if not time_major:
inputs = nest.map_structure(
lambda t: array_ops.transpose(t, [1, 0, 2]), inputs)
inputs_flat = nest.flatten(inputs)
batch_size = array_ops.shape(inputs_flat[0])[1]
if initial_state is None:
initial_state = cell.zero_state(batch_size, dtype)
func_cell = _FunctionalRnnCell(cell, inputs, initial_state)
if sequence_length is not None:
max_length = math_ops.reduce_max(sequence_length)
else:
max_length = None
extended_acc_state, extended_final_state = recurrent.Recurrent(
theta=func_cell.theta,
state0=func_cell.extended_initial_state,
inputs=inputs,
cell_fn=func_cell.cell_step,
max_input_length=max_length,
use_tpu=use_tpu)
tf_output, tf_state = _PostProcessOutput(
extended_acc_state, extended_final_state, func_cell,
inputs_flat[0].shape[0], sequence_length)
if time_major:
tf_output = array_ops.transpose(tf_output, [1, 0, 2])
return tf_output, tf_state
def bidirectional_functional_rnn(
cell_fw,
cell_bw,
inputs,
initial_state_fw=None,
initial_state_bw=None,
dtype=None,
sequence_length=None,
time_major=False,
use_tpu=False,
scope=None):
"""Creates a bidirectional recurrent neural network.
Performs fully dynamic unrolling of inputs in both directions. Built to be API
compatible with `tf.nn.bidirectional_dynamic_rnn`, but implemented with
functional control flow for TPU compatibility.
Args:
cell_fw: An instance of `tf.contrib.rnn.RNNCell`.
cell_bw: An instance of `tf.contrib.rnn.RNNCell`.
inputs: The RNN inputs. If time_major == False (default), this must be a
Tensor (or hierarchical structure of Tensors) of shape
[batch_size, max_time, ...]. If time_major == True, this must be a Tensor
(or hierarchical structure of Tensors) of shape:
[max_time, batch_size, ...]. The first two dimensions must match across
all the inputs, but otherwise the ranks and other shape components may
differ.
initial_state_fw: An optional initial state for `cell_fw`. Should match
`cell_fw.zero_state` in structure and type.
initial_state_bw: An optional initial state for `cell_bw`. Should match
`cell_bw.zero_state` in structure and type.
dtype: (optional) The data type for the initial state and expected output.
Required if initial_states are not provided or RNN state has a
heterogeneous dtype.
sequence_length: An optional int32/int64 vector sized [batch_size]. Used to
copy-through state and zero-out outputs when past a batch element's
sequence length. So it's more for correctness than performance.
time_major: Whether the `inputs` tensor is in "time major" format.
use_tpu: Whether to enable TPU-compatible operation. If True, does not truly
reverse `inputs` in the backwards RNN. Once b/69305369 is fixed, we can
remove this flag.
scope: An optional scope name for the dynamic RNN.
Returns:
outputs: A tuple of `(output_fw, output_bw)`. The output of the forward and
backward RNN. If time_major == False (default), these will
be Tensors shaped: [batch_size, max_time, cell.output_size]. If
time_major == True, these will be Tensors shaped:
[max_time, batch_size, cell.output_size]. Note, if cell.output_size is a
(possibly nested) tuple of integers or TensorShape objects, then the
output for that direction will be a tuple having the same structure as
cell.output_size, containing Tensors having shapes corresponding to the
shape data in cell.output_size.
final_states: A tuple of `(final_state_fw, final_state_bw)`. A Tensor or
hierarchical structure of Tensors indicating the final cell state in each
direction. Must have the same structure and shape as cell.zero_state.
Raises:
ValueError: If `initial_state_fw` is None or `initial_state_bw` is None and
`dtype` is not provided.
"""
# Keep this code in sync with tf.nn.dynamic_rnn for compatibility.
with variable_scope.variable_scope(scope or 'bidirectional_rnn'):
# Forward direction
with variable_scope.variable_scope('fw') as fw_scope:
output_fw, output_state_fw = functional_rnn(
cell=cell_fw, inputs=inputs, sequence_length=sequence_length,
initial_state=initial_state_fw, dtype=dtype,
time_major=time_major, scope=fw_scope, use_tpu=use_tpu)
# Backward direction
if not time_major:
time_dim = 1
batch_dim = 0
else:
time_dim = 0
batch_dim = 1
def _reverse(input_, seq_lengths, seq_dim, batch_dim):
if seq_lengths is not None:
return array_ops.reverse_sequence(
input=input_, seq_lengths=seq_lengths,
seq_dim=seq_dim, batch_dim=batch_dim)
else:
# See b/69305369.
assert not use_tpu, (
'Bidirectional with variable sequence lengths unsupported on TPU')
return array_ops.reverse(input_, axis=[seq_dim])
with variable_scope.variable_scope('bw') as bw_scope:
inputs_reverse = _reverse(
inputs, seq_lengths=sequence_length,
seq_dim=time_dim, batch_dim=batch_dim)
tmp, output_state_bw = functional_rnn(
cell=cell_bw, inputs=inputs_reverse, sequence_length=sequence_length,
initial_state=initial_state_bw, dtype=dtype,
time_major=time_major, scope=bw_scope, use_tpu=use_tpu)
output_bw = _reverse(
tmp, seq_lengths=sequence_length,
seq_dim=time_dim, batch_dim=batch_dim)
outputs = (output_fw, output_bw)
output_states = (output_state_fw, output_state_bw)
return (outputs, output_states)
# pylint: enable=invalid-name
| apache-2.0 |
chugunovyar/factoryForBuild | env/lib/python2.7/site-packages/matplotlib/tests/test_mlab.py | 5 | 122196 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import tempfile
from numpy.testing import assert_allclose, assert_array_equal
import numpy.ma.testutils as matest
import numpy as np
import datetime as datetime
from nose.tools import (assert_equal, assert_almost_equal, assert_not_equal,
assert_true, assert_raises)
import matplotlib.mlab as mlab
import matplotlib.cbook as cbook
from matplotlib.testing.decorators import knownfailureif, CleanupTestCase
try:
from mpl_toolkits.natgrid import _natgrid
HAS_NATGRID = True
except ImportError:
HAS_NATGRID = False
class general_testcase(CleanupTestCase):
def test_colinear_pca(self):
a = mlab.PCA._get_colinear()
pca = mlab.PCA(a)
assert_allclose(pca.fracs[2:], 0., atol=1e-8)
assert_allclose(pca.Y[:, 2:], 0., atol=1e-8)
def test_prctile(self):
# test odd lengths
x = [1, 2, 3]
assert_equal(mlab.prctile(x, 50), np.median(x))
# test even lengths
x = [1, 2, 3, 4]
assert_equal(mlab.prctile(x, 50), np.median(x))
# derived from email sent by jason-sage to MPL-user on 20090914
ob1 = [1, 1, 2, 2, 1, 2, 4, 3, 2, 2, 2, 3,
4, 5, 6, 7, 8, 9, 7, 6, 4, 5, 5]
p = [0, 75, 100]
expected = [1, 5.5, 9]
# test vectorized
actual = mlab.prctile(ob1, p)
assert_allclose(expected, actual)
# test scalar
for pi, expectedi in zip(p, expected):
actuali = mlab.prctile(ob1, pi)
assert_allclose(expectedi, actuali)
def test_norm(self):
np.random.seed(0)
N = 1000
x = np.random.standard_normal(N)
targ = np.linalg.norm(x)
res = mlab._norm(x)
assert_almost_equal(targ, res)
class spacing_testcase(CleanupTestCase):
def test_logspace_tens(self):
xmin = .01
xmax = 1000.
N = 6
res = mlab.logspace(xmin, xmax, N)
targ = np.logspace(np.log10(xmin), np.log10(xmax), N)
assert_allclose(targ, res)
def test_logspace_primes(self):
xmin = .03
xmax = 1313.
N = 7
res = mlab.logspace(xmin, xmax, N)
targ = np.logspace(np.log10(xmin), np.log10(xmax), N)
assert_allclose(targ, res)
def test_logspace_none(self):
xmin = .03
xmax = 1313.
N = 0
res = mlab.logspace(xmin, xmax, N)
targ = np.logspace(np.log10(xmin), np.log10(xmax), N)
assert_array_equal(targ, res)
assert_equal(res.size, 0)
def test_logspace_single(self):
xmin = .03
xmax = 1313.
N = 1
res = mlab.logspace(xmin, xmax, N)
targ = np.logspace(np.log10(xmin), np.log10(xmax), N)
assert_array_equal(targ, res)
assert_equal(res.size, 1)
class stride_testcase(CleanupTestCase):
def get_base(self, x):
y = x
while y.base is not None:
y = y.base
return y
def calc_window_target(self, x, NFFT, noverlap=0):
'''This is an adaptation of the original window extraction
algorithm. This is here to test to make sure the new implementation
has the same result'''
step = NFFT - noverlap
ind = np.arange(0, len(x) - NFFT + 1, step)
n = len(ind)
result = np.zeros((NFFT, n))
# do the ffts of the slices
for i in range(n):
result[:, i] = x[ind[i]:ind[i]+NFFT]
return result
def test_stride_windows_2D_ValueError(self):
x = np.arange(10)[np.newaxis]
assert_raises(ValueError, mlab.stride_windows, x, 5)
def test_stride_windows_0D_ValueError(self):
x = np.array(0)
assert_raises(ValueError, mlab.stride_windows, x, 5)
def test_stride_windows_noverlap_gt_n_ValueError(self):
x = np.arange(10)
assert_raises(ValueError, mlab.stride_windows, x, 2, 3)
def test_stride_windows_noverlap_eq_n_ValueError(self):
x = np.arange(10)
assert_raises(ValueError, mlab.stride_windows, x, 2, 2)
def test_stride_windows_n_gt_lenx_ValueError(self):
x = np.arange(10)
assert_raises(ValueError, mlab.stride_windows, x, 11)
def test_stride_windows_n_lt_1_ValueError(self):
x = np.arange(10)
assert_raises(ValueError, mlab.stride_windows, x, 0)
def test_stride_repeat_2D_ValueError(self):
x = np.arange(10)[np.newaxis]
assert_raises(ValueError, mlab.stride_repeat, x, 5)
def test_stride_repeat_axis_lt_0_ValueError(self):
x = np.array(0)
assert_raises(ValueError, mlab.stride_repeat, x, 5, axis=-1)
def test_stride_repeat_axis_gt_1_ValueError(self):
x = np.array(0)
assert_raises(ValueError, mlab.stride_repeat, x, 5, axis=2)
def test_stride_repeat_n_lt_1_ValueError(self):
x = np.arange(10)
assert_raises(ValueError, mlab.stride_repeat, x, 0)
def test_stride_repeat_n1_axis0(self):
x = np.arange(10)
y = mlab.stride_repeat(x, 1)
assert_equal((1, ) + x.shape, y.shape)
assert_array_equal(x, y.flat)
assert_true(self.get_base(y) is x)
def test_stride_repeat_n1_axis1(self):
x = np.arange(10)
y = mlab.stride_repeat(x, 1, axis=1)
assert_equal(x.shape + (1, ), y.shape)
assert_array_equal(x, y.flat)
assert_true(self.get_base(y) is x)
def test_stride_repeat_n5_axis0(self):
x = np.arange(10)
y = mlab.stride_repeat(x, 5)
yr = np.repeat(x[np.newaxis], 5, axis=0)
assert_equal(yr.shape, y.shape)
assert_array_equal(yr, y)
assert_equal((5, ) + x.shape, y.shape)
assert_true(self.get_base(y) is x)
def test_stride_repeat_n5_axis1(self):
x = np.arange(10)
y = mlab.stride_repeat(x, 5, axis=1)
yr = np.repeat(x[np.newaxis], 5, axis=0).T
assert_equal(yr.shape, y.shape)
assert_array_equal(yr, y)
assert_equal(x.shape + (5, ), y.shape)
assert_true(self.get_base(y) is x)
def test_stride_windows_n1_noverlap0_axis0(self):
x = np.arange(10)
y = mlab.stride_windows(x, 1)
yt = self.calc_window_target(x, 1)
assert_equal(yt.shape, y.shape)
assert_array_equal(yt, y)
assert_equal((1, ) + x.shape, y.shape)
assert_true(self.get_base(y) is x)
def test_stride_windows_n1_noverlap0_axis1(self):
x = np.arange(10)
y = mlab.stride_windows(x, 1, axis=1)
yt = self.calc_window_target(x, 1).T
assert_equal(yt.shape, y.shape)
assert_array_equal(yt, y)
assert_equal(x.shape + (1, ), y.shape)
assert_true(self.get_base(y) is x)
def test_stride_windows_n5_noverlap0_axis0(self):
x = np.arange(100)
y = mlab.stride_windows(x, 5)
yt = self.calc_window_target(x, 5)
assert_equal(yt.shape, y.shape)
assert_array_equal(yt, y)
assert_equal((5, 20), y.shape)
assert_true(self.get_base(y) is x)
def test_stride_windows_n5_noverlap0_axis1(self):
x = np.arange(100)
y = mlab.stride_windows(x, 5, axis=1)
yt = self.calc_window_target(x, 5).T
assert_equal(yt.shape, y.shape)
assert_array_equal(yt, y)
assert_equal((20, 5), y.shape)
assert_true(self.get_base(y) is x)
def test_stride_windows_n15_noverlap2_axis0(self):
x = np.arange(100)
y = mlab.stride_windows(x, 15, 2)
yt = self.calc_window_target(x, 15, 2)
assert_equal(yt.shape, y.shape)
assert_array_equal(yt, y)
assert_equal((15, 7), y.shape)
assert_true(self.get_base(y) is x)
def test_stride_windows_n15_noverlap2_axis1(self):
x = np.arange(100)
y = mlab.stride_windows(x, 15, 2, axis=1)
yt = self.calc_window_target(x, 15, 2).T
assert_equal(yt.shape, y.shape)
assert_array_equal(yt, y)
assert_equal((7, 15), y.shape)
assert_true(self.get_base(y) is x)
def test_stride_windows_n13_noverlapn3_axis0(self):
x = np.arange(100)
y = mlab.stride_windows(x, 13, -3)
yt = self.calc_window_target(x, 13, -3)
assert_equal(yt.shape, y.shape)
assert_array_equal(yt, y)
assert_equal((13, 6), y.shape)
assert_true(self.get_base(y) is x)
def test_stride_windows_n13_noverlapn3_axis1(self):
x = np.arange(100)
y = mlab.stride_windows(x, 13, -3, axis=1)
yt = self.calc_window_target(x, 13, -3).T
assert_equal(yt.shape, y.shape)
assert_array_equal(yt, y)
assert_equal((6, 13), y.shape)
assert_true(self.get_base(y) is x)
def test_stride_windows_n32_noverlap0_axis0_unflatten(self):
n = 32
x = np.arange(n)[np.newaxis]
x1 = np.tile(x, (21, 1))
x2 = x1.flatten()
y = mlab.stride_windows(x2, n)
assert_equal(y.shape, x1.T.shape)
assert_array_equal(y, x1.T)
def test_stride_windows_n32_noverlap0_axis1_unflatten(self):
n = 32
x = np.arange(n)[np.newaxis]
x1 = np.tile(x, (21, 1))
x2 = x1.flatten()
y = mlab.stride_windows(x2, n, axis=1)
assert_equal(y.shape, x1.shape)
assert_array_equal(y, x1)
def test_stride_ensure_integer_type(self):
N = 100
x = np.empty(N + 20, dtype='>f4')
x.fill(np.NaN)
y = x[10:-10]
y.fill(0.3)
# previous to #3845 lead to corrupt access
y_strided = mlab.stride_windows(y, n=33, noverlap=0.6)
assert_array_equal(y_strided, 0.3)
# previous to #3845 lead to corrupt access
y_strided = mlab.stride_windows(y, n=33.3, noverlap=0)
assert_array_equal(y_strided, 0.3)
# even previous to #3845 could not find any problematic
# configuration however, let's be sure it's not accidentally
# introduced
y_strided = mlab.stride_repeat(y, n=33.815)
assert_array_equal(y_strided, 0.3)
class csv_testcase(CleanupTestCase):
def setUp(self):
if six.PY3:
self.fd = tempfile.TemporaryFile(suffix='csv', mode="w+",
newline='')
else:
self.fd = tempfile.TemporaryFile(suffix='csv', mode="wb+")
def tearDown(self):
self.fd.close()
def test_recarray_csv_roundtrip(self):
expected = np.recarray((99,),
[(str('x'), np.float),
(str('y'), np.float),
(str('t'), np.float)])
# initialising all values: uninitialised memory sometimes produces
# floats that do not round-trip to string and back.
expected['x'][:] = np.linspace(-1e9, -1, 99)
expected['y'][:] = np.linspace(1, 1e9, 99)
expected['t'][:] = np.linspace(0, 0.01, 99)
mlab.rec2csv(expected, self.fd)
self.fd.seek(0)
actual = mlab.csv2rec(self.fd)
assert_allclose(expected['x'], actual['x'])
assert_allclose(expected['y'], actual['y'])
assert_allclose(expected['t'], actual['t'])
def test_rec2csv_bad_shape_ValueError(self):
bad = np.recarray((99, 4), [(str('x'), np.float),
(str('y'), np.float)])
# the bad recarray should trigger a ValueError for having ndim > 1.
assert_raises(ValueError, mlab.rec2csv, bad, self.fd)
def test_csv2rec_names_with_comments(self):
self.fd.write('# comment\n1,2,3\n4,5,6\n')
self.fd.seek(0)
array = mlab.csv2rec(self.fd, names='a,b,c')
assert len(array) == 2
assert len(array.dtype) == 3
def test_csv2rec_usdate(self):
self.fd.write('01/11/14\n' +
'03/05/76 12:00:01 AM\n' +
'07/09/83 5:17:34 PM\n' +
'06/20/2054 2:31:45 PM\n' +
'10/31/00 11:50:23 AM\n')
expected = [datetime.datetime(2014, 1, 11, 0, 0),
datetime.datetime(1976, 3, 5, 0, 0, 1),
datetime.datetime(1983, 7, 9, 17, 17, 34),
datetime.datetime(2054, 6, 20, 14, 31, 45),
datetime.datetime(2000, 10, 31, 11, 50, 23)]
self.fd.seek(0)
array = mlab.csv2rec(self.fd, names='a')
assert_array_equal(array['a'].tolist(), expected)
def test_csv2rec_dayfirst(self):
self.fd.write('11/01/14\n' +
'05/03/76 12:00:01 AM\n' +
'09/07/83 5:17:34 PM\n' +
'20/06/2054 2:31:45 PM\n' +
'31/10/00 11:50:23 AM\n')
expected = [datetime.datetime(2014, 1, 11, 0, 0),
datetime.datetime(1976, 3, 5, 0, 0, 1),
datetime.datetime(1983, 7, 9, 17, 17, 34),
datetime.datetime(2054, 6, 20, 14, 31, 45),
datetime.datetime(2000, 10, 31, 11, 50, 23)]
self.fd.seek(0)
array = mlab.csv2rec(self.fd, names='a', dayfirst=True)
assert_array_equal(array['a'].tolist(), expected)
def test_csv2rec_yearfirst(self):
self.fd.write('14/01/11\n' +
'76/03/05 12:00:01 AM\n' +
'83/07/09 5:17:34 PM\n' +
'2054/06/20 2:31:45 PM\n' +
'00/10/31 11:50:23 AM\n')
expected = [datetime.datetime(2014, 1, 11, 0, 0),
datetime.datetime(1976, 3, 5, 0, 0, 1),
datetime.datetime(1983, 7, 9, 17, 17, 34),
datetime.datetime(2054, 6, 20, 14, 31, 45),
datetime.datetime(2000, 10, 31, 11, 50, 23)]
self.fd.seek(0)
array = mlab.csv2rec(self.fd, names='a', yearfirst=True)
assert_array_equal(array['a'].tolist(), expected)
class rec2txt_testcase(CleanupTestCase):
def test_csv2txt_basic(self):
# str() calls around field names necessary b/c as of numpy 1.11
# dtype doesn't like unicode names (caused by unicode_literals import)
a = np.array([(1.0, 2, 'foo', 'bing'),
(2.0, 3, 'bar', 'blah')],
dtype=np.dtype([(str('x'), np.float32),
(str('y'), np.int8),
(str('s'), str, 3),
(str('s2'), str, 4)]))
truth = (' x y s s2\n'
' 1.000 2 foo bing \n'
' 2.000 3 bar blah ').splitlines()
assert_equal(mlab.rec2txt(a).splitlines(), truth)
class window_testcase(CleanupTestCase):
def setUp(self):
np.random.seed(0)
self.n = 1000
self.x = np.arange(0., self.n)
self.sig_rand = np.random.standard_normal(self.n) + 100.
self.sig_ones = np.ones_like(self.x)
self.sig_slope = np.linspace(-10., 90., self.n)
def check_window_apply_repeat(self, x, window, NFFT, noverlap):
'''This is an adaptation of the original window application
algorithm. This is here to test to make sure the new implementation
has the same result'''
step = NFFT - noverlap
ind = np.arange(0, len(x) - NFFT + 1, step)
n = len(ind)
result = np.zeros((NFFT, n))
if cbook.iterable(window):
windowVals = window
else:
windowVals = window(np.ones((NFFT,), x.dtype))
# do the ffts of the slices
for i in range(n):
result[:, i] = windowVals * x[ind[i]:ind[i]+NFFT]
return result
def test_window_none_rand(self):
res = mlab.window_none(self.sig_ones)
assert_array_equal(res, self.sig_ones)
def test_window_none_ones(self):
res = mlab.window_none(self.sig_rand)
assert_array_equal(res, self.sig_rand)
def test_window_hanning_rand(self):
targ = np.hanning(len(self.sig_rand)) * self.sig_rand
res = mlab.window_hanning(self.sig_rand)
assert_allclose(targ, res, atol=1e-06)
def test_window_hanning_ones(self):
targ = np.hanning(len(self.sig_ones))
res = mlab.window_hanning(self.sig_ones)
assert_allclose(targ, res, atol=1e-06)
def test_apply_window_1D_axis1_ValueError(self):
x = self.sig_rand
window = mlab.window_hanning
assert_raises(ValueError, mlab.apply_window, x, window, axis=1,
return_window=False)
def test_apply_window_1D_els_wrongsize_ValueError(self):
x = self.sig_rand
window = mlab.window_hanning(np.ones(x.shape[0]-1))
assert_raises(ValueError, mlab.apply_window, x, window)
def test_apply_window_0D_ValueError(self):
x = np.array(0)
window = mlab.window_hanning
assert_raises(ValueError, mlab.apply_window, x, window, axis=1,
return_window=False)
def test_apply_window_3D_ValueError(self):
x = self.sig_rand[np.newaxis][np.newaxis]
window = mlab.window_hanning
assert_raises(ValueError, mlab.apply_window, x, window, axis=1,
return_window=False)
def test_apply_window_hanning_1D(self):
x = self.sig_rand
window = mlab.window_hanning
window1 = mlab.window_hanning(np.ones(x.shape[0]))
y, window2 = mlab.apply_window(x, window, return_window=True)
yt = window(x)
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
assert_array_equal(window1, window2)
def test_apply_window_hanning_1D_axis0(self):
x = self.sig_rand
window = mlab.window_hanning
y = mlab.apply_window(x, window, axis=0, return_window=False)
yt = window(x)
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
def test_apply_window_hanning_els_1D_axis0(self):
x = self.sig_rand
window = mlab.window_hanning(np.ones(x.shape[0]))
window1 = mlab.window_hanning
y = mlab.apply_window(x, window, axis=0, return_window=False)
yt = window1(x)
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
def test_apply_window_hanning_2D_axis0(self):
x = np.random.standard_normal([1000, 10]) + 100.
window = mlab.window_hanning
y = mlab.apply_window(x, window, axis=0, return_window=False)
yt = np.zeros_like(x)
for i in range(x.shape[1]):
yt[:, i] = window(x[:, i])
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
def test_apply_window_hanning_els1_2D_axis0(self):
x = np.random.standard_normal([1000, 10]) + 100.
window = mlab.window_hanning(np.ones(x.shape[0]))
window1 = mlab.window_hanning
y = mlab.apply_window(x, window, axis=0, return_window=False)
yt = np.zeros_like(x)
for i in range(x.shape[1]):
yt[:, i] = window1(x[:, i])
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
def test_apply_window_hanning_els2_2D_axis0(self):
x = np.random.standard_normal([1000, 10]) + 100.
window = mlab.window_hanning
window1 = mlab.window_hanning(np.ones(x.shape[0]))
y, window2 = mlab.apply_window(x, window, axis=0, return_window=True)
yt = np.zeros_like(x)
for i in range(x.shape[1]):
yt[:, i] = window1*x[:, i]
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
assert_array_equal(window1, window2)
def test_apply_window_hanning_els3_2D_axis0(self):
x = np.random.standard_normal([1000, 10]) + 100.
window = mlab.window_hanning
window1 = mlab.window_hanning(np.ones(x.shape[0]))
y, window2 = mlab.apply_window(x, window, axis=0, return_window=True)
yt = mlab.apply_window(x, window1, axis=0, return_window=False)
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
assert_array_equal(window1, window2)
def test_apply_window_hanning_2D_axis1(self):
x = np.random.standard_normal([10, 1000]) + 100.
window = mlab.window_hanning
y = mlab.apply_window(x, window, axis=1, return_window=False)
yt = np.zeros_like(x)
for i in range(x.shape[0]):
yt[i, :] = window(x[i, :])
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
def test_apply_window_hanning_2D__els1_axis1(self):
x = np.random.standard_normal([10, 1000]) + 100.
window = mlab.window_hanning(np.ones(x.shape[1]))
window1 = mlab.window_hanning
y = mlab.apply_window(x, window, axis=1, return_window=False)
yt = np.zeros_like(x)
for i in range(x.shape[0]):
yt[i, :] = window1(x[i, :])
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
def test_apply_window_hanning_2D_els2_axis1(self):
x = np.random.standard_normal([10, 1000]) + 100.
window = mlab.window_hanning
window1 = mlab.window_hanning(np.ones(x.shape[1]))
y, window2 = mlab.apply_window(x, window, axis=1, return_window=True)
yt = np.zeros_like(x)
for i in range(x.shape[0]):
yt[i, :] = window1 * x[i, :]
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
assert_array_equal(window1, window2)
def test_apply_window_hanning_2D_els3_axis1(self):
x = np.random.standard_normal([10, 1000]) + 100.
window = mlab.window_hanning
window1 = mlab.window_hanning(np.ones(x.shape[1]))
y = mlab.apply_window(x, window, axis=1, return_window=False)
yt = mlab.apply_window(x, window1, axis=1, return_window=False)
assert_equal(yt.shape, y.shape)
assert_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
def test_apply_window_stride_windows_hanning_2D_n13_noverlapn3_axis0(self):
x = self.sig_rand
window = mlab.window_hanning
yi = mlab.stride_windows(x, n=13, noverlap=2, axis=0)
y = mlab.apply_window(yi, window, axis=0, return_window=False)
yt = self.check_window_apply_repeat(x, window, 13, 2)
assert_equal(yt.shape, y.shape)
assert_not_equal(x.shape, y.shape)
assert_allclose(yt, y, atol=1e-06)
def test_apply_window_hanning_2D_stack_axis1(self):
ydata = np.arange(32)
ydata1 = ydata+5
ydata2 = ydata+3.3
ycontrol1 = mlab.apply_window(ydata1, mlab.window_hanning)
ycontrol2 = mlab.window_hanning(ydata2)
ydata = np.vstack([ydata1, ydata2])
ycontrol = np.vstack([ycontrol1, ycontrol2])
ydata = np.tile(ydata, (20, 1))
ycontrol = np.tile(ycontrol, (20, 1))
result = mlab.apply_window(ydata, mlab.window_hanning, axis=1,
return_window=False)
assert_allclose(ycontrol, result, atol=1e-08)
def test_apply_window_hanning_2D_stack_windows_axis1(self):
ydata = np.arange(32)
ydata1 = ydata+5
ydata2 = ydata+3.3
ycontrol1 = mlab.apply_window(ydata1, mlab.window_hanning)
ycontrol2 = mlab.window_hanning(ydata2)
ydata = np.vstack([ydata1, ydata2])
ycontrol = np.vstack([ycontrol1, ycontrol2])
ydata = np.tile(ydata, (20, 1))
ycontrol = np.tile(ycontrol, (20, 1))
result = mlab.apply_window(ydata, mlab.window_hanning, axis=1,
return_window=False)
assert_allclose(ycontrol, result, atol=1e-08)
def test_apply_window_hanning_2D_stack_windows_axis1_unflatten(self):
n = 32
ydata = np.arange(n)
ydata1 = ydata+5
ydata2 = ydata+3.3
ycontrol1 = mlab.apply_window(ydata1, mlab.window_hanning)
ycontrol2 = mlab.window_hanning(ydata2)
ydata = np.vstack([ydata1, ydata2])
ycontrol = np.vstack([ycontrol1, ycontrol2])
ydata = np.tile(ydata, (20, 1))
ycontrol = np.tile(ycontrol, (20, 1))
ydata = ydata.flatten()
ydata1 = mlab.stride_windows(ydata, 32, noverlap=0, axis=0)
result = mlab.apply_window(ydata1, mlab.window_hanning, axis=0,
return_window=False)
assert_allclose(ycontrol.T, result, atol=1e-08)
class detrend_testcase(CleanupTestCase):
def setUp(self):
np.random.seed(0)
n = 1000
x = np.linspace(0., 100, n)
self.sig_zeros = np.zeros(n)
self.sig_off = self.sig_zeros + 100.
self.sig_slope = np.linspace(-10., 90., n)
self.sig_slope_mean = x - x.mean()
sig_rand = np.random.standard_normal(n)
sig_sin = np.sin(x*2*np.pi/(n/100))
sig_rand -= sig_rand.mean()
sig_sin -= sig_sin.mean()
self.sig_base = sig_rand + sig_sin
self.atol = 1e-08
def test_detrend_none_0D_zeros(self):
input = 0.
targ = input
res = mlab.detrend_none(input)
assert_equal(input, targ)
def test_detrend_none_0D_zeros_axis1(self):
input = 0.
targ = input
res = mlab.detrend_none(input, axis=1)
assert_equal(input, targ)
def test_detrend_str_none_0D_zeros(self):
input = 0.
targ = input
res = mlab.detrend(input, key='none')
assert_equal(input, targ)
def test_detrend_detrend_none_0D_zeros(self):
input = 0.
targ = input
res = mlab.detrend(input, key=mlab.detrend_none)
assert_equal(input, targ)
def test_detrend_none_0D_off(self):
input = 5.5
targ = input
res = mlab.detrend_none(input)
assert_equal(input, targ)
def test_detrend_none_1D_off(self):
input = self.sig_off
targ = input
res = mlab.detrend_none(input)
assert_array_equal(res, targ)
def test_detrend_none_1D_slope(self):
input = self.sig_slope
targ = input
res = mlab.detrend_none(input)
assert_array_equal(res, targ)
def test_detrend_none_1D_base(self):
input = self.sig_base
targ = input
res = mlab.detrend_none(input)
assert_array_equal(res, targ)
def test_detrend_none_1D_base_slope_off_list(self):
input = self.sig_base + self.sig_slope + self.sig_off
targ = input.tolist()
res = mlab.detrend_none(input.tolist())
assert_equal(res, targ)
def test_detrend_none_2D(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
input = np.vstack(arri)
targ = input
res = mlab.detrend_none(input)
assert_array_equal(res, targ)
def test_detrend_none_2D_T(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
input = np.vstack(arri)
targ = input
res = mlab.detrend_none(input.T)
assert_array_equal(res.T, targ)
def test_detrend_mean_0D_zeros(self):
input = 0.
targ = 0.
res = mlab.detrend_mean(input)
assert_almost_equal(res, targ)
def test_detrend_str_mean_0D_zeros(self):
input = 0.
targ = 0.
res = mlab.detrend(input, key='mean')
assert_almost_equal(res, targ)
def test_detrend_detrend_mean_0D_zeros(self):
input = 0.
targ = 0.
res = mlab.detrend(input, key=mlab.detrend_mean)
assert_almost_equal(res, targ)
def test_detrend_mean_0D_off(self):
input = 5.5
targ = 0.
res = mlab.detrend_mean(input)
assert_almost_equal(res, targ)
def test_detrend_str_mean_0D_off(self):
input = 5.5
targ = 0.
res = mlab.detrend(input, key='mean')
assert_almost_equal(res, targ)
def test_detrend_detrend_mean_0D_off(self):
input = 5.5
targ = 0.
res = mlab.detrend(input, key=mlab.detrend_mean)
assert_almost_equal(res, targ)
def test_detrend_mean_1D_zeros(self):
input = self.sig_zeros
targ = self.sig_zeros
res = mlab.detrend_mean(input)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_mean_1D_base(self):
input = self.sig_base
targ = self.sig_base
res = mlab.detrend_mean(input)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_mean_1D_base_off(self):
input = self.sig_base + self.sig_off
targ = self.sig_base
res = mlab.detrend_mean(input)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_mean_1D_base_slope(self):
input = self.sig_base + self.sig_slope
targ = self.sig_base + self.sig_slope_mean
res = mlab.detrend_mean(input)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_mean_1D_base_slope_off(self):
input = self.sig_base + self.sig_slope + self.sig_off
targ = self.sig_base + self.sig_slope_mean
res = mlab.detrend_mean(input)
assert_allclose(res, targ, atol=1e-08)
def test_detrend_mean_1D_base_slope_off_axis0(self):
input = self.sig_base + self.sig_slope + self.sig_off
targ = self.sig_base + self.sig_slope_mean
res = mlab.detrend_mean(input, axis=0)
assert_allclose(res, targ, atol=1e-08)
def test_detrend_mean_1D_base_slope_off_list(self):
input = self.sig_base + self.sig_slope + self.sig_off
targ = self.sig_base + self.sig_slope_mean
res = mlab.detrend_mean(input.tolist())
assert_allclose(res, targ, atol=1e-08)
def test_detrend_mean_1D_base_slope_off_list_axis0(self):
input = self.sig_base + self.sig_slope + self.sig_off
targ = self.sig_base + self.sig_slope_mean
res = mlab.detrend_mean(input.tolist(), axis=0)
assert_allclose(res, targ, atol=1e-08)
def test_demean_0D_off(self):
input = 5.5
targ = 0.
res = mlab.demean(input, axis=None)
assert_almost_equal(res, targ)
def test_demean_1D_base_slope_off(self):
input = self.sig_base + self.sig_slope + self.sig_off
targ = self.sig_base + self.sig_slope_mean
res = mlab.demean(input)
assert_allclose(res, targ, atol=1e-08)
def test_demean_1D_base_slope_off_axis0(self):
input = self.sig_base + self.sig_slope + self.sig_off
targ = self.sig_base + self.sig_slope_mean
res = mlab.demean(input, axis=0)
assert_allclose(res, targ, atol=1e-08)
def test_demean_1D_base_slope_off_list(self):
input = self.sig_base + self.sig_slope + self.sig_off
targ = self.sig_base + self.sig_slope_mean
res = mlab.demean(input.tolist())
assert_allclose(res, targ, atol=1e-08)
def test_detrend_mean_2D_default(self):
arri = [self.sig_off,
self.sig_base + self.sig_off]
arrt = [self.sig_zeros,
self.sig_base]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend_mean(input)
assert_allclose(res, targ, atol=1e-08)
def test_detrend_mean_2D_none(self):
arri = [self.sig_off,
self.sig_base + self.sig_off]
arrt = [self.sig_zeros,
self.sig_base]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend_mean(input, axis=None)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_mean_2D_none_T(self):
arri = [self.sig_off,
self.sig_base + self.sig_off]
arrt = [self.sig_zeros,
self.sig_base]
input = np.vstack(arri).T
targ = np.vstack(arrt)
res = mlab.detrend_mean(input, axis=None)
assert_allclose(res.T, targ,
atol=1e-08)
def test_detrend_mean_2D_axis0(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri).T
targ = np.vstack(arrt).T
res = mlab.detrend_mean(input, axis=0)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_mean_2D_axis1(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend_mean(input, axis=1)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_mean_2D_axism1(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend_mean(input, axis=-1)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_mean_2D_none(self):
arri = [self.sig_off,
self.sig_base + self.sig_off]
arrt = [self.sig_zeros,
self.sig_base]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend_mean(input, axis=None)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_mean_2D_none_T(self):
arri = [self.sig_off,
self.sig_base + self.sig_off]
arrt = [self.sig_zeros,
self.sig_base]
input = np.vstack(arri).T
targ = np.vstack(arrt)
res = mlab.detrend_mean(input, axis=None)
assert_allclose(res.T, targ,
atol=1e-08)
def test_detrend_mean_2D_axis0(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri).T
targ = np.vstack(arrt).T
res = mlab.detrend_mean(input, axis=0)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_mean_2D_axis1(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend_mean(input, axis=1)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_mean_2D_axism1(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend_mean(input, axis=-1)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_2D_default(self):
arri = [self.sig_off,
self.sig_base + self.sig_off]
arrt = [self.sig_zeros,
self.sig_base]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend(input)
assert_allclose(res, targ, atol=1e-08)
def test_detrend_2D_none(self):
arri = [self.sig_off,
self.sig_base + self.sig_off]
arrt = [self.sig_zeros,
self.sig_base]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend(input, axis=None)
assert_allclose(res, targ, atol=1e-08)
def test_detrend_str_mean_2D_axis0(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri).T
targ = np.vstack(arrt).T
res = mlab.detrend(input, key='mean', axis=0)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_str_constant_2D_none_T(self):
arri = [self.sig_off,
self.sig_base + self.sig_off]
arrt = [self.sig_zeros,
self.sig_base]
input = np.vstack(arri).T
targ = np.vstack(arrt)
res = mlab.detrend(input, key='constant', axis=None)
assert_allclose(res.T, targ,
atol=1e-08)
def test_detrend_str_default_2D_axis1(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend(input, key='default', axis=1)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_detrend_mean_2D_axis0(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri).T
targ = np.vstack(arrt).T
res = mlab.detrend(input, key=mlab.detrend_mean, axis=0)
assert_allclose(res, targ,
atol=1e-08)
def test_demean_2D_default(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri).T
targ = np.vstack(arrt).T
res = mlab.demean(input)
assert_allclose(res, targ,
atol=1e-08)
def test_demean_2D_none(self):
arri = [self.sig_off,
self.sig_base + self.sig_off]
arrt = [self.sig_zeros,
self.sig_base]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.demean(input, axis=None)
assert_allclose(res, targ,
atol=1e-08)
def test_demean_2D_axis0(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri).T
targ = np.vstack(arrt).T
res = mlab.demean(input, axis=0)
assert_allclose(res, targ,
atol=1e-08)
def test_demean_2D_axis1(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.demean(input, axis=1)
assert_allclose(res, targ,
atol=1e-08)
def test_demean_2D_axism1(self):
arri = [self.sig_base,
self.sig_base + self.sig_off,
self.sig_base + self.sig_slope,
self.sig_base + self.sig_off + self.sig_slope]
arrt = [self.sig_base,
self.sig_base,
self.sig_base + self.sig_slope_mean,
self.sig_base + self.sig_slope_mean]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.demean(input, axis=-1)
assert_allclose(res, targ,
atol=1e-08)
def test_detrend_bad_key_str_ValueError(self):
input = self.sig_slope[np.newaxis]
assert_raises(ValueError, mlab.detrend, input, key='spam')
def test_detrend_bad_key_var_ValueError(self):
input = self.sig_slope[np.newaxis]
assert_raises(ValueError, mlab.detrend, input, key=5)
def test_detrend_mean_0D_d0_ValueError(self):
input = 5.5
assert_raises(ValueError, mlab.detrend_mean, input, axis=0)
def test_detrend_0D_d0_ValueError(self):
input = 5.5
assert_raises(ValueError, mlab.detrend, input, axis=0)
def test_detrend_mean_1D_d1_ValueError(self):
input = self.sig_slope
assert_raises(ValueError, mlab.detrend_mean, input, axis=1)
def test_detrend_1D_d1_ValueError(self):
input = self.sig_slope
assert_raises(ValueError, mlab.detrend, input, axis=1)
def test_demean_1D_d1_ValueError(self):
input = self.sig_slope
assert_raises(ValueError, mlab.demean, input, axis=1)
def test_detrend_mean_2D_d2_ValueError(self):
input = self.sig_slope[np.newaxis]
assert_raises(ValueError, mlab.detrend_mean, input, axis=2)
def test_detrend_2D_d2_ValueError(self):
input = self.sig_slope[np.newaxis]
assert_raises(ValueError, mlab.detrend, input, axis=2)
def test_demean_2D_d2_ValueError(self):
input = self.sig_slope[np.newaxis]
assert_raises(ValueError, mlab.demean, input, axis=2)
def test_detrend_linear_0D_zeros(self):
input = 0.
targ = 0.
res = mlab.detrend_linear(input)
assert_almost_equal(res, targ)
def test_detrend_linear_0D_off(self):
input = 5.5
targ = 0.
res = mlab.detrend_linear(input)
assert_almost_equal(res, targ)
def test_detrend_str_linear_0D_off(self):
input = 5.5
targ = 0.
res = mlab.detrend(input, key='linear')
assert_almost_equal(res, targ)
def test_detrend_detrend_linear_0D_off(self):
input = 5.5
targ = 0.
res = mlab.detrend(input, key=mlab.detrend_linear)
assert_almost_equal(res, targ)
def test_detrend_linear_1d_off(self):
input = self.sig_off
targ = self.sig_zeros
res = mlab.detrend_linear(input)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_linear_1d_slope(self):
input = self.sig_slope
targ = self.sig_zeros
res = mlab.detrend_linear(input)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_linear_1d_slope_off(self):
input = self.sig_slope + self.sig_off
targ = self.sig_zeros
res = mlab.detrend_linear(input)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_str_linear_1d_slope_off(self):
input = self.sig_slope + self.sig_off
targ = self.sig_zeros
res = mlab.detrend(input, key='linear')
assert_allclose(res, targ, atol=self.atol)
def test_detrend_detrend_linear_1d_slope_off(self):
input = self.sig_slope + self.sig_off
targ = self.sig_zeros
res = mlab.detrend(input, key=mlab.detrend_linear)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_linear_1d_slope_off_list(self):
input = self.sig_slope + self.sig_off
targ = self.sig_zeros
res = mlab.detrend_linear(input.tolist())
assert_allclose(res, targ, atol=self.atol)
def test_detrend_linear_2D_ValueError(self):
input = self.sig_slope[np.newaxis]
assert_raises(ValueError, mlab.detrend_linear, input)
def test_detrend_str_linear_2d_slope_off_axis0(self):
arri = [self.sig_off,
self.sig_slope,
self.sig_slope + self.sig_off]
arrt = [self.sig_zeros,
self.sig_zeros,
self.sig_zeros]
input = np.vstack(arri).T
targ = np.vstack(arrt).T
res = mlab.detrend(input, key='linear', axis=0)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_detrend_linear_1d_slope_off_axis1(self):
arri = [self.sig_off,
self.sig_slope,
self.sig_slope + self.sig_off]
arrt = [self.sig_zeros,
self.sig_zeros,
self.sig_zeros]
input = np.vstack(arri).T
targ = np.vstack(arrt).T
res = mlab.detrend(input, key=mlab.detrend_linear, axis=0)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_str_linear_2d_slope_off_axis0(self):
arri = [self.sig_off,
self.sig_slope,
self.sig_slope + self.sig_off]
arrt = [self.sig_zeros,
self.sig_zeros,
self.sig_zeros]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend(input, key='linear', axis=1)
assert_allclose(res, targ, atol=self.atol)
def test_detrend_detrend_linear_1d_slope_off_axis1(self):
arri = [self.sig_off,
self.sig_slope,
self.sig_slope + self.sig_off]
arrt = [self.sig_zeros,
self.sig_zeros,
self.sig_zeros]
input = np.vstack(arri)
targ = np.vstack(arrt)
res = mlab.detrend(input, key=mlab.detrend_linear, axis=1)
assert_allclose(res, targ, atol=self.atol)
class spectral_testcase_nosig_real_onesided(CleanupTestCase):
def setUp(self):
self.createStim(fstims=[],
iscomplex=False, sides='onesided', nsides=1)
def createStim(self, fstims, iscomplex, sides, nsides, len_x=None,
NFFT_density=-1, nover_density=-1, pad_to_density=-1,
pad_to_spectrum=-1):
Fs = 100.
x = np.arange(0, 10, 1/Fs)
if len_x is not None:
x = x[:len_x]
# get the stimulus frequencies, defaulting to None
fstims = [Fs/fstim for fstim in fstims]
# get the constants, default to calculated values
if NFFT_density is None:
NFFT_density_real = 256
elif NFFT_density < 0:
NFFT_density_real = NFFT_density = 100
else:
NFFT_density_real = NFFT_density
if nover_density is None:
nover_density_real = 0
elif nover_density < 0:
nover_density_real = nover_density = NFFT_density_real//2
else:
nover_density_real = nover_density
if pad_to_density is None:
pad_to_density_real = NFFT_density_real
elif pad_to_density < 0:
pad_to_density = int(2**np.ceil(np.log2(NFFT_density_real)))
pad_to_density_real = pad_to_density
else:
pad_to_density_real = pad_to_density
if pad_to_spectrum is None:
pad_to_spectrum_real = len(x)
elif pad_to_spectrum < 0:
pad_to_spectrum_real = pad_to_spectrum = len(x)
else:
pad_to_spectrum_real = pad_to_spectrum
if pad_to_spectrum is None:
NFFT_spectrum_real = NFFT_spectrum = pad_to_spectrum_real
else:
NFFT_spectrum_real = NFFT_spectrum = len(x)
nover_spectrum_real = nover_spectrum = 0
NFFT_specgram = NFFT_density
nover_specgram = nover_density
pad_to_specgram = pad_to_density
NFFT_specgram_real = NFFT_density_real
nover_specgram_real = nover_density_real
if nsides == 1:
# frequencies for specgram, psd, and csd
# need to handle even and odd differently
if pad_to_density_real % 2:
freqs_density = np.linspace(0, Fs/2,
num=pad_to_density_real,
endpoint=False)[::2]
else:
freqs_density = np.linspace(0, Fs/2,
num=pad_to_density_real//2+1)
# frequencies for complex, magnitude, angle, and phase spectrums
# need to handle even and odd differently
if pad_to_spectrum_real % 2:
freqs_spectrum = np.linspace(0, Fs/2,
num=pad_to_spectrum_real,
endpoint=False)[::2]
else:
freqs_spectrum = np.linspace(0, Fs/2,
num=pad_to_spectrum_real//2+1)
else:
# frequencies for specgram, psd, and csd
# need to handle even and odd differentl
if pad_to_density_real % 2:
freqs_density = np.linspace(-Fs/2, Fs/2,
num=2*pad_to_density_real,
endpoint=False)[1::2]
else:
freqs_density = np.linspace(-Fs/2, Fs/2,
num=pad_to_density_real,
endpoint=False)
# frequencies for complex, magnitude, angle, and phase spectrums
# need to handle even and odd differently
if pad_to_spectrum_real % 2:
freqs_spectrum = np.linspace(-Fs/2, Fs/2,
num=2*pad_to_spectrum_real,
endpoint=False)[1::2]
else:
freqs_spectrum = np.linspace(-Fs/2, Fs/2,
num=pad_to_spectrum_real,
endpoint=False)
freqs_specgram = freqs_density
# time points for specgram
t_start = NFFT_specgram_real//2
t_stop = len(x) - NFFT_specgram_real//2+1
t_step = NFFT_specgram_real - nover_specgram_real
t_specgram = x[t_start:t_stop:t_step]
if NFFT_specgram_real % 2:
t_specgram += 1/Fs/2
if len(t_specgram) == 0:
t_specgram = np.array([NFFT_specgram_real/(2*Fs)])
t_spectrum = np.array([NFFT_spectrum_real/(2*Fs)])
t_density = t_specgram
y = np.zeros_like(x)
for i, fstim in enumerate(fstims):
y += np.sin(fstim * x * np.pi * 2) * 10**i
if iscomplex:
y = y.astype('complex')
self.Fs = Fs
self.sides = sides
self.fstims = fstims
self.NFFT_density = NFFT_density
self.nover_density = nover_density
self.pad_to_density = pad_to_density
self.NFFT_spectrum = NFFT_spectrum
self.nover_spectrum = nover_spectrum
self.pad_to_spectrum = pad_to_spectrum
self.NFFT_specgram = NFFT_specgram
self.nover_specgram = nover_specgram
self.pad_to_specgram = pad_to_specgram
self.t_specgram = t_specgram
self.t_density = t_density
self.t_spectrum = t_spectrum
self.y = y
self.freqs_density = freqs_density
self.freqs_spectrum = freqs_spectrum
self.freqs_specgram = freqs_specgram
self.NFFT_density_real = NFFT_density_real
def check_freqs(self, vals, targfreqs, resfreqs, fstims):
assert_true(resfreqs.argmin() == 0)
assert_true(resfreqs.argmax() == len(resfreqs)-1)
assert_allclose(resfreqs, targfreqs, atol=1e-06)
for fstim in fstims:
i = np.abs(resfreqs - fstim).argmin()
assert_true(vals[i] > vals[i+2])
assert_true(vals[i] > vals[i-2])
def check_maxfreq(self, spec, fsp, fstims):
# skip the test if there are no frequencies
if len(fstims) == 0:
return
# if twosided, do the test for each side
if fsp.min() < 0:
fspa = np.abs(fsp)
zeroind = fspa.argmin()
self.check_maxfreq(spec[:zeroind], fspa[:zeroind], fstims)
self.check_maxfreq(spec[zeroind:], fspa[zeroind:], fstims)
return
fstimst = fstims[:]
spect = spec.copy()
# go through each peak and make sure it is correctly the maximum peak
while fstimst:
maxind = spect.argmax()
maxfreq = fsp[maxind]
assert_almost_equal(maxfreq, fstimst[-1])
del fstimst[-1]
spect[maxind-5:maxind+5] = 0
def test_spectral_helper_raises_complex_same_data(self):
# test that mode 'complex' cannot be used if x is not y
assert_raises(ValueError, mlab._spectral_helper,
x=self.y, y=self.y+1, mode='complex')
def test_spectral_helper_raises_magnitude_same_data(self):
# test that mode 'magnitude' cannot be used if x is not y
assert_raises(ValueError, mlab._spectral_helper,
x=self.y, y=self.y+1, mode='magnitude')
def test_spectral_helper_raises_angle_same_data(self):
# test that mode 'angle' cannot be used if x is not y
assert_raises(ValueError, mlab._spectral_helper,
x=self.y, y=self.y+1, mode='angle')
def test_spectral_helper_raises_phase_same_data(self):
# test that mode 'phase' cannot be used if x is not y
assert_raises(ValueError, mlab._spectral_helper,
x=self.y, y=self.y+1, mode='phase')
def test_spectral_helper_raises_unknown_mode(self):
# test that unknown value for mode cannot be used
assert_raises(ValueError, mlab._spectral_helper,
x=self.y, mode='spam')
def test_spectral_helper_raises_unknown_sides(self):
# test that unknown value for sides cannot be used
assert_raises(ValueError, mlab._spectral_helper,
x=self.y, y=self.y, sides='eggs')
def test_spectral_helper_raises_noverlap_gt_NFFT(self):
# test that noverlap cannot be larger than NFFT
assert_raises(ValueError, mlab._spectral_helper,
x=self.y, y=self.y, NFFT=10, noverlap=20)
def test_spectral_helper_raises_noverlap_eq_NFFT(self):
# test that noverlap cannot be equal to NFFT
assert_raises(ValueError, mlab._spectral_helper,
x=self.y, NFFT=10, noverlap=10)
def test_spectral_helper_raises_winlen_ne_NFFT(self):
# test that the window length cannot be different from NFFT
assert_raises(ValueError, mlab._spectral_helper,
x=self.y, y=self.y, NFFT=10, window=np.ones(9))
def test_single_spectrum_helper_raises_mode_default(self):
# test that mode 'default' cannot be used with _single_spectrum_helper
assert_raises(ValueError, mlab._single_spectrum_helper,
x=self.y, mode='default')
def test_single_spectrum_helper_raises_mode_psd(self):
# test that mode 'psd' cannot be used with _single_spectrum_helper
assert_raises(ValueError, mlab._single_spectrum_helper,
x=self.y, mode='psd')
def test_spectral_helper_psd(self):
freqs = self.freqs_density
spec, fsp, t = mlab._spectral_helper(x=self.y, y=self.y,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides,
mode='psd')
assert_allclose(fsp, freqs, atol=1e-06)
assert_allclose(t, self.t_density, atol=1e-06)
assert_equal(spec.shape[0], freqs.shape[0])
assert_equal(spec.shape[1], self.t_specgram.shape[0])
def test_spectral_helper_magnitude_specgram(self):
freqs = self.freqs_specgram
spec, fsp, t = mlab._spectral_helper(x=self.y, y=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='magnitude')
assert_allclose(fsp, freqs, atol=1e-06)
assert_allclose(t, self.t_specgram, atol=1e-06)
assert_equal(spec.shape[0], freqs.shape[0])
assert_equal(spec.shape[1], self.t_specgram.shape[0])
def test_spectral_helper_magnitude_magnitude_spectrum(self):
freqs = self.freqs_spectrum
spec, fsp, t = mlab._spectral_helper(x=self.y, y=self.y,
NFFT=self.NFFT_spectrum,
Fs=self.Fs,
noverlap=self.nover_spectrum,
pad_to=self.pad_to_spectrum,
sides=self.sides,
mode='magnitude')
assert_allclose(fsp, freqs, atol=1e-06)
assert_allclose(t, self.t_spectrum, atol=1e-06)
assert_equal(spec.shape[0], freqs.shape[0])
assert_equal(spec.shape[1], 1)
def test_csd(self):
freqs = self.freqs_density
spec, fsp = mlab.csd(x=self.y, y=self.y+1,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides)
assert_allclose(fsp, freqs, atol=1e-06)
assert_equal(spec.shape, freqs.shape)
def test_psd(self):
freqs = self.freqs_density
spec, fsp = mlab.psd(x=self.y,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides)
assert_equal(spec.shape, freqs.shape)
self.check_freqs(spec, freqs, fsp, self.fstims)
def test_psd_detrend_mean_func_offset(self):
if self.NFFT_density is None:
return
freqs = self.freqs_density
ydata = np.zeros(self.NFFT_density)
ydata1 = ydata+5
ydata2 = ydata+3.3
ydata = np.vstack([ydata1, ydata2])
ydata = np.tile(ydata, (20, 1))
ydatab = ydata.T.flatten()
ydata = ydata.flatten()
ycontrol = np.zeros_like(ydata)
spec_g, fsp_g = mlab.psd(x=ydata,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
detrend=mlab.detrend_mean)
spec_b, fsp_b = mlab.psd(x=ydatab,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
detrend=mlab.detrend_mean)
spec_c, fsp_c = mlab.psd(x=ycontrol,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides)
assert_array_equal(fsp_g, fsp_c)
assert_array_equal(fsp_b, fsp_c)
assert_allclose(spec_g, spec_c, atol=1e-08)
# these should not be almost equal
assert_raises(AssertionError,
assert_allclose, spec_b, spec_c, atol=1e-08)
def test_psd_detrend_mean_str_offset(self):
if self.NFFT_density is None:
return
freqs = self.freqs_density
ydata = np.zeros(self.NFFT_density)
ydata1 = ydata+5
ydata2 = ydata+3.3
ydata = np.vstack([ydata1, ydata2])
ydata = np.tile(ydata, (20, 1))
ydatab = ydata.T.flatten()
ydata = ydata.flatten()
ycontrol = np.zeros_like(ydata)
spec_g, fsp_g = mlab.psd(x=ydata,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
detrend='mean')
spec_b, fsp_b = mlab.psd(x=ydatab,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
detrend='mean')
spec_c, fsp_c = mlab.psd(x=ycontrol,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides)
assert_array_equal(fsp_g, fsp_c)
assert_array_equal(fsp_b, fsp_c)
assert_allclose(spec_g, spec_c, atol=1e-08)
# these should not be almost equal
assert_raises(AssertionError,
assert_allclose, spec_b, spec_c, atol=1e-08)
def test_psd_detrend_linear_func_trend(self):
if self.NFFT_density is None:
return
freqs = self.freqs_density
ydata = np.arange(self.NFFT_density)
ydata1 = ydata+5
ydata2 = ydata+3.3
ydata = np.vstack([ydata1, ydata2])
ydata = np.tile(ydata, (20, 1))
ydatab = ydata.T.flatten()
ydata = ydata.flatten()
ycontrol = np.zeros_like(ydata)
spec_g, fsp_g = mlab.psd(x=ydata,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
detrend=mlab.detrend_linear)
spec_b, fsp_b = mlab.psd(x=ydatab,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
detrend=mlab.detrend_linear)
spec_c, fsp_c = mlab.psd(x=ycontrol,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides)
assert_array_equal(fsp_g, fsp_c)
assert_array_equal(fsp_b, fsp_c)
assert_allclose(spec_g, spec_c, atol=1e-08)
# these should not be almost equal
assert_raises(AssertionError,
assert_allclose, spec_b, spec_c, atol=1e-08)
def test_psd_detrend_linear_str_trend(self):
if self.NFFT_density is None:
return
freqs = self.freqs_density
ydata = np.arange(self.NFFT_density)
ydata1 = ydata+5
ydata2 = ydata+3.3
ydata = np.vstack([ydata1, ydata2])
ydata = np.tile(ydata, (20, 1))
ydatab = ydata.T.flatten()
ydata = ydata.flatten()
ycontrol = np.zeros_like(ydata)
spec_g, fsp_g = mlab.psd(x=ydata,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
detrend='linear')
spec_b, fsp_b = mlab.psd(x=ydatab,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
detrend='linear')
spec_c, fsp_c = mlab.psd(x=ycontrol,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides)
assert_array_equal(fsp_g, fsp_c)
assert_array_equal(fsp_b, fsp_c)
assert_allclose(spec_g, spec_c, atol=1e-08)
# these should not be almost equal
assert_raises(AssertionError,
assert_allclose, spec_b, spec_c, atol=1e-08)
def test_psd_window_hanning(self):
if self.NFFT_density is None:
return
freqs = self.freqs_density
ydata = np.arange(self.NFFT_density)
ydata1 = ydata+5
ydata2 = ydata+3.3
ycontrol1, windowVals = mlab.apply_window(ydata1,
mlab.window_hanning,
return_window=True)
ycontrol2 = mlab.window_hanning(ydata2)
ydata = np.vstack([ydata1, ydata2])
ycontrol = np.vstack([ycontrol1, ycontrol2])
ydata = np.tile(ydata, (20, 1))
ycontrol = np.tile(ycontrol, (20, 1))
ydatab = ydata.T.flatten()
ydataf = ydata.flatten()
ycontrol = ycontrol.flatten()
spec_g, fsp_g = mlab.psd(x=ydataf,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
window=mlab.window_hanning)
spec_b, fsp_b = mlab.psd(x=ydatab,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
window=mlab.window_hanning)
spec_c, fsp_c = mlab.psd(x=ycontrol,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
window=mlab.window_none)
spec_c *= len(ycontrol1)/(np.abs(windowVals)**2).sum()
assert_array_equal(fsp_g, fsp_c)
assert_array_equal(fsp_b, fsp_c)
assert_allclose(spec_g, spec_c, atol=1e-08)
# these should not be almost equal
assert_raises(AssertionError,
assert_allclose, spec_b, spec_c, atol=1e-08)
def test_psd_window_hanning_detrend_linear(self):
if self.NFFT_density is None:
return
freqs = self.freqs_density
ydata = np.arange(self.NFFT_density)
ycontrol = np.zeros(self.NFFT_density)
ydata1 = ydata+5
ydata2 = ydata+3.3
ycontrol1 = ycontrol
ycontrol2 = ycontrol
ycontrol1, windowVals = mlab.apply_window(ycontrol1,
mlab.window_hanning,
return_window=True)
ycontrol2 = mlab.window_hanning(ycontrol2)
ydata = np.vstack([ydata1, ydata2])
ycontrol = np.vstack([ycontrol1, ycontrol2])
ydata = np.tile(ydata, (20, 1))
ycontrol = np.tile(ycontrol, (20, 1))
ydatab = ydata.T.flatten()
ydataf = ydata.flatten()
ycontrol = ycontrol.flatten()
spec_g, fsp_g = mlab.psd(x=ydataf,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
detrend=mlab.detrend_linear,
window=mlab.window_hanning)
spec_b, fsp_b = mlab.psd(x=ydatab,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
detrend=mlab.detrend_linear,
window=mlab.window_hanning)
spec_c, fsp_c = mlab.psd(x=ycontrol,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=0,
sides=self.sides,
window=mlab.window_none)
spec_c *= len(ycontrol1)/(np.abs(windowVals)**2).sum()
assert_array_equal(fsp_g, fsp_c)
assert_array_equal(fsp_b, fsp_c)
assert_allclose(spec_g, spec_c, atol=1e-08)
# these should not be almost equal
assert_raises(AssertionError,
assert_allclose, spec_b, spec_c, atol=1e-08)
def test_psd_windowarray(self):
freqs = self.freqs_density
spec, fsp = mlab.psd(x=self.y,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides,
window=np.ones(self.NFFT_density_real))
assert_allclose(fsp, freqs, atol=1e-06)
assert_equal(spec.shape, freqs.shape)
def test_psd_windowarray_scale_by_freq(self):
freqs = self.freqs_density
win = mlab.window_hanning(np.ones(self.NFFT_density_real))
spec, fsp = mlab.psd(x=self.y,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides,
window=mlab.window_hanning)
spec_s, fsp_s = mlab.psd(x=self.y,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides,
window=mlab.window_hanning,
scale_by_freq=True)
spec_n, fsp_n = mlab.psd(x=self.y,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides,
window=mlab.window_hanning,
scale_by_freq=False)
assert_array_equal(fsp, fsp_s)
assert_array_equal(fsp, fsp_n)
assert_array_equal(spec, spec_s)
assert_allclose(spec_s*(win**2).sum(),
spec_n/self.Fs*win.sum()**2,
atol=1e-08)
def test_complex_spectrum(self):
freqs = self.freqs_spectrum
spec, fsp = mlab.complex_spectrum(x=self.y,
Fs=self.Fs,
sides=self.sides,
pad_to=self.pad_to_spectrum)
assert_allclose(fsp, freqs, atol=1e-06)
assert_equal(spec.shape, freqs.shape)
def test_magnitude_spectrum(self):
freqs = self.freqs_spectrum
spec, fsp = mlab.magnitude_spectrum(x=self.y,
Fs=self.Fs,
sides=self.sides,
pad_to=self.pad_to_spectrum)
assert_equal(spec.shape, freqs.shape)
self.check_maxfreq(spec, fsp, self.fstims)
self.check_freqs(spec, freqs, fsp, self.fstims)
def test_angle_spectrum(self):
freqs = self.freqs_spectrum
spec, fsp = mlab.angle_spectrum(x=self.y,
Fs=self.Fs,
sides=self.sides,
pad_to=self.pad_to_spectrum)
assert_allclose(fsp, freqs, atol=1e-06)
assert_equal(spec.shape, freqs.shape)
def test_phase_spectrum(self):
freqs = self.freqs_spectrum
spec, fsp = mlab.phase_spectrum(x=self.y,
Fs=self.Fs,
sides=self.sides,
pad_to=self.pad_to_spectrum)
assert_allclose(fsp, freqs, atol=1e-06)
assert_equal(spec.shape, freqs.shape)
def test_specgram_auto(self):
freqs = self.freqs_specgram
spec, fsp, t = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides)
specm = np.mean(spec, axis=1)
assert_allclose(fsp, freqs, atol=1e-06)
assert_allclose(t, self.t_specgram, atol=1e-06)
assert_equal(spec.shape[0], freqs.shape[0])
assert_equal(spec.shape[1], self.t_specgram.shape[0])
# since we are using a single freq, all time slices
# should be about the same
if np.abs(spec.max()) != 0:
assert_allclose(np.diff(spec, axis=1).max()/np.abs(spec.max()), 0,
atol=1e-02)
self.check_freqs(specm, freqs, fsp, self.fstims)
def test_specgram_default(self):
freqs = self.freqs_specgram
spec, fsp, t = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='default')
specm = np.mean(spec, axis=1)
assert_allclose(fsp, freqs, atol=1e-06)
assert_allclose(t, self.t_specgram, atol=1e-06)
assert_equal(spec.shape[0], freqs.shape[0])
assert_equal(spec.shape[1], self.t_specgram.shape[0])
# since we are using a single freq, all time slices
# should be about the same
if np.abs(spec.max()) != 0:
assert_allclose(np.diff(spec, axis=1).max()/np.abs(spec.max()), 0,
atol=1e-02)
self.check_freqs(specm, freqs, fsp, self.fstims)
def test_specgram_psd(self):
freqs = self.freqs_specgram
spec, fsp, t = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='psd')
specm = np.mean(spec, axis=1)
assert_allclose(fsp, freqs, atol=1e-06)
assert_allclose(t, self.t_specgram, atol=1e-06)
assert_equal(spec.shape[0], freqs.shape[0])
assert_equal(spec.shape[1], self.t_specgram.shape[0])
# since we are using a single freq, all time slices
# should be about the same
if np.abs(spec.max()) != 0:
assert_allclose(np.diff(spec, axis=1).max()/np.abs(spec.max()), 0,
atol=1e-02)
self.check_freqs(specm, freqs, fsp, self.fstims)
def test_specgram_complex(self):
freqs = self.freqs_specgram
spec, fsp, t = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='complex')
specm = np.mean(np.abs(spec), axis=1)
assert_allclose(fsp, freqs, atol=1e-06)
assert_allclose(t, self.t_specgram, atol=1e-06)
assert_equal(spec.shape[0], freqs.shape[0])
assert_equal(spec.shape[1], self.t_specgram.shape[0])
self.check_freqs(specm, freqs, fsp, self.fstims)
def test_specgram_magnitude(self):
freqs = self.freqs_specgram
spec, fsp, t = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='magnitude')
specm = np.mean(spec, axis=1)
assert_allclose(fsp, freqs, atol=1e-06)
assert_allclose(t, self.t_specgram, atol=1e-06)
assert_equal(spec.shape[0], freqs.shape[0])
assert_equal(spec.shape[1], self.t_specgram.shape[0])
# since we are using a single freq, all time slices
# should be about the same
if np.abs(spec.max()) != 0:
assert_allclose(np.diff(spec, axis=1).max()/np.abs(spec.max()), 0,
atol=1e-02)
self.check_freqs(specm, freqs, fsp, self.fstims)
def test_specgram_angle(self):
freqs = self.freqs_specgram
spec, fsp, t = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='angle')
specm = np.mean(spec, axis=1)
assert_allclose(fsp, freqs, atol=1e-06)
assert_allclose(t, self.t_specgram, atol=1e-06)
assert_equal(spec.shape[0], freqs.shape[0])
assert_equal(spec.shape[1], self.t_specgram.shape[0])
def test_specgram_phase(self):
freqs = self.freqs_specgram
spec, fsp, t = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='phase')
specm = np.mean(spec, axis=1)
assert_allclose(fsp, freqs, atol=1e-06)
assert_allclose(t, self.t_specgram, atol=1e-06)
assert_equal(spec.shape[0], freqs.shape[0])
assert_equal(spec.shape[1], self.t_specgram.shape[0])
def test_psd_csd_equal(self):
freqs = self.freqs_density
Pxx, freqsxx = mlab.psd(x=self.y,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides)
Pxy, freqsxy = mlab.csd(x=self.y, y=self.y,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides)
assert_array_equal(Pxx, Pxy)
assert_array_equal(freqsxx, freqsxy)
def test_specgram_auto_default_equal(self):
'''test that mlab.specgram without mode and with mode 'default' and
'psd' are all the same'''
freqs = self.freqs_specgram
speca, freqspeca, ta = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides)
specb, freqspecb, tb = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='default')
assert_array_equal(speca, specb)
assert_array_equal(freqspeca, freqspecb)
assert_array_equal(ta, tb)
def test_specgram_auto_psd_equal(self):
'''test that mlab.specgram without mode and with mode 'default' and
'psd' are all the same'''
freqs = self.freqs_specgram
speca, freqspeca, ta = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides)
specc, freqspecc, tc = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='psd')
assert_array_equal(speca, specc)
assert_array_equal(freqspeca, freqspecc)
assert_array_equal(ta, tc)
def test_specgram_complex_mag_equivalent(self):
freqs = self.freqs_specgram
specc, freqspecc, tc = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='complex')
specm, freqspecm, tm = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='magnitude')
assert_array_equal(freqspecc, freqspecm)
assert_array_equal(tc, tm)
assert_allclose(np.abs(specc), specm, atol=1e-06)
def test_specgram_complex_angle_equivalent(self):
freqs = self.freqs_specgram
specc, freqspecc, tc = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='complex')
speca, freqspeca, ta = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='angle')
assert_array_equal(freqspecc, freqspeca)
assert_array_equal(tc, ta)
assert_allclose(np.angle(specc), speca, atol=1e-06)
def test_specgram_complex_phase_equivalent(self):
freqs = self.freqs_specgram
specc, freqspecc, tc = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='complex')
specp, freqspecp, tp = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='phase')
assert_array_equal(freqspecc, freqspecp)
assert_array_equal(tc, tp)
assert_allclose(np.unwrap(np.angle(specc), axis=0), specp,
atol=1e-06)
def test_specgram_angle_phase_equivalent(self):
freqs = self.freqs_specgram
speca, freqspeca, ta = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='angle')
specp, freqspecp, tp = mlab.specgram(x=self.y,
NFFT=self.NFFT_specgram,
Fs=self.Fs,
noverlap=self.nover_specgram,
pad_to=self.pad_to_specgram,
sides=self.sides,
mode='phase')
assert_array_equal(freqspeca, freqspecp)
assert_array_equal(ta, tp)
assert_allclose(np.unwrap(speca, axis=0), specp,
atol=1e-06)
def test_psd_windowarray_equal(self):
freqs = self.freqs_density
win = mlab.window_hanning(np.ones(self.NFFT_density_real))
speca, fspa = mlab.psd(x=self.y,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides,
window=win)
specb, fspb = mlab.psd(x=self.y,
NFFT=self.NFFT_density,
Fs=self.Fs,
noverlap=self.nover_density,
pad_to=self.pad_to_density,
sides=self.sides)
assert_array_equal(fspa, fspb)
assert_allclose(speca, specb, atol=1e-08)
class spectral_testcase_nosig_real_twosided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_nosig_real_defaultsided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_nosig_complex_onesided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_nosig_complex_twosided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_nosig_complex_defaultsided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
iscomplex=True, sides='default', nsides=2)
class spectral_testcase_Fs4_real_onesided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4],
iscomplex=False, sides='onesided', nsides=1)
class spectral_testcase_Fs4_real_twosided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4],
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_Fs4_real_defaultsided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4],
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_Fs4_complex_onesided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4],
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_Fs4_complex_twosided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4],
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_Fs4_complex_defaultsided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4],
iscomplex=True, sides='default', nsides=2)
class spectral_testcase_FsAll_real_onesided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4, 5, 10],
iscomplex=False, sides='onesided', nsides=1)
class spectral_testcase_FsAll_real_twosided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4, 5, 10],
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_FsAll_real_defaultsided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4, 5, 10],
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_FsAll_complex_onesided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4, 5, 10],
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_FsAll_complex_twosided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4, 5, 10],
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_FsAll_complex_defaultsided(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[4, 5, 10],
iscomplex=True, sides='default', nsides=2)
class spectral_testcase_nosig_real_onesided_noNFFT(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None, pad_to_spectrum=None,
iscomplex=False, sides='onesided', nsides=1)
class spectral_testcase_nosig_real_twosided_noNFFT(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None, pad_to_spectrum=None,
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_nosig_real_defaultsided_noNFFT(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None, pad_to_spectrum=None,
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_nosig_complex_onesided_noNFFT(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None, pad_to_spectrum=None,
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_nosig_complex_twosided_noNFFT(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None, pad_to_spectrum=None,
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_nosig_complex_defaultsided_noNFFT(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None, pad_to_spectrum=None,
iscomplex=True, sides='default', nsides=2)
class spectral_testcase_nosig_real_onesided_nopad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
pad_to_density=None, pad_to_spectrum=None,
iscomplex=False, sides='onesided', nsides=1)
class spectral_testcase_nosig_real_twosided_nopad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
pad_to_density=None, pad_to_spectrum=None,
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_nosig_real_defaultsided_nopad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
pad_to_density=None, pad_to_spectrum=None,
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_nosig_complex_onesided_nopad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
pad_to_density=None, pad_to_spectrum=None,
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_nosig_complex_twosided_nopad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None,
pad_to_density=None, pad_to_spectrum=None,
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_nosig_complex_defaultsided_nopad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None,
pad_to_density=None, pad_to_spectrum=None,
iscomplex=True, sides='default', nsides=2)
class spectral_testcase_nosig_real_onesided_noNFFT_no_pad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None,
pad_to_density=None, pad_to_spectrum=None,
iscomplex=False, sides='onesided', nsides=1)
class spectral_testcase_nosig_real_twosided_noNFFT_no_pad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None,
pad_to_density=None, pad_to_spectrum=None,
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_nosig_real_defaultsided_noNFFT_no_pad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None,
pad_to_density=None, pad_to_spectrum=None,
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_nosig_complex_onesided_noNFFT_no_pad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None,
pad_to_density=None, pad_to_spectrum=None,
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_nosig_complex_twosided_noNFFT_no_pad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None,
pad_to_density=None, pad_to_spectrum=None,
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_nosig_complex_defaultsided_noNFFT_no_pad_to(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
NFFT_density=None,
pad_to_density=None, pad_to_spectrum=None,
iscomplex=True, sides='default', nsides=2)
class spectral_testcase_nosig_real_onesided_trim(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
NFFT_density=512, pad_to_spectrum=128,
iscomplex=False, sides='onesided', nsides=1)
class spectral_testcase_nosig_real_twosided_trim(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
NFFT_density=512, pad_to_spectrum=128,
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_nosig_real_defaultsided_trim(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
NFFT_density=512, pad_to_spectrum=128,
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_nosig_complex_onesided_trim(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
NFFT_density=512, pad_to_spectrum=128,
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_nosig_complex_twosided_trim(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
NFFT_density=512, pad_to_spectrum=128,
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_nosig_complex_defaultsided_trim(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
NFFT_density=128, pad_to_spectrum=128,
iscomplex=True, sides='default', nsides=2)
class spectral_testcase_nosig_real_onesided_odd(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
pad_to_density=33, pad_to_spectrum=257,
iscomplex=False, sides='onesided', nsides=1)
class spectral_testcase_nosig_real_twosided_odd(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
pad_to_density=33, pad_to_spectrum=257,
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_nosig_real_defaultsided_odd(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
pad_to_density=33, pad_to_spectrum=257,
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_nosig_complex_onesided_odd(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
pad_to_density=33, pad_to_spectrum=257,
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_nosig_complex_twosided_odd(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
pad_to_density=33, pad_to_spectrum=257,
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_nosig_complex_defaultsided_odd(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=256,
pad_to_density=33, pad_to_spectrum=257,
iscomplex=True, sides='default', nsides=2)
class spectral_testcase_nosig_real_onesided_oddlen(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=255,
NFFT_density=33, pad_to_spectrum=None,
iscomplex=False, sides='onesided', nsides=1)
class spectral_testcase_nosig_real_twosided_oddlen(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=255,
NFFT_density=33, pad_to_spectrum=None,
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_nosig_real_defaultsided_oddlen(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=255,
NFFT_density=33, pad_to_spectrum=None,
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_nosig_complex_onesided_oddlen(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=255,
NFFT_density=33, pad_to_spectrum=None,
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_nosig_complex_twosided_oddlen(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=255,
NFFT_density=33, pad_to_spectrum=None,
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_nosig_complex_defaultsided_oddlen(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=255,
NFFT_density=128, pad_to_spectrum=None,
iscomplex=True, sides='default', nsides=2)
class spectral_testcase_nosig_real_onesided_stretch(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=128,
NFFT_density=128,
pad_to_density=256, pad_to_spectrum=256,
iscomplex=False, sides='onesided', nsides=1)
class spectral_testcase_nosig_real_twosided_stretch(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=128,
NFFT_density=128,
pad_to_density=256, pad_to_spectrum=256,
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_nosig_real_defaultsided_stretch(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=128,
NFFT_density=128,
pad_to_density=256, pad_to_spectrum=256,
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_nosig_complex_onesided_stretch(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=128,
NFFT_density=128,
pad_to_density=256, pad_to_spectrum=256,
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_nosig_complex_twosided_stretch(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=128,
NFFT_density=128,
pad_to_density=256, pad_to_spectrum=256,
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_nosig_complex_defaultsided_stretch(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
len_x=128,
NFFT_density=128,
pad_to_density=256, pad_to_spectrum=256,
iscomplex=True, sides='default', nsides=2)
class spectral_testcase_nosig_real_onesided_overlap(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
nover_density=32,
iscomplex=False, sides='onesided', nsides=1)
class spectral_testcase_nosig_real_twosided_overlap(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
nover_density=32,
iscomplex=False, sides='twosided', nsides=2)
class spectral_testcase_nosig_real_defaultsided_overlap(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
nover_density=32,
iscomplex=False, sides='default', nsides=1)
class spectral_testcase_nosig_complex_onesided_overlap(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
nover_density=32,
iscomplex=True, sides='onesided', nsides=1)
class spectral_testcase_nosig_complex_twosided_overlap(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
nover_density=32,
iscomplex=True, sides='twosided', nsides=2)
class spectral_testcase_nosig_complex_defaultsided_overlap(
spectral_testcase_nosig_real_onesided):
def setUp(self):
self.createStim(fstims=[],
nover_density=32,
iscomplex=True, sides='default', nsides=2)
def test_griddata_linear():
# z is a linear function of x and y.
def get_z(x, y):
return 3.0*x - y
# Passing 1D xi and yi arrays to griddata.
x = np.asarray([0.0, 1.0, 0.0, 1.0, 0.5])
y = np.asarray([0.0, 0.0, 1.0, 1.0, 0.5])
z = get_z(x, y)
xi = [0.2, 0.4, 0.6, 0.8]
yi = [0.1, 0.3, 0.7, 0.9]
zi = mlab.griddata(x, y, z, xi, yi, interp='linear')
xi, yi = np.meshgrid(xi, yi)
np.testing.assert_array_almost_equal(zi, get_z(xi, yi))
# Passing 2D xi and yi arrays to griddata.
zi = mlab.griddata(x, y, z, xi, yi, interp='linear')
np.testing.assert_array_almost_equal(zi, get_z(xi, yi))
# Masking z array.
z_masked = np.ma.array(z, mask=[False, False, False, True, False])
correct_zi_masked = np.ma.masked_where(xi + yi > 1.0, get_z(xi, yi))
zi = mlab.griddata(x, y, z_masked, xi, yi, interp='linear')
matest.assert_array_almost_equal(zi, correct_zi_masked)
np.testing.assert_array_equal(np.ma.getmask(zi),
np.ma.getmask(correct_zi_masked))
@knownfailureif(not HAS_NATGRID)
def test_griddata_nn():
# z is a linear function of x and y.
def get_z(x, y):
return 3.0*x - y
# Passing 1D xi and yi arrays to griddata.
x = np.asarray([0.0, 1.0, 0.0, 1.0, 0.5])
y = np.asarray([0.0, 0.0, 1.0, 1.0, 0.5])
z = get_z(x, y)
xi = [0.2, 0.4, 0.6, 0.8]
yi = [0.1, 0.3, 0.7, 0.9]
correct_zi = [[0.49999252, 1.0999978, 1.7000030, 2.3000080],
[0.29999208, 0.8999978, 1.5000029, 2.1000059],
[-0.1000099, 0.4999943, 1.0999964, 1.6999979],
[-0.3000128, 0.2999894, 0.8999913, 1.4999933]]
zi = mlab.griddata(x, y, z, xi, yi, interp='nn')
np.testing.assert_array_almost_equal(zi, correct_zi, 5)
# Decreasing xi or yi should raise ValueError.
assert_raises(ValueError, mlab.griddata, x, y, z, xi[::-1], yi,
interp='nn')
assert_raises(ValueError, mlab.griddata, x, y, z, xi, yi[::-1],
interp='nn')
# Passing 2D xi and yi arrays to griddata.
xi, yi = np.meshgrid(xi, yi)
zi = mlab.griddata(x, y, z, xi, yi, interp='nn')
np.testing.assert_array_almost_equal(zi, correct_zi, 5)
# Masking z array.
z_masked = np.ma.array(z, mask=[False, False, False, True, False])
correct_zi_masked = np.ma.masked_where(xi + yi > 1.0, correct_zi)
zi = mlab.griddata(x, y, z_masked, xi, yi, interp='nn')
np.testing.assert_array_almost_equal(zi, correct_zi_masked, 5)
np.testing.assert_array_equal(np.ma.getmask(zi),
np.ma.getmask(correct_zi_masked))
#*****************************************************************
# These Tests where taken from SCIPY with some minor modifications
# this can be retreived from:
# https://github.com/scipy/scipy/blob/master/scipy/stats/tests/test_kdeoth.py
#*****************************************************************
class gaussian_kde_tests():
def test_kde_integer_input(self):
"""Regression test for #1181."""
x1 = np.arange(5)
kde = mlab.GaussianKDE(x1)
y_expected = [0.13480721, 0.18222869, 0.19514935, 0.18222869,
0.13480721]
np.testing.assert_array_almost_equal(kde(x1), y_expected, decimal=6)
def test_gaussian_kde_covariance_caching(self):
x1 = np.array([-7, -5, 1, 4, 5], dtype=np.float)
xs = np.linspace(-10, 10, num=5)
# These expected values are from scipy 0.10, before some changes to
# gaussian_kde. They were not compared with any external reference.
y_expected = [0.02463386, 0.04689208, 0.05395444, 0.05337754,
0.01664475]
# set it to the default bandwidth.
kde2 = mlab.GaussianKDE(x1, 'scott')
y2 = kde2(xs)
np.testing.assert_array_almost_equal(y_expected, y2, decimal=7)
def test_kde_bandwidth_method(self):
np.random.seed(8765678)
n_basesample = 50
xn = np.random.randn(n_basesample)
# Default
gkde = mlab.GaussianKDE(xn)
# Supply a callable
gkde2 = mlab.GaussianKDE(xn, 'scott')
# Supply a scalar
gkde3 = mlab.GaussianKDE(xn, bw_method=gkde.factor)
xs = np.linspace(-7, 7, 51)
kdepdf = gkde.evaluate(xs)
kdepdf2 = gkde2.evaluate(xs)
assert_almost_equal(kdepdf.all(), kdepdf2.all())
kdepdf3 = gkde3.evaluate(xs)
assert_almost_equal(kdepdf.all(), kdepdf3.all())
class gaussian_kde_custom_tests(object):
def test_no_data(self):
"""Pass no data into the GaussianKDE class."""
assert_raises(ValueError, mlab.GaussianKDE, [])
def test_single_dataset_element(self):
"""Pass a single dataset element into the GaussianKDE class."""
assert_raises(ValueError, mlab.GaussianKDE, [42])
def test_silverman_multidim_dataset(self):
"""Use a multi-dimensional array as the dataset and test silverman's
output"""
x1 = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
assert_raises(np.linalg.LinAlgError, mlab.GaussianKDE, x1, "silverman")
def test_silverman_singledim_dataset(self):
"""Use a single dimension list as the dataset and test silverman's
output."""
x1 = np.array([-7, -5, 1, 4, 5])
mygauss = mlab.GaussianKDE(x1, "silverman")
y_expected = 0.76770389927475502
assert_almost_equal(mygauss.covariance_factor(), y_expected, 7)
def test_scott_multidim_dataset(self):
"""Use a multi-dimensional array as the dataset and test scott's output
"""
x1 = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
assert_raises(np.linalg.LinAlgError, mlab.GaussianKDE, x1, "scott")
def test_scott_singledim_dataset(self):
"""Use a single-dimensional array as the dataset and test scott's
output"""
x1 = np.array([-7, -5, 1, 4, 5])
mygauss = mlab.GaussianKDE(x1, "scott")
y_expected = 0.72477966367769553
assert_almost_equal(mygauss.covariance_factor(), y_expected, 7)
def test_scalar_empty_dataset(self):
"""Use an empty array as the dataset and test the scalar's cov factor
"""
assert_raises(ValueError, mlab.GaussianKDE, [], bw_method=5)
def test_scalar_covariance_dataset(self):
"""Use a dataset and test a scalar's cov factor
"""
np.random.seed(8765678)
n_basesample = 50
multidim_data = [np.random.randn(n_basesample) for i in range(5)]
kde = mlab.GaussianKDE(multidim_data, bw_method=0.5)
assert_equal(kde.covariance_factor(), 0.5)
def test_callable_covariance_dataset(self):
"""Use a multi-dimensional array as the dataset and test the callable's
cov factor"""
np.random.seed(8765678)
n_basesample = 50
multidim_data = [np.random.randn(n_basesample) for i in range(5)]
def callable_fun(x):
return 0.55
kde = mlab.GaussianKDE(multidim_data, bw_method=callable_fun)
assert_equal(kde.covariance_factor(), 0.55)
def test_callable_singledim_dataset(self):
"""Use a single-dimensional array as the dataset and test the
callable's cov factor"""
np.random.seed(8765678)
n_basesample = 50
multidim_data = np.random.randn(n_basesample)
kde = mlab.GaussianKDE(multidim_data, bw_method='silverman')
y_expected = 0.48438841363348911
assert_almost_equal(kde.covariance_factor(), y_expected, 7)
def test_wrong_bw_method(self):
"""Test the error message that should be called when bw is invalid."""
np.random.seed(8765678)
n_basesample = 50
data = np.random.randn(n_basesample)
assert_raises(ValueError, mlab.GaussianKDE, data, bw_method="invalid")
class gaussian_kde_evaluate_tests(object):
def test_evaluate_diff_dim(self):
"""Test the evaluate method when the dim's of dataset and points are
different dimensions"""
x1 = np.arange(3, 10, 2)
kde = mlab.GaussianKDE(x1)
x2 = np.arange(3, 12, 2)
y_expected = [
0.08797252, 0.11774109, 0.11774109, 0.08797252, 0.0370153
]
y = kde.evaluate(x2)
np.testing.assert_array_almost_equal(y, y_expected, 7)
def test_evaluate_inv_dim(self):
""" Invert the dimensions. i.e., Give the dataset a dimension of
1 [3,2,4], and the points will have a dimension of 3 [[3],[2],[4]].
ValueError should be raised"""
np.random.seed(8765678)
n_basesample = 50
multidim_data = np.random.randn(n_basesample)
kde = mlab.GaussianKDE(multidim_data)
x2 = [[1], [2], [3]]
assert_raises(ValueError, kde.evaluate, x2)
def test_evaluate_dim_and_num(self):
""" Tests if evaluated against a one by one array"""
x1 = np.arange(3, 10, 2)
x2 = np.array([3])
kde = mlab.GaussianKDE(x1)
y_expected = [0.08797252]
y = kde.evaluate(x2)
np.testing.assert_array_almost_equal(y, y_expected, 7)
def test_evaluate_point_dim_not_one(self):
"""Test"""
x1 = np.arange(3, 10, 2)
x2 = [np.arange(3, 10, 2), np.arange(3, 10, 2)]
kde = mlab.GaussianKDE(x1)
assert_raises(ValueError, kde.evaluate, x2)
def test_evaluate_equal_dim_and_num_lt(self):
"""Test when line 3810 fails"""
x1 = np.arange(3, 10, 2)
x2 = np.arange(3, 8, 2)
kde = mlab.GaussianKDE(x1)
y_expected = [0.08797252, 0.11774109, 0.11774109]
y = kde.evaluate(x2)
np.testing.assert_array_almost_equal(y, y_expected, 7)
def test_contiguous_regions():
a, b, c = 3, 4, 5
# Starts and ends with True
mask = [True]*a + [False]*b + [True]*c
expected = [(0, a), (a+b, a+b+c)]
assert_equal(mlab.contiguous_regions(mask), expected)
d, e = 6, 7
# Starts with True ends with False
mask = mask + [False]*e
assert_equal(mlab.contiguous_regions(mask), expected)
# Starts with False ends with True
mask = [False]*d + mask[:-e]
expected = [(d, d+a), (d+a+b, d+a+b+c)]
assert_equal(mlab.contiguous_regions(mask), expected)
# Starts and ends with False
mask = mask + [False]*e
assert_equal(mlab.contiguous_regions(mask), expected)
# No True in mask
assert_equal(mlab.contiguous_regions([False]*5), [])
# Empty mask
assert_equal(mlab.contiguous_regions([]), [])
def test_psd_onesided_norm():
u = np.array([0, 1, 2, 3, 1, 2, 1])
dt = 1.0
Su = np.abs(np.fft.fft(u) * dt)**2 / float(dt * u.size)
P, f = mlab.psd(u, NFFT=u.size, Fs=1/dt, window=mlab.window_none,
detrend=mlab.detrend_none, noverlap=0, pad_to=None,
scale_by_freq=None,
sides='onesided')
Su_1side = np.append([Su[0]], Su[1:4] + Su[4:][::-1])
assert_allclose(P, Su_1side, atol=1e-06)
if __name__ == '__main__':
import nose
import sys
args = ['-s', '--with-doctest']
argv = sys.argv
argv = argv[:1] + args + argv[1:]
nose.runmodule(argv=argv, exit=False)
| gpl-3.0 |
nomad-vino/SPSE-1 | Module 5/5.4.py | 1 | 1362 | #!/usr/bin/python
print " __ "
print " |__|____ ___ __ "
print " | \__ \\\\ \/ / "
print " | |/ __ \\\\ / "
print " /\__| (____ /\_/ "
print " \______| \/ "
print " "
print 'Module 5'
print 'Exploitation Techniques'
print 'Part 4'
print
"""
Playing with processes in IDB
"""
import immlib
imm = immlib.Debugger()
# # # # Main application
DESC = 'Playing with processes'
def main(args):
# open closed process
#exe = 'E:\\Module 6\\Server-Strcpy.exe'
#imm.openProcess(exe)
# attach to running process - not the one opened in immunity
# -> !script_name PID
#imm.Attach(int(args[0])) #PID
#imm.restartProcess()
# find all modules in running process
modules_table = imm.createTable('Module Information', ['Name', 'Base', 'Entry', 'Size', 'Version'])
# get list of modules
module_dict = imm.getAllModules()
# fill table
for entity in module_dict.values() :
# Libs.debugtypes => Module
modules_table.add(0, [
entity.getName(),
'%08X'%entity.getBaseAddress(),
'%08X'%entity.getEntry(),
'%08X'%entity.getSize(),
entity.getVersion()
])
# print the state of registers in logs
imm.log(str(imm.getRegs()))
return 'Done'
| gpl-3.0 |
mancoast/CPythonPyc_test | fail/313_test_bigmem.py | 1 | 41534 | from test import support
from test.support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest
import unittest
import operator
import sys
import functools
# Bigmem testing houserules:
#
# - Try not to allocate too many large objects. It's okay to rely on
# refcounting semantics, but don't forget that 's = create_largestring()'
# doesn't release the old 's' (if it exists) until well after its new
# value has been created. Use 'del s' before the create_largestring call.
#
# - Do *not* compare large objects using assertEqual or similar. It's a
# lengthy operation and the errormessage will be utterly useless due to
# its size. To make sure whether a result has the right contents, better
# to use the strip or count methods, or compare meaningful slices.
#
# - Don't forget to test for large indices, offsets and results and such,
# in addition to large sizes.
#
# - When repeating an object (say, a substring, or a small list) to create
# a large object, make the subobject of a length that is not a power of
# 2. That way, int-wrapping problems are more easily detected.
#
# - While the bigmemtest decorator speaks of 'minsize', all tests will
# actually be called with a much smaller number too, in the normal
# test run (5Kb currently.) This is so the tests themselves get frequent
# testing. Consequently, always make all large allocations based on the
# passed-in 'size', and don't rely on the size being very large. Also,
# memuse-per-size should remain sane (less than a few thousand); if your
# test uses more, adjust 'size' upward, instead.
# BEWARE: it seems that one failing test can yield other subsequent tests to
# fail as well. I do not know whether it is due to memory fragmentation
# issues, or other specifics of the platform malloc() routine.
character_size = 4 if sys.maxunicode > 0xFFFF else 2
class BaseStrTest:
@bigmemtest(minsize=_2G, memuse=2)
def test_capitalize(self, size):
_ = self.from_latin1
SUBSTR = self.from_latin1(' abc def ghi')
s = _('-') * size + SUBSTR
caps = s.capitalize()
self.assertEqual(caps[-len(SUBSTR):],
SUBSTR.capitalize())
self.assertEqual(caps.lstrip(_('-')), SUBSTR)
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_center(self, size):
SUBSTR = self.from_latin1(' abc def ghi')
s = SUBSTR.center(size)
self.assertEqual(len(s), size)
lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2
if len(s) % 2:
lpadsize += 1
self.assertEqual(s[lpadsize:-rpadsize], SUBSTR)
self.assertEqual(s.strip(), SUBSTR.strip())
@bigmemtest(minsize=_2G, memuse=2)
def test_count(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = _('.') * size + SUBSTR
self.assertEqual(s.count(_('.')), size)
s += _('.')
self.assertEqual(s.count(_('.')), size + 1)
self.assertEqual(s.count(_(' ')), 3)
self.assertEqual(s.count(_('i')), 1)
self.assertEqual(s.count(_('j')), 0)
@bigmemtest(minsize=_2G, memuse=2)
def test_endswith(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = _('-') * size + SUBSTR
self.assertTrue(s.endswith(SUBSTR))
self.assertTrue(s.endswith(s))
s2 = _('...') + s
self.assertTrue(s2.endswith(s))
self.assertFalse(s.endswith(_('a') + SUBSTR))
self.assertFalse(SUBSTR.endswith(s))
@bigmemtest(minsize=_2G + 10, memuse=2)
def test_expandtabs(self, size):
_ = self.from_latin1
s = _('-') * size
tabsize = 8
self.assertEqual(s.expandtabs(), s)
del s
slen, remainder = divmod(size, tabsize)
s = _(' \t') * slen
s = s.expandtabs(tabsize)
self.assertEqual(len(s), size - remainder)
self.assertEqual(len(s.strip(_(' '))), 0)
@bigmemtest(minsize=_2G, memuse=2)
def test_find(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
sublen = len(SUBSTR)
s = _('').join([SUBSTR, _('-') * size, SUBSTR])
self.assertEqual(s.find(_(' ')), 0)
self.assertEqual(s.find(SUBSTR), 0)
self.assertEqual(s.find(_(' '), sublen), sublen + size)
self.assertEqual(s.find(SUBSTR, len(SUBSTR)), sublen + size)
self.assertEqual(s.find(_('i')), SUBSTR.find(_('i')))
self.assertEqual(s.find(_('i'), sublen),
sublen + size + SUBSTR.find(_('i')))
self.assertEqual(s.find(_('i'), size),
sublen + size + SUBSTR.find(_('i')))
self.assertEqual(s.find(_('j')), -1)
@bigmemtest(minsize=_2G, memuse=2)
def test_index(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
sublen = len(SUBSTR)
s = _('').join([SUBSTR, _('-') * size, SUBSTR])
self.assertEqual(s.index(_(' ')), 0)
self.assertEqual(s.index(SUBSTR), 0)
self.assertEqual(s.index(_(' '), sublen), sublen + size)
self.assertEqual(s.index(SUBSTR, sublen), sublen + size)
self.assertEqual(s.index(_('i')), SUBSTR.index(_('i')))
self.assertEqual(s.index(_('i'), sublen),
sublen + size + SUBSTR.index(_('i')))
self.assertEqual(s.index(_('i'), size),
sublen + size + SUBSTR.index(_('i')))
self.assertRaises(ValueError, s.index, _('j'))
@bigmemtest(minsize=_2G, memuse=2)
def test_isalnum(self, size):
_ = self.from_latin1
SUBSTR = _('123456')
s = _('a') * size + SUBSTR
self.assertTrue(s.isalnum())
s += _('.')
self.assertFalse(s.isalnum())
@bigmemtest(minsize=_2G, memuse=2)
def test_isalpha(self, size):
_ = self.from_latin1
SUBSTR = _('zzzzzzz')
s = _('a') * size + SUBSTR
self.assertTrue(s.isalpha())
s += _('.')
self.assertFalse(s.isalpha())
@bigmemtest(minsize=_2G, memuse=2)
def test_isdigit(self, size):
_ = self.from_latin1
SUBSTR = _('123456')
s = _('9') * size + SUBSTR
self.assertTrue(s.isdigit())
s += _('z')
self.assertFalse(s.isdigit())
@bigmemtest(minsize=_2G, memuse=2)
def test_islower(self, size):
_ = self.from_latin1
chars = _(''.join(
chr(c) for c in range(255) if not chr(c).isupper()))
repeats = size // len(chars) + 2
s = chars * repeats
self.assertTrue(s.islower())
s += _('A')
self.assertFalse(s.islower())
@bigmemtest(minsize=_2G, memuse=2)
def test_isspace(self, size):
_ = self.from_latin1
whitespace = _(' \f\n\r\t\v')
repeats = size // len(whitespace) + 2
s = whitespace * repeats
self.assertTrue(s.isspace())
s += _('j')
self.assertFalse(s.isspace())
@bigmemtest(minsize=_2G, memuse=2)
def test_istitle(self, size):
_ = self.from_latin1
SUBSTR = _('123456')
s = _('').join([_('A'), _('a') * size, SUBSTR])
self.assertTrue(s.istitle())
s += _('A')
self.assertTrue(s.istitle())
s += _('aA')
self.assertFalse(s.istitle())
@bigmemtest(minsize=_2G, memuse=2)
def test_isupper(self, size):
_ = self.from_latin1
chars = _(''.join(
chr(c) for c in range(255) if not chr(c).islower()))
repeats = size // len(chars) + 2
s = chars * repeats
self.assertTrue(s.isupper())
s += _('a')
self.assertFalse(s.isupper())
@bigmemtest(minsize=_2G, memuse=2)
def test_join(self, size):
_ = self.from_latin1
s = _('A') * size
x = s.join([_('aaaaa'), _('bbbbb')])
self.assertEqual(x.count(_('a')), 5)
self.assertEqual(x.count(_('b')), 5)
self.assertTrue(x.startswith(_('aaaaaA')))
self.assertTrue(x.endswith(_('Abbbbb')))
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_ljust(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = SUBSTR.ljust(size)
self.assertTrue(s.startswith(SUBSTR + _(' ')))
self.assertEqual(len(s), size)
self.assertEqual(s.strip(), SUBSTR.strip())
@bigmemtest(minsize=_2G + 10, memuse=2)
def test_lower(self, size):
_ = self.from_latin1
s = _('A') * size
s = s.lower()
self.assertEqual(len(s), size)
self.assertEqual(s.count(_('a')), size)
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_lstrip(self, size):
_ = self.from_latin1
SUBSTR = _('abc def ghi')
s = SUBSTR.rjust(size)
self.assertEqual(len(s), size)
self.assertEqual(s.lstrip(), SUBSTR.lstrip())
del s
s = SUBSTR.ljust(size)
self.assertEqual(len(s), size)
# Type-specific optimization
if isinstance(s, (str, bytes)):
stripped = s.lstrip()
self.assertTrue(stripped is s)
@bigmemtest(minsize=_2G + 10, memuse=2)
def test_replace(self, size):
_ = self.from_latin1
replacement = _('a')
s = _(' ') * size
s = s.replace(_(' '), replacement)
self.assertEqual(len(s), size)
self.assertEqual(s.count(replacement), size)
s = s.replace(replacement, _(' '), size - 4)
self.assertEqual(len(s), size)
self.assertEqual(s.count(replacement), 4)
self.assertEqual(s[-10:], _(' aaaa'))
@bigmemtest(minsize=_2G, memuse=2)
def test_rfind(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
sublen = len(SUBSTR)
s = _('').join([SUBSTR, _('-') * size, SUBSTR])
self.assertEqual(s.rfind(_(' ')), sublen + size + SUBSTR.rfind(_(' ')))
self.assertEqual(s.rfind(SUBSTR), sublen + size)
self.assertEqual(s.rfind(_(' '), 0, size), SUBSTR.rfind(_(' ')))
self.assertEqual(s.rfind(SUBSTR, 0, sublen + size), 0)
self.assertEqual(s.rfind(_('i')), sublen + size + SUBSTR.rfind(_('i')))
self.assertEqual(s.rfind(_('i'), 0, sublen), SUBSTR.rfind(_('i')))
self.assertEqual(s.rfind(_('i'), 0, sublen + size),
SUBSTR.rfind(_('i')))
self.assertEqual(s.rfind(_('j')), -1)
@bigmemtest(minsize=_2G, memuse=2)
def test_rindex(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
sublen = len(SUBSTR)
s = _('').join([SUBSTR, _('-') * size, SUBSTR])
self.assertEqual(s.rindex(_(' ')),
sublen + size + SUBSTR.rindex(_(' ')))
self.assertEqual(s.rindex(SUBSTR), sublen + size)
self.assertEqual(s.rindex(_(' '), 0, sublen + size - 1),
SUBSTR.rindex(_(' ')))
self.assertEqual(s.rindex(SUBSTR, 0, sublen + size), 0)
self.assertEqual(s.rindex(_('i')),
sublen + size + SUBSTR.rindex(_('i')))
self.assertEqual(s.rindex(_('i'), 0, sublen), SUBSTR.rindex(_('i')))
self.assertEqual(s.rindex(_('i'), 0, sublen + size),
SUBSTR.rindex(_('i')))
self.assertRaises(ValueError, s.rindex, _('j'))
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_rjust(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = SUBSTR.ljust(size)
self.assertTrue(s.startswith(SUBSTR + _(' ')))
self.assertEqual(len(s), size)
self.assertEqual(s.strip(), SUBSTR.strip())
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_rstrip(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = SUBSTR.ljust(size)
self.assertEqual(len(s), size)
self.assertEqual(s.rstrip(), SUBSTR.rstrip())
del s
s = SUBSTR.rjust(size)
self.assertEqual(len(s), size)
# Type-specific optimization
if isinstance(s, (str, bytes)):
stripped = s.rstrip()
self.assertTrue(stripped is s)
# The test takes about size bytes to build a string, and then about
# sqrt(size) substrings of sqrt(size) in size and a list to
# hold sqrt(size) items. It's close but just over 2x size.
@bigmemtest(minsize=_2G, memuse=2.1)
def test_split_small(self, size):
_ = self.from_latin1
# Crudely calculate an estimate so that the result of s.split won't
# take up an inordinate amount of memory
chunksize = int(size ** 0.5 + 2)
SUBSTR = _('a') + _(' ') * chunksize
s = SUBSTR * chunksize
l = s.split()
self.assertEqual(len(l), chunksize)
expected = _('a')
for item in l:
self.assertEqual(item, expected)
del l
l = s.split(_('a'))
self.assertEqual(len(l), chunksize + 1)
expected = _(' ') * chunksize
for item in filter(None, l):
self.assertEqual(item, expected)
# Allocates a string of twice size (and briefly two) and a list of
# size. Because of internal affairs, the s.split() call produces a
# list of size times the same one-character string, so we only
# suffer for the list size. (Otherwise, it'd cost another 48 times
# size in bytes!) Nevertheless, a list of size takes
# 8*size bytes.
@bigmemtest(minsize=_2G + 5, memuse=10)
def test_split_large(self, size):
_ = self.from_latin1
s = _(' a') * size + _(' ')
l = s.split()
self.assertEqual(len(l), size)
self.assertEqual(set(l), set([_('a')]))
del l
l = s.split(_('a'))
self.assertEqual(len(l), size + 1)
self.assertEqual(set(l), set([_(' ')]))
@bigmemtest(minsize=_2G, memuse=2.1)
def test_splitlines(self, size):
_ = self.from_latin1
# Crudely calculate an estimate so that the result of s.split won't
# take up an inordinate amount of memory
chunksize = int(size ** 0.5 + 2) // 2
SUBSTR = _(' ') * chunksize + _('\n') + _(' ') * chunksize + _('\r\n')
s = SUBSTR * chunksize
l = s.splitlines()
self.assertEqual(len(l), chunksize * 2)
expected = _(' ') * chunksize
for item in l:
self.assertEqual(item, expected)
@bigmemtest(minsize=_2G, memuse=2)
def test_startswith(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi')
s = _('-') * size + SUBSTR
self.assertTrue(s.startswith(s))
self.assertTrue(s.startswith(_('-') * size))
self.assertFalse(s.startswith(SUBSTR))
@bigmemtest(minsize=_2G, memuse=1)
def test_strip(self, size):
_ = self.from_latin1
SUBSTR = _(' abc def ghi ')
s = SUBSTR.rjust(size)
self.assertEqual(len(s), size)
self.assertEqual(s.strip(), SUBSTR.strip())
del s
s = SUBSTR.ljust(size)
self.assertEqual(len(s), size)
self.assertEqual(s.strip(), SUBSTR.strip())
@bigmemtest(minsize=_2G, memuse=2)
def test_swapcase(self, size):
_ = self.from_latin1
SUBSTR = _("aBcDeFG12.'\xa9\x00")
sublen = len(SUBSTR)
repeats = size // sublen + 2
s = SUBSTR * repeats
s = s.swapcase()
self.assertEqual(len(s), sublen * repeats)
self.assertEqual(s[:sublen * 3], SUBSTR.swapcase() * 3)
self.assertEqual(s[-sublen * 3:], SUBSTR.swapcase() * 3)
@bigmemtest(minsize=_2G, memuse=2)
def test_title(self, size):
_ = self.from_latin1
SUBSTR = _('SpaaHAaaAaham')
s = SUBSTR * (size // len(SUBSTR) + 2)
s = s.title()
self.assertTrue(s.startswith((SUBSTR * 3).title()))
self.assertTrue(s.endswith(SUBSTR.lower() * 3))
@bigmemtest(minsize=_2G, memuse=2)
def test_translate(self, size):
_ = self.from_latin1
SUBSTR = _('aZz.z.Aaz.')
if isinstance(SUBSTR, str):
trans = {
ord(_('.')): _('-'),
ord(_('a')): _('!'),
ord(_('Z')): _('$'),
}
else:
trans = bytes.maketrans(b'.aZ', b'-!$')
sublen = len(SUBSTR)
repeats = size // sublen + 2
s = SUBSTR * repeats
s = s.translate(trans)
self.assertEqual(len(s), repeats * sublen)
self.assertEqual(s[:sublen], SUBSTR.translate(trans))
self.assertEqual(s[-sublen:], SUBSTR.translate(trans))
self.assertEqual(s.count(_('.')), 0)
self.assertEqual(s.count(_('!')), repeats * 2)
self.assertEqual(s.count(_('z')), repeats * 3)
@bigmemtest(minsize=_2G + 5, memuse=2)
def test_upper(self, size):
_ = self.from_latin1
s = _('a') * size
s = s.upper()
self.assertEqual(len(s), size)
self.assertEqual(s.count(_('A')), size)
@bigmemtest(minsize=_2G + 20, memuse=1)
def test_zfill(self, size):
_ = self.from_latin1
SUBSTR = _('-568324723598234')
s = SUBSTR.zfill(size)
self.assertTrue(s.endswith(_('0') + SUBSTR[1:]))
self.assertTrue(s.startswith(_('-0')))
self.assertEqual(len(s), size)
self.assertEqual(s.count(_('0')), size - len(SUBSTR))
# This test is meaningful even with size < 2G, as long as the
# doubled string is > 2G (but it tests more if both are > 2G :)
@bigmemtest(minsize=_1G + 2, memuse=3)
def test_concat(self, size):
_ = self.from_latin1
s = _('.') * size
self.assertEqual(len(s), size)
s = s + s
self.assertEqual(len(s), size * 2)
self.assertEqual(s.count(_('.')), size * 2)
# This test is meaningful even with size < 2G, as long as the
# repeated string is > 2G (but it tests more if both are > 2G :)
@bigmemtest(minsize=_1G + 2, memuse=3)
def test_repeat(self, size):
_ = self.from_latin1
s = _('.') * size
self.assertEqual(len(s), size)
s = s * 2
self.assertEqual(len(s), size * 2)
self.assertEqual(s.count(_('.')), size * 2)
@bigmemtest(minsize=_2G + 20, memuse=2)
def test_slice_and_getitem(self, size):
_ = self.from_latin1
SUBSTR = _('0123456789')
sublen = len(SUBSTR)
s = SUBSTR * (size // sublen)
stepsize = len(s) // 100
stepsize = stepsize - (stepsize % sublen)
for i in range(0, len(s) - stepsize, stepsize):
self.assertEqual(s[i], SUBSTR[0])
self.assertEqual(s[i:i + sublen], SUBSTR)
self.assertEqual(s[i:i + sublen:2], SUBSTR[::2])
if i > 0:
self.assertEqual(s[i + sublen - 1:i - 1:-3],
SUBSTR[sublen::-3])
# Make sure we do some slicing and indexing near the end of the
# string, too.
self.assertEqual(s[len(s) - 1], SUBSTR[-1])
self.assertEqual(s[-1], SUBSTR[-1])
self.assertEqual(s[len(s) - 10], SUBSTR[0])
self.assertEqual(s[-sublen], SUBSTR[0])
self.assertEqual(s[len(s):], _(''))
self.assertEqual(s[len(s) - 1:], SUBSTR[-1:])
self.assertEqual(s[-1:], SUBSTR[-1:])
self.assertEqual(s[len(s) - sublen:], SUBSTR)
self.assertEqual(s[-sublen:], SUBSTR)
self.assertEqual(len(s[:]), len(s))
self.assertEqual(len(s[:len(s) - 5]), len(s) - 5)
self.assertEqual(len(s[5:-5]), len(s) - 10)
self.assertRaises(IndexError, operator.getitem, s, len(s))
self.assertRaises(IndexError, operator.getitem, s, len(s) + 1)
self.assertRaises(IndexError, operator.getitem, s, len(s) + 1<<31)
@bigmemtest(minsize=_2G, memuse=2)
def test_contains(self, size):
_ = self.from_latin1
SUBSTR = _('0123456789')
edge = _('-') * (size // 2)
s = _('').join([edge, SUBSTR, edge])
del edge
self.assertTrue(SUBSTR in s)
self.assertFalse(SUBSTR * 2 in s)
self.assertTrue(_('-') in s)
self.assertFalse(_('a') in s)
s += _('a')
self.assertTrue(_('a') in s)
@bigmemtest(minsize=_2G + 10, memuse=2)
def test_compare(self, size):
_ = self.from_latin1
s1 = _('-') * size
s2 = _('-') * size
self.assertEqual(s1, s2)
del s2
s2 = s1 + _('a')
self.assertFalse(s1 == s2)
del s2
s2 = _('.') * size
self.assertFalse(s1 == s2)
@bigmemtest(minsize=_2G + 10, memuse=1)
def test_hash(self, size):
# Not sure if we can do any meaningful tests here... Even if we
# start relying on the exact algorithm used, the result will be
# different depending on the size of the C 'long int'. Even this
# test is dodgy (there's no *guarantee* that the two things should
# have a different hash, even if they, in the current
# implementation, almost always do.)
_ = self.from_latin1
s = _('\x00') * size
h1 = hash(s)
del s
s = _('\x00') * (size + 1)
self.assertFalse(h1 == hash(s))
class StrTest(unittest.TestCase, BaseStrTest):
def from_latin1(self, s):
return s
def basic_encode_test(self, size, enc, c='.', expectedsize=None):
if expectedsize is None:
expectedsize = size
s = c * size
self.assertEqual(len(s.encode(enc)), expectedsize)
def setUp(self):
# HACK: adjust memory use of tests inherited from BaseStrTest
# according to character size.
self._adjusted = {}
for name in dir(BaseStrTest):
if not name.startswith('test_'):
continue
meth = getattr(type(self), name)
try:
memuse = meth.memuse
except AttributeError:
continue
meth.memuse = character_size * memuse
self._adjusted[name] = memuse
def tearDown(self):
for name, memuse in self._adjusted.items():
getattr(type(self), name).memuse = memuse
@bigmemtest(minsize=_2G + 2, memuse=character_size + 1)
def test_encode(self, size):
return self.basic_encode_test(size, 'utf-8')
@precisionbigmemtest(size=_4G // 6 + 2, memuse=character_size + 1)
def test_encode_raw_unicode_escape(self, size):
try:
return self.basic_encode_test(size, 'raw_unicode_escape')
except MemoryError:
pass # acceptable on 32-bit
@precisionbigmemtest(size=_4G // 5 + 70, memuse=character_size + 1)
def test_encode_utf7(self, size):
try:
return self.basic_encode_test(size, 'utf7')
except MemoryError:
pass # acceptable on 32-bit
@precisionbigmemtest(size=_4G // 4 + 5, memuse=character_size + 4)
def test_encode_utf32(self, size):
try:
return self.basic_encode_test(size, 'utf32', expectedsize=4*size+4)
except MemoryError:
pass # acceptable on 32-bit
@precisionbigmemtest(size=_2G - 1, memuse=character_size + 1)
def test_encode_ascii(self, size):
return self.basic_encode_test(size, 'ascii', c='A')
@precisionbigmemtest(size=_4G // 5, memuse=character_size * (6 + 1))
def test_unicode_repr_overflow(self, size):
try:
s = "\uAAAA"*size
r = repr(s)
except MemoryError:
pass # acceptable on 32-bit
else:
self.assertTrue(s == eval(r))
@bigmemtest(minsize=_2G + 10, memuse=character_size * 2)
def test_format(self, size):
s = '-' * size
sf = '%s' % (s,)
self.assertEqual(s, sf)
del sf
sf = '..%s..' % (s,)
self.assertEqual(len(sf), len(s) + 4)
self.assertTrue(sf.startswith('..-'))
self.assertTrue(sf.endswith('-..'))
del s, sf
size //= 2
edge = '-' * size
s = ''.join([edge, '%s', edge])
del edge
s = s % '...'
self.assertEqual(len(s), size * 2 + 3)
self.assertEqual(s.count('.'), 3)
self.assertEqual(s.count('-'), size * 2)
@bigmemtest(minsize=_2G + 10, memuse=character_size * 2)
def test_repr_small(self, size):
s = '-' * size
s = repr(s)
self.assertEqual(len(s), size + 2)
self.assertEqual(s[0], "'")
self.assertEqual(s[-1], "'")
self.assertEqual(s.count('-'), size)
del s
# repr() will create a string four times as large as this 'binary
# string', but we don't want to allocate much more than twice
# size in total. (We do extra testing in test_repr_large())
size = size // 5 * 2
s = '\x00' * size
s = repr(s)
self.assertEqual(len(s), size * 4 + 2)
self.assertEqual(s[0], "'")
self.assertEqual(s[-1], "'")
self.assertEqual(s.count('\\'), size)
self.assertEqual(s.count('0'), size * 2)
@bigmemtest(minsize=_2G + 10, memuse=character_size * 5)
def test_repr_large(self, size):
s = '\x00' * size
s = repr(s)
self.assertEqual(len(s), size * 4 + 2)
self.assertEqual(s[0], "'")
self.assertEqual(s[-1], "'")
self.assertEqual(s.count('\\'), size)
self.assertEqual(s.count('0'), size * 2)
@bigmemtest(minsize=2**32 / 5, memuse=character_size * 7)
def test_unicode_repr(self, size):
s = "\uAAAA" * size
for f in (repr, ascii):
r = f(s)
self.assertTrue(len(r) > size)
self.assertTrue(r.endswith(r"\uaaaa'"), r[-10:])
del r
# The character takes 4 bytes even in UCS-2 builds because it will
# be decomposed into surrogates.
@bigmemtest(minsize=2**32 / 5, memuse=4 + character_size * 9)
def test_unicode_repr_wide(self, size):
s = "\U0001AAAA" * size
for f in (repr, ascii):
r = f(s)
self.assertTrue(len(r) > size)
self.assertTrue(r.endswith(r"\U0001aaaa'"), r[-12:])
del r
class BytesTest(unittest.TestCase, BaseStrTest):
def from_latin1(self, s):
return s.encode("latin1")
@bigmemtest(minsize=_2G + 2, memuse=1 + character_size)
def test_decode(self, size):
s = self.from_latin1('.') * size
self.assertEqual(len(s.decode('utf-8')), size)
class BytearrayTest(unittest.TestCase, BaseStrTest):
def from_latin1(self, s):
return bytearray(s.encode("latin1"))
@bigmemtest(minsize=_2G + 2, memuse=1 + character_size)
def test_decode(self, size):
s = self.from_latin1('.') * size
self.assertEqual(len(s.decode('utf-8')), size)
test_hash = None
test_split_large = None
class TupleTest(unittest.TestCase):
# Tuples have a small, fixed-sized head and an array of pointers to
# data. Since we're testing 64-bit addressing, we can assume that the
# pointers are 8 bytes, and that thus that the tuples take up 8 bytes
# per size.
# As a side-effect of testing long tuples, these tests happen to test
# having more than 2<<31 references to any given object. Hence the
# use of different types of objects as contents in different tests.
@bigmemtest(minsize=_2G + 2, memuse=16)
def test_compare(self, size):
t1 = ('',) * size
t2 = ('',) * size
self.assertEqual(t1, t2)
del t2
t2 = ('',) * (size + 1)
self.assertFalse(t1 == t2)
del t2
t2 = (1,) * size
self.assertFalse(t1 == t2)
# Test concatenating into a single tuple of more than 2G in length,
# and concatenating a tuple of more than 2G in length separately, so
# the smaller test still gets run even if there isn't memory for the
# larger test (but we still let the tester know the larger test is
# skipped, in verbose mode.)
def basic_concat_test(self, size):
t = ((),) * size
self.assertEqual(len(t), size)
t = t + t
self.assertEqual(len(t), size * 2)
@bigmemtest(minsize=_2G // 2 + 2, memuse=24)
def test_concat_small(self, size):
return self.basic_concat_test(size)
@bigmemtest(minsize=_2G + 2, memuse=24)
def test_concat_large(self, size):
return self.basic_concat_test(size)
@bigmemtest(minsize=_2G // 5 + 10, memuse=8 * 5)
def test_contains(self, size):
t = (1, 2, 3, 4, 5) * size
self.assertEqual(len(t), size * 5)
self.assertTrue(5 in t)
self.assertFalse((1, 2, 3, 4, 5) in t)
self.assertFalse(0 in t)
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_hash(self, size):
t1 = (0,) * size
h1 = hash(t1)
del t1
t2 = (0,) * (size + 1)
self.assertFalse(h1 == hash(t2))
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_index_and_slice(self, size):
t = (None,) * size
self.assertEqual(len(t), size)
self.assertEqual(t[-1], None)
self.assertEqual(t[5], None)
self.assertEqual(t[size - 1], None)
self.assertRaises(IndexError, operator.getitem, t, size)
self.assertEqual(t[:5], (None,) * 5)
self.assertEqual(t[-5:], (None,) * 5)
self.assertEqual(t[20:25], (None,) * 5)
self.assertEqual(t[-25:-20], (None,) * 5)
self.assertEqual(t[size - 5:], (None,) * 5)
self.assertEqual(t[size - 5:size], (None,) * 5)
self.assertEqual(t[size - 6:size - 2], (None,) * 4)
self.assertEqual(t[size:size], ())
self.assertEqual(t[size:size+5], ())
# Like test_concat, split in two.
def basic_test_repeat(self, size):
t = ('',) * size
self.assertEqual(len(t), size)
t = t * 2
self.assertEqual(len(t), size * 2)
@bigmemtest(minsize=_2G // 2 + 2, memuse=24)
def test_repeat_small(self, size):
return self.basic_test_repeat(size)
@bigmemtest(minsize=_2G + 2, memuse=24)
def test_repeat_large(self, size):
return self.basic_test_repeat(size)
@bigmemtest(minsize=_1G - 1, memuse=12)
def test_repeat_large_2(self, size):
return self.basic_test_repeat(size)
@precisionbigmemtest(size=_1G - 1, memuse=9)
def test_from_2G_generator(self, size):
try:
t = tuple(range(size))
except MemoryError:
pass # acceptable on 32-bit
else:
count = 0
for item in t:
self.assertEqual(item, count)
count += 1
self.assertEqual(count, size)
@precisionbigmemtest(size=_1G - 25, memuse=9)
def test_from_almost_2G_generator(self, size):
try:
t = tuple(range(size))
count = 0
for item in t:
self.assertEqual(item, count)
count += 1
self.assertEqual(count, size)
except MemoryError:
pass # acceptable, expected on 32-bit
# Like test_concat, split in two.
def basic_test_repr(self, size):
t = (0,) * size
s = repr(t)
# The repr of a tuple of 0's is exactly three times the tuple length.
self.assertEqual(len(s), size * 3)
self.assertEqual(s[:5], '(0, 0')
self.assertEqual(s[-5:], '0, 0)')
self.assertEqual(s.count('0'), size)
@bigmemtest(minsize=_2G // 3 + 2, memuse=8 + 3)
def test_repr_small(self, size):
return self.basic_test_repr(size)
@bigmemtest(minsize=_2G + 2, memuse=8 + 3)
def test_repr_large(self, size):
return self.basic_test_repr(size)
class ListTest(unittest.TestCase):
# Like tuples, lists have a small, fixed-sized head and an array of
# pointers to data, so 8 bytes per size. Also like tuples, we make the
# lists hold references to various objects to test their refcount
# limits.
@bigmemtest(minsize=_2G + 2, memuse=16)
def test_compare(self, size):
l1 = [''] * size
l2 = [''] * size
self.assertEqual(l1, l2)
del l2
l2 = [''] * (size + 1)
self.assertFalse(l1 == l2)
del l2
l2 = [2] * size
self.assertFalse(l1 == l2)
# Test concatenating into a single list of more than 2G in length,
# and concatenating a list of more than 2G in length separately, so
# the smaller test still gets run even if there isn't memory for the
# larger test (but we still let the tester know the larger test is
# skipped, in verbose mode.)
def basic_test_concat(self, size):
l = [[]] * size
self.assertEqual(len(l), size)
l = l + l
self.assertEqual(len(l), size * 2)
@bigmemtest(minsize=_2G // 2 + 2, memuse=24)
def test_concat_small(self, size):
return self.basic_test_concat(size)
@bigmemtest(minsize=_2G + 2, memuse=24)
def test_concat_large(self, size):
return self.basic_test_concat(size)
def basic_test_inplace_concat(self, size):
l = [sys.stdout] * size
l += l
self.assertEqual(len(l), size * 2)
self.assertTrue(l[0] is l[-1])
self.assertTrue(l[size - 1] is l[size + 1])
@bigmemtest(minsize=_2G // 2 + 2, memuse=24)
def test_inplace_concat_small(self, size):
return self.basic_test_inplace_concat(size)
@bigmemtest(minsize=_2G + 2, memuse=24)
def test_inplace_concat_large(self, size):
return self.basic_test_inplace_concat(size)
@bigmemtest(minsize=_2G // 5 + 10, memuse=8 * 5)
def test_contains(self, size):
l = [1, 2, 3, 4, 5] * size
self.assertEqual(len(l), size * 5)
self.assertTrue(5 in l)
self.assertFalse([1, 2, 3, 4, 5] in l)
self.assertFalse(0 in l)
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_hash(self, size):
l = [0] * size
self.assertRaises(TypeError, hash, l)
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_index_and_slice(self, size):
l = [None] * size
self.assertEqual(len(l), size)
self.assertEqual(l[-1], None)
self.assertEqual(l[5], None)
self.assertEqual(l[size - 1], None)
self.assertRaises(IndexError, operator.getitem, l, size)
self.assertEqual(l[:5], [None] * 5)
self.assertEqual(l[-5:], [None] * 5)
self.assertEqual(l[20:25], [None] * 5)
self.assertEqual(l[-25:-20], [None] * 5)
self.assertEqual(l[size - 5:], [None] * 5)
self.assertEqual(l[size - 5:size], [None] * 5)
self.assertEqual(l[size - 6:size - 2], [None] * 4)
self.assertEqual(l[size:size], [])
self.assertEqual(l[size:size+5], [])
l[size - 2] = 5
self.assertEqual(len(l), size)
self.assertEqual(l[-3:], [None, 5, None])
self.assertEqual(l.count(5), 1)
self.assertRaises(IndexError, operator.setitem, l, size, 6)
self.assertEqual(len(l), size)
l[size - 7:] = [1, 2, 3, 4, 5]
size -= 2
self.assertEqual(len(l), size)
self.assertEqual(l[-7:], [None, None, 1, 2, 3, 4, 5])
l[:7] = [1, 2, 3, 4, 5]
size -= 2
self.assertEqual(len(l), size)
self.assertEqual(l[:7], [1, 2, 3, 4, 5, None, None])
del l[size - 1]
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(l[-1], 4)
del l[-2:]
size -= 2
self.assertEqual(len(l), size)
self.assertEqual(l[-1], 2)
del l[0]
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(l[0], 2)
del l[:2]
size -= 2
self.assertEqual(len(l), size)
self.assertEqual(l[0], 4)
# Like test_concat, split in two.
def basic_test_repeat(self, size):
l = [] * size
self.assertFalse(l)
l = [''] * size
self.assertEqual(len(l), size)
l = l * 2
self.assertEqual(len(l), size * 2)
@bigmemtest(minsize=_2G // 2 + 2, memuse=24)
def test_repeat_small(self, size):
return self.basic_test_repeat(size)
@bigmemtest(minsize=_2G + 2, memuse=24)
def test_repeat_large(self, size):
return self.basic_test_repeat(size)
def basic_test_inplace_repeat(self, size):
l = ['']
l *= size
self.assertEqual(len(l), size)
self.assertTrue(l[0] is l[-1])
del l
l = [''] * size
l *= 2
self.assertEqual(len(l), size * 2)
self.assertTrue(l[size - 1] is l[-1])
@bigmemtest(minsize=_2G // 2 + 2, memuse=16)
def test_inplace_repeat_small(self, size):
return self.basic_test_inplace_repeat(size)
@bigmemtest(minsize=_2G + 2, memuse=16)
def test_inplace_repeat_large(self, size):
return self.basic_test_inplace_repeat(size)
def basic_test_repr(self, size):
l = [0] * size
s = repr(l)
# The repr of a list of 0's is exactly three times the list length.
self.assertEqual(len(s), size * 3)
self.assertEqual(s[:5], '[0, 0')
self.assertEqual(s[-5:], '0, 0]')
self.assertEqual(s.count('0'), size)
@bigmemtest(minsize=_2G // 3 + 2, memuse=8 + 3)
def test_repr_small(self, size):
return self.basic_test_repr(size)
@bigmemtest(minsize=_2G + 2, memuse=8 + 3)
def test_repr_large(self, size):
return self.basic_test_repr(size)
# list overallocates ~1/8th of the total size (on first expansion) so
# the single list.append call puts memuse at 9 bytes per size.
@bigmemtest(minsize=_2G, memuse=9)
def test_append(self, size):
l = [object()] * size
l.append(object())
self.assertEqual(len(l), size+1)
self.assertTrue(l[-3] is l[-2])
self.assertFalse(l[-2] is l[-1])
@bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5)
def test_count(self, size):
l = [1, 2, 3, 4, 5] * size
self.assertEqual(l.count(1), size)
self.assertEqual(l.count("1"), 0)
def basic_test_extend(self, size):
l = [object] * size
l.extend(l)
self.assertEqual(len(l), size * 2)
self.assertTrue(l[0] is l[-1])
self.assertTrue(l[size - 1] is l[size + 1])
@bigmemtest(minsize=_2G // 2 + 2, memuse=16)
def test_extend_small(self, size):
return self.basic_test_extend(size)
@bigmemtest(minsize=_2G + 2, memuse=16)
def test_extend_large(self, size):
return self.basic_test_extend(size)
@bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5)
def test_index(self, size):
l = [1, 2, 3, 4, 5] * size
size *= 5
self.assertEqual(l.index(1), 0)
self.assertEqual(l.index(5, size - 5), size - 1)
self.assertEqual(l.index(5, size - 5, size), size - 1)
self.assertRaises(ValueError, l.index, 1, size - 4, size)
self.assertRaises(ValueError, l.index, 6)
# This tests suffers from overallocation, just like test_append.
@bigmemtest(minsize=_2G + 10, memuse=9)
def test_insert(self, size):
l = [1.0] * size
l.insert(size - 1, "A")
size += 1
self.assertEqual(len(l), size)
self.assertEqual(l[-3:], [1.0, "A", 1.0])
l.insert(size + 1, "B")
size += 1
self.assertEqual(len(l), size)
self.assertEqual(l[-3:], ["A", 1.0, "B"])
l.insert(1, "C")
size += 1
self.assertEqual(len(l), size)
self.assertEqual(l[:3], [1.0, "C", 1.0])
self.assertEqual(l[size - 3:], ["A", 1.0, "B"])
@bigmemtest(minsize=_2G // 5 + 4, memuse=8 * 5)
def test_pop(self, size):
l = ["a", "b", "c", "d", "e"] * size
size *= 5
self.assertEqual(len(l), size)
item = l.pop()
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(item, "e")
self.assertEqual(l[-2:], ["c", "d"])
item = l.pop(0)
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(item, "a")
self.assertEqual(l[:2], ["b", "c"])
item = l.pop(size - 2)
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(item, "c")
self.assertEqual(l[-2:], ["b", "d"])
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_remove(self, size):
l = [10] * size
self.assertEqual(len(l), size)
l.remove(10)
size -= 1
self.assertEqual(len(l), size)
# Because of the earlier l.remove(), this append doesn't trigger
# a resize.
l.append(5)
size += 1
self.assertEqual(len(l), size)
self.assertEqual(l[-2:], [10, 5])
l.remove(5)
size -= 1
self.assertEqual(len(l), size)
self.assertEqual(l[-2:], [10, 10])
@bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5)
def test_reverse(self, size):
l = [1, 2, 3, 4, 5] * size
l.reverse()
self.assertEqual(len(l), size * 5)
self.assertEqual(l[-5:], [5, 4, 3, 2, 1])
self.assertEqual(l[:5], [5, 4, 3, 2, 1])
@bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5)
def test_sort(self, size):
l = [1, 2, 3, 4, 5] * size
l.sort()
self.assertEqual(len(l), size * 5)
self.assertEqual(l.count(1), size)
self.assertEqual(l[:10], [1] * 10)
self.assertEqual(l[-10:], [5] * 10)
def test_main():
support.run_unittest(StrTest, BytesTest, BytearrayTest,
TupleTest, ListTest)
if __name__ == '__main__':
if len(sys.argv) > 1:
support.set_memlimit(sys.argv[1])
test_main()
| gpl-3.0 |
1013553207/django | django/core/management/commands/diffsettings.py | 479 | 1565 | from django.core.management.base import BaseCommand
def module_to_dict(module, omittable=lambda k: k.startswith('_')):
"""Converts a module namespace to a Python dictionary."""
return {k: repr(v) for k, v in module.__dict__.items() if not omittable(k)}
class Command(BaseCommand):
help = """Displays differences between the current settings.py and Django's
default settings. Settings that don't appear in the defaults are
followed by "###"."""
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument('--all', action='store_true', dest='all', default=False,
help='Display all settings, regardless of their value. '
'Default values are prefixed by "###".')
def handle(self, **options):
# Inspired by Postfix's "postconf -n".
from django.conf import settings, global_settings
# Because settings are imported lazily, we need to explicitly load them.
settings._setup()
user_settings = module_to_dict(settings._wrapped)
default_settings = module_to_dict(global_settings)
output = []
for key in sorted(user_settings):
if key not in default_settings:
output.append("%s = %s ###" % (key, user_settings[key]))
elif user_settings[key] != default_settings[key]:
output.append("%s = %s" % (key, user_settings[key]))
elif options['all']:
output.append("### %s = %s" % (key, user_settings[key]))
return '\n'.join(output)
| bsd-3-clause |
dwhagar/snowboard | snowboard/connection.py | 1 | 6948 | # This file is part of snowboard.
#
# snowboard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# snowboard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with snowboard. If not, see <http://www.gnu.org/licenses/>.
'''
Connection object, designed to be the only object to directly interface with
the server.
See https://github.com/dwhagar/snowboard/wiki/Class-Docs for documentation.
'''
import time
import socket
import ssl
import sys
from . import debug
from . import server
class Connection:
def __init__(self, srv):
self.host = srv.host
self.port = srv.port
self.__socket = None
self.__ssl = None
self.__connected = False
self.ssl = srv.ssl
self.sslVerify = True
self.retries = 3 # Numbers of times to retry a connection
self.delay = 1 # Delay between connection attempts
def connected(self):
'''Returns the state of the connection.'''
return self.__connected
def connect(self):
'''Connect to the configured server.'''
# Keep track of attempts.
attempt = 0
# Try until the connection succeeds or no more tries are left.
while (not self.__connected) and (attempt < self.retries):
# Attempt to establish a connection.
debug.message("Attempting connection to " + self.host + ":" + str(self.port) + ".")
try:
self.__socket = socket.setdefaulttimeout(30)
self.__socket = socket.create_connection((self.host, self.port))
# Handle SSL
if self.ssl:
self.__context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
self.__context.options |= ssl.OP_NO_SSLv2
self.__context.options |= ssl.OP_NO_SSLv3
if self.sslVerify:
self.__context.verify_mode = ssl.CERT_REQUIRED
else:
self.__context.verify_mode = ssl.CERT_NONE
self.__ssl = self.__context.wrap_socket(self.__socket)
self.__ssl.setblocking(False)
# Handle not SSL
else:
self.__socket.setblocking(False)
self.__connected = True
# Assume connection errors are no big deal but do display an error.
except ConnectionAbortedError:
debug.error("Connection to " + self.host + " aborted by server.")
except ConnectionRefusedError:
debug.error("Connection to " + self.host + " refused by server.")
except TimeoutError:
debug.error("Connection to " + self.host + " timed out.")
except socket.gaierror:
debug.error("Failed to resolve " + self.host + ".")
except OSError as err:
debug.error("Failed to connect '" + err.errno + "' " + err.strerror + ".")
attempt += 1
time.sleep(self.delay)
return self.__connected
def disconnect(self):
'''Disconnect from the server.'''
debug.message("Disconnected from " + self.host + ":" + str(self.port) + ".")
if ssl:
if not self.__ssl is None:
self.__ssl.close()
self.__ssl = None
else:
if not self.__socket is None:
self.__socket.close()
self.__socket = None
self.__connected = False
def read(self):
'''Read a line of data from the server, if any.'''
# Only do something if we're connected.
if self.__connected:
done = False
received = ""
while not done:
try:
if self.ssl:
data = self.__ssl.recv(1)
else:
data = self.__socket.recv(1)
except (ssl.SSLWantReadError, BlockingIOError):
received = None
break
except OSError as err:
debug.error("Error #" + str(err.errno) + ": '" + err.strerror + "' disconnecting.")
data = False
# Process the data.
# socket.recv is supposed to return a False if the connection
# been broken.
if not data:
self.disconnect()
done = True
received = None
else:
text = data.decode('utf-8','replace')
if text == '\n':
done = True
else:
received += text
else:
received = None
# Remove the trailing carriage return character (cr/lf pair)
if not received is None:
received = received.strip('\r')
if len(received) > 0:
if received[0] == ':':
received = received[1:]
# Bug fix for Issue #18, do not return blank lines.
if received == "":
received = None
return received
def write(self, data):
'''Sends data to the server.'''
# Encode the data for the server.
data += '\n'
data = data.encode('utf-8')
# Prepare to keep track of what is being sent.
dataSent = 0
bufferSize = len(data)
if self.__connected:
# Loop to send the data.
while dataSent < bufferSize:
try:
if self.ssl:
sentNow = self.__ssl.send(data[dataSent:])
else:
sentNow = self.__socket.send(data[dataSent:])
except OSError as err:
debug.error("Error #" + str(err.errno) + ": '" + err.strerror + "' disconnecting.")
self.disconnect()
return False
# If nothing gets sent, we are disconnected from the server.
if sentNow == 0:
debug.error("Data could not be sent for an unknown reason, disconnecting.")
self.disconnect()
return False
# Keep track of the data.
dataSent += sentNow
else:
sent = False
# If sending completed, set the flag to true.
if dataSent == bufferSize:
sent = True
return sent | gpl-3.0 |
mcanthony/rethinkdb | test/rql_test/connections/http_support/flask/testsuite/signals.py | 554 | 4807 | # -*- coding: utf-8 -*-
"""
flask.testsuite.signals
~~~~~~~~~~~~~~~~~~~~~~~
Signalling.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
from flask.testsuite import FlaskTestCase
class SignalsTestCase(FlaskTestCase):
def test_template_rendered(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('simple_template.html', whiskey=42)
recorded = []
def record(sender, template, context):
recorded.append((template, context))
flask.template_rendered.connect(record, app)
try:
app.test_client().get('/')
self.assert_equal(len(recorded), 1)
template, context = recorded[0]
self.assert_equal(template.name, 'simple_template.html')
self.assert_equal(context['whiskey'], 42)
finally:
flask.template_rendered.disconnect(record, app)
def test_request_signals(self):
app = flask.Flask(__name__)
calls = []
def before_request_signal(sender):
calls.append('before-signal')
def after_request_signal(sender, response):
self.assert_equal(response.data, b'stuff')
calls.append('after-signal')
@app.before_request
def before_request_handler():
calls.append('before-handler')
@app.after_request
def after_request_handler(response):
calls.append('after-handler')
response.data = 'stuff'
return response
@app.route('/')
def index():
calls.append('handler')
return 'ignored anyway'
flask.request_started.connect(before_request_signal, app)
flask.request_finished.connect(after_request_signal, app)
try:
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'stuff')
self.assert_equal(calls, ['before-signal', 'before-handler',
'handler', 'after-handler',
'after-signal'])
finally:
flask.request_started.disconnect(before_request_signal, app)
flask.request_finished.disconnect(after_request_signal, app)
def test_request_exception_signal(self):
app = flask.Flask(__name__)
recorded = []
@app.route('/')
def index():
1 // 0
def record(sender, exception):
recorded.append(exception)
flask.got_request_exception.connect(record, app)
try:
self.assert_equal(app.test_client().get('/').status_code, 500)
self.assert_equal(len(recorded), 1)
self.assert_true(isinstance(recorded[0], ZeroDivisionError))
finally:
flask.got_request_exception.disconnect(record, app)
def test_appcontext_signals(self):
app = flask.Flask(__name__)
recorded = []
def record_push(sender, **kwargs):
recorded.append('push')
def record_pop(sender, **kwargs):
recorded.append('push')
@app.route('/')
def index():
return 'Hello'
flask.appcontext_pushed.connect(record_push, app)
flask.appcontext_popped.connect(record_pop, app)
try:
with app.test_client() as c:
rv = c.get('/')
self.assert_equal(rv.data, b'Hello')
self.assert_equal(recorded, ['push'])
self.assert_equal(recorded, ['push', 'pop'])
finally:
flask.appcontext_pushed.disconnect(record_push, app)
flask.appcontext_popped.disconnect(record_pop, app)
def test_flash_signal(self):
app = flask.Flask(__name__)
app.config['SECRET_KEY'] = 'secret'
@app.route('/')
def index():
flask.flash('This is a flash message', category='notice')
return flask.redirect('/other')
recorded = []
def record(sender, message, category):
recorded.append((message, category))
flask.message_flashed.connect(record, app)
try:
client = app.test_client()
with client.session_transaction():
client.get('/')
self.assert_equal(len(recorded), 1)
message, category = recorded[0]
self.assert_equal(message, 'This is a flash message')
self.assert_equal(category, 'notice')
finally:
flask.message_flashed.disconnect(record, app)
def suite():
suite = unittest.TestSuite()
if flask.signals_available:
suite.addTest(unittest.makeSuite(SignalsTestCase))
return suite
| agpl-3.0 |
naousse/odoo | addons/calendar/contacts.py | 389 | 1414 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class calendar_contacts(osv.osv):
_name = 'calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Employee',required=True, domain=[]),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
} | agpl-3.0 |
dmccue/ansible | lib/ansible/plugins/callback/context_demo.py | 144 | 1447 | # (C) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
This is a very trivial example of how any callback function can get at play and task objects.
play will be 'None' for runner invocations, and task will be None for 'setup' invocations.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_TYPE = 'context_demo'
def v2_on_any(self, *args, **kwargs):
i = 0
self._display.display(" --- ARGS ")
for a in args:
self._display.display(' %s: %s' % (i, a))
i += 1
self._display.display(" --- KWARGS ")
for k in kwargs:
self._display.display(' %s: %s' % (k, kwargs[k]))
| gpl-3.0 |
Nazninn/edgessh | paramiko/packet.py | 18 | 17271 | # Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Packetizer.
"""
import errno
import select
import socket
import struct
import threading
import time
from paramiko.common import *
from paramiko import util
from paramiko.ssh_exception import SSHException
from paramiko.message import Message
got_r_hmac = False
try:
import r_hmac
got_r_hmac = True
except ImportError:
pass
def compute_hmac(key, message, digest_class):
if got_r_hmac:
return r_hmac.HMAC(key, message, digest_class).digest()
from Crypto.Hash import HMAC
return HMAC.HMAC(key, message, digest_class).digest()
class NeedRekeyException (Exception):
pass
class Packetizer (object):
"""
Implementation of the base SSH packet protocol.
"""
# READ the secsh RFC's before raising these values. if anything,
# they should probably be lower.
REKEY_PACKETS = pow(2, 30)
REKEY_BYTES = pow(2, 30)
def __init__(self, socket):
self.__socket = socket
self.__logger = None
self.__closed = False
self.__dump_packets = False
self.__need_rekey = False
self.__init_count = 0
self.__remainder = ''
# used for noticing when to re-key:
self.__sent_bytes = 0
self.__sent_packets = 0
self.__received_bytes = 0
self.__received_packets = 0
self.__received_packets_overflow = 0
# current inbound/outbound ciphering:
self.__block_size_out = 8
self.__block_size_in = 8
self.__mac_size_out = 0
self.__mac_size_in = 0
self.__block_engine_out = None
self.__block_engine_in = None
self.__mac_engine_out = None
self.__mac_engine_in = None
self.__mac_key_out = ''
self.__mac_key_in = ''
self.__compress_engine_out = None
self.__compress_engine_in = None
self.__sequence_number_out = 0L
self.__sequence_number_in = 0L
# lock around outbound writes (packet computation)
self.__write_lock = threading.RLock()
# keepalives:
self.__keepalive_interval = 0
self.__keepalive_last = time.time()
self.__keepalive_callback = None
def set_log(self, log):
"""
Set the python log object to use for logging.
"""
self.__logger = log
def set_outbound_cipher(self, block_engine, block_size, mac_engine, mac_size, mac_key):
"""
Switch outbound data cipher.
"""
self.__block_engine_out = block_engine
self.__block_size_out = block_size
self.__mac_engine_out = mac_engine
self.__mac_size_out = mac_size
self.__mac_key_out = mac_key
self.__sent_bytes = 0
self.__sent_packets = 0
# wait until the reset happens in both directions before clearing rekey flag
self.__init_count |= 1
if self.__init_count == 3:
self.__init_count = 0
self.__need_rekey = False
def set_inbound_cipher(self, block_engine, block_size, mac_engine, mac_size, mac_key):
"""
Switch inbound data cipher.
"""
self.__block_engine_in = block_engine
self.__block_size_in = block_size
self.__mac_engine_in = mac_engine
self.__mac_size_in = mac_size
self.__mac_key_in = mac_key
self.__received_bytes = 0
self.__received_packets = 0
self.__received_packets_overflow = 0
# wait until the reset happens in both directions before clearing rekey flag
self.__init_count |= 2
if self.__init_count == 3:
self.__init_count = 0
self.__need_rekey = False
def set_outbound_compressor(self, compressor):
self.__compress_engine_out = compressor
def set_inbound_compressor(self, compressor):
self.__compress_engine_in = compressor
def close(self):
self.__closed = True
self.__socket.close()
def set_hexdump(self, hexdump):
self.__dump_packets = hexdump
def get_hexdump(self):
return self.__dump_packets
def get_mac_size_in(self):
return self.__mac_size_in
def get_mac_size_out(self):
return self.__mac_size_out
def need_rekey(self):
"""
Returns C{True} if a new set of keys needs to be negotiated. This
will be triggered during a packet read or write, so it should be
checked after every read or write, or at least after every few.
@return: C{True} if a new set of keys needs to be negotiated
"""
return self.__need_rekey
def set_keepalive(self, interval, callback):
"""
Turn on/off the callback keepalive. If C{interval} seconds pass with
no data read from or written to the socket, the callback will be
executed and the timer will be reset.
"""
self.__keepalive_interval = interval
self.__keepalive_callback = callback
self.__keepalive_last = time.time()
def read_all(self, n, check_rekey=False):
"""
Read as close to N bytes as possible, blocking as long as necessary.
@param n: number of bytes to read
@type n: int
@return: the data read
@rtype: str
@raise EOFError: if the socket was closed before all the bytes could
be read
"""
out = ''
# handle over-reading from reading the banner line
if len(self.__remainder) > 0:
out = self.__remainder[:n]
self.__remainder = self.__remainder[n:]
n -= len(out)
if PY22:
return self._py22_read_all(n, out)
while n > 0:
got_timeout = False
try:
x = self.__socket.recv(n)
if len(x) == 0:
raise EOFError()
out += x
n -= len(x)
except socket.timeout:
got_timeout = True
except socket.error, e:
# on Linux, sometimes instead of socket.timeout, we get
# EAGAIN. this is a bug in recent (> 2.6.9) kernels but
# we need to work around it.
if (type(e.args) is tuple) and (len(e.args) > 0) and (e.args[0] == errno.EAGAIN):
got_timeout = True
elif (type(e.args) is tuple) and (len(e.args) > 0) and (e.args[0] == errno.EINTR):
# syscall interrupted; try again
pass
elif self.__closed:
raise EOFError()
else:
raise
if got_timeout:
if self.__closed:
raise EOFError()
if check_rekey and (len(out) == 0) and self.__need_rekey:
raise NeedRekeyException()
self._check_keepalive()
return out
def write_all(self, out):
self.__keepalive_last = time.time()
while len(out) > 0:
got_timeout = False
try:
n = self.__socket.send(out)
except socket.timeout:
got_timeout = True
except socket.error, e:
if (type(e.args) is tuple) and (len(e.args) > 0) and (e.args[0] == errno.EAGAIN):
got_timeout = True
elif (type(e.args) is tuple) and (len(e.args) > 0) and (e.args[0] == errno.EINTR):
# syscall interrupted; try again
pass
else:
n = -1
except Exception:
# could be: (32, 'Broken pipe')
n = -1
if got_timeout:
n = 0
if self.__closed:
n = -1
if n < 0:
raise EOFError()
if n == len(out):
break
out = out[n:]
return
def readline(self, timeout):
"""
Read a line from the socket. We assume no data is pending after the
line, so it's okay to attempt large reads.
"""
buf = self.__remainder
while not '\n' in buf:
buf += self._read_timeout(timeout)
n = buf.index('\n')
self.__remainder = buf[n+1:]
buf = buf[:n]
if (len(buf) > 0) and (buf[-1] == '\r'):
buf = buf[:-1]
return buf
def send_message(self, data):
"""
Write a block of data using the current cipher, as an SSH block.
"""
# encrypt this sucka
data = str(data)
cmd = ord(data[0])
if cmd in MSG_NAMES:
cmd_name = MSG_NAMES[cmd]
else:
cmd_name = '$%x' % cmd
orig_len = len(data)
self.__write_lock.acquire()
try:
if self.__compress_engine_out is not None:
data = self.__compress_engine_out(data)
packet = self._build_packet(data)
if self.__dump_packets:
self._log(DEBUG, 'Write packet <%s>, length %d' % (cmd_name, orig_len))
self._log(DEBUG, util.format_binary(packet, 'OUT: '))
if self.__block_engine_out != None:
out = self.__block_engine_out.encrypt(packet)
else:
out = packet
# + mac
if self.__block_engine_out != None:
payload = struct.pack('>I', self.__sequence_number_out) + packet
out += compute_hmac(self.__mac_key_out, payload, self.__mac_engine_out)[:self.__mac_size_out]
self.__sequence_number_out = (self.__sequence_number_out + 1) & 0xffffffffL
self.write_all(out)
self.__sent_bytes += len(out)
self.__sent_packets += 1
if ((self.__sent_packets >= self.REKEY_PACKETS) or (self.__sent_bytes >= self.REKEY_BYTES)) \
and not self.__need_rekey:
# only ask once for rekeying
self._log(DEBUG, 'Rekeying (hit %d packets, %d bytes sent)' %
(self.__sent_packets, self.__sent_bytes))
self.__received_packets_overflow = 0
self._trigger_rekey()
finally:
self.__write_lock.release()
def read_message(self):
"""
Only one thread should ever be in this function (no other locking is
done).
@raise SSHException: if the packet is mangled
@raise NeedRekeyException: if the transport should rekey
"""
header = self.read_all(self.__block_size_in, check_rekey=True)
if self.__block_engine_in != None:
header = self.__block_engine_in.decrypt(header)
if self.__dump_packets:
self._log(DEBUG, util.format_binary(header, 'IN: '));
packet_size = struct.unpack('>I', header[:4])[0]
# leftover contains decrypted bytes from the first block (after the length field)
leftover = header[4:]
if (packet_size - len(leftover)) % self.__block_size_in != 0:
raise SSHException('Invalid packet blocking')
buf = self.read_all(packet_size + self.__mac_size_in - len(leftover))
packet = buf[:packet_size - len(leftover)]
post_packet = buf[packet_size - len(leftover):]
if self.__block_engine_in != None:
packet = self.__block_engine_in.decrypt(packet)
if self.__dump_packets:
self._log(DEBUG, util.format_binary(packet, 'IN: '));
packet = leftover + packet
if self.__mac_size_in > 0:
mac = post_packet[:self.__mac_size_in]
mac_payload = struct.pack('>II', self.__sequence_number_in, packet_size) + packet
my_mac = compute_hmac(self.__mac_key_in, mac_payload, self.__mac_engine_in)[:self.__mac_size_in]
if my_mac != mac:
raise SSHException('Mismatched MAC')
padding = ord(packet[0])
payload = packet[1:packet_size - padding]
if self.__dump_packets:
self._log(DEBUG, 'Got payload (%d bytes, %d padding)' % (packet_size, padding))
if self.__compress_engine_in is not None:
payload = self.__compress_engine_in(payload)
msg = Message(payload[1:])
msg.seqno = self.__sequence_number_in
self.__sequence_number_in = (self.__sequence_number_in + 1) & 0xffffffffL
# check for rekey
self.__received_bytes += packet_size + self.__mac_size_in + 4
self.__received_packets += 1
if self.__need_rekey:
# we've asked to rekey -- give them 20 packets to comply before
# dropping the connection
self.__received_packets_overflow += 1
if self.__received_packets_overflow >= 20:
raise SSHException('Remote transport is ignoring rekey requests')
elif (self.__received_packets >= self.REKEY_PACKETS) or \
(self.__received_bytes >= self.REKEY_BYTES):
# only ask once for rekeying
self._log(DEBUG, 'Rekeying (hit %d packets, %d bytes received)' %
(self.__received_packets, self.__received_bytes))
self.__received_packets_overflow = 0
self._trigger_rekey()
cmd = ord(payload[0])
if cmd in MSG_NAMES:
cmd_name = MSG_NAMES[cmd]
else:
cmd_name = '$%x' % cmd
if self.__dump_packets:
self._log(DEBUG, 'Read packet <%s>, length %d' % (cmd_name, len(payload)))
return cmd, msg
########## protected
def _log(self, level, msg):
if self.__logger is None:
return
if issubclass(type(msg), list):
for m in msg:
self.__logger.log(level, m)
else:
self.__logger.log(level, msg)
def _check_keepalive(self):
if (not self.__keepalive_interval) or (not self.__block_engine_out) or \
self.__need_rekey:
# wait till we're encrypting, and not in the middle of rekeying
return
now = time.time()
if now > self.__keepalive_last + self.__keepalive_interval:
self.__keepalive_callback()
self.__keepalive_last = now
def _py22_read_all(self, n, out):
while n > 0:
r, w, e = select.select([self.__socket], [], [], 0.1)
if self.__socket not in r:
if self.__closed:
raise EOFError()
self._check_keepalive()
else:
x = self.__socket.recv(n)
if len(x) == 0:
raise EOFError()
out += x
n -= len(x)
return out
def _py22_read_timeout(self, timeout):
start = time.time()
while True:
r, w, e = select.select([self.__socket], [], [], 0.1)
if self.__socket in r:
x = self.__socket.recv(1)
if len(x) == 0:
raise EOFError()
break
if self.__closed:
raise EOFError()
now = time.time()
if now - start >= timeout:
raise socket.timeout()
return x
def _read_timeout(self, timeout):
if PY22:
return self._py22_read_timeout(timeout)
start = time.time()
while True:
try:
x = self.__socket.recv(128)
if len(x) == 0:
raise EOFError()
break
except socket.timeout:
pass
if self.__closed:
raise EOFError()
now = time.time()
if now - start >= timeout:
raise socket.timeout()
return x
def _build_packet(self, payload):
# pad up at least 4 bytes, to nearest block-size (usually 8)
bsize = self.__block_size_out
padding = 3 + bsize - ((len(payload) + 8) % bsize)
packet = struct.pack('>IB', len(payload) + padding + 1, padding)
packet += payload
if self.__block_engine_out is not None:
packet += rng.read(padding)
else:
# cute trick i caught openssh doing: if we're not encrypting,
# don't waste random bytes for the padding
packet += (chr(0) * padding)
return packet
def _trigger_rekey(self):
# outside code should check for this flag
self.__need_rekey = True
| gpl-2.0 |
mancoast/CPythonPyc_test | fail/271_test_dictviews.py | 82 | 6513 | import unittest
from test import test_support
class DictSetTest(unittest.TestCase):
def test_constructors_not_callable(self):
kt = type({}.viewkeys())
self.assertRaises(TypeError, kt, {})
self.assertRaises(TypeError, kt)
it = type({}.viewitems())
self.assertRaises(TypeError, it, {})
self.assertRaises(TypeError, it)
vt = type({}.viewvalues())
self.assertRaises(TypeError, vt, {})
self.assertRaises(TypeError, vt)
def test_dict_keys(self):
d = {1: 10, "a": "ABC"}
keys = d.viewkeys()
self.assertEqual(len(keys), 2)
self.assertEqual(set(keys), set([1, "a"]))
self.assertEqual(keys, set([1, "a"]))
self.assertNotEqual(keys, set([1, "a", "b"]))
self.assertNotEqual(keys, set([1, "b"]))
self.assertNotEqual(keys, set([1]))
self.assertNotEqual(keys, 42)
self.assertIn(1, keys)
self.assertIn("a", keys)
self.assertNotIn(10, keys)
self.assertNotIn("Z", keys)
self.assertEqual(d.viewkeys(), d.viewkeys())
e = {1: 11, "a": "def"}
self.assertEqual(d.viewkeys(), e.viewkeys())
del e["a"]
self.assertNotEqual(d.viewkeys(), e.viewkeys())
def test_dict_items(self):
d = {1: 10, "a": "ABC"}
items = d.viewitems()
self.assertEqual(len(items), 2)
self.assertEqual(set(items), set([(1, 10), ("a", "ABC")]))
self.assertEqual(items, set([(1, 10), ("a", "ABC")]))
self.assertNotEqual(items, set([(1, 10), ("a", "ABC"), "junk"]))
self.assertNotEqual(items, set([(1, 10), ("a", "def")]))
self.assertNotEqual(items, set([(1, 10)]))
self.assertNotEqual(items, 42)
self.assertIn((1, 10), items)
self.assertIn(("a", "ABC"), items)
self.assertNotIn((1, 11), items)
self.assertNotIn(1, items)
self.assertNotIn((), items)
self.assertNotIn((1,), items)
self.assertNotIn((1, 2, 3), items)
self.assertEqual(d.viewitems(), d.viewitems())
e = d.copy()
self.assertEqual(d.viewitems(), e.viewitems())
e["a"] = "def"
self.assertNotEqual(d.viewitems(), e.viewitems())
def test_dict_mixed_keys_items(self):
d = {(1, 1): 11, (2, 2): 22}
e = {1: 1, 2: 2}
self.assertEqual(d.viewkeys(), e.viewitems())
self.assertNotEqual(d.viewitems(), e.viewkeys())
def test_dict_values(self):
d = {1: 10, "a": "ABC"}
values = d.viewvalues()
self.assertEqual(set(values), set([10, "ABC"]))
self.assertEqual(len(values), 2)
def test_dict_repr(self):
d = {1: 10, "a": "ABC"}
self.assertIsInstance(repr(d), str)
r = repr(d.viewitems())
self.assertIsInstance(r, str)
self.assertTrue(r == "dict_items([('a', 'ABC'), (1, 10)])" or
r == "dict_items([(1, 10), ('a', 'ABC')])")
r = repr(d.viewkeys())
self.assertIsInstance(r, str)
self.assertTrue(r == "dict_keys(['a', 1])" or
r == "dict_keys([1, 'a'])")
r = repr(d.viewvalues())
self.assertIsInstance(r, str)
self.assertTrue(r == "dict_values(['ABC', 10])" or
r == "dict_values([10, 'ABC'])")
def test_keys_set_operations(self):
d1 = {'a': 1, 'b': 2}
d2 = {'b': 3, 'c': 2}
d3 = {'d': 4, 'e': 5}
self.assertEqual(d1.viewkeys() & d1.viewkeys(), {'a', 'b'})
self.assertEqual(d1.viewkeys() & d2.viewkeys(), {'b'})
self.assertEqual(d1.viewkeys() & d3.viewkeys(), set())
self.assertEqual(d1.viewkeys() & set(d1.viewkeys()), {'a', 'b'})
self.assertEqual(d1.viewkeys() & set(d2.viewkeys()), {'b'})
self.assertEqual(d1.viewkeys() & set(d3.viewkeys()), set())
self.assertEqual(d1.viewkeys() | d1.viewkeys(), {'a', 'b'})
self.assertEqual(d1.viewkeys() | d2.viewkeys(), {'a', 'b', 'c'})
self.assertEqual(d1.viewkeys() | d3.viewkeys(), {'a', 'b', 'd', 'e'})
self.assertEqual(d1.viewkeys() | set(d1.viewkeys()), {'a', 'b'})
self.assertEqual(d1.viewkeys() | set(d2.viewkeys()), {'a', 'b', 'c'})
self.assertEqual(d1.viewkeys() | set(d3.viewkeys()),
{'a', 'b', 'd', 'e'})
self.assertEqual(d1.viewkeys() ^ d1.viewkeys(), set())
self.assertEqual(d1.viewkeys() ^ d2.viewkeys(), {'a', 'c'})
self.assertEqual(d1.viewkeys() ^ d3.viewkeys(), {'a', 'b', 'd', 'e'})
self.assertEqual(d1.viewkeys() ^ set(d1.viewkeys()), set())
self.assertEqual(d1.viewkeys() ^ set(d2.viewkeys()), {'a', 'c'})
self.assertEqual(d1.viewkeys() ^ set(d3.viewkeys()),
{'a', 'b', 'd', 'e'})
def test_items_set_operations(self):
d1 = {'a': 1, 'b': 2}
d2 = {'a': 2, 'b': 2}
d3 = {'d': 4, 'e': 5}
self.assertEqual(
d1.viewitems() & d1.viewitems(), {('a', 1), ('b', 2)})
self.assertEqual(d1.viewitems() & d2.viewitems(), {('b', 2)})
self.assertEqual(d1.viewitems() & d3.viewitems(), set())
self.assertEqual(d1.viewitems() & set(d1.viewitems()),
{('a', 1), ('b', 2)})
self.assertEqual(d1.viewitems() & set(d2.viewitems()), {('b', 2)})
self.assertEqual(d1.viewitems() & set(d3.viewitems()), set())
self.assertEqual(d1.viewitems() | d1.viewitems(),
{('a', 1), ('b', 2)})
self.assertEqual(d1.viewitems() | d2.viewitems(),
{('a', 1), ('a', 2), ('b', 2)})
self.assertEqual(d1.viewitems() | d3.viewitems(),
{('a', 1), ('b', 2), ('d', 4), ('e', 5)})
self.assertEqual(d1.viewitems() | set(d1.viewitems()),
{('a', 1), ('b', 2)})
self.assertEqual(d1.viewitems() | set(d2.viewitems()),
{('a', 1), ('a', 2), ('b', 2)})
self.assertEqual(d1.viewitems() | set(d3.viewitems()),
{('a', 1), ('b', 2), ('d', 4), ('e', 5)})
self.assertEqual(d1.viewitems() ^ d1.viewitems(), set())
self.assertEqual(d1.viewitems() ^ d2.viewitems(),
{('a', 1), ('a', 2)})
self.assertEqual(d1.viewitems() ^ d3.viewitems(),
{('a', 1), ('b', 2), ('d', 4), ('e', 5)})
def test_main():
test_support.run_unittest(DictSetTest)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
StrellaGroup/erpnext | erpnext/accounts/report/ordered_items_to_be_billed/ordered_items_to_be_billed.py | 7 | 1036 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from erpnext.accounts.report.non_billed_report import get_ordered_to_be_billed_data
def execute(filters=None):
columns = get_column()
args = get_args()
data = get_ordered_to_be_billed_data(args)
return columns, data
def get_column():
return [
_("Sales Order") + ":Link/Sales Order:120", _("Status") + "::120", _("Date") + ":Date:100",
_("Suplier") + ":Link/Customer:120", _("Customer Name") + "::120",
_("Project") + ":Link/Project:120", _("Item Code") + ":Link/Item:120",
_("Amount") + ":Currency:100", _("Billed Amount") + ":Currency:100", _("Pending Amount") + ":Currency:100",
_("Item Name") + "::120", _("Description") + "::120", _("Company") + ":Link/Company:120",
]
def get_args():
return {'doctype': 'Sales Order', 'party': 'customer',
'date': 'transaction_date', 'order': 'transaction_date', 'order_by': 'asc'} | gpl-3.0 |
abdoosh00/edraak | common/lib/capa/capa/tests/test_customrender.py | 57 | 2279 | from lxml import etree
import unittest
import xml.sax.saxutils as saxutils
from . import test_capa_system
from capa import customrender
# just a handy shortcut
lookup_tag = customrender.registry.get_class_for_tag
def extract_context(xml):
"""
Given an xml element corresponding to the output of test_capa_system.render_template, get back the
original context
"""
return eval(xml.text)
def quote_attr(s):
return saxutils.quoteattr(s)[1:-1] # don't want the outer quotes
class HelperTest(unittest.TestCase):
'''
Make sure that our helper function works!
'''
def check(self, d):
xml = etree.XML(test_capa_system().render_template('blah', d))
self.assertEqual(d, extract_context(xml))
def test_extract_context(self):
self.check({})
self.check({1, 2})
self.check({'id', 'an id'})
self.check({'with"quote', 'also"quote'})
class SolutionRenderTest(unittest.TestCase):
'''
Make sure solutions render properly.
'''
def test_rendering(self):
solution = 'To compute unicorns, count them.'
xml_str = """<solution id="solution_12">{s}</solution>""".format(s=solution)
element = etree.fromstring(xml_str)
renderer = lookup_tag('solution')(test_capa_system(), element)
self.assertEqual(renderer.id, 'solution_12')
# Our test_capa_system "renders" templates to a div with the repr of the context.
xml = renderer.get_html()
context = extract_context(xml)
self.assertEqual(context, {'id': 'solution_12'})
class MathRenderTest(unittest.TestCase):
'''
Make sure math renders properly.
'''
def check_parse(self, latex_in, mathjax_out):
xml_str = """<math>{tex}</math>""".format(tex=latex_in)
element = etree.fromstring(xml_str)
renderer = lookup_tag('math')(test_capa_system(), element)
self.assertEqual(renderer.mathstr, mathjax_out)
def test_parsing(self):
self.check_parse('$abc$', '[mathjaxinline]abc[/mathjaxinline]')
self.check_parse('$abc', '$abc')
self.check_parse(r'$\displaystyle 2+2$', '[mathjax] 2+2[/mathjax]')
# NOTE: not testing get_html yet because I don't understand why it's doing what it's doing.
| agpl-3.0 |
microdee/IronHydra | src/IronHydra/Lib/email/generator.py | 106 | 13930 | # Copyright (C) 2001-2010 Python Software Foundation
# Contact: email-sig@python.org
"""Classes to generate plain text from a message object tree."""
__all__ = ['Generator', 'DecodedGenerator']
import re
import sys
import time
import random
import warnings
from cStringIO import StringIO
from email.header import Header
UNDERSCORE = '_'
NL = '\n'
fcre = re.compile(r'^From ', re.MULTILINE)
def _is8bitstring(s):
if isinstance(s, str):
try:
unicode(s, 'us-ascii')
except UnicodeError:
return True
return False
class Generator:
"""Generates output from a Message object tree.
This basic generator writes the message to the given file object as plain
text.
"""
#
# Public interface
#
def __init__(self, outfp, mangle_from_=True, maxheaderlen=78):
"""Create the generator for message flattening.
outfp is the output file-like object for writing the message to. It
must have a write() method.
Optional mangle_from_ is a flag that, when True (the default), escapes
From_ lines in the body of the message by putting a `>' in front of
them.
Optional maxheaderlen specifies the longest length for a non-continued
header. When a header line is longer (in characters, with tabs
expanded to 8 spaces) than maxheaderlen, the header will split as
defined in the Header class. Set maxheaderlen to zero to disable
header wrapping. The default is 78, as recommended (but not required)
by RFC 2822.
"""
self._fp = outfp
self._mangle_from_ = mangle_from_
self._maxheaderlen = maxheaderlen
def write(self, s):
# Just delegate to the file object
self._fp.write(s)
def flatten(self, msg, unixfrom=False):
"""Print the message object tree rooted at msg to the output file
specified when the Generator instance was created.
unixfrom is a flag that forces the printing of a Unix From_ delimiter
before the first object in the message tree. If the original message
has no From_ delimiter, a `standard' one is crafted. By default, this
is False to inhibit the printing of any From_ delimiter.
Note that for subobjects, no From_ line is printed.
"""
if unixfrom:
ufrom = msg.get_unixfrom()
if not ufrom:
ufrom = 'From nobody ' + time.ctime(time.time())
print >> self._fp, ufrom
self._write(msg)
def clone(self, fp):
"""Clone this generator with the exact same options."""
return self.__class__(fp, self._mangle_from_, self._maxheaderlen)
#
# Protected interface - undocumented ;/
#
def _write(self, msg):
# We can't write the headers yet because of the following scenario:
# say a multipart message includes the boundary string somewhere in
# its body. We'd have to calculate the new boundary /before/ we write
# the headers so that we can write the correct Content-Type:
# parameter.
#
# The way we do this, so as to make the _handle_*() methods simpler,
# is to cache any subpart writes into a StringIO. The we write the
# headers and the StringIO contents. That way, subpart handlers can
# Do The Right Thing, and can still modify the Content-Type: header if
# necessary.
oldfp = self._fp
try:
self._fp = sfp = StringIO()
self._dispatch(msg)
finally:
self._fp = oldfp
# Write the headers. First we see if the message object wants to
# handle that itself. If not, we'll do it generically.
meth = getattr(msg, '_write_headers', None)
if meth is None:
self._write_headers(msg)
else:
meth(self)
self._fp.write(sfp.getvalue())
def _dispatch(self, msg):
# Get the Content-Type: for the message, then try to dispatch to
# self._handle_<maintype>_<subtype>(). If there's no handler for the
# full MIME type, then dispatch to self._handle_<maintype>(). If
# that's missing too, then dispatch to self._writeBody().
main = msg.get_content_maintype()
sub = msg.get_content_subtype()
specific = UNDERSCORE.join((main, sub)).replace('-', '_')
meth = getattr(self, '_handle_' + specific, None)
if meth is None:
generic = main.replace('-', '_')
meth = getattr(self, '_handle_' + generic, None)
if meth is None:
meth = self._writeBody
meth(msg)
#
# Default handlers
#
def _write_headers(self, msg):
for h, v in msg.items():
print >> self._fp, '%s:' % h,
if self._maxheaderlen == 0:
# Explicit no-wrapping
print >> self._fp, v
elif isinstance(v, Header):
# Header instances know what to do
print >> self._fp, v.encode()
elif _is8bitstring(v):
# If we have raw 8bit data in a byte string, we have no idea
# what the encoding is. There is no safe way to split this
# string. If it's ascii-subset, then we could do a normal
# ascii split, but if it's multibyte then we could break the
# string. There's no way to know so the least harm seems to
# be to not split the string and risk it being too long.
print >> self._fp, v
else:
# Header's got lots of smarts, so use it. Note that this is
# fundamentally broken though because we lose idempotency when
# the header string is continued with tabs. It will now be
# continued with spaces. This was reversedly broken before we
# fixed bug 1974. Either way, we lose.
print >> self._fp, Header(
v, maxlinelen=self._maxheaderlen, header_name=h).encode()
# A blank line always separates headers from body
print >> self._fp
#
# Handlers for writing types and subtypes
#
def _handle_text(self, msg):
payload = msg.get_payload()
if payload is None:
return
if not isinstance(payload, basestring):
raise TypeError('string payload expected: %s' % type(payload))
if self._mangle_from_:
payload = fcre.sub('>From ', payload)
self._fp.write(payload)
# Default body handler
_writeBody = _handle_text
def _handle_multipart(self, msg):
# The trick here is to write out each part separately, merge them all
# together, and then make sure that the boundary we've chosen isn't
# present in the payload.
msgtexts = []
subparts = msg.get_payload()
if subparts is None:
subparts = []
elif isinstance(subparts, basestring):
# e.g. a non-strict parse of a message with no starting boundary.
self._fp.write(subparts)
return
elif not isinstance(subparts, list):
# Scalar payload
subparts = [subparts]
for part in subparts:
s = StringIO()
g = self.clone(s)
g.flatten(part, unixfrom=False)
msgtexts.append(s.getvalue())
# BAW: What about boundaries that are wrapped in double-quotes?
boundary = msg.get_boundary()
if not boundary:
# Create a boundary that doesn't appear in any of the
# message texts.
alltext = NL.join(msgtexts)
boundary = _make_boundary(alltext)
msg.set_boundary(boundary)
# If there's a preamble, write it out, with a trailing CRLF
if msg.preamble is not None:
print >> self._fp, msg.preamble
# dash-boundary transport-padding CRLF
print >> self._fp, '--' + boundary
# body-part
if msgtexts:
self._fp.write(msgtexts.pop(0))
# *encapsulation
# --> delimiter transport-padding
# --> CRLF body-part
for body_part in msgtexts:
# delimiter transport-padding CRLF
print >> self._fp, '\n--' + boundary
# body-part
self._fp.write(body_part)
# close-delimiter transport-padding
self._fp.write('\n--' + boundary + '--')
if msg.epilogue is not None:
print >> self._fp
self._fp.write(msg.epilogue)
def _handle_multipart_signed(self, msg):
# The contents of signed parts has to stay unmodified in order to keep
# the signature intact per RFC1847 2.1, so we disable header wrapping.
# RDM: This isn't enough to completely preserve the part, but it helps.
old_maxheaderlen = self._maxheaderlen
try:
self._maxheaderlen = 0
self._handle_multipart(msg)
finally:
self._maxheaderlen = old_maxheaderlen
def _handle_message_delivery_status(self, msg):
# We can't just write the headers directly to self's file object
# because this will leave an extra newline between the last header
# block and the boundary. Sigh.
blocks = []
for part in msg.get_payload():
s = StringIO()
g = self.clone(s)
g.flatten(part, unixfrom=False)
text = s.getvalue()
lines = text.split('\n')
# Strip off the unnecessary trailing empty line
if lines and lines[-1] == '':
blocks.append(NL.join(lines[:-1]))
else:
blocks.append(text)
# Now join all the blocks with an empty line. This has the lovely
# effect of separating each block with an empty line, but not adding
# an extra one after the last one.
self._fp.write(NL.join(blocks))
def _handle_message(self, msg):
s = StringIO()
g = self.clone(s)
# The payload of a message/rfc822 part should be a multipart sequence
# of length 1. The zeroth element of the list should be the Message
# object for the subpart. Extract that object, stringify it, and
# write it out.
# Except, it turns out, when it's a string instead, which happens when
# and only when HeaderParser is used on a message of mime type
# message/rfc822. Such messages are generated by, for example,
# Groupwise when forwarding unadorned messages. (Issue 7970.) So
# in that case we just emit the string body.
payload = msg.get_payload()
if isinstance(payload, list):
g.flatten(msg.get_payload(0), unixfrom=False)
payload = s.getvalue()
self._fp.write(payload)
_FMT = '[Non-text (%(type)s) part of message omitted, filename %(filename)s]'
class DecodedGenerator(Generator):
"""Generates a text representation of a message.
Like the Generator base class, except that non-text parts are substituted
with a format string representing the part.
"""
def __init__(self, outfp, mangle_from_=True, maxheaderlen=78, fmt=None):
"""Like Generator.__init__() except that an additional optional
argument is allowed.
Walks through all subparts of a message. If the subpart is of main
type `text', then it prints the decoded payload of the subpart.
Otherwise, fmt is a format string that is used instead of the message
payload. fmt is expanded with the following keywords (in
%(keyword)s format):
type : Full MIME type of the non-text part
maintype : Main MIME type of the non-text part
subtype : Sub-MIME type of the non-text part
filename : Filename of the non-text part
description: Description associated with the non-text part
encoding : Content transfer encoding of the non-text part
The default value for fmt is None, meaning
[Non-text (%(type)s) part of message omitted, filename %(filename)s]
"""
Generator.__init__(self, outfp, mangle_from_, maxheaderlen)
if fmt is None:
self._fmt = _FMT
else:
self._fmt = fmt
def _dispatch(self, msg):
for part in msg.walk():
maintype = part.get_content_maintype()
if maintype == 'text':
print >> self, part.get_payload(decode=True)
elif maintype == 'multipart':
# Just skip this
pass
else:
print >> self, self._fmt % {
'type' : part.get_content_type(),
'maintype' : part.get_content_maintype(),
'subtype' : part.get_content_subtype(),
'filename' : part.get_filename('[no filename]'),
'description': part.get('Content-Description',
'[no description]'),
'encoding' : part.get('Content-Transfer-Encoding',
'[no encoding]'),
}
# Helper
_width = len(repr(sys.maxint-1))
_fmt = '%%0%dd' % _width
def _make_boundary(text=None):
# Craft a random boundary. If text is given, ensure that the chosen
# boundary doesn't appear in the text.
token = random.randrange(sys.maxint)
boundary = ('=' * 15) + (_fmt % token) + '=='
if text is None:
return boundary
b = boundary
counter = 0
while True:
cre = re.compile('^--' + re.escape(b) + '(--)?$', re.MULTILINE)
if not cre.search(text):
break
b = boundary + '.' + str(counter)
counter += 1
return b
| mit |
google/material-design-icons | update/venv/lib/python3.9/site-packages/pip/_vendor/requests/cookies.py | 133 | 18430 | # -*- coding: utf-8 -*-
"""
requests.cookies
~~~~~~~~~~~~~~~~
Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
import copy
import time
import calendar
from ._internal_utils import to_native_string
from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping
try:
import threading
except ImportError:
import dummy_threading as threading
class MockRequest(object):
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
The original request object is read-only. The client is responsible for collecting
the new headers via `get_new_headers()` and interpreting them appropriately. You
probably want `get_cookie_header`, defined below.
"""
def __init__(self, request):
self._r = request
self._new_headers = {}
self.type = urlparse(self._r.url).scheme
def get_type(self):
return self.type
def get_host(self):
return urlparse(self._r.url).netloc
def get_origin_req_host(self):
return self.get_host()
def get_full_url(self):
# Only return the response's URL if the user hadn't set the Host
# header
if not self._r.headers.get('Host'):
return self._r.url
# If they did set it, retrieve it and reconstruct the expected domain
host = to_native_string(self._r.headers['Host'], encoding='utf-8')
parsed = urlparse(self._r.url)
# Reconstruct the URL as we expect it
return urlunparse([
parsed.scheme, host, parsed.path, parsed.params, parsed.query,
parsed.fragment
])
def is_unverifiable(self):
return True
def has_header(self, name):
return name in self._r.headers or name in self._new_headers
def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
def add_unredirected_header(self, name, value):
self._new_headers[name] = value
def get_new_headers(self):
return self._new_headers
@property
def unverifiable(self):
return self.is_unverifiable()
@property
def origin_req_host(self):
return self.get_origin_req_host()
@property
def host(self):
return self.get_host()
class MockResponse(object):
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
"""
def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
self._headers = headers
def info(self):
return self._headers
def getheaders(self, name):
self._headers.getheaders(name)
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
if not (hasattr(response, '_original_response') and
response._original_response):
return
# the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request)
# pull out the HTTPMessage with the headers and put it in the mock:
res = MockResponse(response._original_response.msg)
jar.extract_cookies(res, req)
def get_cookie_header(jar, request):
"""
Produce an appropriate Cookie header string to be sent with `request`, or None.
:rtype: str
"""
r = MockRequest(request)
jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie')
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""Unsets a cookie by name, by default over all domains and paths.
Wraps CookieJar.clear(), is O(n).
"""
clearables = []
for cookie in cookiejar:
if cookie.name != name:
continue
if domain is not None and domain != cookie.domain:
continue
if path is not None and path != cookie.path:
continue
clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
class CookieConflictError(RuntimeError):
"""There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific.
"""
class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict
interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
Requests does not use the dict interface internally; it's just for
compatibility with external client code. All requests code should work
out of the box with externally provided instances of ``CookieJar``, e.g.
``LWPCookieJar`` and ``FileCookieJar``.
Unlike a regular CookieJar, this class is pickleable.
.. warning:: dictionary operations that are normally O(1) may be O(n).
"""
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains.
.. warning:: operation is O(n), not O(1).
"""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
return default
def set(self, name, value, **kwargs):
"""Dict-like set() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains.
"""
# support client code that unsets cookies by assignment of a None value:
if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
return
if isinstance(value, Morsel):
c = morsel_to_cookie(value)
else:
c = create_cookie(name, value, **kwargs)
self.set_cookie(c)
return c
def iterkeys(self):
"""Dict-like iterkeys() that returns an iterator of names of cookies
from the jar.
.. seealso:: itervalues() and iteritems().
"""
for cookie in iter(self):
yield cookie.name
def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the
jar.
.. seealso:: values() and items().
"""
return list(self.iterkeys())
def itervalues(self):
"""Dict-like itervalues() that returns an iterator of values of cookies
from the jar.
.. seealso:: iterkeys() and iteritems().
"""
for cookie in iter(self):
yield cookie.value
def values(self):
"""Dict-like values() that returns a list of values of cookies from the
jar.
.. seealso:: keys() and items().
"""
return list(self.itervalues())
def iteritems(self):
"""Dict-like iteritems() that returns an iterator of name-value tuples
from the jar.
.. seealso:: iterkeys() and itervalues().
"""
for cookie in iter(self):
yield cookie.name, cookie.value
def items(self):
"""Dict-like items() that returns a list of name-value tuples from the
jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
vanilla python dict of key value pairs.
.. seealso:: keys() and values().
"""
return list(self.iteritems())
def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains
def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if cookie.path not in paths:
paths.append(cookie.path)
return paths
def multiple_domains(self):
"""Returns True if there are multiple domains in the jar.
Returns False otherwise.
:rtype: bool
"""
domains = []
for cookie in iter(self):
if cookie.domain is not None and cookie.domain in domains:
return True
domains.append(cookie.domain)
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain
old Python dict of name-value pairs of cookies that meet the
requirements.
:rtype: dict
"""
dictionary = {}
for cookie in iter(self):
if (
(domain is None or cookie.domain == domain) and
(path is None or cookie.path == path)
):
dictionary[cookie.name] = cookie.value
return dictionary
def __contains__(self, name):
try:
return super(RequestsCookieJar, self).__contains__(name)
except CookieConflictError:
return True
def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws
exception if there are more than one cookie with name. In that case,
use the more explicit get() method instead.
.. warning:: operation is O(n), not O(1).
"""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws
exception if there is already a cookie of that name in the jar. In that
case, use the more explicit set() method instead.
"""
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
``remove_cookie_by_name()``.
"""
remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs):
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
cookie.value = cookie.value.replace('\\"', '')
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(copy.copy(cookie))
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values.
If there are conflicting cookies, _find arbitrarily chooses one.
See _find_no_duplicates if you want an exception thrown if there are
conflicting cookies.
:param name: a string containing name of cookie
:param domain: (optional) string containing domain of cookie
:param path: (optional) string containing path of cookie
:return: cookie.value
"""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
"""Both ``__get_item__`` and ``get`` call this function: it's never
used elsewhere in Requests.
:param name: a string containing name of cookie
:param domain: (optional) string containing domain of cookie
:param path: (optional) string containing path of cookie
:raises KeyError: if cookie is not found
:raises CookieConflictError: if there are multiple cookies
that match name and optionally domain and path
:return: cookie.value
"""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
if toReturn:
return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy()
# remove the unpickleable RLock object
state.pop('_cookies_lock')
return state
def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__:
self._cookies_lock = threading.RLock()
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.set_policy(self.get_policy())
new_cj.update(self)
return new_cj
def get_policy(self):
"""Return the CookiePolicy instance used."""
return self._policy
def _copy_cookie_jar(jar):
if jar is None:
return None
if hasattr(jar, 'copy'):
# We're dealing with an instance of RequestsCookieJar
return jar.copy()
# We're dealing with a generic CookieJar instance
new_jar = copy.copy(jar)
new_jar.clear()
for cookie in jar:
new_jar.set_cookie(copy.copy(cookie))
return new_jar
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = {
'version': 0,
'name': name,
'value': value,
'port': None,
'domain': '',
'path': '/',
'secure': False,
'expires': None,
'discard': True,
'comment': None,
'comment_url': None,
'rest': {'HttpOnly': None},
'rfc2109': False,
}
badargs = set(kwargs) - set(result)
if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s'
raise TypeError(err % list(badargs))
result.update(kwargs)
result['port_specified'] = bool(result['port'])
result['domain_specified'] = bool(result['domain'])
result['domain_initial_dot'] = result['domain'].startswith('.')
result['path_specified'] = bool(result['path'])
return cookielib.Cookie(**result)
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
expires = None
if morsel['max-age']:
try:
expires = int(time.time() + int(morsel['max-age']))
except ValueError:
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = calendar.timegm(
time.strptime(morsel['expires'], time_template)
)
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
discard=False,
domain=morsel['domain'],
expires=expires,
name=morsel.key,
path=morsel['path'],
port=None,
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,
secure=bool(morsel['secure']),
value=morsel.value,
version=morsel['version'] or 0,
)
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
:param cookiejar: (optional) A cookiejar to add the cookies to.
:param overwrite: (optional) If False, will not replace cookies
already in the jar with new ones.
:rtype: CookieJar
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
if cookie_dict is not None:
names_from_jar = [cookie.name for cookie in cookiejar]
for name in cookie_dict:
if overwrite or (name not in names_from_jar):
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar
def merge_cookies(cookiejar, cookies):
"""Add cookies to cookiejar and returns a merged CookieJar.
:param cookiejar: CookieJar object to add the cookies to.
:param cookies: Dictionary or CookieJar object to be added.
:rtype: CookieJar
"""
if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar')
if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict(
cookies, cookiejar=cookiejar, overwrite=False)
elif isinstance(cookies, cookielib.CookieJar):
try:
cookiejar.update(cookies)
except AttributeError:
for cookie_in_jar in cookies:
cookiejar.set_cookie(cookie_in_jar)
return cookiejar
| apache-2.0 |
izonder/intellij-community | python/helpers/pydev/pydevd_attach_to_process/winappdbg/win32/kernel32.py | 102 | 164818 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2014, Mario Vilas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Wrapper for kernel32.dll in ctypes.
"""
__revision__ = "$Id$"
import warnings
from winappdbg.win32.defines import *
from winappdbg.win32 import context_i386
from winappdbg.win32 import context_amd64
#==============================================================================
# This is used later on to calculate the list of exported symbols.
_all = None
_all = set(vars().keys())
_all.add('version')
#==============================================================================
from winappdbg.win32.version import *
#------------------------------------------------------------------------------
# This can't be defined in defines.py because it calls GetLastError().
def RaiseIfLastError(result, func = None, arguments = ()):
"""
Error checking for Win32 API calls with no error-specific return value.
Regardless of the return value, the function calls GetLastError(). If the
code is not C{ERROR_SUCCESS} then a C{WindowsError} exception is raised.
For this to work, the user MUST call SetLastError(ERROR_SUCCESS) prior to
calling the API. Otherwise an exception may be raised even on success,
since most API calls don't clear the error status code.
"""
code = GetLastError()
if code != ERROR_SUCCESS:
raise ctypes.WinError(code)
return result
#--- CONTEXT structure and constants ------------------------------------------
ContextArchMask = 0x0FFF0000 # just guessing here! seems to work, though
if arch == ARCH_I386:
from winappdbg.win32.context_i386 import *
elif arch == ARCH_AMD64:
if bits == 64:
from winappdbg.win32.context_amd64 import *
else:
from winappdbg.win32.context_i386 import *
else:
warnings.warn("Unknown or unsupported architecture: %s" % arch)
#--- Constants ----------------------------------------------------------------
STILL_ACTIVE = 259
WAIT_TIMEOUT = 0x102
WAIT_FAILED = -1
WAIT_OBJECT_0 = 0
EXCEPTION_NONCONTINUABLE = 0x1 # Noncontinuable exception
EXCEPTION_MAXIMUM_PARAMETERS = 15 # maximum number of exception parameters
MAXIMUM_WAIT_OBJECTS = 64 # Maximum number of wait objects
MAXIMUM_SUSPEND_COUNT = 0x7f # Maximum times thread can be suspended
FORMAT_MESSAGE_ALLOCATE_BUFFER = 0x00000100
FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000
GR_GDIOBJECTS = 0
GR_USEROBJECTS = 1
PROCESS_NAME_NATIVE = 1
MAXINTATOM = 0xC000
STD_INPUT_HANDLE = 0xFFFFFFF6 # (DWORD)-10
STD_OUTPUT_HANDLE = 0xFFFFFFF5 # (DWORD)-11
STD_ERROR_HANDLE = 0xFFFFFFF4 # (DWORD)-12
ATTACH_PARENT_PROCESS = 0xFFFFFFFF # (DWORD)-1
# LoadLibraryEx constants
DONT_RESOLVE_DLL_REFERENCES = 0x00000001
LOAD_LIBRARY_AS_DATAFILE = 0x00000002
LOAD_WITH_ALTERED_SEARCH_PATH = 0x00000008
LOAD_IGNORE_CODE_AUTHZ_LEVEL = 0x00000010
LOAD_LIBRARY_AS_IMAGE_RESOURCE = 0x00000020
LOAD_LIBRARY_AS_DATAFILE_EXCLUSIVE = 0x00000040
# SetSearchPathMode flags
# TODO I couldn't find these constants :(
##BASE_SEARCH_PATH_ENABLE_SAFE_SEARCHMODE = ???
##BASE_SEARCH_PATH_DISABLE_SAFE_SEARCHMODE = ???
##BASE_SEARCH_PATH_PERMANENT = ???
# Console control events
CTRL_C_EVENT = 0
CTRL_BREAK_EVENT = 1
CTRL_CLOSE_EVENT = 2
CTRL_LOGOFF_EVENT = 5
CTRL_SHUTDOWN_EVENT = 6
# Heap flags
HEAP_NO_SERIALIZE = 0x00000001
HEAP_GENERATE_EXCEPTIONS = 0x00000004
HEAP_ZERO_MEMORY = 0x00000008
HEAP_CREATE_ENABLE_EXECUTE = 0x00040000
# Standard access rights
DELETE = long(0x00010000)
READ_CONTROL = long(0x00020000)
WRITE_DAC = long(0x00040000)
WRITE_OWNER = long(0x00080000)
SYNCHRONIZE = long(0x00100000)
STANDARD_RIGHTS_REQUIRED = long(0x000F0000)
STANDARD_RIGHTS_READ = (READ_CONTROL)
STANDARD_RIGHTS_WRITE = (READ_CONTROL)
STANDARD_RIGHTS_EXECUTE = (READ_CONTROL)
STANDARD_RIGHTS_ALL = long(0x001F0000)
SPECIFIC_RIGHTS_ALL = long(0x0000FFFF)
# Mutex access rights
MUTEX_ALL_ACCESS = 0x1F0001
MUTEX_MODIFY_STATE = 1
# Event access rights
EVENT_ALL_ACCESS = 0x1F0003
EVENT_MODIFY_STATE = 2
# Semaphore access rights
SEMAPHORE_ALL_ACCESS = 0x1F0003
SEMAPHORE_MODIFY_STATE = 2
# Timer access rights
TIMER_ALL_ACCESS = 0x1F0003
TIMER_MODIFY_STATE = 2
TIMER_QUERY_STATE = 1
# Process access rights for OpenProcess
PROCESS_TERMINATE = 0x0001
PROCESS_CREATE_THREAD = 0x0002
PROCESS_SET_SESSIONID = 0x0004
PROCESS_VM_OPERATION = 0x0008
PROCESS_VM_READ = 0x0010
PROCESS_VM_WRITE = 0x0020
PROCESS_DUP_HANDLE = 0x0040
PROCESS_CREATE_PROCESS = 0x0080
PROCESS_SET_QUOTA = 0x0100
PROCESS_SET_INFORMATION = 0x0200
PROCESS_QUERY_INFORMATION = 0x0400
PROCESS_SUSPEND_RESUME = 0x0800
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
# Thread access rights for OpenThread
THREAD_TERMINATE = 0x0001
THREAD_SUSPEND_RESUME = 0x0002
THREAD_ALERT = 0x0004
THREAD_GET_CONTEXT = 0x0008
THREAD_SET_CONTEXT = 0x0010
THREAD_SET_INFORMATION = 0x0020
THREAD_QUERY_INFORMATION = 0x0040
THREAD_SET_THREAD_TOKEN = 0x0080
THREAD_IMPERSONATE = 0x0100
THREAD_DIRECT_IMPERSONATION = 0x0200
THREAD_SET_LIMITED_INFORMATION = 0x0400
THREAD_QUERY_LIMITED_INFORMATION = 0x0800
# The values of PROCESS_ALL_ACCESS and THREAD_ALL_ACCESS were changed in Vista/2008
PROCESS_ALL_ACCESS_NT = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0xFFF)
PROCESS_ALL_ACCESS_VISTA = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0xFFFF)
THREAD_ALL_ACCESS_NT = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x3FF)
THREAD_ALL_ACCESS_VISTA = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0xFFFF)
if NTDDI_VERSION < NTDDI_VISTA:
PROCESS_ALL_ACCESS = PROCESS_ALL_ACCESS_NT
THREAD_ALL_ACCESS = THREAD_ALL_ACCESS_NT
else:
PROCESS_ALL_ACCESS = PROCESS_ALL_ACCESS_VISTA
THREAD_ALL_ACCESS = THREAD_ALL_ACCESS_VISTA
# Process priority classes
IDLE_PRIORITY_CLASS = 0x00000040
BELOW_NORMAL_PRIORITY_CLASS = 0x00004000
NORMAL_PRIORITY_CLASS = 0x00000020
ABOVE_NORMAL_PRIORITY_CLASS = 0x00008000
HIGH_PRIORITY_CLASS = 0x00000080
REALTIME_PRIORITY_CLASS = 0x00000100
PROCESS_MODE_BACKGROUND_BEGIN = 0x00100000
PROCESS_MODE_BACKGROUND_END = 0x00200000
# dwCreationFlag values
DEBUG_PROCESS = 0x00000001
DEBUG_ONLY_THIS_PROCESS = 0x00000002
CREATE_SUSPENDED = 0x00000004 # Threads and processes
DETACHED_PROCESS = 0x00000008
CREATE_NEW_CONSOLE = 0x00000010
NORMAL_PRIORITY_CLASS = 0x00000020
IDLE_PRIORITY_CLASS = 0x00000040
HIGH_PRIORITY_CLASS = 0x00000080
REALTIME_PRIORITY_CLASS = 0x00000100
CREATE_NEW_PROCESS_GROUP = 0x00000200
CREATE_UNICODE_ENVIRONMENT = 0x00000400
CREATE_SEPARATE_WOW_VDM = 0x00000800
CREATE_SHARED_WOW_VDM = 0x00001000
CREATE_FORCEDOS = 0x00002000
BELOW_NORMAL_PRIORITY_CLASS = 0x00004000
ABOVE_NORMAL_PRIORITY_CLASS = 0x00008000
INHERIT_PARENT_AFFINITY = 0x00010000
STACK_SIZE_PARAM_IS_A_RESERVATION = 0x00010000 # Threads only
INHERIT_CALLER_PRIORITY = 0x00020000 # Deprecated
CREATE_PROTECTED_PROCESS = 0x00040000
EXTENDED_STARTUPINFO_PRESENT = 0x00080000
PROCESS_MODE_BACKGROUND_BEGIN = 0x00100000
PROCESS_MODE_BACKGROUND_END = 0x00200000
CREATE_BREAKAWAY_FROM_JOB = 0x01000000
CREATE_PRESERVE_CODE_AUTHZ_LEVEL = 0x02000000
CREATE_DEFAULT_ERROR_MODE = 0x04000000
CREATE_NO_WINDOW = 0x08000000
PROFILE_USER = 0x10000000
PROFILE_KERNEL = 0x20000000
PROFILE_SERVER = 0x40000000
CREATE_IGNORE_SYSTEM_DEFAULT = 0x80000000
# Thread priority values
THREAD_BASE_PRIORITY_LOWRT = 15 # value that gets a thread to LowRealtime-1
THREAD_BASE_PRIORITY_MAX = 2 # maximum thread base priority boost
THREAD_BASE_PRIORITY_MIN = (-2) # minimum thread base priority boost
THREAD_BASE_PRIORITY_IDLE = (-15) # value that gets a thread to idle
THREAD_PRIORITY_LOWEST = THREAD_BASE_PRIORITY_MIN
THREAD_PRIORITY_BELOW_NORMAL = (THREAD_PRIORITY_LOWEST+1)
THREAD_PRIORITY_NORMAL = 0
THREAD_PRIORITY_HIGHEST = THREAD_BASE_PRIORITY_MAX
THREAD_PRIORITY_ABOVE_NORMAL = (THREAD_PRIORITY_HIGHEST-1)
THREAD_PRIORITY_ERROR_RETURN = long(0xFFFFFFFF)
THREAD_PRIORITY_TIME_CRITICAL = THREAD_BASE_PRIORITY_LOWRT
THREAD_PRIORITY_IDLE = THREAD_BASE_PRIORITY_IDLE
# Memory access
SECTION_QUERY = 0x0001
SECTION_MAP_WRITE = 0x0002
SECTION_MAP_READ = 0x0004
SECTION_MAP_EXECUTE = 0x0008
SECTION_EXTEND_SIZE = 0x0010
SECTION_MAP_EXECUTE_EXPLICIT = 0x0020 # not included in SECTION_ALL_ACCESS
SECTION_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED|SECTION_QUERY|\
SECTION_MAP_WRITE | \
SECTION_MAP_READ | \
SECTION_MAP_EXECUTE | \
SECTION_EXTEND_SIZE)
PAGE_NOACCESS = 0x01
PAGE_READONLY = 0x02
PAGE_READWRITE = 0x04
PAGE_WRITECOPY = 0x08
PAGE_EXECUTE = 0x10
PAGE_EXECUTE_READ = 0x20
PAGE_EXECUTE_READWRITE = 0x40
PAGE_EXECUTE_WRITECOPY = 0x80
PAGE_GUARD = 0x100
PAGE_NOCACHE = 0x200
PAGE_WRITECOMBINE = 0x400
MEM_COMMIT = 0x1000
MEM_RESERVE = 0x2000
MEM_DECOMMIT = 0x4000
MEM_RELEASE = 0x8000
MEM_FREE = 0x10000
MEM_PRIVATE = 0x20000
MEM_MAPPED = 0x40000
MEM_RESET = 0x80000
MEM_TOP_DOWN = 0x100000
MEM_WRITE_WATCH = 0x200000
MEM_PHYSICAL = 0x400000
MEM_LARGE_PAGES = 0x20000000
MEM_4MB_PAGES = 0x80000000
SEC_FILE = 0x800000
SEC_IMAGE = 0x1000000
SEC_RESERVE = 0x4000000
SEC_COMMIT = 0x8000000
SEC_NOCACHE = 0x10000000
SEC_LARGE_PAGES = 0x80000000
MEM_IMAGE = SEC_IMAGE
WRITE_WATCH_FLAG_RESET = 0x01
FILE_MAP_ALL_ACCESS = 0xF001F
SECTION_QUERY = 0x0001
SECTION_MAP_WRITE = 0x0002
SECTION_MAP_READ = 0x0004
SECTION_MAP_EXECUTE = 0x0008
SECTION_EXTEND_SIZE = 0x0010
SECTION_MAP_EXECUTE_EXPLICIT = 0x0020 # not included in SECTION_ALL_ACCESS
SECTION_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED|SECTION_QUERY|\
SECTION_MAP_WRITE | \
SECTION_MAP_READ | \
SECTION_MAP_EXECUTE | \
SECTION_EXTEND_SIZE)
FILE_MAP_COPY = SECTION_QUERY
FILE_MAP_WRITE = SECTION_MAP_WRITE
FILE_MAP_READ = SECTION_MAP_READ
FILE_MAP_ALL_ACCESS = SECTION_ALL_ACCESS
FILE_MAP_EXECUTE = SECTION_MAP_EXECUTE_EXPLICIT # not included in FILE_MAP_ALL_ACCESS
GENERIC_READ = 0x80000000
GENERIC_WRITE = 0x40000000
GENERIC_EXECUTE = 0x20000000
GENERIC_ALL = 0x10000000
FILE_SHARE_READ = 0x00000001
FILE_SHARE_WRITE = 0x00000002
FILE_SHARE_DELETE = 0x00000004
CREATE_NEW = 1
CREATE_ALWAYS = 2
OPEN_EXISTING = 3
OPEN_ALWAYS = 4
TRUNCATE_EXISTING = 5
FILE_ATTRIBUTE_READONLY = 0x00000001
FILE_ATTRIBUTE_NORMAL = 0x00000080
FILE_ATTRIBUTE_TEMPORARY = 0x00000100
FILE_FLAG_WRITE_THROUGH = 0x80000000
FILE_FLAG_NO_BUFFERING = 0x20000000
FILE_FLAG_RANDOM_ACCESS = 0x10000000
FILE_FLAG_SEQUENTIAL_SCAN = 0x08000000
FILE_FLAG_DELETE_ON_CLOSE = 0x04000000
FILE_FLAG_OVERLAPPED = 0x40000000
FILE_ATTRIBUTE_READONLY = 0x00000001
FILE_ATTRIBUTE_HIDDEN = 0x00000002
FILE_ATTRIBUTE_SYSTEM = 0x00000004
FILE_ATTRIBUTE_DIRECTORY = 0x00000010
FILE_ATTRIBUTE_ARCHIVE = 0x00000020
FILE_ATTRIBUTE_DEVICE = 0x00000040
FILE_ATTRIBUTE_NORMAL = 0x00000080
FILE_ATTRIBUTE_TEMPORARY = 0x00000100
# Debug events
EXCEPTION_DEBUG_EVENT = 1
CREATE_THREAD_DEBUG_EVENT = 2
CREATE_PROCESS_DEBUG_EVENT = 3
EXIT_THREAD_DEBUG_EVENT = 4
EXIT_PROCESS_DEBUG_EVENT = 5
LOAD_DLL_DEBUG_EVENT = 6
UNLOAD_DLL_DEBUG_EVENT = 7
OUTPUT_DEBUG_STRING_EVENT = 8
RIP_EVENT = 9
# Debug status codes (ContinueDebugEvent)
DBG_EXCEPTION_HANDLED = long(0x00010001)
DBG_CONTINUE = long(0x00010002)
DBG_REPLY_LATER = long(0x40010001)
DBG_UNABLE_TO_PROVIDE_HANDLE = long(0x40010002)
DBG_TERMINATE_THREAD = long(0x40010003)
DBG_TERMINATE_PROCESS = long(0x40010004)
DBG_CONTROL_C = long(0x40010005)
DBG_PRINTEXCEPTION_C = long(0x40010006)
DBG_RIPEXCEPTION = long(0x40010007)
DBG_CONTROL_BREAK = long(0x40010008)
DBG_COMMAND_EXCEPTION = long(0x40010009)
DBG_EXCEPTION_NOT_HANDLED = long(0x80010001)
DBG_NO_STATE_CHANGE = long(0xC0010001)
DBG_APP_NOT_IDLE = long(0xC0010002)
# Status codes
STATUS_WAIT_0 = long(0x00000000)
STATUS_ABANDONED_WAIT_0 = long(0x00000080)
STATUS_USER_APC = long(0x000000C0)
STATUS_TIMEOUT = long(0x00000102)
STATUS_PENDING = long(0x00000103)
STATUS_SEGMENT_NOTIFICATION = long(0x40000005)
STATUS_GUARD_PAGE_VIOLATION = long(0x80000001)
STATUS_DATATYPE_MISALIGNMENT = long(0x80000002)
STATUS_BREAKPOINT = long(0x80000003)
STATUS_SINGLE_STEP = long(0x80000004)
STATUS_INVALID_INFO_CLASS = long(0xC0000003)
STATUS_ACCESS_VIOLATION = long(0xC0000005)
STATUS_IN_PAGE_ERROR = long(0xC0000006)
STATUS_INVALID_HANDLE = long(0xC0000008)
STATUS_NO_MEMORY = long(0xC0000017)
STATUS_ILLEGAL_INSTRUCTION = long(0xC000001D)
STATUS_NONCONTINUABLE_EXCEPTION = long(0xC0000025)
STATUS_INVALID_DISPOSITION = long(0xC0000026)
STATUS_ARRAY_BOUNDS_EXCEEDED = long(0xC000008C)
STATUS_FLOAT_DENORMAL_OPERAND = long(0xC000008D)
STATUS_FLOAT_DIVIDE_BY_ZERO = long(0xC000008E)
STATUS_FLOAT_INEXACT_RESULT = long(0xC000008F)
STATUS_FLOAT_INVALID_OPERATION = long(0xC0000090)
STATUS_FLOAT_OVERFLOW = long(0xC0000091)
STATUS_FLOAT_STACK_CHECK = long(0xC0000092)
STATUS_FLOAT_UNDERFLOW = long(0xC0000093)
STATUS_INTEGER_DIVIDE_BY_ZERO = long(0xC0000094)
STATUS_INTEGER_OVERFLOW = long(0xC0000095)
STATUS_PRIVILEGED_INSTRUCTION = long(0xC0000096)
STATUS_STACK_OVERFLOW = long(0xC00000FD)
STATUS_CONTROL_C_EXIT = long(0xC000013A)
STATUS_FLOAT_MULTIPLE_FAULTS = long(0xC00002B4)
STATUS_FLOAT_MULTIPLE_TRAPS = long(0xC00002B5)
STATUS_REG_NAT_CONSUMPTION = long(0xC00002C9)
STATUS_SXS_EARLY_DEACTIVATION = long(0xC015000F)
STATUS_SXS_INVALID_DEACTIVATION = long(0xC0150010)
STATUS_STACK_BUFFER_OVERRUN = long(0xC0000409)
STATUS_WX86_BREAKPOINT = long(0x4000001F)
STATUS_HEAP_CORRUPTION = long(0xC0000374)
STATUS_POSSIBLE_DEADLOCK = long(0xC0000194)
STATUS_UNWIND_CONSOLIDATE = long(0x80000029)
# Exception codes
EXCEPTION_ACCESS_VIOLATION = STATUS_ACCESS_VIOLATION
EXCEPTION_ARRAY_BOUNDS_EXCEEDED = STATUS_ARRAY_BOUNDS_EXCEEDED
EXCEPTION_BREAKPOINT = STATUS_BREAKPOINT
EXCEPTION_DATATYPE_MISALIGNMENT = STATUS_DATATYPE_MISALIGNMENT
EXCEPTION_FLT_DENORMAL_OPERAND = STATUS_FLOAT_DENORMAL_OPERAND
EXCEPTION_FLT_DIVIDE_BY_ZERO = STATUS_FLOAT_DIVIDE_BY_ZERO
EXCEPTION_FLT_INEXACT_RESULT = STATUS_FLOAT_INEXACT_RESULT
EXCEPTION_FLT_INVALID_OPERATION = STATUS_FLOAT_INVALID_OPERATION
EXCEPTION_FLT_OVERFLOW = STATUS_FLOAT_OVERFLOW
EXCEPTION_FLT_STACK_CHECK = STATUS_FLOAT_STACK_CHECK
EXCEPTION_FLT_UNDERFLOW = STATUS_FLOAT_UNDERFLOW
EXCEPTION_ILLEGAL_INSTRUCTION = STATUS_ILLEGAL_INSTRUCTION
EXCEPTION_IN_PAGE_ERROR = STATUS_IN_PAGE_ERROR
EXCEPTION_INT_DIVIDE_BY_ZERO = STATUS_INTEGER_DIVIDE_BY_ZERO
EXCEPTION_INT_OVERFLOW = STATUS_INTEGER_OVERFLOW
EXCEPTION_INVALID_DISPOSITION = STATUS_INVALID_DISPOSITION
EXCEPTION_NONCONTINUABLE_EXCEPTION = STATUS_NONCONTINUABLE_EXCEPTION
EXCEPTION_PRIV_INSTRUCTION = STATUS_PRIVILEGED_INSTRUCTION
EXCEPTION_SINGLE_STEP = STATUS_SINGLE_STEP
EXCEPTION_STACK_OVERFLOW = STATUS_STACK_OVERFLOW
EXCEPTION_GUARD_PAGE = STATUS_GUARD_PAGE_VIOLATION
EXCEPTION_INVALID_HANDLE = STATUS_INVALID_HANDLE
EXCEPTION_POSSIBLE_DEADLOCK = STATUS_POSSIBLE_DEADLOCK
EXCEPTION_WX86_BREAKPOINT = STATUS_WX86_BREAKPOINT
CONTROL_C_EXIT = STATUS_CONTROL_C_EXIT
DBG_CONTROL_C = long(0x40010005)
MS_VC_EXCEPTION = long(0x406D1388)
# Access violation types
ACCESS_VIOLATION_TYPE_READ = EXCEPTION_READ_FAULT
ACCESS_VIOLATION_TYPE_WRITE = EXCEPTION_WRITE_FAULT
ACCESS_VIOLATION_TYPE_DEP = EXCEPTION_EXECUTE_FAULT
# RIP event types
SLE_ERROR = 1
SLE_MINORERROR = 2
SLE_WARNING = 3
# DuplicateHandle constants
DUPLICATE_CLOSE_SOURCE = 0x00000001
DUPLICATE_SAME_ACCESS = 0x00000002
# GetFinalPathNameByHandle constants
FILE_NAME_NORMALIZED = 0x0
FILE_NAME_OPENED = 0x8
VOLUME_NAME_DOS = 0x0
VOLUME_NAME_GUID = 0x1
VOLUME_NAME_NONE = 0x4
VOLUME_NAME_NT = 0x2
# GetProductInfo constants
PRODUCT_BUSINESS = 0x00000006
PRODUCT_BUSINESS_N = 0x00000010
PRODUCT_CLUSTER_SERVER = 0x00000012
PRODUCT_DATACENTER_SERVER = 0x00000008
PRODUCT_DATACENTER_SERVER_CORE = 0x0000000C
PRODUCT_DATACENTER_SERVER_CORE_V = 0x00000027
PRODUCT_DATACENTER_SERVER_V = 0x00000025
PRODUCT_ENTERPRISE = 0x00000004
PRODUCT_ENTERPRISE_E = 0x00000046
PRODUCT_ENTERPRISE_N = 0x0000001B
PRODUCT_ENTERPRISE_SERVER = 0x0000000A
PRODUCT_ENTERPRISE_SERVER_CORE = 0x0000000E
PRODUCT_ENTERPRISE_SERVER_CORE_V = 0x00000029
PRODUCT_ENTERPRISE_SERVER_IA64 = 0x0000000F
PRODUCT_ENTERPRISE_SERVER_V = 0x00000026
PRODUCT_HOME_BASIC = 0x00000002
PRODUCT_HOME_BASIC_E = 0x00000043
PRODUCT_HOME_BASIC_N = 0x00000005
PRODUCT_HOME_PREMIUM = 0x00000003
PRODUCT_HOME_PREMIUM_E = 0x00000044
PRODUCT_HOME_PREMIUM_N = 0x0000001A
PRODUCT_HYPERV = 0x0000002A
PRODUCT_MEDIUMBUSINESS_SERVER_MANAGEMENT = 0x0000001E
PRODUCT_MEDIUMBUSINESS_SERVER_MESSAGING = 0x00000020
PRODUCT_MEDIUMBUSINESS_SERVER_SECURITY = 0x0000001F
PRODUCT_PROFESSIONAL = 0x00000030
PRODUCT_PROFESSIONAL_E = 0x00000045
PRODUCT_PROFESSIONAL_N = 0x00000031
PRODUCT_SERVER_FOR_SMALLBUSINESS = 0x00000018
PRODUCT_SERVER_FOR_SMALLBUSINESS_V = 0x00000023
PRODUCT_SERVER_FOUNDATION = 0x00000021
PRODUCT_SMALLBUSINESS_SERVER = 0x00000009
PRODUCT_STANDARD_SERVER = 0x00000007
PRODUCT_STANDARD_SERVER_CORE = 0x0000000D
PRODUCT_STANDARD_SERVER_CORE_V = 0x00000028
PRODUCT_STANDARD_SERVER_V = 0x00000024
PRODUCT_STARTER = 0x0000000B
PRODUCT_STARTER_E = 0x00000042
PRODUCT_STARTER_N = 0x0000002F
PRODUCT_STORAGE_ENTERPRISE_SERVER = 0x00000017
PRODUCT_STORAGE_EXPRESS_SERVER = 0x00000014
PRODUCT_STORAGE_STANDARD_SERVER = 0x00000015
PRODUCT_STORAGE_WORKGROUP_SERVER = 0x00000016
PRODUCT_UNDEFINED = 0x00000000
PRODUCT_UNLICENSED = 0xABCDABCD
PRODUCT_ULTIMATE = 0x00000001
PRODUCT_ULTIMATE_E = 0x00000047
PRODUCT_ULTIMATE_N = 0x0000001C
PRODUCT_WEB_SERVER = 0x00000011
PRODUCT_WEB_SERVER_CORE = 0x0000001D
# DEP policy flags
PROCESS_DEP_ENABLE = 1
PROCESS_DEP_DISABLE_ATL_THUNK_EMULATION = 2
# Error modes
SEM_FAILCRITICALERRORS = 0x001
SEM_NOGPFAULTERRORBOX = 0x002
SEM_NOALIGNMENTFAULTEXCEPT = 0x004
SEM_NOOPENFILEERRORBOX = 0x800
# GetHandleInformation / SetHandleInformation
HANDLE_FLAG_INHERIT = 0x00000001
HANDLE_FLAG_PROTECT_FROM_CLOSE = 0x00000002
#--- Handle wrappers ----------------------------------------------------------
class Handle (object):
"""
Encapsulates Win32 handles to avoid leaking them.
@type inherit: bool
@ivar inherit: C{True} if the handle is to be inherited by child processes,
C{False} otherwise.
@type protectFromClose: bool
@ivar protectFromClose: Set to C{True} to prevent the handle from being
closed. Must be set to C{False} before you're done using the handle,
or it will be left open until the debugger exits. Use with care!
@see:
L{ProcessHandle}, L{ThreadHandle}, L{FileHandle}, L{SnapshotHandle}
"""
# XXX DEBUG
# When this private flag is True each Handle will print a message to
# standard output when it's created and destroyed. This is useful for
# detecting handle leaks within WinAppDbg itself.
__bLeakDetection = False
def __init__(self, aHandle = None, bOwnership = True):
"""
@type aHandle: int
@param aHandle: Win32 handle value.
@type bOwnership: bool
@param bOwnership:
C{True} if we own the handle and we need to close it.
C{False} if someone else will be calling L{CloseHandle}.
"""
super(Handle, self).__init__()
self._value = self._normalize(aHandle)
self.bOwnership = bOwnership
if Handle.__bLeakDetection: # XXX DEBUG
print("INIT HANDLE (%r) %r" % (self.value, self))
@property
def value(self):
return self._value
def __del__(self):
"""
Closes the Win32 handle when the Python object is destroyed.
"""
try:
if Handle.__bLeakDetection: # XXX DEBUG
print("DEL HANDLE %r" % self)
self.close()
except Exception:
pass
def __enter__(self):
"""
Compatibility with the "C{with}" Python statement.
"""
if Handle.__bLeakDetection: # XXX DEBUG
print("ENTER HANDLE %r" % self)
return self
def __exit__(self, type, value, traceback):
"""
Compatibility with the "C{with}" Python statement.
"""
if Handle.__bLeakDetection: # XXX DEBUG
print("EXIT HANDLE %r" % self)
try:
self.close()
except Exception:
pass
def __copy__(self):
"""
Duplicates the Win32 handle when copying the Python object.
@rtype: L{Handle}
@return: A new handle to the same Win32 object.
"""
return self.dup()
def __deepcopy__(self):
"""
Duplicates the Win32 handle when copying the Python object.
@rtype: L{Handle}
@return: A new handle to the same win32 object.
"""
return self.dup()
@property
def _as_parameter_(self):
"""
Compatibility with ctypes.
Allows passing transparently a Handle object to an API call.
"""
return HANDLE(self.value)
@staticmethod
def from_param(value):
"""
Compatibility with ctypes.
Allows passing transparently a Handle object to an API call.
@type value: int
@param value: Numeric handle value.
"""
return HANDLE(value)
def close(self):
"""
Closes the Win32 handle.
"""
if self.bOwnership and self.value not in (None, INVALID_HANDLE_VALUE):
if Handle.__bLeakDetection: # XXX DEBUG
print("CLOSE HANDLE (%d) %r" % (self.value, self))
try:
self._close()
finally:
self._value = None
def _close(self):
"""
Low-level close method.
This is a private method, do not call it.
"""
CloseHandle(self.value)
def dup(self):
"""
@rtype: L{Handle}
@return: A new handle to the same Win32 object.
"""
if self.value is None:
raise ValueError("Closed handles can't be duplicated!")
new_handle = DuplicateHandle(self.value)
if Handle.__bLeakDetection: # XXX DEBUG
print("DUP HANDLE (%d -> %d) %r %r" % \
(self.value, new_handle.value, self, new_handle))
return new_handle
@staticmethod
def _normalize(value):
"""
Normalize handle values.
"""
if hasattr(value, 'value'):
value = value.value
if value is not None:
value = long(value)
return value
def wait(self, dwMilliseconds = None):
"""
Wait for the Win32 object to be signaled.
@type dwMilliseconds: int
@param dwMilliseconds: (Optional) Timeout value in milliseconds.
Use C{INFINITE} or C{None} for no timeout.
"""
if self.value is None:
raise ValueError("Handle is already closed!")
if dwMilliseconds is None:
dwMilliseconds = INFINITE
r = WaitForSingleObject(self.value, dwMilliseconds)
if r != WAIT_OBJECT_0:
raise ctypes.WinError(r)
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.value)
def __get_inherit(self):
if self.value is None:
raise ValueError("Handle is already closed!")
return bool( GetHandleInformation(self.value) & HANDLE_FLAG_INHERIT )
def __set_inherit(self, value):
if self.value is None:
raise ValueError("Handle is already closed!")
flag = (0, HANDLE_FLAG_INHERIT)[ bool(value) ]
SetHandleInformation(self.value, flag, flag)
inherit = property(__get_inherit, __set_inherit)
def __get_protectFromClose(self):
if self.value is None:
raise ValueError("Handle is already closed!")
return bool( GetHandleInformation(self.value) & HANDLE_FLAG_PROTECT_FROM_CLOSE )
def __set_protectFromClose(self, value):
if self.value is None:
raise ValueError("Handle is already closed!")
flag = (0, HANDLE_FLAG_PROTECT_FROM_CLOSE)[ bool(value) ]
SetHandleInformation(self.value, flag, flag)
protectFromClose = property(__get_protectFromClose, __set_protectFromClose)
class UserModeHandle (Handle):
"""
Base class for non-kernel handles. Generally this means they are closed
by special Win32 API functions instead of CloseHandle() and some standard
operations (synchronizing, duplicating, inheritance) are not supported.
@type _TYPE: C type
@cvar _TYPE: C type to translate this handle to.
Subclasses should override this.
Defaults to L{HANDLE}.
"""
# Subclasses should override this.
_TYPE = HANDLE
# This method must be implemented by subclasses.
def _close(self):
raise NotImplementedError()
# Translation to C type.
@property
def _as_parameter_(self):
return self._TYPE(self.value)
# Translation to C type.
@staticmethod
def from_param(value):
return self._TYPE(self.value)
# Operation not supported.
@property
def inherit(self):
return False
# Operation not supported.
@property
def protectFromClose(self):
return False
# Operation not supported.
def dup(self):
raise NotImplementedError()
# Operation not supported.
def wait(self, dwMilliseconds = None):
raise NotImplementedError()
class ProcessHandle (Handle):
"""
Win32 process handle.
@type dwAccess: int
@ivar dwAccess: Current access flags to this handle.
This is the same value passed to L{OpenProcess}.
Can only be C{None} if C{aHandle} is also C{None}.
Defaults to L{PROCESS_ALL_ACCESS}.
@see: L{Handle}
"""
def __init__(self, aHandle = None, bOwnership = True,
dwAccess = PROCESS_ALL_ACCESS):
"""
@type aHandle: int
@param aHandle: Win32 handle value.
@type bOwnership: bool
@param bOwnership:
C{True} if we own the handle and we need to close it.
C{False} if someone else will be calling L{CloseHandle}.
@type dwAccess: int
@param dwAccess: Current access flags to this handle.
This is the same value passed to L{OpenProcess}.
Can only be C{None} if C{aHandle} is also C{None}.
Defaults to L{PROCESS_ALL_ACCESS}.
"""
super(ProcessHandle, self).__init__(aHandle, bOwnership)
self.dwAccess = dwAccess
if aHandle is not None and dwAccess is None:
msg = "Missing access flags for process handle: %x" % aHandle
raise TypeError(msg)
def get_pid(self):
"""
@rtype: int
@return: Process global ID.
"""
return GetProcessId(self.value)
class ThreadHandle (Handle):
"""
Win32 thread handle.
@type dwAccess: int
@ivar dwAccess: Current access flags to this handle.
This is the same value passed to L{OpenThread}.
Can only be C{None} if C{aHandle} is also C{None}.
Defaults to L{THREAD_ALL_ACCESS}.
@see: L{Handle}
"""
def __init__(self, aHandle = None, bOwnership = True,
dwAccess = THREAD_ALL_ACCESS):
"""
@type aHandle: int
@param aHandle: Win32 handle value.
@type bOwnership: bool
@param bOwnership:
C{True} if we own the handle and we need to close it.
C{False} if someone else will be calling L{CloseHandle}.
@type dwAccess: int
@param dwAccess: Current access flags to this handle.
This is the same value passed to L{OpenThread}.
Can only be C{None} if C{aHandle} is also C{None}.
Defaults to L{THREAD_ALL_ACCESS}.
"""
super(ThreadHandle, self).__init__(aHandle, bOwnership)
self.dwAccess = dwAccess
if aHandle is not None and dwAccess is None:
msg = "Missing access flags for thread handle: %x" % aHandle
raise TypeError(msg)
def get_tid(self):
"""
@rtype: int
@return: Thread global ID.
"""
return GetThreadId(self.value)
class FileHandle (Handle):
"""
Win32 file handle.
@see: L{Handle}
"""
def get_filename(self):
"""
@rtype: None or str
@return: Name of the open file, or C{None} if unavailable.
"""
#
# XXX BUG
#
# This code truncates the first two bytes of the path.
# It seems to be the expected behavior of NtQueryInformationFile.
#
# My guess is it only returns the NT pathname, without the device name.
# It's like dropping the drive letter in a Win32 pathname.
#
# Note that using the "official" GetFileInformationByHandleEx
# API introduced in Vista doesn't change the results!
#
dwBufferSize = 0x1004
lpFileInformation = ctypes.create_string_buffer(dwBufferSize)
try:
GetFileInformationByHandleEx(self.value,
FILE_INFO_BY_HANDLE_CLASS.FileNameInfo,
lpFileInformation, dwBufferSize)
except AttributeError:
from winappdbg.win32.ntdll import NtQueryInformationFile, \
FileNameInformation, \
FILE_NAME_INFORMATION
NtQueryInformationFile(self.value,
FileNameInformation,
lpFileInformation,
dwBufferSize)
FileName = compat.unicode(lpFileInformation.raw[sizeof(DWORD):], 'U16')
FileName = ctypes.create_unicode_buffer(FileName).value
if not FileName:
FileName = None
elif FileName[1:2] != ':':
# When the drive letter is missing, we'll assume SYSTEMROOT.
# Not a good solution but it could be worse.
import os
FileName = os.environ['SYSTEMROOT'][:2] + FileName
return FileName
class FileMappingHandle (Handle):
"""
File mapping handle.
@see: L{Handle}
"""
pass
# XXX maybe add functions related to the toolhelp snapshots here?
class SnapshotHandle (Handle):
"""
Toolhelp32 snapshot handle.
@see: L{Handle}
"""
pass
#--- Structure wrappers -------------------------------------------------------
class ProcessInformation (object):
"""
Process information object returned by L{CreateProcess}.
"""
def __init__(self, pi):
self.hProcess = ProcessHandle(pi.hProcess)
self.hThread = ThreadHandle(pi.hThread)
self.dwProcessId = pi.dwProcessId
self.dwThreadId = pi.dwThreadId
# Don't psyco-optimize this class because it needs to be serialized.
class MemoryBasicInformation (object):
"""
Memory information object returned by L{VirtualQueryEx}.
"""
READABLE = (
PAGE_EXECUTE_READ |
PAGE_EXECUTE_READWRITE |
PAGE_EXECUTE_WRITECOPY |
PAGE_READONLY |
PAGE_READWRITE |
PAGE_WRITECOPY
)
WRITEABLE = (
PAGE_EXECUTE_READWRITE |
PAGE_EXECUTE_WRITECOPY |
PAGE_READWRITE |
PAGE_WRITECOPY
)
COPY_ON_WRITE = (
PAGE_EXECUTE_WRITECOPY |
PAGE_WRITECOPY
)
EXECUTABLE = (
PAGE_EXECUTE |
PAGE_EXECUTE_READ |
PAGE_EXECUTE_READWRITE |
PAGE_EXECUTE_WRITECOPY
)
EXECUTABLE_AND_WRITEABLE = (
PAGE_EXECUTE_READWRITE |
PAGE_EXECUTE_WRITECOPY
)
def __init__(self, mbi=None):
"""
@type mbi: L{MEMORY_BASIC_INFORMATION} or L{MemoryBasicInformation}
@param mbi: Either a L{MEMORY_BASIC_INFORMATION} structure or another
L{MemoryBasicInformation} instance.
"""
if mbi is None:
self.BaseAddress = None
self.AllocationBase = None
self.AllocationProtect = None
self.RegionSize = None
self.State = None
self.Protect = None
self.Type = None
else:
self.BaseAddress = mbi.BaseAddress
self.AllocationBase = mbi.AllocationBase
self.AllocationProtect = mbi.AllocationProtect
self.RegionSize = mbi.RegionSize
self.State = mbi.State
self.Protect = mbi.Protect
self.Type = mbi.Type
# Only used when copying MemoryBasicInformation objects, instead of
# instancing them from a MEMORY_BASIC_INFORMATION structure.
if hasattr(mbi, 'content'):
self.content = mbi.content
if hasattr(mbi, 'filename'):
self.content = mbi.filename
def __contains__(self, address):
"""
Test if the given memory address falls within this memory region.
@type address: int
@param address: Memory address to test.
@rtype: bool
@return: C{True} if the given memory address falls within this memory
region, C{False} otherwise.
"""
return self.BaseAddress <= address < (self.BaseAddress + self.RegionSize)
def is_free(self):
"""
@rtype: bool
@return: C{True} if the memory in this region is free.
"""
return self.State == MEM_FREE
def is_reserved(self):
"""
@rtype: bool
@return: C{True} if the memory in this region is reserved.
"""
return self.State == MEM_RESERVE
def is_commited(self):
"""
@rtype: bool
@return: C{True} if the memory in this region is commited.
"""
return self.State == MEM_COMMIT
def is_image(self):
"""
@rtype: bool
@return: C{True} if the memory in this region belongs to an executable
image.
"""
return self.Type == MEM_IMAGE
def is_mapped(self):
"""
@rtype: bool
@return: C{True} if the memory in this region belongs to a mapped file.
"""
return self.Type == MEM_MAPPED
def is_private(self):
"""
@rtype: bool
@return: C{True} if the memory in this region is private.
"""
return self.Type == MEM_PRIVATE
def is_guard(self):
"""
@rtype: bool
@return: C{True} if all pages in this region are guard pages.
"""
return self.is_commited() and bool(self.Protect & PAGE_GUARD)
def has_content(self):
"""
@rtype: bool
@return: C{True} if the memory in this region has any data in it.
"""
return self.is_commited() and not bool(self.Protect & (PAGE_GUARD | PAGE_NOACCESS))
def is_readable(self):
"""
@rtype: bool
@return: C{True} if all pages in this region are readable.
"""
return self.has_content() and bool(self.Protect & self.READABLE)
def is_writeable(self):
"""
@rtype: bool
@return: C{True} if all pages in this region are writeable.
"""
return self.has_content() and bool(self.Protect & self.WRITEABLE)
def is_copy_on_write(self):
"""
@rtype: bool
@return: C{True} if all pages in this region are marked as
copy-on-write. This means the pages are writeable, but changes
are not propagated to disk.
@note:
Tipically data sections in executable images are marked like this.
"""
return self.has_content() and bool(self.Protect & self.COPY_ON_WRITE)
def is_executable(self):
"""
@rtype: bool
@return: C{True} if all pages in this region are executable.
@note: Executable pages are always readable.
"""
return self.has_content() and bool(self.Protect & self.EXECUTABLE)
def is_executable_and_writeable(self):
"""
@rtype: bool
@return: C{True} if all pages in this region are executable and
writeable.
@note: The presence of such pages make memory corruption
vulnerabilities much easier to exploit.
"""
return self.has_content() and bool(self.Protect & self.EXECUTABLE_AND_WRITEABLE)
class ProcThreadAttributeList (object):
"""
Extended process and thread attribute support.
To be used with L{STARTUPINFOEX}.
Only available for Windows Vista and above.
@type AttributeList: list of tuple( int, ctypes-compatible object )
@ivar AttributeList: List of (Attribute, Value) pairs.
@type AttributeListBuffer: L{LPPROC_THREAD_ATTRIBUTE_LIST}
@ivar AttributeListBuffer: Memory buffer used to store the attribute list.
L{InitializeProcThreadAttributeList},
L{UpdateProcThreadAttribute},
L{DeleteProcThreadAttributeList} and
L{STARTUPINFOEX}.
"""
def __init__(self, AttributeList):
"""
@type AttributeList: list of tuple( int, ctypes-compatible object )
@param AttributeList: List of (Attribute, Value) pairs.
"""
self.AttributeList = AttributeList
self.AttributeListBuffer = InitializeProcThreadAttributeList(
len(AttributeList))
try:
for Attribute, Value in AttributeList:
UpdateProcThreadAttribute(self.AttributeListBuffer,
Attribute, Value)
except:
ProcThreadAttributeList.__del__(self)
raise
def __del__(self):
try:
DeleteProcThreadAttributeList(self.AttributeListBuffer)
del self.AttributeListBuffer
except Exception:
pass
def __copy__(self):
return self.__deepcopy__()
def __deepcopy__(self):
return self.__class__(self.AttributeList)
@property
def value(self):
return ctypes.cast(ctypes.pointer(self.AttributeListBuffer), LPVOID)
@property
def _as_parameter_(self):
return self.value
# XXX TODO
@staticmethod
def from_param(value):
raise NotImplementedError()
#--- OVERLAPPED structure -----------------------------------------------------
# typedef struct _OVERLAPPED {
# ULONG_PTR Internal;
# ULONG_PTR InternalHigh;
# union {
# struct {
# DWORD Offset;
# DWORD OffsetHigh;
# } ;
# PVOID Pointer;
# } ;
# HANDLE hEvent;
# }OVERLAPPED, *LPOVERLAPPED;
class _OVERLAPPED_STRUCT(Structure):
_fields_ = [
('Offset', DWORD),
('OffsetHigh', DWORD),
]
class _OVERLAPPED_UNION(Union):
_fields_ = [
('s', _OVERLAPPED_STRUCT),
('Pointer', PVOID),
]
class OVERLAPPED(Structure):
_fields_ = [
('Internal', ULONG_PTR),
('InternalHigh', ULONG_PTR),
('u', _OVERLAPPED_UNION),
('hEvent', HANDLE),
]
LPOVERLAPPED = POINTER(OVERLAPPED)
#--- SECURITY_ATTRIBUTES structure --------------------------------------------
# typedef struct _SECURITY_ATTRIBUTES {
# DWORD nLength;
# LPVOID lpSecurityDescriptor;
# BOOL bInheritHandle;
# } SECURITY_ATTRIBUTES, *PSECURITY_ATTRIBUTES, *LPSECURITY_ATTRIBUTES;
class SECURITY_ATTRIBUTES(Structure):
_fields_ = [
('nLength', DWORD),
('lpSecurityDescriptor', LPVOID),
('bInheritHandle', BOOL),
]
LPSECURITY_ATTRIBUTES = POINTER(SECURITY_ATTRIBUTES)
# --- Extended process and thread attribute support ---------------------------
PPROC_THREAD_ATTRIBUTE_LIST = LPVOID
LPPROC_THREAD_ATTRIBUTE_LIST = PPROC_THREAD_ATTRIBUTE_LIST
PROC_THREAD_ATTRIBUTE_NUMBER = 0x0000FFFF
PROC_THREAD_ATTRIBUTE_THREAD = 0x00010000 # Attribute may be used with thread creation
PROC_THREAD_ATTRIBUTE_INPUT = 0x00020000 # Attribute is input only
PROC_THREAD_ATTRIBUTE_ADDITIVE = 0x00040000 # Attribute may be "accumulated," e.g. bitmasks, counters, etc.
# PROC_THREAD_ATTRIBUTE_NUM
ProcThreadAttributeParentProcess = 0
ProcThreadAttributeExtendedFlags = 1
ProcThreadAttributeHandleList = 2
ProcThreadAttributeGroupAffinity = 3
ProcThreadAttributePreferredNode = 4
ProcThreadAttributeIdealProcessor = 5
ProcThreadAttributeUmsThread = 6
ProcThreadAttributeMitigationPolicy = 7
ProcThreadAttributeMax = 8
PROC_THREAD_ATTRIBUTE_PARENT_PROCESS = ProcThreadAttributeParentProcess | PROC_THREAD_ATTRIBUTE_INPUT
PROC_THREAD_ATTRIBUTE_EXTENDED_FLAGS = ProcThreadAttributeExtendedFlags | PROC_THREAD_ATTRIBUTE_INPUT | PROC_THREAD_ATTRIBUTE_ADDITIVE
PROC_THREAD_ATTRIBUTE_HANDLE_LIST = ProcThreadAttributeHandleList | PROC_THREAD_ATTRIBUTE_INPUT
PROC_THREAD_ATTRIBUTE_GROUP_AFFINITY = ProcThreadAttributeGroupAffinity | PROC_THREAD_ATTRIBUTE_THREAD | PROC_THREAD_ATTRIBUTE_INPUT
PROC_THREAD_ATTRIBUTE_PREFERRED_NODE = ProcThreadAttributePreferredNode | PROC_THREAD_ATTRIBUTE_INPUT
PROC_THREAD_ATTRIBUTE_IDEAL_PROCESSOR = ProcThreadAttributeIdealProcessor | PROC_THREAD_ATTRIBUTE_THREAD | PROC_THREAD_ATTRIBUTE_INPUT
PROC_THREAD_ATTRIBUTE_UMS_THREAD = ProcThreadAttributeUmsThread | PROC_THREAD_ATTRIBUTE_THREAD | PROC_THREAD_ATTRIBUTE_INPUT
PROC_THREAD_ATTRIBUTE_MITIGATION_POLICY = ProcThreadAttributeMitigationPolicy | PROC_THREAD_ATTRIBUTE_INPUT
PROCESS_CREATION_MITIGATION_POLICY_DEP_ENABLE = 0x01
PROCESS_CREATION_MITIGATION_POLICY_DEP_ATL_THUNK_ENABLE = 0x02
PROCESS_CREATION_MITIGATION_POLICY_SEHOP_ENABLE = 0x04
#--- VS_FIXEDFILEINFO structure -----------------------------------------------
# struct VS_FIXEDFILEINFO {
# DWORD dwSignature;
# DWORD dwStrucVersion;
# DWORD dwFileVersionMS;
# DWORD dwFileVersionLS;
# DWORD dwProductVersionMS;
# DWORD dwProductVersionLS;
# DWORD dwFileFlagsMask;
# DWORD dwFileFlags;
# DWORD dwFileOS;
# DWORD dwFileType;
# DWORD dwFileSubtype;
# DWORD dwFileDateMS;
# DWORD dwFileDateLS;
# };
class VS_FIXEDFILEINFO (Structure):
_fields_ = [
("dwSignature", DWORD), # 0xFEEF04BD
("dwStrucVersion", DWORD),
("dwFileVersionMS", DWORD),
("dwFileVersionLS", DWORD),
("dwProductVersionMS", DWORD),
("dwProductVersionLS", DWORD),
("dwFileFlagsMask", DWORD),
("dwFileFlags", DWORD),
("dwFileOS", DWORD),
("dwFileType", DWORD),
("dwFileSubtype", DWORD),
("dwFileDateMS", DWORD),
("dwFileDateLS", DWORD),
]
#--- THREADNAME_INFO structure ------------------------------------------------
# typedef struct tagTHREADNAME_INFO
# {
# DWORD dwType; // Must be 0x1000.
# LPCSTR szName; // Pointer to name (in user addr space).
# DWORD dwThreadID; // Thread ID (-1=caller thread).
# DWORD dwFlags; // Reserved for future use, must be zero.
# } THREADNAME_INFO;
class THREADNAME_INFO(Structure):
_fields_ = [
("dwType", DWORD), # 0x1000
("szName", LPVOID), # remote pointer
("dwThreadID", DWORD), # -1 usually
("dwFlags", DWORD), # 0
]
#--- MEMORY_BASIC_INFORMATION structure ---------------------------------------
# typedef struct _MEMORY_BASIC_INFORMATION32 {
# DWORD BaseAddress;
# DWORD AllocationBase;
# DWORD AllocationProtect;
# DWORD RegionSize;
# DWORD State;
# DWORD Protect;
# DWORD Type;
# } MEMORY_BASIC_INFORMATION32, *PMEMORY_BASIC_INFORMATION32;
class MEMORY_BASIC_INFORMATION32(Structure):
_fields_ = [
('BaseAddress', DWORD), # remote pointer
('AllocationBase', DWORD), # remote pointer
('AllocationProtect', DWORD),
('RegionSize', DWORD),
('State', DWORD),
('Protect', DWORD),
('Type', DWORD),
]
# typedef struct DECLSPEC_ALIGN(16) _MEMORY_BASIC_INFORMATION64 {
# ULONGLONG BaseAddress;
# ULONGLONG AllocationBase;
# DWORD AllocationProtect;
# DWORD __alignment1;
# ULONGLONG RegionSize;
# DWORD State;
# DWORD Protect;
# DWORD Type;
# DWORD __alignment2;
# } MEMORY_BASIC_INFORMATION64, *PMEMORY_BASIC_INFORMATION64;
class MEMORY_BASIC_INFORMATION64(Structure):
_fields_ = [
('BaseAddress', ULONGLONG), # remote pointer
('AllocationBase', ULONGLONG), # remote pointer
('AllocationProtect', DWORD),
('__alignment1', DWORD),
('RegionSize', ULONGLONG),
('State', DWORD),
('Protect', DWORD),
('Type', DWORD),
('__alignment2', DWORD),
]
# typedef struct _MEMORY_BASIC_INFORMATION {
# PVOID BaseAddress;
# PVOID AllocationBase;
# DWORD AllocationProtect;
# SIZE_T RegionSize;
# DWORD State;
# DWORD Protect;
# DWORD Type;
# } MEMORY_BASIC_INFORMATION, *PMEMORY_BASIC_INFORMATION;
class MEMORY_BASIC_INFORMATION(Structure):
_fields_ = [
('BaseAddress', SIZE_T), # remote pointer
('AllocationBase', SIZE_T), # remote pointer
('AllocationProtect', DWORD),
('RegionSize', SIZE_T),
('State', DWORD),
('Protect', DWORD),
('Type', DWORD),
]
PMEMORY_BASIC_INFORMATION = POINTER(MEMORY_BASIC_INFORMATION)
#--- BY_HANDLE_FILE_INFORMATION structure -------------------------------------
# typedef struct _FILETIME {
# DWORD dwLowDateTime;
# DWORD dwHighDateTime;
# } FILETIME, *PFILETIME;
class FILETIME(Structure):
_fields_ = [
('dwLowDateTime', DWORD),
('dwHighDateTime', DWORD),
]
LPFILETIME = POINTER(FILETIME)
# typedef struct _SYSTEMTIME {
# WORD wYear;
# WORD wMonth;
# WORD wDayOfWeek;
# WORD wDay;
# WORD wHour;
# WORD wMinute;
# WORD wSecond;
# WORD wMilliseconds;
# }SYSTEMTIME, *PSYSTEMTIME;
class SYSTEMTIME(Structure):
_fields_ = [
('wYear', WORD),
('wMonth', WORD),
('wDayOfWeek', WORD),
('wDay', WORD),
('wHour', WORD),
('wMinute', WORD),
('wSecond', WORD),
('wMilliseconds', WORD),
]
LPSYSTEMTIME = POINTER(SYSTEMTIME)
# typedef struct _BY_HANDLE_FILE_INFORMATION {
# DWORD dwFileAttributes;
# FILETIME ftCreationTime;
# FILETIME ftLastAccessTime;
# FILETIME ftLastWriteTime;
# DWORD dwVolumeSerialNumber;
# DWORD nFileSizeHigh;
# DWORD nFileSizeLow;
# DWORD nNumberOfLinks;
# DWORD nFileIndexHigh;
# DWORD nFileIndexLow;
# } BY_HANDLE_FILE_INFORMATION, *PBY_HANDLE_FILE_INFORMATION;
class BY_HANDLE_FILE_INFORMATION(Structure):
_fields_ = [
('dwFileAttributes', DWORD),
('ftCreationTime', FILETIME),
('ftLastAccessTime', FILETIME),
('ftLastWriteTime', FILETIME),
('dwVolumeSerialNumber', DWORD),
('nFileSizeHigh', DWORD),
('nFileSizeLow', DWORD),
('nNumberOfLinks', DWORD),
('nFileIndexHigh', DWORD),
('nFileIndexLow', DWORD),
]
LPBY_HANDLE_FILE_INFORMATION = POINTER(BY_HANDLE_FILE_INFORMATION)
# typedef enum _FILE_INFO_BY_HANDLE_CLASS {
# FileBasicInfo = 0,
# FileStandardInfo = 1,
# FileNameInfo = 2,
# FileRenameInfo = 3,
# FileDispositionInfo = 4,
# FileAllocationInfo = 5,
# FileEndOfFileInfo = 6,
# FileStreamInfo = 7,
# FileCompressionInfo = 8,
# FileAttributeTagInfo = 9,
# FileIdBothDirectoryInfo = 10,
# FileIdBothDirectoryRestartInfo = 11,
# FileIoPriorityHintInfo = 12,
# MaximumFileInfoByHandlesClass = 13
# } FILE_INFO_BY_HANDLE_CLASS, *PFILE_INFO_BY_HANDLE_CLASS;
class FILE_INFO_BY_HANDLE_CLASS(object):
FileBasicInfo = 0
FileStandardInfo = 1
FileNameInfo = 2
FileRenameInfo = 3
FileDispositionInfo = 4
FileAllocationInfo = 5
FileEndOfFileInfo = 6
FileStreamInfo = 7
FileCompressionInfo = 8
FileAttributeTagInfo = 9
FileIdBothDirectoryInfo = 10
FileIdBothDirectoryRestartInfo = 11
FileIoPriorityHintInfo = 12
MaximumFileInfoByHandlesClass = 13
# typedef struct _FILE_NAME_INFO {
# DWORD FileNameLength;
# WCHAR FileName[1];
# } FILE_NAME_INFO, *PFILE_NAME_INFO;
##class FILE_NAME_INFO(Structure):
## _fields_ = [
## ('FileNameLength', DWORD),
## ('FileName', WCHAR * 1),
## ]
# TO DO: add more structures used by GetFileInformationByHandleEx()
#--- PROCESS_INFORMATION structure --------------------------------------------
# typedef struct _PROCESS_INFORMATION {
# HANDLE hProcess;
# HANDLE hThread;
# DWORD dwProcessId;
# DWORD dwThreadId;
# } PROCESS_INFORMATION, *PPROCESS_INFORMATION, *LPPROCESS_INFORMATION;
class PROCESS_INFORMATION(Structure):
_fields_ = [
('hProcess', HANDLE),
('hThread', HANDLE),
('dwProcessId', DWORD),
('dwThreadId', DWORD),
]
LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION)
#--- STARTUPINFO and STARTUPINFOEX structures ---------------------------------
# typedef struct _STARTUPINFO {
# DWORD cb;
# LPTSTR lpReserved;
# LPTSTR lpDesktop;
# LPTSTR lpTitle;
# DWORD dwX;
# DWORD dwY;
# DWORD dwXSize;
# DWORD dwYSize;
# DWORD dwXCountChars;
# DWORD dwYCountChars;
# DWORD dwFillAttribute;
# DWORD dwFlags;
# WORD wShowWindow;
# WORD cbReserved2;
# LPBYTE lpReserved2;
# HANDLE hStdInput;
# HANDLE hStdOutput;
# HANDLE hStdError;
# }STARTUPINFO, *LPSTARTUPINFO;
class STARTUPINFO(Structure):
_fields_ = [
('cb', DWORD),
('lpReserved', LPSTR),
('lpDesktop', LPSTR),
('lpTitle', LPSTR),
('dwX', DWORD),
('dwY', DWORD),
('dwXSize', DWORD),
('dwYSize', DWORD),
('dwXCountChars', DWORD),
('dwYCountChars', DWORD),
('dwFillAttribute', DWORD),
('dwFlags', DWORD),
('wShowWindow', WORD),
('cbReserved2', WORD),
('lpReserved2', LPVOID), # LPBYTE
('hStdInput', HANDLE),
('hStdOutput', HANDLE),
('hStdError', HANDLE),
]
LPSTARTUPINFO = POINTER(STARTUPINFO)
# typedef struct _STARTUPINFOEX {
# STARTUPINFO StartupInfo;
# PPROC_THREAD_ATTRIBUTE_LIST lpAttributeList;
# } STARTUPINFOEX, *LPSTARTUPINFOEX;
class STARTUPINFOEX(Structure):
_fields_ = [
('StartupInfo', STARTUPINFO),
('lpAttributeList', PPROC_THREAD_ATTRIBUTE_LIST),
]
LPSTARTUPINFOEX = POINTER(STARTUPINFOEX)
class STARTUPINFOW(Structure):
_fields_ = [
('cb', DWORD),
('lpReserved', LPWSTR),
('lpDesktop', LPWSTR),
('lpTitle', LPWSTR),
('dwX', DWORD),
('dwY', DWORD),
('dwXSize', DWORD),
('dwYSize', DWORD),
('dwXCountChars', DWORD),
('dwYCountChars', DWORD),
('dwFillAttribute', DWORD),
('dwFlags', DWORD),
('wShowWindow', WORD),
('cbReserved2', WORD),
('lpReserved2', LPVOID), # LPBYTE
('hStdInput', HANDLE),
('hStdOutput', HANDLE),
('hStdError', HANDLE),
]
LPSTARTUPINFOW = POINTER(STARTUPINFOW)
class STARTUPINFOEXW(Structure):
_fields_ = [
('StartupInfo', STARTUPINFOW),
('lpAttributeList', PPROC_THREAD_ATTRIBUTE_LIST),
]
LPSTARTUPINFOEXW = POINTER(STARTUPINFOEXW)
#--- JIT_DEBUG_INFO structure -------------------------------------------------
# typedef struct _JIT_DEBUG_INFO {
# DWORD dwSize;
# DWORD dwProcessorArchitecture;
# DWORD dwThreadID;
# DWORD dwReserved0;
# ULONG64 lpExceptionAddress;
# ULONG64 lpExceptionRecord;
# ULONG64 lpContextRecord;
# } JIT_DEBUG_INFO, *LPJIT_DEBUG_INFO;
class JIT_DEBUG_INFO(Structure):
_fields_ = [
('dwSize', DWORD),
('dwProcessorArchitecture', DWORD),
('dwThreadID', DWORD),
('dwReserved0', DWORD),
('lpExceptionAddress', ULONG64),
('lpExceptionRecord', ULONG64),
('lpContextRecord', ULONG64),
]
JIT_DEBUG_INFO32 = JIT_DEBUG_INFO
JIT_DEBUG_INFO64 = JIT_DEBUG_INFO
LPJIT_DEBUG_INFO = POINTER(JIT_DEBUG_INFO)
LPJIT_DEBUG_INFO32 = POINTER(JIT_DEBUG_INFO32)
LPJIT_DEBUG_INFO64 = POINTER(JIT_DEBUG_INFO64)
#--- DEBUG_EVENT structure ----------------------------------------------------
# typedef struct _EXCEPTION_RECORD32 {
# DWORD ExceptionCode;
# DWORD ExceptionFlags;
# DWORD ExceptionRecord;
# DWORD ExceptionAddress;
# DWORD NumberParameters;
# DWORD ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS];
# } EXCEPTION_RECORD32, *PEXCEPTION_RECORD32;
class EXCEPTION_RECORD32(Structure):
_fields_ = [
('ExceptionCode', DWORD),
('ExceptionFlags', DWORD),
('ExceptionRecord', DWORD),
('ExceptionAddress', DWORD),
('NumberParameters', DWORD),
('ExceptionInformation', DWORD * EXCEPTION_MAXIMUM_PARAMETERS),
]
PEXCEPTION_RECORD32 = POINTER(EXCEPTION_RECORD32)
# typedef struct _EXCEPTION_RECORD64 {
# DWORD ExceptionCode;
# DWORD ExceptionFlags;
# DWORD64 ExceptionRecord;
# DWORD64 ExceptionAddress;
# DWORD NumberParameters;
# DWORD __unusedAlignment;
# DWORD64 ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS];
# } EXCEPTION_RECORD64, *PEXCEPTION_RECORD64;
class EXCEPTION_RECORD64(Structure):
_fields_ = [
('ExceptionCode', DWORD),
('ExceptionFlags', DWORD),
('ExceptionRecord', DWORD64),
('ExceptionAddress', DWORD64),
('NumberParameters', DWORD),
('__unusedAlignment', DWORD),
('ExceptionInformation', DWORD64 * EXCEPTION_MAXIMUM_PARAMETERS),
]
PEXCEPTION_RECORD64 = POINTER(EXCEPTION_RECORD64)
# typedef struct _EXCEPTION_RECORD {
# DWORD ExceptionCode;
# DWORD ExceptionFlags;
# LPVOID ExceptionRecord;
# LPVOID ExceptionAddress;
# DWORD NumberParameters;
# LPVOID ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS];
# } EXCEPTION_RECORD, *PEXCEPTION_RECORD;
class EXCEPTION_RECORD(Structure):
pass
PEXCEPTION_RECORD = POINTER(EXCEPTION_RECORD)
EXCEPTION_RECORD._fields_ = [
('ExceptionCode', DWORD),
('ExceptionFlags', DWORD),
('ExceptionRecord', PEXCEPTION_RECORD),
('ExceptionAddress', LPVOID),
('NumberParameters', DWORD),
('ExceptionInformation', LPVOID * EXCEPTION_MAXIMUM_PARAMETERS),
]
# typedef struct _EXCEPTION_DEBUG_INFO {
# EXCEPTION_RECORD ExceptionRecord;
# DWORD dwFirstChance;
# } EXCEPTION_DEBUG_INFO;
class EXCEPTION_DEBUG_INFO(Structure):
_fields_ = [
('ExceptionRecord', EXCEPTION_RECORD),
('dwFirstChance', DWORD),
]
# typedef struct _CREATE_THREAD_DEBUG_INFO {
# HANDLE hThread;
# LPVOID lpThreadLocalBase;
# LPTHREAD_START_ROUTINE lpStartAddress;
# } CREATE_THREAD_DEBUG_INFO;
class CREATE_THREAD_DEBUG_INFO(Structure):
_fields_ = [
('hThread', HANDLE),
('lpThreadLocalBase', LPVOID),
('lpStartAddress', LPVOID),
]
# typedef struct _CREATE_PROCESS_DEBUG_INFO {
# HANDLE hFile;
# HANDLE hProcess;
# HANDLE hThread;
# LPVOID lpBaseOfImage;
# DWORD dwDebugInfoFileOffset;
# DWORD nDebugInfoSize;
# LPVOID lpThreadLocalBase;
# LPTHREAD_START_ROUTINE lpStartAddress;
# LPVOID lpImageName;
# WORD fUnicode;
# } CREATE_PROCESS_DEBUG_INFO;
class CREATE_PROCESS_DEBUG_INFO(Structure):
_fields_ = [
('hFile', HANDLE),
('hProcess', HANDLE),
('hThread', HANDLE),
('lpBaseOfImage', LPVOID),
('dwDebugInfoFileOffset', DWORD),
('nDebugInfoSize', DWORD),
('lpThreadLocalBase', LPVOID),
('lpStartAddress', LPVOID),
('lpImageName', LPVOID),
('fUnicode', WORD),
]
# typedef struct _EXIT_THREAD_DEBUG_INFO {
# DWORD dwExitCode;
# } EXIT_THREAD_DEBUG_INFO;
class EXIT_THREAD_DEBUG_INFO(Structure):
_fields_ = [
('dwExitCode', DWORD),
]
# typedef struct _EXIT_PROCESS_DEBUG_INFO {
# DWORD dwExitCode;
# } EXIT_PROCESS_DEBUG_INFO;
class EXIT_PROCESS_DEBUG_INFO(Structure):
_fields_ = [
('dwExitCode', DWORD),
]
# typedef struct _LOAD_DLL_DEBUG_INFO {
# HANDLE hFile;
# LPVOID lpBaseOfDll;
# DWORD dwDebugInfoFileOffset;
# DWORD nDebugInfoSize;
# LPVOID lpImageName;
# WORD fUnicode;
# } LOAD_DLL_DEBUG_INFO;
class LOAD_DLL_DEBUG_INFO(Structure):
_fields_ = [
('hFile', HANDLE),
('lpBaseOfDll', LPVOID),
('dwDebugInfoFileOffset', DWORD),
('nDebugInfoSize', DWORD),
('lpImageName', LPVOID),
('fUnicode', WORD),
]
# typedef struct _UNLOAD_DLL_DEBUG_INFO {
# LPVOID lpBaseOfDll;
# } UNLOAD_DLL_DEBUG_INFO;
class UNLOAD_DLL_DEBUG_INFO(Structure):
_fields_ = [
('lpBaseOfDll', LPVOID),
]
# typedef struct _OUTPUT_DEBUG_STRING_INFO {
# LPSTR lpDebugStringData;
# WORD fUnicode;
# WORD nDebugStringLength;
# } OUTPUT_DEBUG_STRING_INFO;
class OUTPUT_DEBUG_STRING_INFO(Structure):
_fields_ = [
('lpDebugStringData', LPVOID), # don't use LPSTR
('fUnicode', WORD),
('nDebugStringLength', WORD),
]
# typedef struct _RIP_INFO {
# DWORD dwError;
# DWORD dwType;
# } RIP_INFO, *LPRIP_INFO;
class RIP_INFO(Structure):
_fields_ = [
('dwError', DWORD),
('dwType', DWORD),
]
# typedef struct _DEBUG_EVENT {
# DWORD dwDebugEventCode;
# DWORD dwProcessId;
# DWORD dwThreadId;
# union {
# EXCEPTION_DEBUG_INFO Exception;
# CREATE_THREAD_DEBUG_INFO CreateThread;
# CREATE_PROCESS_DEBUG_INFO CreateProcessInfo;
# EXIT_THREAD_DEBUG_INFO ExitThread;
# EXIT_PROCESS_DEBUG_INFO ExitProcess;
# LOAD_DLL_DEBUG_INFO LoadDll;
# UNLOAD_DLL_DEBUG_INFO UnloadDll;
# OUTPUT_DEBUG_STRING_INFO DebugString;
# RIP_INFO RipInfo;
# } u;
# } DEBUG_EVENT;.
class _DEBUG_EVENT_UNION_(Union):
_fields_ = [
('Exception', EXCEPTION_DEBUG_INFO),
('CreateThread', CREATE_THREAD_DEBUG_INFO),
('CreateProcessInfo', CREATE_PROCESS_DEBUG_INFO),
('ExitThread', EXIT_THREAD_DEBUG_INFO),
('ExitProcess', EXIT_PROCESS_DEBUG_INFO),
('LoadDll', LOAD_DLL_DEBUG_INFO),
('UnloadDll', UNLOAD_DLL_DEBUG_INFO),
('DebugString', OUTPUT_DEBUG_STRING_INFO),
('RipInfo', RIP_INFO),
]
class DEBUG_EVENT(Structure):
_fields_ = [
('dwDebugEventCode', DWORD),
('dwProcessId', DWORD),
('dwThreadId', DWORD),
('u', _DEBUG_EVENT_UNION_),
]
LPDEBUG_EVENT = POINTER(DEBUG_EVENT)
#--- Console API defines and structures ---------------------------------------
FOREGROUND_MASK = 0x000F
BACKGROUND_MASK = 0x00F0
COMMON_LVB_MASK = 0xFF00
FOREGROUND_BLACK = 0x0000
FOREGROUND_BLUE = 0x0001
FOREGROUND_GREEN = 0x0002
FOREGROUND_CYAN = 0x0003
FOREGROUND_RED = 0x0004
FOREGROUND_MAGENTA = 0x0005
FOREGROUND_YELLOW = 0x0006
FOREGROUND_GREY = 0x0007
FOREGROUND_INTENSITY = 0x0008
BACKGROUND_BLACK = 0x0000
BACKGROUND_BLUE = 0x0010
BACKGROUND_GREEN = 0x0020
BACKGROUND_CYAN = 0x0030
BACKGROUND_RED = 0x0040
BACKGROUND_MAGENTA = 0x0050
BACKGROUND_YELLOW = 0x0060
BACKGROUND_GREY = 0x0070
BACKGROUND_INTENSITY = 0x0080
COMMON_LVB_LEADING_BYTE = 0x0100
COMMON_LVB_TRAILING_BYTE = 0x0200
COMMON_LVB_GRID_HORIZONTAL = 0x0400
COMMON_LVB_GRID_LVERTICAL = 0x0800
COMMON_LVB_GRID_RVERTICAL = 0x1000
COMMON_LVB_REVERSE_VIDEO = 0x4000
COMMON_LVB_UNDERSCORE = 0x8000
# typedef struct _CHAR_INFO {
# union {
# WCHAR UnicodeChar;
# CHAR AsciiChar;
# } Char;
# WORD Attributes;
# } CHAR_INFO, *PCHAR_INFO;
class _CHAR_INFO_CHAR(Union):
_fields_ = [
('UnicodeChar', WCHAR),
('AsciiChar', CHAR),
]
class CHAR_INFO(Structure):
_fields_ = [
('Char', _CHAR_INFO_CHAR),
('Attributes', WORD),
]
PCHAR_INFO = POINTER(CHAR_INFO)
# typedef struct _COORD {
# SHORT X;
# SHORT Y;
# } COORD, *PCOORD;
class COORD(Structure):
_fields_ = [
('X', SHORT),
('Y', SHORT),
]
PCOORD = POINTER(COORD)
# typedef struct _SMALL_RECT {
# SHORT Left;
# SHORT Top;
# SHORT Right;
# SHORT Bottom;
# } SMALL_RECT;
class SMALL_RECT(Structure):
_fields_ = [
('Left', SHORT),
('Top', SHORT),
('Right', SHORT),
('Bottom', SHORT),
]
PSMALL_RECT = POINTER(SMALL_RECT)
# typedef struct _CONSOLE_SCREEN_BUFFER_INFO {
# COORD dwSize;
# COORD dwCursorPosition;
# WORD wAttributes;
# SMALL_RECT srWindow;
# COORD dwMaximumWindowSize;
# } CONSOLE_SCREEN_BUFFER_INFO;
class CONSOLE_SCREEN_BUFFER_INFO(Structure):
_fields_ = [
('dwSize', COORD),
('dwCursorPosition', COORD),
('wAttributes', WORD),
('srWindow', SMALL_RECT),
('dwMaximumWindowSize', COORD),
]
PCONSOLE_SCREEN_BUFFER_INFO = POINTER(CONSOLE_SCREEN_BUFFER_INFO)
#--- Toolhelp library defines and structures ----------------------------------
TH32CS_SNAPHEAPLIST = 0x00000001
TH32CS_SNAPPROCESS = 0x00000002
TH32CS_SNAPTHREAD = 0x00000004
TH32CS_SNAPMODULE = 0x00000008
TH32CS_INHERIT = 0x80000000
TH32CS_SNAPALL = (TH32CS_SNAPHEAPLIST | TH32CS_SNAPPROCESS | TH32CS_SNAPTHREAD | TH32CS_SNAPMODULE)
# typedef struct tagTHREADENTRY32 {
# DWORD dwSize;
# DWORD cntUsage;
# DWORD th32ThreadID;
# DWORD th32OwnerProcessID;
# LONG tpBasePri;
# LONG tpDeltaPri;
# DWORD dwFlags;
# } THREADENTRY32, *PTHREADENTRY32;
class THREADENTRY32(Structure):
_fields_ = [
('dwSize', DWORD),
('cntUsage', DWORD),
('th32ThreadID', DWORD),
('th32OwnerProcessID', DWORD),
('tpBasePri', LONG),
('tpDeltaPri', LONG),
('dwFlags', DWORD),
]
LPTHREADENTRY32 = POINTER(THREADENTRY32)
# typedef struct tagPROCESSENTRY32 {
# DWORD dwSize;
# DWORD cntUsage;
# DWORD th32ProcessID;
# ULONG_PTR th32DefaultHeapID;
# DWORD th32ModuleID;
# DWORD cntThreads;
# DWORD th32ParentProcessID;
# LONG pcPriClassBase;
# DWORD dwFlags;
# TCHAR szExeFile[MAX_PATH];
# } PROCESSENTRY32, *PPROCESSENTRY32;
class PROCESSENTRY32(Structure):
_fields_ = [
('dwSize', DWORD),
('cntUsage', DWORD),
('th32ProcessID', DWORD),
('th32DefaultHeapID', ULONG_PTR),
('th32ModuleID', DWORD),
('cntThreads', DWORD),
('th32ParentProcessID', DWORD),
('pcPriClassBase', LONG),
('dwFlags', DWORD),
('szExeFile', TCHAR * 260),
]
LPPROCESSENTRY32 = POINTER(PROCESSENTRY32)
# typedef struct tagMODULEENTRY32 {
# DWORD dwSize;
# DWORD th32ModuleID;
# DWORD th32ProcessID;
# DWORD GlblcntUsage;
# DWORD ProccntUsage;
# BYTE* modBaseAddr;
# DWORD modBaseSize;
# HMODULE hModule;
# TCHAR szModule[MAX_MODULE_NAME32 + 1];
# TCHAR szExePath[MAX_PATH];
# } MODULEENTRY32, *PMODULEENTRY32;
class MODULEENTRY32(Structure):
_fields_ = [
("dwSize", DWORD),
("th32ModuleID", DWORD),
("th32ProcessID", DWORD),
("GlblcntUsage", DWORD),
("ProccntUsage", DWORD),
("modBaseAddr", LPVOID), # BYTE*
("modBaseSize", DWORD),
("hModule", HMODULE),
("szModule", TCHAR * (MAX_MODULE_NAME32 + 1)),
("szExePath", TCHAR * MAX_PATH),
]
LPMODULEENTRY32 = POINTER(MODULEENTRY32)
# typedef struct tagHEAPENTRY32 {
# SIZE_T dwSize;
# HANDLE hHandle;
# ULONG_PTR dwAddress;
# SIZE_T dwBlockSize;
# DWORD dwFlags;
# DWORD dwLockCount;
# DWORD dwResvd;
# DWORD th32ProcessID;
# ULONG_PTR th32HeapID;
# } HEAPENTRY32,
# *PHEAPENTRY32;
class HEAPENTRY32(Structure):
_fields_ = [
("dwSize", SIZE_T),
("hHandle", HANDLE),
("dwAddress", ULONG_PTR),
("dwBlockSize", SIZE_T),
("dwFlags", DWORD),
("dwLockCount", DWORD),
("dwResvd", DWORD),
("th32ProcessID", DWORD),
("th32HeapID", ULONG_PTR),
]
LPHEAPENTRY32 = POINTER(HEAPENTRY32)
# typedef struct tagHEAPLIST32 {
# SIZE_T dwSize;
# DWORD th32ProcessID;
# ULONG_PTR th32HeapID;
# DWORD dwFlags;
# } HEAPLIST32,
# *PHEAPLIST32;
class HEAPLIST32(Structure):
_fields_ = [
("dwSize", SIZE_T),
("th32ProcessID", DWORD),
("th32HeapID", ULONG_PTR),
("dwFlags", DWORD),
]
LPHEAPLIST32 = POINTER(HEAPLIST32)
#--- kernel32.dll -------------------------------------------------------------
# DWORD WINAPI GetLastError(void);
def GetLastError():
_GetLastError = windll.kernel32.GetLastError
_GetLastError.argtypes = []
_GetLastError.restype = DWORD
return _GetLastError()
# void WINAPI SetLastError(
# __in DWORD dwErrCode
# );
def SetLastError(dwErrCode):
_SetLastError = windll.kernel32.SetLastError
_SetLastError.argtypes = [DWORD]
_SetLastError.restype = None
_SetLastError(dwErrCode)
# UINT WINAPI GetErrorMode(void);
def GetErrorMode():
_GetErrorMode = windll.kernel32.GetErrorMode
_GetErrorMode.argtypes = []
_GetErrorMode.restype = UINT
return _GetErrorMode()
# UINT WINAPI SetErrorMode(
# __in UINT uMode
# );
def SetErrorMode(uMode):
_SetErrorMode = windll.kernel32.SetErrorMode
_SetErrorMode.argtypes = [UINT]
_SetErrorMode.restype = UINT
return _SetErrorMode(dwErrCode)
# DWORD GetThreadErrorMode(void);
def GetThreadErrorMode():
_GetThreadErrorMode = windll.kernel32.GetThreadErrorMode
_GetThreadErrorMode.argtypes = []
_GetThreadErrorMode.restype = DWORD
return _GetThreadErrorMode()
# BOOL SetThreadErrorMode(
# __in DWORD dwNewMode,
# __out LPDWORD lpOldMode
# );
def SetThreadErrorMode(dwNewMode):
_SetThreadErrorMode = windll.kernel32.SetThreadErrorMode
_SetThreadErrorMode.argtypes = [DWORD, LPDWORD]
_SetThreadErrorMode.restype = BOOL
_SetThreadErrorMode.errcheck = RaiseIfZero
old = DWORD(0)
_SetThreadErrorMode(dwErrCode, byref(old))
return old.value
# BOOL WINAPI CloseHandle(
# __in HANDLE hObject
# );
def CloseHandle(hHandle):
if isinstance(hHandle, Handle):
# Prevents the handle from being closed without notifying the Handle object.
hHandle.close()
else:
_CloseHandle = windll.kernel32.CloseHandle
_CloseHandle.argtypes = [HANDLE]
_CloseHandle.restype = bool
_CloseHandle.errcheck = RaiseIfZero
_CloseHandle(hHandle)
# BOOL WINAPI DuplicateHandle(
# __in HANDLE hSourceProcessHandle,
# __in HANDLE hSourceHandle,
# __in HANDLE hTargetProcessHandle,
# __out LPHANDLE lpTargetHandle,
# __in DWORD dwDesiredAccess,
# __in BOOL bInheritHandle,
# __in DWORD dwOptions
# );
def DuplicateHandle(hSourceHandle, hSourceProcessHandle = None, hTargetProcessHandle = None, dwDesiredAccess = STANDARD_RIGHTS_ALL, bInheritHandle = False, dwOptions = DUPLICATE_SAME_ACCESS):
_DuplicateHandle = windll.kernel32.DuplicateHandle
_DuplicateHandle.argtypes = [HANDLE, HANDLE, HANDLE, LPHANDLE, DWORD, BOOL, DWORD]
_DuplicateHandle.restype = bool
_DuplicateHandle.errcheck = RaiseIfZero
# NOTE: the arguments to this function are in a different order,
# so we can set default values for all of them but one (hSourceHandle).
if hSourceProcessHandle is None:
hSourceProcessHandle = GetCurrentProcess()
if hTargetProcessHandle is None:
hTargetProcessHandle = hSourceProcessHandle
lpTargetHandle = HANDLE(INVALID_HANDLE_VALUE)
_DuplicateHandle(hSourceProcessHandle, hSourceHandle, hTargetProcessHandle, byref(lpTargetHandle), dwDesiredAccess, bool(bInheritHandle), dwOptions)
if isinstance(hSourceHandle, Handle):
HandleClass = hSourceHandle.__class__
else:
HandleClass = Handle
if hasattr(hSourceHandle, 'dwAccess'):
return HandleClass(lpTargetHandle.value, dwAccess = hSourceHandle.dwAccess)
else:
return HandleClass(lpTargetHandle.value)
# HLOCAL WINAPI LocalFree(
# __in HLOCAL hMem
# );
def LocalFree(hMem):
_LocalFree = windll.kernel32.LocalFree
_LocalFree.argtypes = [HLOCAL]
_LocalFree.restype = HLOCAL
result = _LocalFree(hMem)
if result != NULL:
ctypes.WinError()
#------------------------------------------------------------------------------
# Console API
# HANDLE WINAPI GetStdHandle(
# _In_ DWORD nStdHandle
# );
def GetStdHandle(nStdHandle):
_GetStdHandle = windll.kernel32.GetStdHandle
_GetStdHandle.argytpes = [DWORD]
_GetStdHandle.restype = HANDLE
_GetStdHandle.errcheck = RaiseIfZero
return Handle( _GetStdHandle(nStdHandle), bOwnership = False )
# BOOL WINAPI SetStdHandle(
# _In_ DWORD nStdHandle,
# _In_ HANDLE hHandle
# );
# TODO
# UINT WINAPI GetConsoleCP(void);
def GetConsoleCP():
_GetConsoleCP = windll.kernel32.GetConsoleCP
_GetConsoleCP.argytpes = []
_GetConsoleCP.restype = UINT
return _GetConsoleCP()
# UINT WINAPI GetConsoleOutputCP(void);
def GetConsoleOutputCP():
_GetConsoleOutputCP = windll.kernel32.GetConsoleOutputCP
_GetConsoleOutputCP.argytpes = []
_GetConsoleOutputCP.restype = UINT
return _GetConsoleOutputCP()
#BOOL WINAPI SetConsoleCP(
# _In_ UINT wCodePageID
#);
def SetConsoleCP(wCodePageID):
_SetConsoleCP = windll.kernel32.SetConsoleCP
_SetConsoleCP.argytpes = [UINT]
_SetConsoleCP.restype = bool
_SetConsoleCP.errcheck = RaiseIfZero
_SetConsoleCP(wCodePageID)
#BOOL WINAPI SetConsoleOutputCP(
# _In_ UINT wCodePageID
#);
def SetConsoleOutputCP(wCodePageID):
_SetConsoleOutputCP = windll.kernel32.SetConsoleOutputCP
_SetConsoleOutputCP.argytpes = [UINT]
_SetConsoleOutputCP.restype = bool
_SetConsoleOutputCP.errcheck = RaiseIfZero
_SetConsoleOutputCP(wCodePageID)
# HANDLE WINAPI CreateConsoleScreenBuffer(
# _In_ DWORD dwDesiredAccess,
# _In_ DWORD dwShareMode,
# _In_opt_ const SECURITY_ATTRIBUTES *lpSecurityAttributes,
# _In_ DWORD dwFlags,
# _Reserved_ LPVOID lpScreenBufferData
# );
# TODO
# BOOL WINAPI SetConsoleActiveScreenBuffer(
# _In_ HANDLE hConsoleOutput
# );
def SetConsoleActiveScreenBuffer(hConsoleOutput = None):
_SetConsoleActiveScreenBuffer = windll.kernel32.SetConsoleActiveScreenBuffer
_SetConsoleActiveScreenBuffer.argytpes = [HANDLE]
_SetConsoleActiveScreenBuffer.restype = bool
_SetConsoleActiveScreenBuffer.errcheck = RaiseIfZero
if hConsoleOutput is None:
hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE)
_SetConsoleActiveScreenBuffer(hConsoleOutput)
# BOOL WINAPI GetConsoleScreenBufferInfo(
# _In_ HANDLE hConsoleOutput,
# _Out_ PCONSOLE_SCREEN_BUFFER_INFO lpConsoleScreenBufferInfo
# );
def GetConsoleScreenBufferInfo(hConsoleOutput = None):
_GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
_GetConsoleScreenBufferInfo.argytpes = [HANDLE, PCONSOLE_SCREEN_BUFFER_INFO]
_GetConsoleScreenBufferInfo.restype = bool
_GetConsoleScreenBufferInfo.errcheck = RaiseIfZero
if hConsoleOutput is None:
hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE)
ConsoleScreenBufferInfo = CONSOLE_SCREEN_BUFFER_INFO()
_GetConsoleScreenBufferInfo(hConsoleOutput, byref(ConsoleScreenBufferInfo))
return ConsoleScreenBufferInfo
# BOOL WINAPI GetConsoleScreenBufferInfoEx(
# _In_ HANDLE hConsoleOutput,
# _Out_ PCONSOLE_SCREEN_BUFFER_INFOEX lpConsoleScreenBufferInfoEx
# );
# TODO
# BOOL WINAPI SetConsoleWindowInfo(
# _In_ HANDLE hConsoleOutput,
# _In_ BOOL bAbsolute,
# _In_ const SMALL_RECT *lpConsoleWindow
# );
def SetConsoleWindowInfo(hConsoleOutput, bAbsolute, lpConsoleWindow):
_SetConsoleWindowInfo = windll.kernel32.SetConsoleWindowInfo
_SetConsoleWindowInfo.argytpes = [HANDLE, BOOL, PSMALL_RECT]
_SetConsoleWindowInfo.restype = bool
_SetConsoleWindowInfo.errcheck = RaiseIfZero
if hConsoleOutput is None:
hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE)
if isinstance(lpConsoleWindow, SMALL_RECT):
ConsoleWindow = lpConsoleWindow
else:
ConsoleWindow = SMALL_RECT(*lpConsoleWindow)
_SetConsoleWindowInfo(hConsoleOutput, bAbsolute, byref(ConsoleWindow))
# BOOL WINAPI SetConsoleTextAttribute(
# _In_ HANDLE hConsoleOutput,
# _In_ WORD wAttributes
# );
def SetConsoleTextAttribute(hConsoleOutput = None, wAttributes = 0):
_SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
_SetConsoleTextAttribute.argytpes = [HANDLE, WORD]
_SetConsoleTextAttribute.restype = bool
_SetConsoleTextAttribute.errcheck = RaiseIfZero
if hConsoleOutput is None:
hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE)
_SetConsoleTextAttribute(hConsoleOutput, wAttributes)
# HANDLE WINAPI CreateConsoleScreenBuffer(
# _In_ DWORD dwDesiredAccess,
# _In_ DWORD dwShareMode,
# _In_opt_ const SECURITY_ATTRIBUTES *lpSecurityAttributes,
# _In_ DWORD dwFlags,
# _Reserved_ LPVOID lpScreenBufferData
# );
# TODO
# BOOL WINAPI AllocConsole(void);
def AllocConsole():
_AllocConsole = windll.kernel32.AllocConsole
_AllocConsole.argytpes = []
_AllocConsole.restype = bool
_AllocConsole.errcheck = RaiseIfZero
_AllocConsole()
# BOOL WINAPI AttachConsole(
# _In_ DWORD dwProcessId
# );
def AttachConsole(dwProcessId = ATTACH_PARENT_PROCESS):
_AttachConsole = windll.kernel32.AttachConsole
_AttachConsole.argytpes = [DWORD]
_AttachConsole.restype = bool
_AttachConsole.errcheck = RaiseIfZero
_AttachConsole(dwProcessId)
# BOOL WINAPI FreeConsole(void);
def FreeConsole():
_FreeConsole = windll.kernel32.FreeConsole
_FreeConsole.argytpes = []
_FreeConsole.restype = bool
_FreeConsole.errcheck = RaiseIfZero
_FreeConsole()
# DWORD WINAPI GetConsoleProcessList(
# _Out_ LPDWORD lpdwProcessList,
# _In_ DWORD dwProcessCount
# );
# TODO
# DWORD WINAPI GetConsoleTitle(
# _Out_ LPTSTR lpConsoleTitle,
# _In_ DWORD nSize
# );
# TODO
#BOOL WINAPI SetConsoleTitle(
# _In_ LPCTSTR lpConsoleTitle
#);
# TODO
# COORD WINAPI GetLargestConsoleWindowSize(
# _In_ HANDLE hConsoleOutput
# );
# TODO
# BOOL WINAPI GetConsoleHistoryInfo(
# _Out_ PCONSOLE_HISTORY_INFO lpConsoleHistoryInfo
# );
# TODO
#------------------------------------------------------------------------------
# DLL API
# DWORD WINAPI GetDllDirectory(
# __in DWORD nBufferLength,
# __out LPTSTR lpBuffer
# );
def GetDllDirectoryA():
_GetDllDirectoryA = windll.kernel32.GetDllDirectoryA
_GetDllDirectoryA.argytpes = [DWORD, LPSTR]
_GetDllDirectoryA.restype = DWORD
nBufferLength = _GetDllDirectoryA(0, None)
if nBufferLength == 0:
return None
lpBuffer = ctypes.create_string_buffer("", nBufferLength)
_GetDllDirectoryA(nBufferLength, byref(lpBuffer))
return lpBuffer.value
def GetDllDirectoryW():
_GetDllDirectoryW = windll.kernel32.GetDllDirectoryW
_GetDllDirectoryW.argytpes = [DWORD, LPWSTR]
_GetDllDirectoryW.restype = DWORD
nBufferLength = _GetDllDirectoryW(0, None)
if nBufferLength == 0:
return None
lpBuffer = ctypes.create_unicode_buffer(u"", nBufferLength)
_GetDllDirectoryW(nBufferLength, byref(lpBuffer))
return lpBuffer.value
GetDllDirectory = GuessStringType(GetDllDirectoryA, GetDllDirectoryW)
# BOOL WINAPI SetDllDirectory(
# __in_opt LPCTSTR lpPathName
# );
def SetDllDirectoryA(lpPathName = None):
_SetDllDirectoryA = windll.kernel32.SetDllDirectoryA
_SetDllDirectoryA.argytpes = [LPSTR]
_SetDllDirectoryA.restype = bool
_SetDllDirectoryA.errcheck = RaiseIfZero
_SetDllDirectoryA(lpPathName)
def SetDllDirectoryW(lpPathName):
_SetDllDirectoryW = windll.kernel32.SetDllDirectoryW
_SetDllDirectoryW.argytpes = [LPWSTR]
_SetDllDirectoryW.restype = bool
_SetDllDirectoryW.errcheck = RaiseIfZero
_SetDllDirectoryW(lpPathName)
SetDllDirectory = GuessStringType(SetDllDirectoryA, SetDllDirectoryW)
# HMODULE WINAPI LoadLibrary(
# __in LPCTSTR lpFileName
# );
def LoadLibraryA(pszLibrary):
_LoadLibraryA = windll.kernel32.LoadLibraryA
_LoadLibraryA.argtypes = [LPSTR]
_LoadLibraryA.restype = HMODULE
hModule = _LoadLibraryA(pszLibrary)
if hModule == NULL:
raise ctypes.WinError()
return hModule
def LoadLibraryW(pszLibrary):
_LoadLibraryW = windll.kernel32.LoadLibraryW
_LoadLibraryW.argtypes = [LPWSTR]
_LoadLibraryW.restype = HMODULE
hModule = _LoadLibraryW(pszLibrary)
if hModule == NULL:
raise ctypes.WinError()
return hModule
LoadLibrary = GuessStringType(LoadLibraryA, LoadLibraryW)
# HMODULE WINAPI LoadLibraryEx(
# __in LPCTSTR lpFileName,
# __reserved HANDLE hFile,
# __in DWORD dwFlags
# );
def LoadLibraryExA(pszLibrary, dwFlags = 0):
_LoadLibraryExA = windll.kernel32.LoadLibraryExA
_LoadLibraryExA.argtypes = [LPSTR, HANDLE, DWORD]
_LoadLibraryExA.restype = HMODULE
hModule = _LoadLibraryExA(pszLibrary, NULL, dwFlags)
if hModule == NULL:
raise ctypes.WinError()
return hModule
def LoadLibraryExW(pszLibrary, dwFlags = 0):
_LoadLibraryExW = windll.kernel32.LoadLibraryExW
_LoadLibraryExW.argtypes = [LPWSTR, HANDLE, DWORD]
_LoadLibraryExW.restype = HMODULE
hModule = _LoadLibraryExW(pszLibrary, NULL, dwFlags)
if hModule == NULL:
raise ctypes.WinError()
return hModule
LoadLibraryEx = GuessStringType(LoadLibraryExA, LoadLibraryExW)
# HMODULE WINAPI GetModuleHandle(
# __in_opt LPCTSTR lpModuleName
# );
def GetModuleHandleA(lpModuleName):
_GetModuleHandleA = windll.kernel32.GetModuleHandleA
_GetModuleHandleA.argtypes = [LPSTR]
_GetModuleHandleA.restype = HMODULE
hModule = _GetModuleHandleA(lpModuleName)
if hModule == NULL:
raise ctypes.WinError()
return hModule
def GetModuleHandleW(lpModuleName):
_GetModuleHandleW = windll.kernel32.GetModuleHandleW
_GetModuleHandleW.argtypes = [LPWSTR]
_GetModuleHandleW.restype = HMODULE
hModule = _GetModuleHandleW(lpModuleName)
if hModule == NULL:
raise ctypes.WinError()
return hModule
GetModuleHandle = GuessStringType(GetModuleHandleA, GetModuleHandleW)
# FARPROC WINAPI GetProcAddress(
# __in HMODULE hModule,
# __in LPCSTR lpProcName
# );
def GetProcAddressA(hModule, lpProcName):
_GetProcAddress = windll.kernel32.GetProcAddress
_GetProcAddress.argtypes = [HMODULE, LPVOID]
_GetProcAddress.restype = LPVOID
if type(lpProcName) in (type(0), type(long(0))):
lpProcName = LPVOID(lpProcName)
if lpProcName.value & (~0xFFFF):
raise ValueError('Ordinal number too large: %d' % lpProcName.value)
elif type(lpProcName) == type(compat.b("")):
lpProcName = ctypes.c_char_p(lpProcName)
else:
raise TypeError(str(type(lpProcName)))
return _GetProcAddress(hModule, lpProcName)
GetProcAddressW = MakeWideVersion(GetProcAddressA)
GetProcAddress = GuessStringType(GetProcAddressA, GetProcAddressW)
# BOOL WINAPI FreeLibrary(
# __in HMODULE hModule
# );
def FreeLibrary(hModule):
_FreeLibrary = windll.kernel32.FreeLibrary
_FreeLibrary.argtypes = [HMODULE]
_FreeLibrary.restype = bool
_FreeLibrary.errcheck = RaiseIfZero
_FreeLibrary(hModule)
# PVOID WINAPI RtlPcToFileHeader(
# __in PVOID PcValue,
# __out PVOID *BaseOfImage
# );
def RtlPcToFileHeader(PcValue):
_RtlPcToFileHeader = windll.kernel32.RtlPcToFileHeader
_RtlPcToFileHeader.argtypes = [PVOID, POINTER(PVOID)]
_RtlPcToFileHeader.restype = PRUNTIME_FUNCTION
BaseOfImage = PVOID(0)
_RtlPcToFileHeader(PcValue, byref(BaseOfImage))
return BaseOfImage.value
#------------------------------------------------------------------------------
# File API and related
# BOOL WINAPI GetHandleInformation(
# __in HANDLE hObject,
# __out LPDWORD lpdwFlags
# );
def GetHandleInformation(hObject):
_GetHandleInformation = windll.kernel32.GetHandleInformation
_GetHandleInformation.argtypes = [HANDLE, PDWORD]
_GetHandleInformation.restype = bool
_GetHandleInformation.errcheck = RaiseIfZero
dwFlags = DWORD(0)
_GetHandleInformation(hObject, byref(dwFlags))
return dwFlags.value
# BOOL WINAPI SetHandleInformation(
# __in HANDLE hObject,
# __in DWORD dwMask,
# __in DWORD dwFlags
# );
def SetHandleInformation(hObject, dwMask, dwFlags):
_SetHandleInformation = windll.kernel32.SetHandleInformation
_SetHandleInformation.argtypes = [HANDLE, DWORD, DWORD]
_SetHandleInformation.restype = bool
_SetHandleInformation.errcheck = RaiseIfZero
_SetHandleInformation(hObject, dwMask, dwFlags)
# UINT WINAPI GetWindowModuleFileName(
# __in HWND hwnd,
# __out LPTSTR lpszFileName,
# __in UINT cchFileNameMax
# );
# Not included because it doesn't work in other processes.
# See: http://support.microsoft.com/?id=228469
# BOOL WINAPI QueryFullProcessImageName(
# __in HANDLE hProcess,
# __in DWORD dwFlags,
# __out LPTSTR lpExeName,
# __inout PDWORD lpdwSize
# );
def QueryFullProcessImageNameA(hProcess, dwFlags = 0):
_QueryFullProcessImageNameA = windll.kernel32.QueryFullProcessImageNameA
_QueryFullProcessImageNameA.argtypes = [HANDLE, DWORD, LPSTR, PDWORD]
_QueryFullProcessImageNameA.restype = bool
dwSize = MAX_PATH
while 1:
lpdwSize = DWORD(dwSize)
lpExeName = ctypes.create_string_buffer('', lpdwSize.value + 1)
success = _QueryFullProcessImageNameA(hProcess, dwFlags, lpExeName, byref(lpdwSize))
if success and 0 < lpdwSize.value < dwSize:
break
error = GetLastError()
if error != ERROR_INSUFFICIENT_BUFFER:
raise ctypes.WinError(error)
dwSize = dwSize + 256
if dwSize > 0x1000:
# this prevents an infinite loop in Windows 2008 when the path has spaces,
# see http://msdn.microsoft.com/en-us/library/ms684919(VS.85).aspx#4
raise ctypes.WinError(error)
return lpExeName.value
def QueryFullProcessImageNameW(hProcess, dwFlags = 0):
_QueryFullProcessImageNameW = windll.kernel32.QueryFullProcessImageNameW
_QueryFullProcessImageNameW.argtypes = [HANDLE, DWORD, LPWSTR, PDWORD]
_QueryFullProcessImageNameW.restype = bool
dwSize = MAX_PATH
while 1:
lpdwSize = DWORD(dwSize)
lpExeName = ctypes.create_unicode_buffer('', lpdwSize.value + 1)
success = _QueryFullProcessImageNameW(hProcess, dwFlags, lpExeName, byref(lpdwSize))
if success and 0 < lpdwSize.value < dwSize:
break
error = GetLastError()
if error != ERROR_INSUFFICIENT_BUFFER:
raise ctypes.WinError(error)
dwSize = dwSize + 256
if dwSize > 0x1000:
# this prevents an infinite loop in Windows 2008 when the path has spaces,
# see http://msdn.microsoft.com/en-us/library/ms684919(VS.85).aspx#4
raise ctypes.WinError(error)
return lpExeName.value
QueryFullProcessImageName = GuessStringType(QueryFullProcessImageNameA, QueryFullProcessImageNameW)
# DWORD WINAPI GetLogicalDriveStrings(
# __in DWORD nBufferLength,
# __out LPTSTR lpBuffer
# );
def GetLogicalDriveStringsA():
_GetLogicalDriveStringsA = ctypes.windll.kernel32.GetLogicalDriveStringsA
_GetLogicalDriveStringsA.argtypes = [DWORD, LPSTR]
_GetLogicalDriveStringsA.restype = DWORD
_GetLogicalDriveStringsA.errcheck = RaiseIfZero
nBufferLength = (4 * 26) + 1 # "X:\\\0" from A to Z plus empty string
lpBuffer = ctypes.create_string_buffer('', nBufferLength)
_GetLogicalDriveStringsA(nBufferLength, lpBuffer)
drive_strings = list()
string_p = addressof(lpBuffer)
sizeof_char = sizeof(ctypes.c_char)
while True:
string_v = ctypes.string_at(string_p)
if string_v == '':
break
drive_strings.append(string_v)
string_p += len(string_v) + sizeof_char
return drive_strings
def GetLogicalDriveStringsW():
_GetLogicalDriveStringsW = ctypes.windll.kernel32.GetLogicalDriveStringsW
_GetLogicalDriveStringsW.argtypes = [DWORD, LPWSTR]
_GetLogicalDriveStringsW.restype = DWORD
_GetLogicalDriveStringsW.errcheck = RaiseIfZero
nBufferLength = (4 * 26) + 1 # "X:\\\0" from A to Z plus empty string
lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength)
_GetLogicalDriveStringsW(nBufferLength, lpBuffer)
drive_strings = list()
string_p = addressof(lpBuffer)
sizeof_wchar = sizeof(ctypes.c_wchar)
while True:
string_v = ctypes.wstring_at(string_p)
if string_v == u'':
break
drive_strings.append(string_v)
string_p += (len(string_v) * sizeof_wchar) + sizeof_wchar
return drive_strings
##def GetLogicalDriveStringsA():
## _GetLogicalDriveStringsA = windll.kernel32.GetLogicalDriveStringsA
## _GetLogicalDriveStringsA.argtypes = [DWORD, LPSTR]
## _GetLogicalDriveStringsA.restype = DWORD
## _GetLogicalDriveStringsA.errcheck = RaiseIfZero
##
## nBufferLength = (4 * 26) + 1 # "X:\\\0" from A to Z plus empty string
## lpBuffer = ctypes.create_string_buffer('', nBufferLength)
## _GetLogicalDriveStringsA(nBufferLength, lpBuffer)
## result = list()
## index = 0
## while 1:
## string = list()
## while 1:
## character = lpBuffer[index]
## index = index + 1
## if character == '\0':
## break
## string.append(character)
## if not string:
## break
## result.append(''.join(string))
## return result
##
##def GetLogicalDriveStringsW():
## _GetLogicalDriveStringsW = windll.kernel32.GetLogicalDriveStringsW
## _GetLogicalDriveStringsW.argtypes = [DWORD, LPWSTR]
## _GetLogicalDriveStringsW.restype = DWORD
## _GetLogicalDriveStringsW.errcheck = RaiseIfZero
##
## nBufferLength = (4 * 26) + 1 # "X:\\\0" from A to Z plus empty string
## lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength)
## _GetLogicalDriveStringsW(nBufferLength, lpBuffer)
## result = list()
## index = 0
## while 1:
## string = list()
## while 1:
## character = lpBuffer[index]
## index = index + 1
## if character == u'\0':
## break
## string.append(character)
## if not string:
## break
## result.append(u''.join(string))
## return result
GetLogicalDriveStrings = GuessStringType(GetLogicalDriveStringsA, GetLogicalDriveStringsW)
# DWORD WINAPI QueryDosDevice(
# __in_opt LPCTSTR lpDeviceName,
# __out LPTSTR lpTargetPath,
# __in DWORD ucchMax
# );
def QueryDosDeviceA(lpDeviceName = None):
_QueryDosDeviceA = windll.kernel32.QueryDosDeviceA
_QueryDosDeviceA.argtypes = [LPSTR, LPSTR, DWORD]
_QueryDosDeviceA.restype = DWORD
_QueryDosDeviceA.errcheck = RaiseIfZero
if not lpDeviceName:
lpDeviceName = None
ucchMax = 0x1000
lpTargetPath = ctypes.create_string_buffer('', ucchMax)
_QueryDosDeviceA(lpDeviceName, lpTargetPath, ucchMax)
return lpTargetPath.value
def QueryDosDeviceW(lpDeviceName):
_QueryDosDeviceW = windll.kernel32.QueryDosDeviceW
_QueryDosDeviceW.argtypes = [LPWSTR, LPWSTR, DWORD]
_QueryDosDeviceW.restype = DWORD
_QueryDosDeviceW.errcheck = RaiseIfZero
if not lpDeviceName:
lpDeviceName = None
ucchMax = 0x1000
lpTargetPath = ctypes.create_unicode_buffer(u'', ucchMax)
_QueryDosDeviceW(lpDeviceName, lpTargetPath, ucchMax)
return lpTargetPath.value
QueryDosDevice = GuessStringType(QueryDosDeviceA, QueryDosDeviceW)
# LPVOID WINAPI MapViewOfFile(
# __in HANDLE hFileMappingObject,
# __in DWORD dwDesiredAccess,
# __in DWORD dwFileOffsetHigh,
# __in DWORD dwFileOffsetLow,
# __in SIZE_T dwNumberOfBytesToMap
# );
def MapViewOfFile(hFileMappingObject, dwDesiredAccess = FILE_MAP_ALL_ACCESS | FILE_MAP_EXECUTE, dwFileOffsetHigh = 0, dwFileOffsetLow = 0, dwNumberOfBytesToMap = 0):
_MapViewOfFile = windll.kernel32.MapViewOfFile
_MapViewOfFile.argtypes = [HANDLE, DWORD, DWORD, DWORD, SIZE_T]
_MapViewOfFile.restype = LPVOID
lpBaseAddress = _MapViewOfFile(hFileMappingObject, dwDesiredAccess, dwFileOffsetHigh, dwFileOffsetLow, dwNumberOfBytesToMap)
if lpBaseAddress == NULL:
raise ctypes.WinError()
return lpBaseAddress
# BOOL WINAPI UnmapViewOfFile(
# __in LPCVOID lpBaseAddress
# );
def UnmapViewOfFile(lpBaseAddress):
_UnmapViewOfFile = windll.kernel32.UnmapViewOfFile
_UnmapViewOfFile.argtypes = [LPVOID]
_UnmapViewOfFile.restype = bool
_UnmapViewOfFile.errcheck = RaiseIfZero
_UnmapViewOfFile(lpBaseAddress)
# HANDLE WINAPI OpenFileMapping(
# __in DWORD dwDesiredAccess,
# __in BOOL bInheritHandle,
# __in LPCTSTR lpName
# );
def OpenFileMappingA(dwDesiredAccess, bInheritHandle, lpName):
_OpenFileMappingA = windll.kernel32.OpenFileMappingA
_OpenFileMappingA.argtypes = [DWORD, BOOL, LPSTR]
_OpenFileMappingA.restype = HANDLE
_OpenFileMappingA.errcheck = RaiseIfZero
hFileMappingObject = _OpenFileMappingA(dwDesiredAccess, bool(bInheritHandle), lpName)
return FileMappingHandle(hFileMappingObject)
def OpenFileMappingW(dwDesiredAccess, bInheritHandle, lpName):
_OpenFileMappingW = windll.kernel32.OpenFileMappingW
_OpenFileMappingW.argtypes = [DWORD, BOOL, LPWSTR]
_OpenFileMappingW.restype = HANDLE
_OpenFileMappingW.errcheck = RaiseIfZero
hFileMappingObject = _OpenFileMappingW(dwDesiredAccess, bool(bInheritHandle), lpName)
return FileMappingHandle(hFileMappingObject)
OpenFileMapping = GuessStringType(OpenFileMappingA, OpenFileMappingW)
# HANDLE WINAPI CreateFileMapping(
# __in HANDLE hFile,
# __in_opt LPSECURITY_ATTRIBUTES lpAttributes,
# __in DWORD flProtect,
# __in DWORD dwMaximumSizeHigh,
# __in DWORD dwMaximumSizeLow,
# __in_opt LPCTSTR lpName
# );
def CreateFileMappingA(hFile, lpAttributes = None, flProtect = PAGE_EXECUTE_READWRITE, dwMaximumSizeHigh = 0, dwMaximumSizeLow = 0, lpName = None):
_CreateFileMappingA = windll.kernel32.CreateFileMappingA
_CreateFileMappingA.argtypes = [HANDLE, LPVOID, DWORD, DWORD, DWORD, LPSTR]
_CreateFileMappingA.restype = HANDLE
_CreateFileMappingA.errcheck = RaiseIfZero
if lpAttributes:
lpAttributes = ctypes.pointer(lpAttributes)
if not lpName:
lpName = None
hFileMappingObject = _CreateFileMappingA(hFile, lpAttributes, flProtect, dwMaximumSizeHigh, dwMaximumSizeLow, lpName)
return FileMappingHandle(hFileMappingObject)
def CreateFileMappingW(hFile, lpAttributes = None, flProtect = PAGE_EXECUTE_READWRITE, dwMaximumSizeHigh = 0, dwMaximumSizeLow = 0, lpName = None):
_CreateFileMappingW = windll.kernel32.CreateFileMappingW
_CreateFileMappingW.argtypes = [HANDLE, LPVOID, DWORD, DWORD, DWORD, LPWSTR]
_CreateFileMappingW.restype = HANDLE
_CreateFileMappingW.errcheck = RaiseIfZero
if lpAttributes:
lpAttributes = ctypes.pointer(lpAttributes)
if not lpName:
lpName = None
hFileMappingObject = _CreateFileMappingW(hFile, lpAttributes, flProtect, dwMaximumSizeHigh, dwMaximumSizeLow, lpName)
return FileMappingHandle(hFileMappingObject)
CreateFileMapping = GuessStringType(CreateFileMappingA, CreateFileMappingW)
# HANDLE WINAPI CreateFile(
# __in LPCTSTR lpFileName,
# __in DWORD dwDesiredAccess,
# __in DWORD dwShareMode,
# __in_opt LPSECURITY_ATTRIBUTES lpSecurityAttributes,
# __in DWORD dwCreationDisposition,
# __in DWORD dwFlagsAndAttributes,
# __in_opt HANDLE hTemplateFile
# );
def CreateFileA(lpFileName, dwDesiredAccess = GENERIC_ALL, dwShareMode = 0, lpSecurityAttributes = None, dwCreationDisposition = OPEN_ALWAYS, dwFlagsAndAttributes = FILE_ATTRIBUTE_NORMAL, hTemplateFile = None):
_CreateFileA = windll.kernel32.CreateFileA
_CreateFileA.argtypes = [LPSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE]
_CreateFileA.restype = HANDLE
if not lpFileName:
lpFileName = None
if lpSecurityAttributes:
lpSecurityAttributes = ctypes.pointer(lpSecurityAttributes)
hFile = _CreateFileA(lpFileName, dwDesiredAccess, dwShareMode, lpSecurityAttributes, dwCreationDisposition, dwFlagsAndAttributes, hTemplateFile)
if hFile == INVALID_HANDLE_VALUE:
raise ctypes.WinError()
return FileHandle(hFile)
def CreateFileW(lpFileName, dwDesiredAccess = GENERIC_ALL, dwShareMode = 0, lpSecurityAttributes = None, dwCreationDisposition = OPEN_ALWAYS, dwFlagsAndAttributes = FILE_ATTRIBUTE_NORMAL, hTemplateFile = None):
_CreateFileW = windll.kernel32.CreateFileW
_CreateFileW.argtypes = [LPWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE]
_CreateFileW.restype = HANDLE
if not lpFileName:
lpFileName = None
if lpSecurityAttributes:
lpSecurityAttributes = ctypes.pointer(lpSecurityAttributes)
hFile = _CreateFileW(lpFileName, dwDesiredAccess, dwShareMode, lpSecurityAttributes, dwCreationDisposition, dwFlagsAndAttributes, hTemplateFile)
if hFile == INVALID_HANDLE_VALUE:
raise ctypes.WinError()
return FileHandle(hFile)
CreateFile = GuessStringType(CreateFileA, CreateFileW)
# BOOL WINAPI FlushFileBuffers(
# __in HANDLE hFile
# );
def FlushFileBuffers(hFile):
_FlushFileBuffers = windll.kernel32.FlushFileBuffers
_FlushFileBuffers.argtypes = [HANDLE]
_FlushFileBuffers.restype = bool
_FlushFileBuffers.errcheck = RaiseIfZero
_FlushFileBuffers(hFile)
# BOOL WINAPI FlushViewOfFile(
# __in LPCVOID lpBaseAddress,
# __in SIZE_T dwNumberOfBytesToFlush
# );
def FlushViewOfFile(lpBaseAddress, dwNumberOfBytesToFlush = 0):
_FlushViewOfFile = windll.kernel32.FlushViewOfFile
_FlushViewOfFile.argtypes = [LPVOID, SIZE_T]
_FlushViewOfFile.restype = bool
_FlushViewOfFile.errcheck = RaiseIfZero
_FlushViewOfFile(lpBaseAddress, dwNumberOfBytesToFlush)
# DWORD WINAPI SearchPath(
# __in_opt LPCTSTR lpPath,
# __in LPCTSTR lpFileName,
# __in_opt LPCTSTR lpExtension,
# __in DWORD nBufferLength,
# __out LPTSTR lpBuffer,
# __out_opt LPTSTR *lpFilePart
# );
def SearchPathA(lpPath, lpFileName, lpExtension):
_SearchPathA = windll.kernel32.SearchPathA
_SearchPathA.argtypes = [LPSTR, LPSTR, LPSTR, DWORD, LPSTR, POINTER(LPSTR)]
_SearchPathA.restype = DWORD
_SearchPathA.errcheck = RaiseIfZero
if not lpPath:
lpPath = None
if not lpExtension:
lpExtension = None
nBufferLength = _SearchPathA(lpPath, lpFileName, lpExtension, 0, None, None)
lpBuffer = ctypes.create_string_buffer('', nBufferLength + 1)
lpFilePart = LPSTR()
_SearchPathA(lpPath, lpFileName, lpExtension, nBufferLength, lpBuffer, byref(lpFilePart))
lpFilePart = lpFilePart.value
lpBuffer = lpBuffer.value
if lpBuffer == '':
if GetLastError() == ERROR_SUCCESS:
raise ctypes.WinError(ERROR_FILE_NOT_FOUND)
raise ctypes.WinError()
return (lpBuffer, lpFilePart)
def SearchPathW(lpPath, lpFileName, lpExtension):
_SearchPathW = windll.kernel32.SearchPathW
_SearchPathW.argtypes = [LPWSTR, LPWSTR, LPWSTR, DWORD, LPWSTR, POINTER(LPWSTR)]
_SearchPathW.restype = DWORD
_SearchPathW.errcheck = RaiseIfZero
if not lpPath:
lpPath = None
if not lpExtension:
lpExtension = None
nBufferLength = _SearchPathW(lpPath, lpFileName, lpExtension, 0, None, None)
lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength + 1)
lpFilePart = LPWSTR()
_SearchPathW(lpPath, lpFileName, lpExtension, nBufferLength, lpBuffer, byref(lpFilePart))
lpFilePart = lpFilePart.value
lpBuffer = lpBuffer.value
if lpBuffer == u'':
if GetLastError() == ERROR_SUCCESS:
raise ctypes.WinError(ERROR_FILE_NOT_FOUND)
raise ctypes.WinError()
return (lpBuffer, lpFilePart)
SearchPath = GuessStringType(SearchPathA, SearchPathW)
# BOOL SetSearchPathMode(
# __in DWORD Flags
# );
def SetSearchPathMode(Flags):
_SetSearchPathMode = windll.kernel32.SetSearchPathMode
_SetSearchPathMode.argtypes = [DWORD]
_SetSearchPathMode.restype = bool
_SetSearchPathMode.errcheck = RaiseIfZero
_SetSearchPathMode(Flags)
# BOOL WINAPI DeviceIoControl(
# __in HANDLE hDevice,
# __in DWORD dwIoControlCode,
# __in_opt LPVOID lpInBuffer,
# __in DWORD nInBufferSize,
# __out_opt LPVOID lpOutBuffer,
# __in DWORD nOutBufferSize,
# __out_opt LPDWORD lpBytesReturned,
# __inout_opt LPOVERLAPPED lpOverlapped
# );
def DeviceIoControl(hDevice, dwIoControlCode, lpInBuffer, nInBufferSize, lpOutBuffer, nOutBufferSize, lpOverlapped):
_DeviceIoControl = windll.kernel32.DeviceIoControl
_DeviceIoControl.argtypes = [HANDLE, DWORD, LPVOID, DWORD, LPVOID, DWORD, LPDWORD, LPOVERLAPPED]
_DeviceIoControl.restype = bool
_DeviceIoControl.errcheck = RaiseIfZero
if not lpInBuffer:
lpInBuffer = None
if not lpOutBuffer:
lpOutBuffer = None
if lpOverlapped:
lpOverlapped = ctypes.pointer(lpOverlapped)
lpBytesReturned = DWORD(0)
_DeviceIoControl(hDevice, dwIoControlCode, lpInBuffer, nInBufferSize, lpOutBuffer, nOutBufferSize, byref(lpBytesReturned), lpOverlapped)
return lpBytesReturned.value
# BOOL GetFileInformationByHandle(
# HANDLE hFile,
# LPBY_HANDLE_FILE_INFORMATION lpFileInformation
# );
def GetFileInformationByHandle(hFile):
_GetFileInformationByHandle = windll.kernel32.GetFileInformationByHandle
_GetFileInformationByHandle.argtypes = [HANDLE, LPBY_HANDLE_FILE_INFORMATION]
_GetFileInformationByHandle.restype = bool
_GetFileInformationByHandle.errcheck = RaiseIfZero
lpFileInformation = BY_HANDLE_FILE_INFORMATION()
_GetFileInformationByHandle(hFile, byref(lpFileInformation))
return lpFileInformation
# BOOL WINAPI GetFileInformationByHandleEx(
# __in HANDLE hFile,
# __in FILE_INFO_BY_HANDLE_CLASS FileInformationClass,
# __out LPVOID lpFileInformation,
# __in DWORD dwBufferSize
# );
def GetFileInformationByHandleEx(hFile, FileInformationClass, lpFileInformation, dwBufferSize):
_GetFileInformationByHandleEx = windll.kernel32.GetFileInformationByHandleEx
_GetFileInformationByHandleEx.argtypes = [HANDLE, DWORD, LPVOID, DWORD]
_GetFileInformationByHandleEx.restype = bool
_GetFileInformationByHandleEx.errcheck = RaiseIfZero
# XXX TODO
# support each FileInformationClass so the function can allocate the
# corresponding structure for the lpFileInformation parameter
_GetFileInformationByHandleEx(hFile, FileInformationClass, byref(lpFileInformation), dwBufferSize)
# DWORD WINAPI GetFinalPathNameByHandle(
# __in HANDLE hFile,
# __out LPTSTR lpszFilePath,
# __in DWORD cchFilePath,
# __in DWORD dwFlags
# );
def GetFinalPathNameByHandleA(hFile, dwFlags = FILE_NAME_NORMALIZED | VOLUME_NAME_DOS):
_GetFinalPathNameByHandleA = windll.kernel32.GetFinalPathNameByHandleA
_GetFinalPathNameByHandleA.argtypes = [HANDLE, LPSTR, DWORD, DWORD]
_GetFinalPathNameByHandleA.restype = DWORD
cchFilePath = _GetFinalPathNameByHandleA(hFile, None, 0, dwFlags)
if cchFilePath == 0:
raise ctypes.WinError()
lpszFilePath = ctypes.create_string_buffer('', cchFilePath + 1)
nCopied = _GetFinalPathNameByHandleA(hFile, lpszFilePath, cchFilePath, dwFlags)
if nCopied <= 0 or nCopied > cchFilePath:
raise ctypes.WinError()
return lpszFilePath.value
def GetFinalPathNameByHandleW(hFile, dwFlags = FILE_NAME_NORMALIZED | VOLUME_NAME_DOS):
_GetFinalPathNameByHandleW = windll.kernel32.GetFinalPathNameByHandleW
_GetFinalPathNameByHandleW.argtypes = [HANDLE, LPWSTR, DWORD, DWORD]
_GetFinalPathNameByHandleW.restype = DWORD
cchFilePath = _GetFinalPathNameByHandleW(hFile, None, 0, dwFlags)
if cchFilePath == 0:
raise ctypes.WinError()
lpszFilePath = ctypes.create_unicode_buffer(u'', cchFilePath + 1)
nCopied = _GetFinalPathNameByHandleW(hFile, lpszFilePath, cchFilePath, dwFlags)
if nCopied <= 0 or nCopied > cchFilePath:
raise ctypes.WinError()
return lpszFilePath.value
GetFinalPathNameByHandle = GuessStringType(GetFinalPathNameByHandleA, GetFinalPathNameByHandleW)
# DWORD GetFullPathName(
# LPCTSTR lpFileName,
# DWORD nBufferLength,
# LPTSTR lpBuffer,
# LPTSTR* lpFilePart
# );
def GetFullPathNameA(lpFileName):
_GetFullPathNameA = windll.kernel32.GetFullPathNameA
_GetFullPathNameA.argtypes = [LPSTR, DWORD, LPSTR, POINTER(LPSTR)]
_GetFullPathNameA.restype = DWORD
nBufferLength = _GetFullPathNameA(lpFileName, 0, None, None)
if nBufferLength <= 0:
raise ctypes.WinError()
lpBuffer = ctypes.create_string_buffer('', nBufferLength + 1)
lpFilePart = LPSTR()
nCopied = _GetFullPathNameA(lpFileName, nBufferLength, lpBuffer, byref(lpFilePart))
if nCopied > nBufferLength or nCopied == 0:
raise ctypes.WinError()
return lpBuffer.value, lpFilePart.value
def GetFullPathNameW(lpFileName):
_GetFullPathNameW = windll.kernel32.GetFullPathNameW
_GetFullPathNameW.argtypes = [LPWSTR, DWORD, LPWSTR, POINTER(LPWSTR)]
_GetFullPathNameW.restype = DWORD
nBufferLength = _GetFullPathNameW(lpFileName, 0, None, None)
if nBufferLength <= 0:
raise ctypes.WinError()
lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength + 1)
lpFilePart = LPWSTR()
nCopied = _GetFullPathNameW(lpFileName, nBufferLength, lpBuffer, byref(lpFilePart))
if nCopied > nBufferLength or nCopied == 0:
raise ctypes.WinError()
return lpBuffer.value, lpFilePart.value
GetFullPathName = GuessStringType(GetFullPathNameA, GetFullPathNameW)
# DWORD WINAPI GetTempPath(
# __in DWORD nBufferLength,
# __out LPTSTR lpBuffer
# );
def GetTempPathA():
_GetTempPathA = windll.kernel32.GetTempPathA
_GetTempPathA.argtypes = [DWORD, LPSTR]
_GetTempPathA.restype = DWORD
nBufferLength = _GetTempPathA(0, None)
if nBufferLength <= 0:
raise ctypes.WinError()
lpBuffer = ctypes.create_string_buffer('', nBufferLength)
nCopied = _GetTempPathA(nBufferLength, lpBuffer)
if nCopied > nBufferLength or nCopied == 0:
raise ctypes.WinError()
return lpBuffer.value
def GetTempPathW():
_GetTempPathW = windll.kernel32.GetTempPathW
_GetTempPathW.argtypes = [DWORD, LPWSTR]
_GetTempPathW.restype = DWORD
nBufferLength = _GetTempPathW(0, None)
if nBufferLength <= 0:
raise ctypes.WinError()
lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength)
nCopied = _GetTempPathW(nBufferLength, lpBuffer)
if nCopied > nBufferLength or nCopied == 0:
raise ctypes.WinError()
return lpBuffer.value
GetTempPath = GuessStringType(GetTempPathA, GetTempPathW)
# UINT WINAPI GetTempFileName(
# __in LPCTSTR lpPathName,
# __in LPCTSTR lpPrefixString,
# __in UINT uUnique,
# __out LPTSTR lpTempFileName
# );
def GetTempFileNameA(lpPathName = None, lpPrefixString = "TMP", uUnique = 0):
_GetTempFileNameA = windll.kernel32.GetTempFileNameA
_GetTempFileNameA.argtypes = [LPSTR, LPSTR, UINT, LPSTR]
_GetTempFileNameA.restype = UINT
if lpPathName is None:
lpPathName = GetTempPathA()
lpTempFileName = ctypes.create_string_buffer('', MAX_PATH)
uUnique = _GetTempFileNameA(lpPathName, lpPrefixString, uUnique, lpTempFileName)
if uUnique == 0:
raise ctypes.WinError()
return lpTempFileName.value, uUnique
def GetTempFileNameW(lpPathName = None, lpPrefixString = u"TMP", uUnique = 0):
_GetTempFileNameW = windll.kernel32.GetTempFileNameW
_GetTempFileNameW.argtypes = [LPWSTR, LPWSTR, UINT, LPWSTR]
_GetTempFileNameW.restype = UINT
if lpPathName is None:
lpPathName = GetTempPathW()
lpTempFileName = ctypes.create_unicode_buffer(u'', MAX_PATH)
uUnique = _GetTempFileNameW(lpPathName, lpPrefixString, uUnique, lpTempFileName)
if uUnique == 0:
raise ctypes.WinError()
return lpTempFileName.value, uUnique
GetTempFileName = GuessStringType(GetTempFileNameA, GetTempFileNameW)
# DWORD WINAPI GetCurrentDirectory(
# __in DWORD nBufferLength,
# __out LPTSTR lpBuffer
# );
def GetCurrentDirectoryA():
_GetCurrentDirectoryA = windll.kernel32.GetCurrentDirectoryA
_GetCurrentDirectoryA.argtypes = [DWORD, LPSTR]
_GetCurrentDirectoryA.restype = DWORD
nBufferLength = _GetCurrentDirectoryA(0, None)
if nBufferLength <= 0:
raise ctypes.WinError()
lpBuffer = ctypes.create_string_buffer('', nBufferLength)
nCopied = _GetCurrentDirectoryA(nBufferLength, lpBuffer)
if nCopied > nBufferLength or nCopied == 0:
raise ctypes.WinError()
return lpBuffer.value
def GetCurrentDirectoryW():
_GetCurrentDirectoryW = windll.kernel32.GetCurrentDirectoryW
_GetCurrentDirectoryW.argtypes = [DWORD, LPWSTR]
_GetCurrentDirectoryW.restype = DWORD
nBufferLength = _GetCurrentDirectoryW(0, None)
if nBufferLength <= 0:
raise ctypes.WinError()
lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength)
nCopied = _GetCurrentDirectoryW(nBufferLength, lpBuffer)
if nCopied > nBufferLength or nCopied == 0:
raise ctypes.WinError()
return lpBuffer.value
GetCurrentDirectory = GuessStringType(GetCurrentDirectoryA, GetCurrentDirectoryW)
#------------------------------------------------------------------------------
# Contrl-C handler
# BOOL WINAPI HandlerRoutine(
# __in DWORD dwCtrlType
# );
PHANDLER_ROUTINE = ctypes.WINFUNCTYPE(BOOL, DWORD)
# BOOL WINAPI SetConsoleCtrlHandler(
# __in_opt PHANDLER_ROUTINE HandlerRoutine,
# __in BOOL Add
# );
def SetConsoleCtrlHandler(HandlerRoutine = None, Add = True):
_SetConsoleCtrlHandler = windll.kernel32.SetConsoleCtrlHandler
_SetConsoleCtrlHandler.argtypes = [PHANDLER_ROUTINE, BOOL]
_SetConsoleCtrlHandler.restype = bool
_SetConsoleCtrlHandler.errcheck = RaiseIfZero
_SetConsoleCtrlHandler(HandlerRoutine, bool(Add))
# we can't automagically transform Python functions to PHANDLER_ROUTINE
# because a) the actual pointer value is meaningful to the API
# and b) if it gets garbage collected bad things would happen
# BOOL WINAPI GenerateConsoleCtrlEvent(
# __in DWORD dwCtrlEvent,
# __in DWORD dwProcessGroupId
# );
def GenerateConsoleCtrlEvent(dwCtrlEvent, dwProcessGroupId):
_GenerateConsoleCtrlEvent = windll.kernel32.GenerateConsoleCtrlEvent
_GenerateConsoleCtrlEvent.argtypes = [DWORD, DWORD]
_GenerateConsoleCtrlEvent.restype = bool
_GenerateConsoleCtrlEvent.errcheck = RaiseIfZero
_GenerateConsoleCtrlEvent(dwCtrlEvent, dwProcessGroupId)
#------------------------------------------------------------------------------
# Synchronization API
# XXX NOTE
#
# Instead of waiting forever, we wait for a small period of time and loop.
# This is a workaround for an unwanted behavior of psyco-accelerated code:
# you can't interrupt a blocking call using Ctrl+C, because signal processing
# is only done between C calls.
#
# Also see: bug #2793618 in Psyco project
# http://sourceforge.net/tracker/?func=detail&aid=2793618&group_id=41036&atid=429622
# DWORD WINAPI WaitForSingleObject(
# HANDLE hHandle,
# DWORD dwMilliseconds
# );
def WaitForSingleObject(hHandle, dwMilliseconds = INFINITE):
_WaitForSingleObject = windll.kernel32.WaitForSingleObject
_WaitForSingleObject.argtypes = [HANDLE, DWORD]
_WaitForSingleObject.restype = DWORD
if not dwMilliseconds and dwMilliseconds != 0:
dwMilliseconds = INFINITE
if dwMilliseconds != INFINITE:
r = _WaitForSingleObject(hHandle, dwMilliseconds)
if r == WAIT_FAILED:
raise ctypes.WinError()
else:
while 1:
r = _WaitForSingleObject(hHandle, 100)
if r == WAIT_FAILED:
raise ctypes.WinError()
if r != WAIT_TIMEOUT:
break
return r
# DWORD WINAPI WaitForSingleObjectEx(
# HANDLE hHandle,
# DWORD dwMilliseconds,
# BOOL bAlertable
# );
def WaitForSingleObjectEx(hHandle, dwMilliseconds = INFINITE, bAlertable = True):
_WaitForSingleObjectEx = windll.kernel32.WaitForSingleObjectEx
_WaitForSingleObjectEx.argtypes = [HANDLE, DWORD, BOOL]
_WaitForSingleObjectEx.restype = DWORD
if not dwMilliseconds and dwMilliseconds != 0:
dwMilliseconds = INFINITE
if dwMilliseconds != INFINITE:
r = _WaitForSingleObjectEx(hHandle, dwMilliseconds, bool(bAlertable))
if r == WAIT_FAILED:
raise ctypes.WinError()
else:
while 1:
r = _WaitForSingleObjectEx(hHandle, 100, bool(bAlertable))
if r == WAIT_FAILED:
raise ctypes.WinError()
if r != WAIT_TIMEOUT:
break
return r
# DWORD WINAPI WaitForMultipleObjects(
# DWORD nCount,
# const HANDLE *lpHandles,
# BOOL bWaitAll,
# DWORD dwMilliseconds
# );
def WaitForMultipleObjects(handles, bWaitAll = False, dwMilliseconds = INFINITE):
_WaitForMultipleObjects = windll.kernel32.WaitForMultipleObjects
_WaitForMultipleObjects.argtypes = [DWORD, POINTER(HANDLE), BOOL, DWORD]
_WaitForMultipleObjects.restype = DWORD
if not dwMilliseconds and dwMilliseconds != 0:
dwMilliseconds = INFINITE
nCount = len(handles)
lpHandlesType = HANDLE * nCount
lpHandles = lpHandlesType(*handles)
if dwMilliseconds != INFINITE:
r = _WaitForMultipleObjects(byref(lpHandles), bool(bWaitAll), dwMilliseconds)
if r == WAIT_FAILED:
raise ctypes.WinError()
else:
while 1:
r = _WaitForMultipleObjects(byref(lpHandles), bool(bWaitAll), 100)
if r == WAIT_FAILED:
raise ctypes.WinError()
if r != WAIT_TIMEOUT:
break
return r
# DWORD WINAPI WaitForMultipleObjectsEx(
# DWORD nCount,
# const HANDLE *lpHandles,
# BOOL bWaitAll,
# DWORD dwMilliseconds,
# BOOL bAlertable
# );
def WaitForMultipleObjectsEx(handles, bWaitAll = False, dwMilliseconds = INFINITE, bAlertable = True):
_WaitForMultipleObjectsEx = windll.kernel32.WaitForMultipleObjectsEx
_WaitForMultipleObjectsEx.argtypes = [DWORD, POINTER(HANDLE), BOOL, DWORD]
_WaitForMultipleObjectsEx.restype = DWORD
if not dwMilliseconds and dwMilliseconds != 0:
dwMilliseconds = INFINITE
nCount = len(handles)
lpHandlesType = HANDLE * nCount
lpHandles = lpHandlesType(*handles)
if dwMilliseconds != INFINITE:
r = _WaitForMultipleObjectsEx(byref(lpHandles), bool(bWaitAll), dwMilliseconds, bool(bAlertable))
if r == WAIT_FAILED:
raise ctypes.WinError()
else:
while 1:
r = _WaitForMultipleObjectsEx(byref(lpHandles), bool(bWaitAll), 100, bool(bAlertable))
if r == WAIT_FAILED:
raise ctypes.WinError()
if r != WAIT_TIMEOUT:
break
return r
# HANDLE WINAPI CreateMutex(
# _In_opt_ LPSECURITY_ATTRIBUTES lpMutexAttributes,
# _In_ BOOL bInitialOwner,
# _In_opt_ LPCTSTR lpName
# );
def CreateMutexA(lpMutexAttributes = None, bInitialOwner = True, lpName = None):
_CreateMutexA = windll.kernel32.CreateMutexA
_CreateMutexA.argtypes = [LPVOID, BOOL, LPSTR]
_CreateMutexA.restype = HANDLE
_CreateMutexA.errcheck = RaiseIfZero
return Handle( _CreateMutexA(lpMutexAttributes, bInitialOwner, lpName) )
def CreateMutexW(lpMutexAttributes = None, bInitialOwner = True, lpName = None):
_CreateMutexW = windll.kernel32.CreateMutexW
_CreateMutexW.argtypes = [LPVOID, BOOL, LPWSTR]
_CreateMutexW.restype = HANDLE
_CreateMutexW.errcheck = RaiseIfZero
return Handle( _CreateMutexW(lpMutexAttributes, bInitialOwner, lpName) )
CreateMutex = GuessStringType(CreateMutexA, CreateMutexW)
# HANDLE WINAPI OpenMutex(
# _In_ DWORD dwDesiredAccess,
# _In_ BOOL bInheritHandle,
# _In_ LPCTSTR lpName
# );
def OpenMutexA(dwDesiredAccess = MUTEX_ALL_ACCESS, bInitialOwner = True, lpName = None):
_OpenMutexA = windll.kernel32.OpenMutexA
_OpenMutexA.argtypes = [DWORD, BOOL, LPSTR]
_OpenMutexA.restype = HANDLE
_OpenMutexA.errcheck = RaiseIfZero
return Handle( _OpenMutexA(lpMutexAttributes, bInitialOwner, lpName) )
def OpenMutexW(dwDesiredAccess = MUTEX_ALL_ACCESS, bInitialOwner = True, lpName = None):
_OpenMutexW = windll.kernel32.OpenMutexW
_OpenMutexW.argtypes = [DWORD, BOOL, LPWSTR]
_OpenMutexW.restype = HANDLE
_OpenMutexW.errcheck = RaiseIfZero
return Handle( _OpenMutexW(lpMutexAttributes, bInitialOwner, lpName) )
OpenMutex = GuessStringType(OpenMutexA, OpenMutexW)
# HANDLE WINAPI CreateEvent(
# _In_opt_ LPSECURITY_ATTRIBUTES lpEventAttributes,
# _In_ BOOL bManualReset,
# _In_ BOOL bInitialState,
# _In_opt_ LPCTSTR lpName
# );
def CreateEventA(lpMutexAttributes = None, bManualReset = False, bInitialState = False, lpName = None):
_CreateEventA = windll.kernel32.CreateEventA
_CreateEventA.argtypes = [LPVOID, BOOL, BOOL, LPSTR]
_CreateEventA.restype = HANDLE
_CreateEventA.errcheck = RaiseIfZero
return Handle( _CreateEventA(lpMutexAttributes, bManualReset, bInitialState, lpName) )
def CreateEventW(lpMutexAttributes = None, bManualReset = False, bInitialState = False, lpName = None):
_CreateEventW = windll.kernel32.CreateEventW
_CreateEventW.argtypes = [LPVOID, BOOL, BOOL, LPWSTR]
_CreateEventW.restype = HANDLE
_CreateEventW.errcheck = RaiseIfZero
return Handle( _CreateEventW(lpMutexAttributes, bManualReset, bInitialState, lpName) )
CreateEvent = GuessStringType(CreateEventA, CreateEventW)
# HANDLE WINAPI OpenEvent(
# _In_ DWORD dwDesiredAccess,
# _In_ BOOL bInheritHandle,
# _In_ LPCTSTR lpName
# );
def OpenEventA(dwDesiredAccess = EVENT_ALL_ACCESS, bInheritHandle = False, lpName = None):
_OpenEventA = windll.kernel32.OpenEventA
_OpenEventA.argtypes = [DWORD, BOOL, LPSTR]
_OpenEventA.restype = HANDLE
_OpenEventA.errcheck = RaiseIfZero
return Handle( _OpenEventA(dwDesiredAccess, bInheritHandle, lpName) )
def OpenEventW(dwDesiredAccess = EVENT_ALL_ACCESS, bInheritHandle = False, lpName = None):
_OpenEventW = windll.kernel32.OpenEventW
_OpenEventW.argtypes = [DWORD, BOOL, LPWSTR]
_OpenEventW.restype = HANDLE
_OpenEventW.errcheck = RaiseIfZero
return Handle( _OpenEventW(dwDesiredAccess, bInheritHandle, lpName) )
OpenEvent = GuessStringType(OpenEventA, OpenEventW)
# HANDLE WINAPI CreateSemaphore(
# _In_opt_ LPSECURITY_ATTRIBUTES lpSemaphoreAttributes,
# _In_ LONG lInitialCount,
# _In_ LONG lMaximumCount,
# _In_opt_ LPCTSTR lpName
# );
# TODO
# HANDLE WINAPI OpenSemaphore(
# _In_ DWORD dwDesiredAccess,
# _In_ BOOL bInheritHandle,
# _In_ LPCTSTR lpName
# );
# TODO
# BOOL WINAPI ReleaseMutex(
# _In_ HANDLE hMutex
# );
def ReleaseMutex(hMutex):
_ReleaseMutex = windll.kernel32.ReleaseMutex
_ReleaseMutex.argtypes = [HANDLE]
_ReleaseMutex.restype = bool
_ReleaseMutex.errcheck = RaiseIfZero
_ReleaseMutex(hMutex)
# BOOL WINAPI SetEvent(
# _In_ HANDLE hEvent
# );
def SetEvent(hEvent):
_SetEvent = windll.kernel32.SetEvent
_SetEvent.argtypes = [HANDLE]
_SetEvent.restype = bool
_SetEvent.errcheck = RaiseIfZero
_SetEvent(hEvent)
# BOOL WINAPI ResetEvent(
# _In_ HANDLE hEvent
# );
def ResetEvent(hEvent):
_ResetEvent = windll.kernel32.ResetEvent
_ResetEvent.argtypes = [HANDLE]
_ResetEvent.restype = bool
_ResetEvent.errcheck = RaiseIfZero
_ResetEvent(hEvent)
# BOOL WINAPI PulseEvent(
# _In_ HANDLE hEvent
# );
def PulseEvent(hEvent):
_PulseEvent = windll.kernel32.PulseEvent
_PulseEvent.argtypes = [HANDLE]
_PulseEvent.restype = bool
_PulseEvent.errcheck = RaiseIfZero
_PulseEvent(hEvent)
# BOOL WINAPI ReleaseSemaphore(
# _In_ HANDLE hSemaphore,
# _In_ LONG lReleaseCount,
# _Out_opt_ LPLONG lpPreviousCount
# );
# TODO
#------------------------------------------------------------------------------
# Debug API
# BOOL WaitForDebugEvent(
# LPDEBUG_EVENT lpDebugEvent,
# DWORD dwMilliseconds
# );
def WaitForDebugEvent(dwMilliseconds = INFINITE):
_WaitForDebugEvent = windll.kernel32.WaitForDebugEvent
_WaitForDebugEvent.argtypes = [LPDEBUG_EVENT, DWORD]
_WaitForDebugEvent.restype = DWORD
if not dwMilliseconds and dwMilliseconds != 0:
dwMilliseconds = INFINITE
lpDebugEvent = DEBUG_EVENT()
lpDebugEvent.dwDebugEventCode = 0
lpDebugEvent.dwProcessId = 0
lpDebugEvent.dwThreadId = 0
if dwMilliseconds != INFINITE:
success = _WaitForDebugEvent(byref(lpDebugEvent), dwMilliseconds)
if success == 0:
raise ctypes.WinError()
else:
# this avoids locking the Python GIL for too long
while 1:
success = _WaitForDebugEvent(byref(lpDebugEvent), 100)
if success != 0:
break
code = GetLastError()
if code not in (ERROR_SEM_TIMEOUT, WAIT_TIMEOUT):
raise ctypes.WinError(code)
return lpDebugEvent
# BOOL ContinueDebugEvent(
# DWORD dwProcessId,
# DWORD dwThreadId,
# DWORD dwContinueStatus
# );
def ContinueDebugEvent(dwProcessId, dwThreadId, dwContinueStatus = DBG_EXCEPTION_NOT_HANDLED):
_ContinueDebugEvent = windll.kernel32.ContinueDebugEvent
_ContinueDebugEvent.argtypes = [DWORD, DWORD, DWORD]
_ContinueDebugEvent.restype = bool
_ContinueDebugEvent.errcheck = RaiseIfZero
_ContinueDebugEvent(dwProcessId, dwThreadId, dwContinueStatus)
# BOOL WINAPI FlushInstructionCache(
# __in HANDLE hProcess,
# __in LPCVOID lpBaseAddress,
# __in SIZE_T dwSize
# );
def FlushInstructionCache(hProcess, lpBaseAddress = None, dwSize = 0):
# http://blogs.msdn.com/oldnewthing/archive/2003/12/08/55954.aspx#55958
_FlushInstructionCache = windll.kernel32.FlushInstructionCache
_FlushInstructionCache.argtypes = [HANDLE, LPVOID, SIZE_T]
_FlushInstructionCache.restype = bool
_FlushInstructionCache.errcheck = RaiseIfZero
_FlushInstructionCache(hProcess, lpBaseAddress, dwSize)
# BOOL DebugActiveProcess(
# DWORD dwProcessId
# );
def DebugActiveProcess(dwProcessId):
_DebugActiveProcess = windll.kernel32.DebugActiveProcess
_DebugActiveProcess.argtypes = [DWORD]
_DebugActiveProcess.restype = bool
_DebugActiveProcess.errcheck = RaiseIfZero
_DebugActiveProcess(dwProcessId)
# BOOL DebugActiveProcessStop(
# DWORD dwProcessId
# );
def DebugActiveProcessStop(dwProcessId):
_DebugActiveProcessStop = windll.kernel32.DebugActiveProcessStop
_DebugActiveProcessStop.argtypes = [DWORD]
_DebugActiveProcessStop.restype = bool
_DebugActiveProcessStop.errcheck = RaiseIfZero
_DebugActiveProcessStop(dwProcessId)
# BOOL CheckRemoteDebuggerPresent(
# HANDLE hProcess,
# PBOOL pbDebuggerPresent
# );
def CheckRemoteDebuggerPresent(hProcess):
_CheckRemoteDebuggerPresent = windll.kernel32.CheckRemoteDebuggerPresent
_CheckRemoteDebuggerPresent.argtypes = [HANDLE, PBOOL]
_CheckRemoteDebuggerPresent.restype = bool
_CheckRemoteDebuggerPresent.errcheck = RaiseIfZero
pbDebuggerPresent = BOOL(0)
_CheckRemoteDebuggerPresent(hProcess, byref(pbDebuggerPresent))
return bool(pbDebuggerPresent.value)
# BOOL DebugSetProcessKillOnExit(
# BOOL KillOnExit
# );
def DebugSetProcessKillOnExit(KillOnExit):
_DebugSetProcessKillOnExit = windll.kernel32.DebugSetProcessKillOnExit
_DebugSetProcessKillOnExit.argtypes = [BOOL]
_DebugSetProcessKillOnExit.restype = bool
_DebugSetProcessKillOnExit.errcheck = RaiseIfZero
_DebugSetProcessKillOnExit(bool(KillOnExit))
# BOOL DebugBreakProcess(
# HANDLE Process
# );
def DebugBreakProcess(hProcess):
_DebugBreakProcess = windll.kernel32.DebugBreakProcess
_DebugBreakProcess.argtypes = [HANDLE]
_DebugBreakProcess.restype = bool
_DebugBreakProcess.errcheck = RaiseIfZero
_DebugBreakProcess(hProcess)
# void WINAPI OutputDebugString(
# __in_opt LPCTSTR lpOutputString
# );
def OutputDebugStringA(lpOutputString):
_OutputDebugStringA = windll.kernel32.OutputDebugStringA
_OutputDebugStringA.argtypes = [LPSTR]
_OutputDebugStringA.restype = None
_OutputDebugStringA(lpOutputString)
def OutputDebugStringW(lpOutputString):
_OutputDebugStringW = windll.kernel32.OutputDebugStringW
_OutputDebugStringW.argtypes = [LPWSTR]
_OutputDebugStringW.restype = None
_OutputDebugStringW(lpOutputString)
OutputDebugString = GuessStringType(OutputDebugStringA, OutputDebugStringW)
# BOOL WINAPI ReadProcessMemory(
# __in HANDLE hProcess,
# __in LPCVOID lpBaseAddress,
# __out LPVOID lpBuffer,
# __in SIZE_T nSize,
# __out SIZE_T* lpNumberOfBytesRead
# );
def ReadProcessMemory(hProcess, lpBaseAddress, nSize):
_ReadProcessMemory = windll.kernel32.ReadProcessMemory
_ReadProcessMemory.argtypes = [HANDLE, LPVOID, LPVOID, SIZE_T, POINTER(SIZE_T)]
_ReadProcessMemory.restype = bool
lpBuffer = ctypes.create_string_buffer(compat.b(''), nSize)
lpNumberOfBytesRead = SIZE_T(0)
success = _ReadProcessMemory(hProcess, lpBaseAddress, lpBuffer, nSize, byref(lpNumberOfBytesRead))
if not success and GetLastError() != ERROR_PARTIAL_COPY:
raise ctypes.WinError()
return compat.b(lpBuffer.raw)[:lpNumberOfBytesRead.value]
# BOOL WINAPI WriteProcessMemory(
# __in HANDLE hProcess,
# __in LPCVOID lpBaseAddress,
# __in LPVOID lpBuffer,
# __in SIZE_T nSize,
# __out SIZE_T* lpNumberOfBytesWritten
# );
def WriteProcessMemory(hProcess, lpBaseAddress, lpBuffer):
_WriteProcessMemory = windll.kernel32.WriteProcessMemory
_WriteProcessMemory.argtypes = [HANDLE, LPVOID, LPVOID, SIZE_T, POINTER(SIZE_T)]
_WriteProcessMemory.restype = bool
nSize = len(lpBuffer)
lpBuffer = ctypes.create_string_buffer(lpBuffer)
lpNumberOfBytesWritten = SIZE_T(0)
success = _WriteProcessMemory(hProcess, lpBaseAddress, lpBuffer, nSize, byref(lpNumberOfBytesWritten))
if not success and GetLastError() != ERROR_PARTIAL_COPY:
raise ctypes.WinError()
return lpNumberOfBytesWritten.value
# LPVOID WINAPI VirtualAllocEx(
# __in HANDLE hProcess,
# __in_opt LPVOID lpAddress,
# __in SIZE_T dwSize,
# __in DWORD flAllocationType,
# __in DWORD flProtect
# );
def VirtualAllocEx(hProcess, lpAddress = 0, dwSize = 0x1000, flAllocationType = MEM_COMMIT | MEM_RESERVE, flProtect = PAGE_EXECUTE_READWRITE):
_VirtualAllocEx = windll.kernel32.VirtualAllocEx
_VirtualAllocEx.argtypes = [HANDLE, LPVOID, SIZE_T, DWORD, DWORD]
_VirtualAllocEx.restype = LPVOID
lpAddress = _VirtualAllocEx(hProcess, lpAddress, dwSize, flAllocationType, flProtect)
if lpAddress == NULL:
raise ctypes.WinError()
return lpAddress
# SIZE_T WINAPI VirtualQueryEx(
# __in HANDLE hProcess,
# __in_opt LPCVOID lpAddress,
# __out PMEMORY_BASIC_INFORMATION lpBuffer,
# __in SIZE_T dwLength
# );
def VirtualQueryEx(hProcess, lpAddress):
_VirtualQueryEx = windll.kernel32.VirtualQueryEx
_VirtualQueryEx.argtypes = [HANDLE, LPVOID, PMEMORY_BASIC_INFORMATION, SIZE_T]
_VirtualQueryEx.restype = SIZE_T
lpBuffer = MEMORY_BASIC_INFORMATION()
dwLength = sizeof(MEMORY_BASIC_INFORMATION)
success = _VirtualQueryEx(hProcess, lpAddress, byref(lpBuffer), dwLength)
if success == 0:
raise ctypes.WinError()
return MemoryBasicInformation(lpBuffer)
# BOOL WINAPI VirtualProtectEx(
# __in HANDLE hProcess,
# __in LPVOID lpAddress,
# __in SIZE_T dwSize,
# __in DWORD flNewProtect,
# __out PDWORD lpflOldProtect
# );
def VirtualProtectEx(hProcess, lpAddress, dwSize, flNewProtect = PAGE_EXECUTE_READWRITE):
_VirtualProtectEx = windll.kernel32.VirtualProtectEx
_VirtualProtectEx.argtypes = [HANDLE, LPVOID, SIZE_T, DWORD, PDWORD]
_VirtualProtectEx.restype = bool
_VirtualProtectEx.errcheck = RaiseIfZero
flOldProtect = DWORD(0)
_VirtualProtectEx(hProcess, lpAddress, dwSize, flNewProtect, byref(flOldProtect))
return flOldProtect.value
# BOOL WINAPI VirtualFreeEx(
# __in HANDLE hProcess,
# __in LPVOID lpAddress,
# __in SIZE_T dwSize,
# __in DWORD dwFreeType
# );
def VirtualFreeEx(hProcess, lpAddress, dwSize = 0, dwFreeType = MEM_RELEASE):
_VirtualFreeEx = windll.kernel32.VirtualFreeEx
_VirtualFreeEx.argtypes = [HANDLE, LPVOID, SIZE_T, DWORD]
_VirtualFreeEx.restype = bool
_VirtualFreeEx.errcheck = RaiseIfZero
_VirtualFreeEx(hProcess, lpAddress, dwSize, dwFreeType)
# HANDLE WINAPI CreateRemoteThread(
# __in HANDLE hProcess,
# __in LPSECURITY_ATTRIBUTES lpThreadAttributes,
# __in SIZE_T dwStackSize,
# __in LPTHREAD_START_ROUTINE lpStartAddress,
# __in LPVOID lpParameter,
# __in DWORD dwCreationFlags,
# __out LPDWORD lpThreadId
# );
def CreateRemoteThread(hProcess, lpThreadAttributes, dwStackSize, lpStartAddress, lpParameter, dwCreationFlags):
_CreateRemoteThread = windll.kernel32.CreateRemoteThread
_CreateRemoteThread.argtypes = [HANDLE, LPSECURITY_ATTRIBUTES, SIZE_T, LPVOID, LPVOID, DWORD, LPDWORD]
_CreateRemoteThread.restype = HANDLE
if not lpThreadAttributes:
lpThreadAttributes = None
else:
lpThreadAttributes = byref(lpThreadAttributes)
dwThreadId = DWORD(0)
hThread = _CreateRemoteThread(hProcess, lpThreadAttributes, dwStackSize, lpStartAddress, lpParameter, dwCreationFlags, byref(dwThreadId))
if not hThread:
raise ctypes.WinError()
return ThreadHandle(hThread), dwThreadId.value
#------------------------------------------------------------------------------
# Process API
# BOOL WINAPI CreateProcess(
# __in_opt LPCTSTR lpApplicationName,
# __inout_opt LPTSTR lpCommandLine,
# __in_opt LPSECURITY_ATTRIBUTES lpProcessAttributes,
# __in_opt LPSECURITY_ATTRIBUTES lpThreadAttributes,
# __in BOOL bInheritHandles,
# __in DWORD dwCreationFlags,
# __in_opt LPVOID lpEnvironment,
# __in_opt LPCTSTR lpCurrentDirectory,
# __in LPSTARTUPINFO lpStartupInfo,
# __out LPPROCESS_INFORMATION lpProcessInformation
# );
def CreateProcessA(lpApplicationName, lpCommandLine=None, lpProcessAttributes=None, lpThreadAttributes=None, bInheritHandles=False, dwCreationFlags=0, lpEnvironment=None, lpCurrentDirectory=None, lpStartupInfo=None):
_CreateProcessA = windll.kernel32.CreateProcessA
_CreateProcessA.argtypes = [LPSTR, LPSTR, LPSECURITY_ATTRIBUTES, LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPSTR, LPVOID, LPPROCESS_INFORMATION]
_CreateProcessA.restype = bool
_CreateProcessA.errcheck = RaiseIfZero
if not lpApplicationName:
lpApplicationName = None
if not lpCommandLine:
lpCommandLine = None
else:
lpCommandLine = ctypes.create_string_buffer(lpCommandLine, max(MAX_PATH, len(lpCommandLine)))
if not lpEnvironment:
lpEnvironment = None
else:
lpEnvironment = ctypes.create_string_buffer(lpEnvironment)
if not lpCurrentDirectory:
lpCurrentDirectory = None
if not lpProcessAttributes:
lpProcessAttributes = None
else:
lpProcessAttributes = byref(lpProcessAttributes)
if not lpThreadAttributes:
lpThreadAttributes = None
else:
lpThreadAttributes = byref(lpThreadAttributes)
if not lpStartupInfo:
lpStartupInfo = STARTUPINFO()
lpStartupInfo.cb = sizeof(STARTUPINFO)
lpStartupInfo.lpReserved = 0
lpStartupInfo.lpDesktop = 0
lpStartupInfo.lpTitle = 0
lpStartupInfo.dwFlags = 0
lpStartupInfo.cbReserved2 = 0
lpStartupInfo.lpReserved2 = 0
lpProcessInformation = PROCESS_INFORMATION()
lpProcessInformation.hProcess = INVALID_HANDLE_VALUE
lpProcessInformation.hThread = INVALID_HANDLE_VALUE
lpProcessInformation.dwProcessId = 0
lpProcessInformation.dwThreadId = 0
_CreateProcessA(lpApplicationName, lpCommandLine, lpProcessAttributes, lpThreadAttributes, bool(bInheritHandles), dwCreationFlags, lpEnvironment, lpCurrentDirectory, byref(lpStartupInfo), byref(lpProcessInformation))
return ProcessInformation(lpProcessInformation)
def CreateProcessW(lpApplicationName, lpCommandLine=None, lpProcessAttributes=None, lpThreadAttributes=None, bInheritHandles=False, dwCreationFlags=0, lpEnvironment=None, lpCurrentDirectory=None, lpStartupInfo=None):
_CreateProcessW = windll.kernel32.CreateProcessW
_CreateProcessW.argtypes = [LPWSTR, LPWSTR, LPSECURITY_ATTRIBUTES, LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPWSTR, LPVOID, LPPROCESS_INFORMATION]
_CreateProcessW.restype = bool
_CreateProcessW.errcheck = RaiseIfZero
if not lpApplicationName:
lpApplicationName = None
if not lpCommandLine:
lpCommandLine = None
else:
lpCommandLine = ctypes.create_unicode_buffer(lpCommandLine, max(MAX_PATH, len(lpCommandLine)))
if not lpEnvironment:
lpEnvironment = None
else:
lpEnvironment = ctypes.create_unicode_buffer(lpEnvironment)
if not lpCurrentDirectory:
lpCurrentDirectory = None
if not lpProcessAttributes:
lpProcessAttributes = None
else:
lpProcessAttributes = byref(lpProcessAttributes)
if not lpThreadAttributes:
lpThreadAttributes = None
else:
lpThreadAttributes = byref(lpThreadAttributes)
if not lpStartupInfo:
lpStartupInfo = STARTUPINFO()
lpStartupInfo.cb = sizeof(STARTUPINFO)
lpStartupInfo.lpReserved = 0
lpStartupInfo.lpDesktop = 0
lpStartupInfo.lpTitle = 0
lpStartupInfo.dwFlags = 0
lpStartupInfo.cbReserved2 = 0
lpStartupInfo.lpReserved2 = 0
lpProcessInformation = PROCESS_INFORMATION()
lpProcessInformation.hProcess = INVALID_HANDLE_VALUE
lpProcessInformation.hThread = INVALID_HANDLE_VALUE
lpProcessInformation.dwProcessId = 0
lpProcessInformation.dwThreadId = 0
_CreateProcessW(lpApplicationName, lpCommandLine, lpProcessAttributes, lpThreadAttributes, bool(bInheritHandles), dwCreationFlags, lpEnvironment, lpCurrentDirectory, byref(lpStartupInfo), byref(lpProcessInformation))
return ProcessInformation(lpProcessInformation)
CreateProcess = GuessStringType(CreateProcessA, CreateProcessW)
# BOOL WINAPI InitializeProcThreadAttributeList(
# __out_opt LPPROC_THREAD_ATTRIBUTE_LIST lpAttributeList,
# __in DWORD dwAttributeCount,
# __reserved DWORD dwFlags,
# __inout PSIZE_T lpSize
# );
def InitializeProcThreadAttributeList(dwAttributeCount):
_InitializeProcThreadAttributeList = windll.kernel32.InitializeProcThreadAttributeList
_InitializeProcThreadAttributeList.argtypes = [LPPROC_THREAD_ATTRIBUTE_LIST, DWORD, DWORD, PSIZE_T]
_InitializeProcThreadAttributeList.restype = bool
Size = SIZE_T(0)
_InitializeProcThreadAttributeList(None, dwAttributeCount, 0, byref(Size))
RaiseIfZero(Size.value)
AttributeList = (BYTE * Size.value)()
success = _InitializeProcThreadAttributeList(byref(AttributeList), dwAttributeCount, 0, byref(Size))
RaiseIfZero(success)
return AttributeList
# BOOL WINAPI UpdateProcThreadAttribute(
# __inout LPPROC_THREAD_ATTRIBUTE_LIST lpAttributeList,
# __in DWORD dwFlags,
# __in DWORD_PTR Attribute,
# __in PVOID lpValue,
# __in SIZE_T cbSize,
# __out_opt PVOID lpPreviousValue,
# __in_opt PSIZE_T lpReturnSize
# );
def UpdateProcThreadAttribute(lpAttributeList, Attribute, Value, cbSize = None):
_UpdateProcThreadAttribute = windll.kernel32.UpdateProcThreadAttribute
_UpdateProcThreadAttribute.argtypes = [LPPROC_THREAD_ATTRIBUTE_LIST, DWORD, DWORD_PTR, PVOID, SIZE_T, PVOID, PSIZE_T]
_UpdateProcThreadAttribute.restype = bool
_UpdateProcThreadAttribute.errcheck = RaiseIfZero
if cbSize is None:
cbSize = sizeof(Value)
_UpdateProcThreadAttribute(byref(lpAttributeList), 0, Attribute, byref(Value), cbSize, None, None)
# VOID WINAPI DeleteProcThreadAttributeList(
# __inout LPPROC_THREAD_ATTRIBUTE_LIST lpAttributeList
# );
def DeleteProcThreadAttributeList(lpAttributeList):
_DeleteProcThreadAttributeList = windll.kernel32.DeleteProcThreadAttributeList
_DeleteProcThreadAttributeList.restype = None
_DeleteProcThreadAttributeList(byref(lpAttributeList))
# HANDLE WINAPI OpenProcess(
# __in DWORD dwDesiredAccess,
# __in BOOL bInheritHandle,
# __in DWORD dwProcessId
# );
def OpenProcess(dwDesiredAccess, bInheritHandle, dwProcessId):
_OpenProcess = windll.kernel32.OpenProcess
_OpenProcess.argtypes = [DWORD, BOOL, DWORD]
_OpenProcess.restype = HANDLE
hProcess = _OpenProcess(dwDesiredAccess, bool(bInheritHandle), dwProcessId)
if hProcess == NULL:
raise ctypes.WinError()
return ProcessHandle(hProcess, dwAccess = dwDesiredAccess)
# HANDLE WINAPI OpenThread(
# __in DWORD dwDesiredAccess,
# __in BOOL bInheritHandle,
# __in DWORD dwThreadId
# );
def OpenThread(dwDesiredAccess, bInheritHandle, dwThreadId):
_OpenThread = windll.kernel32.OpenThread
_OpenThread.argtypes = [DWORD, BOOL, DWORD]
_OpenThread.restype = HANDLE
hThread = _OpenThread(dwDesiredAccess, bool(bInheritHandle), dwThreadId)
if hThread == NULL:
raise ctypes.WinError()
return ThreadHandle(hThread, dwAccess = dwDesiredAccess)
# DWORD WINAPI SuspendThread(
# __in HANDLE hThread
# );
def SuspendThread(hThread):
_SuspendThread = windll.kernel32.SuspendThread
_SuspendThread.argtypes = [HANDLE]
_SuspendThread.restype = DWORD
previousCount = _SuspendThread(hThread)
if previousCount == DWORD(-1).value:
raise ctypes.WinError()
return previousCount
# DWORD WINAPI ResumeThread(
# __in HANDLE hThread
# );
def ResumeThread(hThread):
_ResumeThread = windll.kernel32.ResumeThread
_ResumeThread.argtypes = [HANDLE]
_ResumeThread.restype = DWORD
previousCount = _ResumeThread(hThread)
if previousCount == DWORD(-1).value:
raise ctypes.WinError()
return previousCount
# BOOL WINAPI TerminateThread(
# __inout HANDLE hThread,
# __in DWORD dwExitCode
# );
def TerminateThread(hThread, dwExitCode = 0):
_TerminateThread = windll.kernel32.TerminateThread
_TerminateThread.argtypes = [HANDLE, DWORD]
_TerminateThread.restype = bool
_TerminateThread.errcheck = RaiseIfZero
_TerminateThread(hThread, dwExitCode)
# BOOL WINAPI TerminateProcess(
# __inout HANDLE hProcess,
# __in DWORD dwExitCode
# );
def TerminateProcess(hProcess, dwExitCode = 0):
_TerminateProcess = windll.kernel32.TerminateProcess
_TerminateProcess.argtypes = [HANDLE, DWORD]
_TerminateProcess.restype = bool
_TerminateProcess.errcheck = RaiseIfZero
_TerminateProcess(hProcess, dwExitCode)
# DWORD WINAPI GetCurrentProcessId(void);
def GetCurrentProcessId():
_GetCurrentProcessId = windll.kernel32.GetCurrentProcessId
_GetCurrentProcessId.argtypes = []
_GetCurrentProcessId.restype = DWORD
return _GetCurrentProcessId()
# DWORD WINAPI GetCurrentThreadId(void);
def GetCurrentThreadId():
_GetCurrentThreadId = windll.kernel32.GetCurrentThreadId
_GetCurrentThreadId.argtypes = []
_GetCurrentThreadId.restype = DWORD
return _GetCurrentThreadId()
# DWORD WINAPI GetProcessId(
# __in HANDLE hProcess
# );
def GetProcessId(hProcess):
_GetProcessId = windll.kernel32.GetProcessId
_GetProcessId.argtypes = [HANDLE]
_GetProcessId.restype = DWORD
_GetProcessId.errcheck = RaiseIfZero
return _GetProcessId(hProcess)
# DWORD WINAPI GetThreadId(
# __in HANDLE hThread
# );
def GetThreadId(hThread):
_GetThreadId = windll.kernel32._GetThreadId
_GetThreadId.argtypes = [HANDLE]
_GetThreadId.restype = DWORD
dwThreadId = _GetThreadId(hThread)
if dwThreadId == 0:
raise ctypes.WinError()
return dwThreadId
# DWORD WINAPI GetProcessIdOfThread(
# __in HANDLE hThread
# );
def GetProcessIdOfThread(hThread):
_GetProcessIdOfThread = windll.kernel32.GetProcessIdOfThread
_GetProcessIdOfThread.argtypes = [HANDLE]
_GetProcessIdOfThread.restype = DWORD
dwProcessId = _GetProcessIdOfThread(hThread)
if dwProcessId == 0:
raise ctypes.WinError()
return dwProcessId
# BOOL WINAPI GetExitCodeProcess(
# __in HANDLE hProcess,
# __out LPDWORD lpExitCode
# );
def GetExitCodeProcess(hProcess):
_GetExitCodeProcess = windll.kernel32.GetExitCodeProcess
_GetExitCodeProcess.argtypes = [HANDLE]
_GetExitCodeProcess.restype = bool
_GetExitCodeProcess.errcheck = RaiseIfZero
lpExitCode = DWORD(0)
_GetExitCodeProcess(hProcess, byref(lpExitCode))
return lpExitCode.value
# BOOL WINAPI GetExitCodeThread(
# __in HANDLE hThread,
# __out LPDWORD lpExitCode
# );
def GetExitCodeThread(hThread):
_GetExitCodeThread = windll.kernel32.GetExitCodeThread
_GetExitCodeThread.argtypes = [HANDLE]
_GetExitCodeThread.restype = bool
_GetExitCodeThread.errcheck = RaiseIfZero
lpExitCode = DWORD(0)
_GetExitCodeThread(hThread, byref(lpExitCode))
return lpExitCode.value
# DWORD WINAPI GetProcessVersion(
# __in DWORD ProcessId
# );
def GetProcessVersion(ProcessId):
_GetProcessVersion = windll.kernel32.GetProcessVersion
_GetProcessVersion.argtypes = [DWORD]
_GetProcessVersion.restype = DWORD
retval = _GetProcessVersion(ProcessId)
if retval == 0:
raise ctypes.WinError()
return retval
# DWORD WINAPI GetPriorityClass(
# __in HANDLE hProcess
# );
def GetPriorityClass(hProcess):
_GetPriorityClass = windll.kernel32.GetPriorityClass
_GetPriorityClass.argtypes = [HANDLE]
_GetPriorityClass.restype = DWORD
retval = _GetPriorityClass(hProcess)
if retval == 0:
raise ctypes.WinError()
return retval
# BOOL WINAPI SetPriorityClass(
# __in HANDLE hProcess,
# __in DWORD dwPriorityClass
# );
def SetPriorityClass(hProcess, dwPriorityClass = NORMAL_PRIORITY_CLASS):
_SetPriorityClass = windll.kernel32.SetPriorityClass
_SetPriorityClass.argtypes = [HANDLE, DWORD]
_SetPriorityClass.restype = bool
_SetPriorityClass.errcheck = RaiseIfZero
_SetPriorityClass(hProcess, dwPriorityClass)
# BOOL WINAPI GetProcessPriorityBoost(
# __in HANDLE hProcess,
# __out PBOOL pDisablePriorityBoost
# );
def GetProcessPriorityBoost(hProcess):
_GetProcessPriorityBoost = windll.kernel32.GetProcessPriorityBoost
_GetProcessPriorityBoost.argtypes = [HANDLE, PBOOL]
_GetProcessPriorityBoost.restype = bool
_GetProcessPriorityBoost.errcheck = RaiseIfZero
pDisablePriorityBoost = BOOL(False)
_GetProcessPriorityBoost(hProcess, byref(pDisablePriorityBoost))
return bool(pDisablePriorityBoost.value)
# BOOL WINAPI SetProcessPriorityBoost(
# __in HANDLE hProcess,
# __in BOOL DisablePriorityBoost
# );
def SetProcessPriorityBoost(hProcess, DisablePriorityBoost):
_SetProcessPriorityBoost = windll.kernel32.SetProcessPriorityBoost
_SetProcessPriorityBoost.argtypes = [HANDLE, BOOL]
_SetProcessPriorityBoost.restype = bool
_SetProcessPriorityBoost.errcheck = RaiseIfZero
_SetProcessPriorityBoost(hProcess, bool(DisablePriorityBoost))
# BOOL WINAPI GetProcessAffinityMask(
# __in HANDLE hProcess,
# __out PDWORD_PTR lpProcessAffinityMask,
# __out PDWORD_PTR lpSystemAffinityMask
# );
def GetProcessAffinityMask(hProcess):
_GetProcessAffinityMask = windll.kernel32.GetProcessAffinityMask
_GetProcessAffinityMask.argtypes = [HANDLE, PDWORD_PTR, PDWORD_PTR]
_GetProcessAffinityMask.restype = bool
_GetProcessAffinityMask.errcheck = RaiseIfZero
lpProcessAffinityMask = DWORD_PTR(0)
lpSystemAffinityMask = DWORD_PTR(0)
_GetProcessAffinityMask(hProcess, byref(lpProcessAffinityMask), byref(lpSystemAffinityMask))
return lpProcessAffinityMask.value, lpSystemAffinityMask.value
# BOOL WINAPI SetProcessAffinityMask(
# __in HANDLE hProcess,
# __in DWORD_PTR dwProcessAffinityMask
# );
def SetProcessAffinityMask(hProcess, dwProcessAffinityMask):
_SetProcessAffinityMask = windll.kernel32.SetProcessAffinityMask
_SetProcessAffinityMask.argtypes = [HANDLE, DWORD_PTR]
_SetProcessAffinityMask.restype = bool
_SetProcessAffinityMask.errcheck = RaiseIfZero
_SetProcessAffinityMask(hProcess, dwProcessAffinityMask)
#------------------------------------------------------------------------------
# Toolhelp32 API
# HANDLE WINAPI CreateToolhelp32Snapshot(
# __in DWORD dwFlags,
# __in DWORD th32ProcessID
# );
def CreateToolhelp32Snapshot(dwFlags = TH32CS_SNAPALL, th32ProcessID = 0):
_CreateToolhelp32Snapshot = windll.kernel32.CreateToolhelp32Snapshot
_CreateToolhelp32Snapshot.argtypes = [DWORD, DWORD]
_CreateToolhelp32Snapshot.restype = HANDLE
hSnapshot = _CreateToolhelp32Snapshot(dwFlags, th32ProcessID)
if hSnapshot == INVALID_HANDLE_VALUE:
raise ctypes.WinError()
return SnapshotHandle(hSnapshot)
# BOOL WINAPI Process32First(
# __in HANDLE hSnapshot,
# __inout LPPROCESSENTRY32 lppe
# );
def Process32First(hSnapshot):
_Process32First = windll.kernel32.Process32First
_Process32First.argtypes = [HANDLE, LPPROCESSENTRY32]
_Process32First.restype = bool
pe = PROCESSENTRY32()
pe.dwSize = sizeof(PROCESSENTRY32)
success = _Process32First(hSnapshot, byref(pe))
if not success:
if GetLastError() == ERROR_NO_MORE_FILES:
return None
raise ctypes.WinError()
return pe
# BOOL WINAPI Process32Next(
# __in HANDLE hSnapshot,
# __out LPPROCESSENTRY32 lppe
# );
def Process32Next(hSnapshot, pe = None):
_Process32Next = windll.kernel32.Process32Next
_Process32Next.argtypes = [HANDLE, LPPROCESSENTRY32]
_Process32Next.restype = bool
if pe is None:
pe = PROCESSENTRY32()
pe.dwSize = sizeof(PROCESSENTRY32)
success = _Process32Next(hSnapshot, byref(pe))
if not success:
if GetLastError() == ERROR_NO_MORE_FILES:
return None
raise ctypes.WinError()
return pe
# BOOL WINAPI Thread32First(
# __in HANDLE hSnapshot,
# __inout LPTHREADENTRY32 lpte
# );
def Thread32First(hSnapshot):
_Thread32First = windll.kernel32.Thread32First
_Thread32First.argtypes = [HANDLE, LPTHREADENTRY32]
_Thread32First.restype = bool
te = THREADENTRY32()
te.dwSize = sizeof(THREADENTRY32)
success = _Thread32First(hSnapshot, byref(te))
if not success:
if GetLastError() == ERROR_NO_MORE_FILES:
return None
raise ctypes.WinError()
return te
# BOOL WINAPI Thread32Next(
# __in HANDLE hSnapshot,
# __out LPTHREADENTRY32 lpte
# );
def Thread32Next(hSnapshot, te = None):
_Thread32Next = windll.kernel32.Thread32Next
_Thread32Next.argtypes = [HANDLE, LPTHREADENTRY32]
_Thread32Next.restype = bool
if te is None:
te = THREADENTRY32()
te.dwSize = sizeof(THREADENTRY32)
success = _Thread32Next(hSnapshot, byref(te))
if not success:
if GetLastError() == ERROR_NO_MORE_FILES:
return None
raise ctypes.WinError()
return te
# BOOL WINAPI Module32First(
# __in HANDLE hSnapshot,
# __inout LPMODULEENTRY32 lpme
# );
def Module32First(hSnapshot):
_Module32First = windll.kernel32.Module32First
_Module32First.argtypes = [HANDLE, LPMODULEENTRY32]
_Module32First.restype = bool
me = MODULEENTRY32()
me.dwSize = sizeof(MODULEENTRY32)
success = _Module32First(hSnapshot, byref(me))
if not success:
if GetLastError() == ERROR_NO_MORE_FILES:
return None
raise ctypes.WinError()
return me
# BOOL WINAPI Module32Next(
# __in HANDLE hSnapshot,
# __out LPMODULEENTRY32 lpme
# );
def Module32Next(hSnapshot, me = None):
_Module32Next = windll.kernel32.Module32Next
_Module32Next.argtypes = [HANDLE, LPMODULEENTRY32]
_Module32Next.restype = bool
if me is None:
me = MODULEENTRY32()
me.dwSize = sizeof(MODULEENTRY32)
success = _Module32Next(hSnapshot, byref(me))
if not success:
if GetLastError() == ERROR_NO_MORE_FILES:
return None
raise ctypes.WinError()
return me
# BOOL WINAPI Heap32First(
# __inout LPHEAPENTRY32 lphe,
# __in DWORD th32ProcessID,
# __in ULONG_PTR th32HeapID
# );
def Heap32First(th32ProcessID, th32HeapID):
_Heap32First = windll.kernel32.Heap32First
_Heap32First.argtypes = [LPHEAPENTRY32, DWORD, ULONG_PTR]
_Heap32First.restype = bool
he = HEAPENTRY32()
he.dwSize = sizeof(HEAPENTRY32)
success = _Heap32First(byref(he), th32ProcessID, th32HeapID)
if not success:
if GetLastError() == ERROR_NO_MORE_FILES:
return None
raise ctypes.WinError()
return he
# BOOL WINAPI Heap32Next(
# __out LPHEAPENTRY32 lphe
# );
def Heap32Next(he):
_Heap32Next = windll.kernel32.Heap32Next
_Heap32Next.argtypes = [LPHEAPENTRY32]
_Heap32Next.restype = bool
he.dwSize = sizeof(HEAPENTRY32)
success = _Heap32Next(byref(he))
if not success:
if GetLastError() == ERROR_NO_MORE_FILES:
return None
raise ctypes.WinError()
return he
# BOOL WINAPI Heap32ListFirst(
# __in HANDLE hSnapshot,
# __inout LPHEAPLIST32 lphl
# );
def Heap32ListFirst(hSnapshot):
_Heap32ListFirst = windll.kernel32.Heap32ListFirst
_Heap32ListFirst.argtypes = [HANDLE, LPHEAPLIST32]
_Heap32ListFirst.restype = bool
hl = HEAPLIST32()
hl.dwSize = sizeof(HEAPLIST32)
success = _Heap32ListFirst(hSnapshot, byref(hl))
if not success:
if GetLastError() == ERROR_NO_MORE_FILES:
return None
raise ctypes.WinError()
return hl
# BOOL WINAPI Heap32ListNext(
# __in HANDLE hSnapshot,
# __out LPHEAPLIST32 lphl
# );
def Heap32ListNext(hSnapshot, hl = None):
_Heap32ListNext = windll.kernel32.Heap32ListNext
_Heap32ListNext.argtypes = [HANDLE, LPHEAPLIST32]
_Heap32ListNext.restype = bool
if hl is None:
hl = HEAPLIST32()
hl.dwSize = sizeof(HEAPLIST32)
success = _Heap32ListNext(hSnapshot, byref(hl))
if not success:
if GetLastError() == ERROR_NO_MORE_FILES:
return None
raise ctypes.WinError()
return hl
# BOOL WINAPI Toolhelp32ReadProcessMemory(
# __in DWORD th32ProcessID,
# __in LPCVOID lpBaseAddress,
# __out LPVOID lpBuffer,
# __in SIZE_T cbRead,
# __out SIZE_T lpNumberOfBytesRead
# );
def Toolhelp32ReadProcessMemory(th32ProcessID, lpBaseAddress, cbRead):
_Toolhelp32ReadProcessMemory = windll.kernel32.Toolhelp32ReadProcessMemory
_Toolhelp32ReadProcessMemory.argtypes = [DWORD, LPVOID, LPVOID, SIZE_T, POINTER(SIZE_T)]
_Toolhelp32ReadProcessMemory.restype = bool
lpBuffer = ctypes.create_string_buffer('', cbRead)
lpNumberOfBytesRead = SIZE_T(0)
success = _Toolhelp32ReadProcessMemory(th32ProcessID, lpBaseAddress, lpBuffer, cbRead, byref(lpNumberOfBytesRead))
if not success and GetLastError() != ERROR_PARTIAL_COPY:
raise ctypes.WinError()
return str(lpBuffer.raw)[:lpNumberOfBytesRead.value]
#------------------------------------------------------------------------------
# Miscellaneous system information
# BOOL WINAPI GetProcessDEPPolicy(
# __in HANDLE hProcess,
# __out LPDWORD lpFlags,
# __out PBOOL lpPermanent
# );
# Contribution by ivanlef0u (http://ivanlef0u.fr/)
# XP SP3 and > only
def GetProcessDEPPolicy(hProcess):
_GetProcessDEPPolicy = windll.kernel32.GetProcessDEPPolicy
_GetProcessDEPPolicy.argtypes = [HANDLE, LPDWORD, PBOOL]
_GetProcessDEPPolicy.restype = bool
_GetProcessDEPPolicy.errcheck = RaiseIfZero
lpFlags = DWORD(0)
lpPermanent = BOOL(0)
_GetProcessDEPPolicy(hProcess, byref(lpFlags), byref(lpPermanent))
return (lpFlags.value, lpPermanent.value)
# DWORD WINAPI GetCurrentProcessorNumber(void);
def GetCurrentProcessorNumber():
_GetCurrentProcessorNumber = windll.kernel32.GetCurrentProcessorNumber
_GetCurrentProcessorNumber.argtypes = []
_GetCurrentProcessorNumber.restype = DWORD
_GetCurrentProcessorNumber.errcheck = RaiseIfZero
return _GetCurrentProcessorNumber()
# VOID WINAPI FlushProcessWriteBuffers(void);
def FlushProcessWriteBuffers():
_FlushProcessWriteBuffers = windll.kernel32.FlushProcessWriteBuffers
_FlushProcessWriteBuffers.argtypes = []
_FlushProcessWriteBuffers.restype = None
_FlushProcessWriteBuffers()
# BOOL WINAPI GetLogicalProcessorInformation(
# __out PSYSTEM_LOGICAL_PROCESSOR_INFORMATION Buffer,
# __inout PDWORD ReturnLength
# );
# TO DO http://msdn.microsoft.com/en-us/library/ms683194(VS.85).aspx
# BOOL WINAPI GetProcessIoCounters(
# __in HANDLE hProcess,
# __out PIO_COUNTERS lpIoCounters
# );
# TO DO http://msdn.microsoft.com/en-us/library/ms683218(VS.85).aspx
# DWORD WINAPI GetGuiResources(
# __in HANDLE hProcess,
# __in DWORD uiFlags
# );
def GetGuiResources(hProcess, uiFlags = GR_GDIOBJECTS):
_GetGuiResources = windll.kernel32.GetGuiResources
_GetGuiResources.argtypes = [HANDLE, DWORD]
_GetGuiResources.restype = DWORD
dwCount = _GetGuiResources(hProcess, uiFlags)
if dwCount == 0:
errcode = GetLastError()
if errcode != ERROR_SUCCESS:
raise ctypes.WinError(errcode)
return dwCount
# BOOL WINAPI GetProcessHandleCount(
# __in HANDLE hProcess,
# __inout PDWORD pdwHandleCount
# );
def GetProcessHandleCount(hProcess):
_GetProcessHandleCount = windll.kernel32.GetProcessHandleCount
_GetProcessHandleCount.argtypes = [HANDLE, PDWORD]
_GetProcessHandleCount.restype = DWORD
_GetProcessHandleCount.errcheck = RaiseIfZero
pdwHandleCount = DWORD(0)
_GetProcessHandleCount(hProcess, byref(pdwHandleCount))
return pdwHandleCount.value
# BOOL WINAPI GetProcessTimes(
# __in HANDLE hProcess,
# __out LPFILETIME lpCreationTime,
# __out LPFILETIME lpExitTime,
# __out LPFILETIME lpKernelTime,
# __out LPFILETIME lpUserTime
# );
def GetProcessTimes(hProcess = None):
_GetProcessTimes = windll.kernel32.GetProcessTimes
_GetProcessTimes.argtypes = [HANDLE, LPFILETIME, LPFILETIME, LPFILETIME, LPFILETIME]
_GetProcessTimes.restype = bool
_GetProcessTimes.errcheck = RaiseIfZero
if hProcess is None:
hProcess = GetCurrentProcess()
CreationTime = FILETIME()
ExitTime = FILETIME()
KernelTime = FILETIME()
UserTime = FILETIME()
_GetProcessTimes(hProcess, byref(CreationTime), byref(ExitTime), byref(KernelTime), byref(UserTime))
return (CreationTime, ExitTime, KernelTime, UserTime)
# BOOL WINAPI FileTimeToSystemTime(
# __in const FILETIME *lpFileTime,
# __out LPSYSTEMTIME lpSystemTime
# );
def FileTimeToSystemTime(lpFileTime):
_FileTimeToSystemTime = windll.kernel32.FileTimeToSystemTime
_FileTimeToSystemTime.argtypes = [LPFILETIME, LPSYSTEMTIME]
_FileTimeToSystemTime.restype = bool
_FileTimeToSystemTime.errcheck = RaiseIfZero
if isinstance(lpFileTime, FILETIME):
FileTime = lpFileTime
else:
FileTime = FILETIME()
FileTime.dwLowDateTime = lpFileTime & 0xFFFFFFFF
FileTime.dwHighDateTime = lpFileTime >> 32
SystemTime = SYSTEMTIME()
_FileTimeToSystemTime(byref(FileTime), byref(SystemTime))
return SystemTime
# void WINAPI GetSystemTimeAsFileTime(
# __out LPFILETIME lpSystemTimeAsFileTime
# );
def GetSystemTimeAsFileTime():
_GetSystemTimeAsFileTime = windll.kernel32.GetSystemTimeAsFileTime
_GetSystemTimeAsFileTime.argtypes = [LPFILETIME]
_GetSystemTimeAsFileTime.restype = None
FileTime = FILETIME()
_GetSystemTimeAsFileTime(byref(FileTime))
return FileTime
#------------------------------------------------------------------------------
# Global ATOM API
# ATOM GlobalAddAtom(
# __in LPCTSTR lpString
# );
def GlobalAddAtomA(lpString):
_GlobalAddAtomA = windll.kernel32.GlobalAddAtomA
_GlobalAddAtomA.argtypes = [LPSTR]
_GlobalAddAtomA.restype = ATOM
_GlobalAddAtomA.errcheck = RaiseIfZero
return _GlobalAddAtomA(lpString)
def GlobalAddAtomW(lpString):
_GlobalAddAtomW = windll.kernel32.GlobalAddAtomW
_GlobalAddAtomW.argtypes = [LPWSTR]
_GlobalAddAtomW.restype = ATOM
_GlobalAddAtomW.errcheck = RaiseIfZero
return _GlobalAddAtomW(lpString)
GlobalAddAtom = GuessStringType(GlobalAddAtomA, GlobalAddAtomW)
# ATOM GlobalFindAtom(
# __in LPCTSTR lpString
# );
def GlobalFindAtomA(lpString):
_GlobalFindAtomA = windll.kernel32.GlobalFindAtomA
_GlobalFindAtomA.argtypes = [LPSTR]
_GlobalFindAtomA.restype = ATOM
_GlobalFindAtomA.errcheck = RaiseIfZero
return _GlobalFindAtomA(lpString)
def GlobalFindAtomW(lpString):
_GlobalFindAtomW = windll.kernel32.GlobalFindAtomW
_GlobalFindAtomW.argtypes = [LPWSTR]
_GlobalFindAtomW.restype = ATOM
_GlobalFindAtomW.errcheck = RaiseIfZero
return _GlobalFindAtomW(lpString)
GlobalFindAtom = GuessStringType(GlobalFindAtomA, GlobalFindAtomW)
# UINT GlobalGetAtomName(
# __in ATOM nAtom,
# __out LPTSTR lpBuffer,
# __in int nSize
# );
def GlobalGetAtomNameA(nAtom):
_GlobalGetAtomNameA = windll.kernel32.GlobalGetAtomNameA
_GlobalGetAtomNameA.argtypes = [ATOM, LPSTR, ctypes.c_int]
_GlobalGetAtomNameA.restype = UINT
_GlobalGetAtomNameA.errcheck = RaiseIfZero
nSize = 64
while 1:
lpBuffer = ctypes.create_string_buffer("", nSize)
nCopied = _GlobalGetAtomNameA(nAtom, lpBuffer, nSize)
if nCopied < nSize - 1:
break
nSize = nSize + 64
return lpBuffer.value
def GlobalGetAtomNameW(nAtom):
_GlobalGetAtomNameW = windll.kernel32.GlobalGetAtomNameW
_GlobalGetAtomNameW.argtypes = [ATOM, LPWSTR, ctypes.c_int]
_GlobalGetAtomNameW.restype = UINT
_GlobalGetAtomNameW.errcheck = RaiseIfZero
nSize = 64
while 1:
lpBuffer = ctypes.create_unicode_buffer(u"", nSize)
nCopied = _GlobalGetAtomNameW(nAtom, lpBuffer, nSize)
if nCopied < nSize - 1:
break
nSize = nSize + 64
return lpBuffer.value
GlobalGetAtomName = GuessStringType(GlobalGetAtomNameA, GlobalGetAtomNameW)
# ATOM GlobalDeleteAtom(
# __in ATOM nAtom
# );
def GlobalDeleteAtom(nAtom):
_GlobalDeleteAtom = windll.kernel32.GlobalDeleteAtom
_GlobalDeleteAtom.argtypes
_GlobalDeleteAtom.restype
SetLastError(ERROR_SUCCESS)
_GlobalDeleteAtom(nAtom)
error = GetLastError()
if error != ERROR_SUCCESS:
raise ctypes.WinError(error)
#------------------------------------------------------------------------------
# Wow64
# DWORD WINAPI Wow64SuspendThread(
# _In_ HANDLE hThread
# );
def Wow64SuspendThread(hThread):
_Wow64SuspendThread = windll.kernel32.Wow64SuspendThread
_Wow64SuspendThread.argtypes = [HANDLE]
_Wow64SuspendThread.restype = DWORD
previousCount = _Wow64SuspendThread(hThread)
if previousCount == DWORD(-1).value:
raise ctypes.WinError()
return previousCount
# BOOLEAN WINAPI Wow64EnableWow64FsRedirection(
# __in BOOLEAN Wow64FsEnableRedirection
# );
def Wow64EnableWow64FsRedirection(Wow64FsEnableRedirection):
"""
This function may not work reliably when there are nested calls. Therefore,
this function has been replaced by the L{Wow64DisableWow64FsRedirection}
and L{Wow64RevertWow64FsRedirection} functions.
@see: U{http://msdn.microsoft.com/en-us/library/windows/desktop/aa365744(v=vs.85).aspx}
"""
_Wow64EnableWow64FsRedirection = windll.kernel32.Wow64EnableWow64FsRedirection
_Wow64EnableWow64FsRedirection.argtypes = [BOOLEAN]
_Wow64EnableWow64FsRedirection.restype = BOOLEAN
_Wow64EnableWow64FsRedirection.errcheck = RaiseIfZero
# BOOL WINAPI Wow64DisableWow64FsRedirection(
# __out PVOID *OldValue
# );
def Wow64DisableWow64FsRedirection():
_Wow64DisableWow64FsRedirection = windll.kernel32.Wow64DisableWow64FsRedirection
_Wow64DisableWow64FsRedirection.argtypes = [PPVOID]
_Wow64DisableWow64FsRedirection.restype = BOOL
_Wow64DisableWow64FsRedirection.errcheck = RaiseIfZero
OldValue = PVOID(None)
_Wow64DisableWow64FsRedirection(byref(OldValue))
return OldValue
# BOOL WINAPI Wow64RevertWow64FsRedirection(
# __in PVOID OldValue
# );
def Wow64RevertWow64FsRedirection(OldValue):
_Wow64RevertWow64FsRedirection = windll.kernel32.Wow64RevertWow64FsRedirection
_Wow64RevertWow64FsRedirection.argtypes = [PVOID]
_Wow64RevertWow64FsRedirection.restype = BOOL
_Wow64RevertWow64FsRedirection.errcheck = RaiseIfZero
_Wow64RevertWow64FsRedirection(OldValue)
#==============================================================================
# This calculates the list of exported symbols.
_all = set(vars().keys()).difference(_all)
__all__ = [_x for _x in _all if not _x.startswith('_')]
__all__.sort()
#==============================================================================
#==============================================================================
# Mark functions that Psyco cannot compile.
# In your programs, don't use psyco.full().
# Call psyco.bind() on your main function instead.
try:
import psyco
psyco.cannotcompile(WaitForDebugEvent)
psyco.cannotcompile(WaitForSingleObject)
psyco.cannotcompile(WaitForSingleObjectEx)
psyco.cannotcompile(WaitForMultipleObjects)
psyco.cannotcompile(WaitForMultipleObjectsEx)
except ImportError:
pass
#==============================================================================
| apache-2.0 |
zarthur/restful-todo | tests/test_integration.py | 1 | 1726 | import re
import threading
import unittest
from selenium import webdriver
from app import create_app, db
from models import Todo
class IntegrationTestCase(unittest.TestCase):
client = None
@classmethod
def setUpClass(cls):
# start Firefox
try:
cls.client = webdriver.Firefox()
except:
pass
if cls.client:
cls.app = create_app('testing')
cls.app_context = cls.app.app_context()
cls.app_context.push()
db.drop_all()
db.create_all()
todo = Todo(title='title1', body='body1')
db.session.add(todo)
db.session.commit()
threading.Thread(target=cls.app.run).start()
@classmethod
def tearDownClass(cls):
if cls.client:
cls.client.close()
db.drop_all()
db.session.remove()
cls.app_context.pop()
def setUp(self):
if not self.client:
self.skipTest('Web browser not available')
def tearDown(self):
pass
def test_home_page(self):
self.client.get('http://localhost:5000/')
self.assertTrue(re.search('RESTful', self.client.page_source))
def test_new_page(self):
self.client.get('http://localhost:5000/')
self.client.find_element_by_link_text('New Todo').click()
self.assertTrue('Back to list' in self.client.page_source)
self.client.find_element_by_name('title').send_keys('SelTitle')
self.client.find_element_by_name('body').send_keys('selenium body')
self.client.find_element_by_name('submit').click()
self.assertTrue(re.search('SelTitle', self.client.page_source))
| gpl-2.0 |
dimarkov/seaborn | seaborn/tests/test_axisgrid.py | 11 | 42805 | import warnings
import numpy as np
import pandas as pd
from scipy import stats
import matplotlib as mpl
import matplotlib.pyplot as plt
from distutils.version import LooseVersion
import nose.tools as nt
import numpy.testing as npt
from numpy.testing.decorators import skipif
import pandas.util.testing as tm
from .. import axisgrid as ag
from .. import rcmod
from ..palettes import color_palette
from ..distributions import kdeplot
from ..categorical import pointplot
from ..linearmodels import pairplot
from ..utils import categorical_order
rs = np.random.RandomState(0)
old_matplotlib = LooseVersion(mpl.__version__) < "1.4"
class TestFacetGrid(object):
df = pd.DataFrame(dict(x=rs.normal(size=60),
y=rs.gamma(4, size=60),
a=np.repeat(list("abc"), 20),
b=np.tile(list("mn"), 30),
c=np.tile(list("tuv"), 20),
d=np.tile(list("abcdefghij"), 6)))
def test_self_data(self):
g = ag.FacetGrid(self.df)
nt.assert_is(g.data, self.df)
plt.close("all")
def test_self_fig(self):
g = ag.FacetGrid(self.df)
nt.assert_is_instance(g.fig, plt.Figure)
plt.close("all")
def test_self_axes(self):
g = ag.FacetGrid(self.df, row="a", col="b", hue="c")
for ax in g.axes.flat:
nt.assert_is_instance(ax, plt.Axes)
plt.close("all")
def test_axes_array_size(self):
g1 = ag.FacetGrid(self.df)
nt.assert_equal(g1.axes.shape, (1, 1))
g2 = ag.FacetGrid(self.df, row="a")
nt.assert_equal(g2.axes.shape, (3, 1))
g3 = ag.FacetGrid(self.df, col="b")
nt.assert_equal(g3.axes.shape, (1, 2))
g4 = ag.FacetGrid(self.df, hue="c")
nt.assert_equal(g4.axes.shape, (1, 1))
g5 = ag.FacetGrid(self.df, row="a", col="b", hue="c")
nt.assert_equal(g5.axes.shape, (3, 2))
for ax in g5.axes.flat:
nt.assert_is_instance(ax, plt.Axes)
plt.close("all")
def test_single_axes(self):
g1 = ag.FacetGrid(self.df)
nt.assert_is_instance(g1.ax, plt.Axes)
g2 = ag.FacetGrid(self.df, row="a")
with nt.assert_raises(AttributeError):
g2.ax
g3 = ag.FacetGrid(self.df, col="a")
with nt.assert_raises(AttributeError):
g3.ax
g4 = ag.FacetGrid(self.df, col="a", row="b")
with nt.assert_raises(AttributeError):
g4.ax
def test_col_wrap(self):
g = ag.FacetGrid(self.df, col="d")
nt.assert_equal(g.axes.shape, (1, 10))
nt.assert_is(g.facet_axis(0, 8), g.axes[0, 8])
g_wrap = ag.FacetGrid(self.df, col="d", col_wrap=4)
nt.assert_equal(g_wrap.axes.shape, (10,))
nt.assert_is(g_wrap.facet_axis(0, 8), g_wrap.axes[8])
nt.assert_equal(g_wrap._ncol, 4)
nt.assert_equal(g_wrap._nrow, 3)
with nt.assert_raises(ValueError):
g = ag.FacetGrid(self.df, row="b", col="d", col_wrap=4)
df = self.df.copy()
df.loc[df.d == "j"] = np.nan
g_missing = ag.FacetGrid(df, col="d")
nt.assert_equal(g_missing.axes.shape, (1, 9))
g_missing_wrap = ag.FacetGrid(df, col="d", col_wrap=4)
nt.assert_equal(g_missing_wrap.axes.shape, (9,))
plt.close("all")
def test_normal_axes(self):
null = np.empty(0, object).flat
g = ag.FacetGrid(self.df)
npt.assert_array_equal(g._bottom_axes, g.axes.flat)
npt.assert_array_equal(g._not_bottom_axes, null)
npt.assert_array_equal(g._left_axes, g.axes.flat)
npt.assert_array_equal(g._not_left_axes, null)
npt.assert_array_equal(g._inner_axes, null)
g = ag.FacetGrid(self.df, col="c")
npt.assert_array_equal(g._bottom_axes, g.axes.flat)
npt.assert_array_equal(g._not_bottom_axes, null)
npt.assert_array_equal(g._left_axes, g.axes[:, 0].flat)
npt.assert_array_equal(g._not_left_axes, g.axes[:, 1:].flat)
npt.assert_array_equal(g._inner_axes, null)
g = ag.FacetGrid(self.df, row="c")
npt.assert_array_equal(g._bottom_axes, g.axes[-1, :].flat)
npt.assert_array_equal(g._not_bottom_axes, g.axes[:-1, :].flat)
npt.assert_array_equal(g._left_axes, g.axes.flat)
npt.assert_array_equal(g._not_left_axes, null)
npt.assert_array_equal(g._inner_axes, null)
g = ag.FacetGrid(self.df, col="a", row="c")
npt.assert_array_equal(g._bottom_axes, g.axes[-1, :].flat)
npt.assert_array_equal(g._not_bottom_axes, g.axes[:-1, :].flat)
npt.assert_array_equal(g._left_axes, g.axes[:, 0].flat)
npt.assert_array_equal(g._not_left_axes, g.axes[:, 1:].flat)
npt.assert_array_equal(g._inner_axes, g.axes[:-1, 1:].flat)
plt.close("all")
def test_wrapped_axes(self):
null = np.empty(0, object).flat
g = ag.FacetGrid(self.df, col="a", col_wrap=2)
npt.assert_array_equal(g._bottom_axes,
g.axes[np.array([1, 2])].flat)
npt.assert_array_equal(g._not_bottom_axes, g.axes[:1].flat)
npt.assert_array_equal(g._left_axes, g.axes[np.array([0, 2])].flat)
npt.assert_array_equal(g._not_left_axes, g.axes[np.array([1])].flat)
npt.assert_array_equal(g._inner_axes, null)
plt.close("all")
def test_figure_size(self):
g = ag.FacetGrid(self.df, row="a", col="b")
npt.assert_array_equal(g.fig.get_size_inches(), (6, 9))
g = ag.FacetGrid(self.df, row="a", col="b", size=6)
npt.assert_array_equal(g.fig.get_size_inches(), (12, 18))
g = ag.FacetGrid(self.df, col="c", size=4, aspect=.5)
npt.assert_array_equal(g.fig.get_size_inches(), (6, 4))
plt.close("all")
def test_figure_size_with_legend(self):
g1 = ag.FacetGrid(self.df, col="a", hue="c", size=4, aspect=.5)
npt.assert_array_equal(g1.fig.get_size_inches(), (6, 4))
g1.add_legend()
nt.assert_greater(g1.fig.get_size_inches()[0], 6)
g2 = ag.FacetGrid(self.df, col="a", hue="c", size=4, aspect=.5,
legend_out=False)
npt.assert_array_equal(g2.fig.get_size_inches(), (6, 4))
g2.add_legend()
npt.assert_array_equal(g2.fig.get_size_inches(), (6, 4))
plt.close("all")
def test_legend_data(self):
g1 = ag.FacetGrid(self.df, hue="a")
g1.map(plt.plot, "x", "y")
g1.add_legend()
palette = color_palette(n_colors=3)
nt.assert_equal(g1._legend.get_title().get_text(), "a")
a_levels = sorted(self.df.a.unique())
lines = g1._legend.get_lines()
nt.assert_equal(len(lines), len(a_levels))
for line, hue in zip(lines, palette):
nt.assert_equal(line.get_color(), hue)
labels = g1._legend.get_texts()
nt.assert_equal(len(labels), len(a_levels))
for label, level in zip(labels, a_levels):
nt.assert_equal(label.get_text(), level)
plt.close("all")
def test_legend_data_missing_level(self):
g1 = ag.FacetGrid(self.df, hue="a", hue_order=list("azbc"))
g1.map(plt.plot, "x", "y")
g1.add_legend()
b, g, r, p = color_palette(n_colors=4)
palette = [b, r, p]
nt.assert_equal(g1._legend.get_title().get_text(), "a")
a_levels = sorted(self.df.a.unique())
lines = g1._legend.get_lines()
nt.assert_equal(len(lines), len(a_levels))
for line, hue in zip(lines, palette):
nt.assert_equal(line.get_color(), hue)
labels = g1._legend.get_texts()
nt.assert_equal(len(labels), 4)
for label, level in zip(labels, list("azbc")):
nt.assert_equal(label.get_text(), level)
plt.close("all")
def test_get_boolean_legend_data(self):
self.df["b_bool"] = self.df.b == "m"
g1 = ag.FacetGrid(self.df, hue="b_bool")
g1.map(plt.plot, "x", "y")
g1.add_legend()
palette = color_palette(n_colors=2)
nt.assert_equal(g1._legend.get_title().get_text(), "b_bool")
b_levels = list(map(str, categorical_order(self.df.b_bool)))
lines = g1._legend.get_lines()
nt.assert_equal(len(lines), len(b_levels))
for line, hue in zip(lines, palette):
nt.assert_equal(line.get_color(), hue)
labels = g1._legend.get_texts()
nt.assert_equal(len(labels), len(b_levels))
for label, level in zip(labels, b_levels):
nt.assert_equal(label.get_text(), level)
plt.close("all")
def test_legend_options(self):
g1 = ag.FacetGrid(self.df, hue="b")
g1.map(plt.plot, "x", "y")
g1.add_legend()
def test_legendout_with_colwrap(self):
g = ag.FacetGrid(self.df, col="d", hue='b',
col_wrap=4, legend_out=False)
g.map(plt.plot, "x", "y", linewidth=3)
g.add_legend()
def test_subplot_kws(self):
g = ag.FacetGrid(self.df, subplot_kws=dict(axisbg="blue"))
for ax in g.axes.flat:
nt.assert_equal(ax.get_axis_bgcolor(), "blue")
@skipif(old_matplotlib)
def test_gridspec_kws(self):
ratios = [3, 1, 2]
sizes = [0.46, 0.15, 0.31]
gskws = dict(width_ratios=ratios, height_ratios=ratios)
g = ag.FacetGrid(self.df, col='c', row='a', gridspec_kws=gskws)
# clear out all ticks
for ax in g.axes.flat:
ax.set_xticks([])
ax.set_yticks([])
g.fig.tight_layout()
widths, heights = np.meshgrid(sizes, sizes)
for n, ax in enumerate(g.axes.flat):
npt.assert_almost_equal(
ax.get_position().width,
widths.flatten()[n],
decimal=2
)
npt.assert_almost_equal(
ax.get_position().height,
heights.flatten()[n],
decimal=2
)
@skipif(old_matplotlib)
def test_gridspec_kws_col_wrap(self):
ratios = [3, 1, 2, 1, 1]
sizes = [0.46, 0.15, 0.31]
gskws = dict(width_ratios=ratios)
with warnings.catch_warnings():
warnings.resetwarnings()
warnings.simplefilter("always")
npt.assert_warns(UserWarning, ag.FacetGrid, self.df, col='d',
col_wrap=5, gridspec_kws=gskws)
@skipif(not old_matplotlib)
def test_gridsic_kws_old_mpl(self):
ratios = [3, 1, 2]
sizes = [0.46, 0.15, 0.31]
gskws = dict(width_ratios=ratios, height_ratios=ratios)
with warnings.catch_warnings():
warnings.resetwarnings()
warnings.simplefilter("always")
npt.assert_warns(UserWarning, ag.FacetGrid, self.df, col='c',
row='a', gridspec_kws=gskws)
def test_data_generator(self):
g = ag.FacetGrid(self.df, row="a")
d = list(g.facet_data())
nt.assert_equal(len(d), 3)
tup, data = d[0]
nt.assert_equal(tup, (0, 0, 0))
nt.assert_true((data["a"] == "a").all())
tup, data = d[1]
nt.assert_equal(tup, (1, 0, 0))
nt.assert_true((data["a"] == "b").all())
g = ag.FacetGrid(self.df, row="a", col="b")
d = list(g.facet_data())
nt.assert_equal(len(d), 6)
tup, data = d[0]
nt.assert_equal(tup, (0, 0, 0))
nt.assert_true((data["a"] == "a").all())
nt.assert_true((data["b"] == "m").all())
tup, data = d[1]
nt.assert_equal(tup, (0, 1, 0))
nt.assert_true((data["a"] == "a").all())
nt.assert_true((data["b"] == "n").all())
tup, data = d[2]
nt.assert_equal(tup, (1, 0, 0))
nt.assert_true((data["a"] == "b").all())
nt.assert_true((data["b"] == "m").all())
g = ag.FacetGrid(self.df, hue="c")
d = list(g.facet_data())
nt.assert_equal(len(d), 3)
tup, data = d[1]
nt.assert_equal(tup, (0, 0, 1))
nt.assert_true((data["c"] == "u").all())
plt.close("all")
def test_map(self):
g = ag.FacetGrid(self.df, row="a", col="b", hue="c")
g.map(plt.plot, "x", "y", linewidth=3)
lines = g.axes[0, 0].lines
nt.assert_equal(len(lines), 3)
line1, _, _ = lines
nt.assert_equal(line1.get_linewidth(), 3)
x, y = line1.get_data()
mask = (self.df.a == "a") & (self.df.b == "m") & (self.df.c == "t")
npt.assert_array_equal(x, self.df.x[mask])
npt.assert_array_equal(y, self.df.y[mask])
def test_map_dataframe(self):
g = ag.FacetGrid(self.df, row="a", col="b", hue="c")
plot = lambda x, y, data=None, **kws: plt.plot(data[x], data[y], **kws)
g.map_dataframe(plot, "x", "y", linestyle="--")
lines = g.axes[0, 0].lines
nt.assert_equal(len(lines), 3)
line1, _, _ = lines
nt.assert_equal(line1.get_linestyle(), "--")
x, y = line1.get_data()
mask = (self.df.a == "a") & (self.df.b == "m") & (self.df.c == "t")
npt.assert_array_equal(x, self.df.x[mask])
npt.assert_array_equal(y, self.df.y[mask])
def test_set(self):
g = ag.FacetGrid(self.df, row="a", col="b")
xlim = (-2, 5)
ylim = (3, 6)
xticks = [-2, 0, 3, 5]
yticks = [3, 4.5, 6]
g.set(xlim=xlim, ylim=ylim, xticks=xticks, yticks=yticks)
for ax in g.axes.flat:
npt.assert_array_equal(ax.get_xlim(), xlim)
npt.assert_array_equal(ax.get_ylim(), ylim)
npt.assert_array_equal(ax.get_xticks(), xticks)
npt.assert_array_equal(ax.get_yticks(), yticks)
plt.close("all")
def test_set_titles(self):
g = ag.FacetGrid(self.df, row="a", col="b")
g.map(plt.plot, "x", "y")
# Test the default titles
nt.assert_equal(g.axes[0, 0].get_title(), "a = a | b = m")
nt.assert_equal(g.axes[0, 1].get_title(), "a = a | b = n")
nt.assert_equal(g.axes[1, 0].get_title(), "a = b | b = m")
# Test a provided title
g.set_titles("{row_var} == {row_name} \/ {col_var} == {col_name}")
nt.assert_equal(g.axes[0, 0].get_title(), "a == a \/ b == m")
nt.assert_equal(g.axes[0, 1].get_title(), "a == a \/ b == n")
nt.assert_equal(g.axes[1, 0].get_title(), "a == b \/ b == m")
# Test a single row
g = ag.FacetGrid(self.df, col="b")
g.map(plt.plot, "x", "y")
# Test the default titles
nt.assert_equal(g.axes[0, 0].get_title(), "b = m")
nt.assert_equal(g.axes[0, 1].get_title(), "b = n")
# test with dropna=False
g = ag.FacetGrid(self.df, col="b", hue="b", dropna=False)
g.map(plt.plot, 'x', 'y')
plt.close("all")
def test_set_titles_margin_titles(self):
g = ag.FacetGrid(self.df, row="a", col="b", margin_titles=True)
g.map(plt.plot, "x", "y")
# Test the default titles
nt.assert_equal(g.axes[0, 0].get_title(), "b = m")
nt.assert_equal(g.axes[0, 1].get_title(), "b = n")
nt.assert_equal(g.axes[1, 0].get_title(), "")
# Test the row "titles"
nt.assert_equal(g.axes[0, 1].texts[0].get_text(), "a = a")
nt.assert_equal(g.axes[1, 1].texts[0].get_text(), "a = b")
# Test a provided title
g.set_titles(col_template="{col_var} == {col_name}")
nt.assert_equal(g.axes[0, 0].get_title(), "b == m")
nt.assert_equal(g.axes[0, 1].get_title(), "b == n")
nt.assert_equal(g.axes[1, 0].get_title(), "")
plt.close("all")
def test_set_ticklabels(self):
g = ag.FacetGrid(self.df, row="a", col="b")
g.map(plt.plot, "x", "y")
xlab = [l.get_text() + "h" for l in g.axes[1, 0].get_xticklabels()]
ylab = [l.get_text() for l in g.axes[1, 0].get_yticklabels()]
g.set_xticklabels(xlab)
g.set_yticklabels(rotation=90)
got_x = [l.get_text() + "h" for l in g.axes[1, 1].get_xticklabels()]
got_y = [l.get_text() for l in g.axes[0, 0].get_yticklabels()]
npt.assert_array_equal(got_x, xlab)
npt.assert_array_equal(got_y, ylab)
x, y = np.arange(10), np.arange(10)
df = pd.DataFrame(np.c_[x, y], columns=["x", "y"])
g = ag.FacetGrid(df).map(pointplot, "x", "y")
g.set_xticklabels(step=2)
got_x = [int(l.get_text()) for l in g.axes[0, 0].get_xticklabels()]
npt.assert_array_equal(x[::2], got_x)
g = ag.FacetGrid(self.df, col="d", col_wrap=5)
g.map(plt.plot, "x", "y")
g.set_xticklabels(rotation=45)
g.set_yticklabels(rotation=75)
for ax in g._bottom_axes:
for l in ax.get_xticklabels():
nt.assert_equal(l.get_rotation(), 45)
for ax in g._left_axes:
for l in ax.get_yticklabels():
nt.assert_equal(l.get_rotation(), 75)
plt.close("all")
def test_set_axis_labels(self):
g = ag.FacetGrid(self.df, row="a", col="b")
g.map(plt.plot, "x", "y")
xlab = 'xx'
ylab = 'yy'
g.set_axis_labels(xlab, ylab)
got_x = [ax.get_xlabel() for ax in g.axes[-1, :]]
got_y = [ax.get_ylabel() for ax in g.axes[:, 0]]
npt.assert_array_equal(got_x, xlab)
npt.assert_array_equal(got_y, ylab)
plt.close("all")
def test_axis_lims(self):
g = ag.FacetGrid(self.df, row="a", col="b", xlim=(0, 4), ylim=(-2, 3))
nt.assert_equal(g.axes[0, 0].get_xlim(), (0, 4))
nt.assert_equal(g.axes[0, 0].get_ylim(), (-2, 3))
plt.close("all")
def test_data_orders(self):
g = ag.FacetGrid(self.df, row="a", col="b", hue="c")
nt.assert_equal(g.row_names, list("abc"))
nt.assert_equal(g.col_names, list("mn"))
nt.assert_equal(g.hue_names, list("tuv"))
nt.assert_equal(g.axes.shape, (3, 2))
g = ag.FacetGrid(self.df, row="a", col="b", hue="c",
row_order=list("bca"),
col_order=list("nm"),
hue_order=list("vtu"))
nt.assert_equal(g.row_names, list("bca"))
nt.assert_equal(g.col_names, list("nm"))
nt.assert_equal(g.hue_names, list("vtu"))
nt.assert_equal(g.axes.shape, (3, 2))
g = ag.FacetGrid(self.df, row="a", col="b", hue="c",
row_order=list("bcda"),
col_order=list("nom"),
hue_order=list("qvtu"))
nt.assert_equal(g.row_names, list("bcda"))
nt.assert_equal(g.col_names, list("nom"))
nt.assert_equal(g.hue_names, list("qvtu"))
nt.assert_equal(g.axes.shape, (4, 3))
plt.close("all")
def test_palette(self):
rcmod.set()
g = ag.FacetGrid(self.df, hue="c")
nt.assert_equal(g._colors, color_palette(n_colors=3))
g = ag.FacetGrid(self.df, hue="d")
nt.assert_equal(g._colors, color_palette("husl", 10))
g = ag.FacetGrid(self.df, hue="c", palette="Set2")
nt.assert_equal(g._colors, color_palette("Set2", 3))
dict_pal = dict(t="red", u="green", v="blue")
list_pal = color_palette(["red", "green", "blue"], 3)
g = ag.FacetGrid(self.df, hue="c", palette=dict_pal)
nt.assert_equal(g._colors, list_pal)
list_pal = color_palette(["green", "blue", "red"], 3)
g = ag.FacetGrid(self.df, hue="c", hue_order=list("uvt"),
palette=dict_pal)
nt.assert_equal(g._colors, list_pal)
plt.close("all")
def test_hue_kws(self):
kws = dict(marker=["o", "s", "D"])
g = ag.FacetGrid(self.df, hue="c", hue_kws=kws)
g.map(plt.plot, "x", "y")
for line, marker in zip(g.axes[0, 0].lines, kws["marker"]):
nt.assert_equal(line.get_marker(), marker)
def test_dropna(self):
df = self.df.copy()
hasna = pd.Series(np.tile(np.arange(6), 10), dtype=np.float)
hasna[hasna == 5] = np.nan
df["hasna"] = hasna
g = ag.FacetGrid(df, dropna=False, row="hasna")
nt.assert_equal(g._not_na.sum(), 60)
g = ag.FacetGrid(df, dropna=True, row="hasna")
nt.assert_equal(g._not_na.sum(), 50)
plt.close("all")
@classmethod
def teardown_class(cls):
"""Ensure that all figures are closed on exit."""
plt.close("all")
class TestPairGrid(object):
rs = np.random.RandomState(sum(map(ord, "PairGrid")))
df = pd.DataFrame(dict(x=rs.normal(size=80),
y=rs.randint(0, 4, size=(80)),
z=rs.gamma(3, size=80),
a=np.repeat(list("abcd"), 20),
b=np.repeat(list("abcdefgh"), 10)))
def test_self_data(self):
g = ag.PairGrid(self.df)
nt.assert_is(g.data, self.df)
plt.close("all")
def test_ignore_datelike_data(self):
df = self.df.copy()
df['date'] = pd.date_range('2010-01-01', periods=len(df), freq='d')
result = ag.PairGrid(self.df).data
expected = df.drop('date', axis=1)
tm.assert_frame_equal(result, expected)
plt.close("all")
def test_self_fig(self):
g = ag.PairGrid(self.df)
nt.assert_is_instance(g.fig, plt.Figure)
plt.close("all")
def test_self_axes(self):
g = ag.PairGrid(self.df)
for ax in g.axes.flat:
nt.assert_is_instance(ax, plt.Axes)
plt.close("all")
def test_default_axes(self):
g = ag.PairGrid(self.df)
nt.assert_equal(g.axes.shape, (3, 3))
nt.assert_equal(g.x_vars, ["x", "y", "z"])
nt.assert_equal(g.y_vars, ["x", "y", "z"])
nt.assert_true(g.square_grid)
plt.close("all")
def test_specific_square_axes(self):
vars = ["z", "x"]
g = ag.PairGrid(self.df, vars=vars)
nt.assert_equal(g.axes.shape, (len(vars), len(vars)))
nt.assert_equal(g.x_vars, vars)
nt.assert_equal(g.y_vars, vars)
nt.assert_true(g.square_grid)
plt.close("all")
def test_specific_nonsquare_axes(self):
x_vars = ["x", "y"]
y_vars = ["z", "y", "x"]
g = ag.PairGrid(self.df, x_vars=x_vars, y_vars=y_vars)
nt.assert_equal(g.axes.shape, (len(y_vars), len(x_vars)))
nt.assert_equal(g.x_vars, x_vars)
nt.assert_equal(g.y_vars, y_vars)
nt.assert_true(not g.square_grid)
x_vars = ["x", "y"]
y_vars = "z"
g = ag.PairGrid(self.df, x_vars=x_vars, y_vars=y_vars)
nt.assert_equal(g.axes.shape, (len(y_vars), len(x_vars)))
nt.assert_equal(g.x_vars, list(x_vars))
nt.assert_equal(g.y_vars, list(y_vars))
nt.assert_true(not g.square_grid)
plt.close("all")
def test_specific_square_axes_with_array(self):
vars = np.array(["z", "x"])
g = ag.PairGrid(self.df, vars=vars)
nt.assert_equal(g.axes.shape, (len(vars), len(vars)))
nt.assert_equal(g.x_vars, list(vars))
nt.assert_equal(g.y_vars, list(vars))
nt.assert_true(g.square_grid)
plt.close("all")
def test_specific_nonsquare_axes_with_array(self):
x_vars = np.array(["x", "y"])
y_vars = np.array(["z", "y", "x"])
g = ag.PairGrid(self.df, x_vars=x_vars, y_vars=y_vars)
nt.assert_equal(g.axes.shape, (len(y_vars), len(x_vars)))
nt.assert_equal(g.x_vars, list(x_vars))
nt.assert_equal(g.y_vars, list(y_vars))
nt.assert_true(not g.square_grid)
plt.close("all")
def test_size(self):
g1 = ag.PairGrid(self.df, size=3)
npt.assert_array_equal(g1.fig.get_size_inches(), (9, 9))
g2 = ag.PairGrid(self.df, size=4, aspect=.5)
npt.assert_array_equal(g2.fig.get_size_inches(), (6, 12))
g3 = ag.PairGrid(self.df, y_vars=["z"], x_vars=["x", "y"],
size=2, aspect=2)
npt.assert_array_equal(g3.fig.get_size_inches(), (8, 2))
plt.close("all")
def test_map(self):
vars = ["x", "y", "z"]
g1 = ag.PairGrid(self.df)
g1.map(plt.scatter)
for i, axes_i in enumerate(g1.axes):
for j, ax in enumerate(axes_i):
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
g2 = ag.PairGrid(self.df, "a")
g2.map(plt.scatter)
for i, axes_i in enumerate(g2.axes):
for j, ax in enumerate(axes_i):
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
for k, k_level in enumerate("abcd"):
x_in_k = x_in[self.df.a == k_level]
y_in_k = y_in[self.df.a == k_level]
x_out, y_out = ax.collections[k].get_offsets().T
npt.assert_array_equal(x_in_k, x_out)
npt.assert_array_equal(y_in_k, y_out)
plt.close("all")
def test_map_nonsquare(self):
x_vars = ["x"]
y_vars = ["y", "z"]
g = ag.PairGrid(self.df, x_vars=x_vars, y_vars=y_vars)
g.map(plt.scatter)
x_in = self.df.x
for i, i_var in enumerate(y_vars):
ax = g.axes[i, 0]
y_in = self.df[i_var]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
plt.close("all")
def test_map_lower(self):
vars = ["x", "y", "z"]
g = ag.PairGrid(self.df)
g.map_lower(plt.scatter)
for i, j in zip(*np.tril_indices_from(g.axes, -1)):
ax = g.axes[i, j]
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
for i, j in zip(*np.triu_indices_from(g.axes)):
ax = g.axes[i, j]
nt.assert_equal(len(ax.collections), 0)
plt.close("all")
def test_map_upper(self):
vars = ["x", "y", "z"]
g = ag.PairGrid(self.df)
g.map_upper(plt.scatter)
for i, j in zip(*np.triu_indices_from(g.axes, 1)):
ax = g.axes[i, j]
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
for i, j in zip(*np.tril_indices_from(g.axes)):
ax = g.axes[i, j]
nt.assert_equal(len(ax.collections), 0)
plt.close("all")
@skipif(old_matplotlib)
def test_map_diag(self):
g1 = ag.PairGrid(self.df)
g1.map_diag(plt.hist)
for ax in g1.diag_axes:
nt.assert_equal(len(ax.patches), 10)
g2 = ag.PairGrid(self.df)
g2.map_diag(plt.hist, bins=15)
for ax in g2.diag_axes:
nt.assert_equal(len(ax.patches), 15)
g3 = ag.PairGrid(self.df, hue="a")
g3.map_diag(plt.hist)
for ax in g3.diag_axes:
nt.assert_equal(len(ax.patches), 40)
plt.close("all")
@skipif(old_matplotlib)
def test_map_diag_and_offdiag(self):
vars = ["x", "y", "z"]
g = ag.PairGrid(self.df)
g.map_offdiag(plt.scatter)
g.map_diag(plt.hist)
for ax in g.diag_axes:
nt.assert_equal(len(ax.patches), 10)
for i, j in zip(*np.triu_indices_from(g.axes, 1)):
ax = g.axes[i, j]
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
for i, j in zip(*np.tril_indices_from(g.axes, -1)):
ax = g.axes[i, j]
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
for i, j in zip(*np.diag_indices_from(g.axes)):
ax = g.axes[i, j]
nt.assert_equal(len(ax.collections), 0)
plt.close("all")
def test_palette(self):
rcmod.set()
g = ag.PairGrid(self.df, hue="a")
nt.assert_equal(g.palette, color_palette(n_colors=4))
g = ag.PairGrid(self.df, hue="b")
nt.assert_equal(g.palette, color_palette("husl", 8))
g = ag.PairGrid(self.df, hue="a", palette="Set2")
nt.assert_equal(g.palette, color_palette("Set2", 4))
dict_pal = dict(a="red", b="green", c="blue", d="purple")
list_pal = color_palette(["red", "green", "blue", "purple"], 4)
g = ag.PairGrid(self.df, hue="a", palette=dict_pal)
nt.assert_equal(g.palette, list_pal)
list_pal = color_palette(["purple", "blue", "red", "green"], 4)
g = ag.PairGrid(self.df, hue="a", hue_order=list("dcab"),
palette=dict_pal)
nt.assert_equal(g.palette, list_pal)
plt.close("all")
def test_hue_kws(self):
kws = dict(marker=["o", "s", "d", "+"])
g = ag.PairGrid(self.df, hue="a", hue_kws=kws)
g.map(plt.plot)
for line, marker in zip(g.axes[0, 0].lines, kws["marker"]):
nt.assert_equal(line.get_marker(), marker)
g = ag.PairGrid(self.df, hue="a", hue_kws=kws,
hue_order=list("dcab"))
g.map(plt.plot)
for line, marker in zip(g.axes[0, 0].lines, kws["marker"]):
nt.assert_equal(line.get_marker(), marker)
plt.close("all")
@skipif(old_matplotlib)
def test_hue_order(self):
order = list("dcab")
g = ag.PairGrid(self.df, hue="a", hue_order=order)
g.map(plt.plot)
for line, level in zip(g.axes[1, 0].lines, order):
x, y = line.get_xydata().T
npt.assert_array_equal(x, self.df.loc[self.df.a == level, "x"])
npt.assert_array_equal(y, self.df.loc[self.df.a == level, "y"])
plt.close("all")
g = ag.PairGrid(self.df, hue="a", hue_order=order)
g.map_diag(plt.plot)
for line, level in zip(g.axes[0, 0].lines, order):
x, y = line.get_xydata().T
npt.assert_array_equal(x, self.df.loc[self.df.a == level, "x"])
npt.assert_array_equal(y, self.df.loc[self.df.a == level, "x"])
plt.close("all")
g = ag.PairGrid(self.df, hue="a", hue_order=order)
g.map_lower(plt.plot)
for line, level in zip(g.axes[1, 0].lines, order):
x, y = line.get_xydata().T
npt.assert_array_equal(x, self.df.loc[self.df.a == level, "x"])
npt.assert_array_equal(y, self.df.loc[self.df.a == level, "y"])
plt.close("all")
g = ag.PairGrid(self.df, hue="a", hue_order=order)
g.map_upper(plt.plot)
for line, level in zip(g.axes[0, 1].lines, order):
x, y = line.get_xydata().T
npt.assert_array_equal(x, self.df.loc[self.df.a == level, "y"])
npt.assert_array_equal(y, self.df.loc[self.df.a == level, "x"])
plt.close("all")
@skipif(old_matplotlib)
def test_hue_order_missing_level(self):
order = list("dcaeb")
g = ag.PairGrid(self.df, hue="a", hue_order=order)
g.map(plt.plot)
for line, level in zip(g.axes[1, 0].lines, order):
x, y = line.get_xydata().T
npt.assert_array_equal(x, self.df.loc[self.df.a == level, "x"])
npt.assert_array_equal(y, self.df.loc[self.df.a == level, "y"])
plt.close("all")
g = ag.PairGrid(self.df, hue="a", hue_order=order)
g.map_diag(plt.plot)
for line, level in zip(g.axes[0, 0].lines, order):
x, y = line.get_xydata().T
npt.assert_array_equal(x, self.df.loc[self.df.a == level, "x"])
npt.assert_array_equal(y, self.df.loc[self.df.a == level, "x"])
plt.close("all")
g = ag.PairGrid(self.df, hue="a", hue_order=order)
g.map_lower(plt.plot)
for line, level in zip(g.axes[1, 0].lines, order):
x, y = line.get_xydata().T
npt.assert_array_equal(x, self.df.loc[self.df.a == level, "x"])
npt.assert_array_equal(y, self.df.loc[self.df.a == level, "y"])
plt.close("all")
g = ag.PairGrid(self.df, hue="a", hue_order=order)
g.map_upper(plt.plot)
for line, level in zip(g.axes[0, 1].lines, order):
x, y = line.get_xydata().T
npt.assert_array_equal(x, self.df.loc[self.df.a == level, "y"])
npt.assert_array_equal(y, self.df.loc[self.df.a == level, "x"])
plt.close("all")
def test_nondefault_index(self):
df = self.df.copy().set_index("b")
vars = ["x", "y", "z"]
g1 = ag.PairGrid(df)
g1.map(plt.scatter)
for i, axes_i in enumerate(g1.axes):
for j, ax in enumerate(axes_i):
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
g2 = ag.PairGrid(df, "a")
g2.map(plt.scatter)
for i, axes_i in enumerate(g2.axes):
for j, ax in enumerate(axes_i):
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
for k, k_level in enumerate("abcd"):
x_in_k = x_in[self.df.a == k_level]
y_in_k = y_in[self.df.a == k_level]
x_out, y_out = ax.collections[k].get_offsets().T
npt.assert_array_equal(x_in_k, x_out)
npt.assert_array_equal(y_in_k, y_out)
plt.close("all")
@skipif(old_matplotlib)
def test_pairplot(self):
vars = ["x", "y", "z"]
g = pairplot(self.df)
for ax in g.diag_axes:
nt.assert_equal(len(ax.patches), 10)
for i, j in zip(*np.triu_indices_from(g.axes, 1)):
ax = g.axes[i, j]
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
for i, j in zip(*np.tril_indices_from(g.axes, -1)):
ax = g.axes[i, j]
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
for i, j in zip(*np.diag_indices_from(g.axes)):
ax = g.axes[i, j]
nt.assert_equal(len(ax.collections), 0)
plt.close("all")
@skipif(old_matplotlib)
def test_pairplot_reg(self):
vars = ["x", "y", "z"]
g = pairplot(self.df, kind="reg")
for ax in g.diag_axes:
nt.assert_equal(len(ax.patches), 10)
for i, j in zip(*np.triu_indices_from(g.axes, 1)):
ax = g.axes[i, j]
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
nt.assert_equal(len(ax.lines), 1)
nt.assert_equal(len(ax.collections), 2)
for i, j in zip(*np.tril_indices_from(g.axes, -1)):
ax = g.axes[i, j]
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
nt.assert_equal(len(ax.lines), 1)
nt.assert_equal(len(ax.collections), 2)
for i, j in zip(*np.diag_indices_from(g.axes)):
ax = g.axes[i, j]
nt.assert_equal(len(ax.collections), 0)
plt.close("all")
@skipif(old_matplotlib)
def test_pairplot_kde(self):
vars = ["x", "y", "z"]
g = pairplot(self.df, diag_kind="kde")
for ax in g.diag_axes:
nt.assert_equal(len(ax.lines), 1)
for i, j in zip(*np.triu_indices_from(g.axes, 1)):
ax = g.axes[i, j]
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
for i, j in zip(*np.tril_indices_from(g.axes, -1)):
ax = g.axes[i, j]
x_in = self.df[vars[j]]
y_in = self.df[vars[i]]
x_out, y_out = ax.collections[0].get_offsets().T
npt.assert_array_equal(x_in, x_out)
npt.assert_array_equal(y_in, y_out)
for i, j in zip(*np.diag_indices_from(g.axes)):
ax = g.axes[i, j]
nt.assert_equal(len(ax.collections), 0)
plt.close("all")
@skipif(old_matplotlib)
def test_pairplot_markers(self):
vars = ["x", "y", "z"]
markers = ["o", "x", "s", "d"]
g = pairplot(self.df, hue="a", vars=vars, markers=markers)
nt.assert_equal(g.hue_kws["marker"], markers)
plt.close("all")
with nt.assert_raises(ValueError):
g = pairplot(self.df, hue="a", vars=vars, markers=markers[:-2])
@classmethod
def teardown_class(cls):
"""Ensure that all figures are closed on exit."""
plt.close("all")
class TestJointGrid(object):
rs = np.random.RandomState(sum(map(ord, "JointGrid")))
x = rs.randn(100)
y = rs.randn(100)
x_na = x.copy()
x_na[10] = np.nan
x_na[20] = np.nan
data = pd.DataFrame(dict(x=x, y=y, x_na=x_na))
def test_margin_grid_from_arrays(self):
g = ag.JointGrid(self.x, self.y)
npt.assert_array_equal(g.x, self.x)
npt.assert_array_equal(g.y, self.y)
plt.close("all")
def test_margin_grid_from_series(self):
g = ag.JointGrid(self.data.x, self.data.y)
npt.assert_array_equal(g.x, self.x)
npt.assert_array_equal(g.y, self.y)
plt.close("all")
def test_margin_grid_from_dataframe(self):
g = ag.JointGrid("x", "y", self.data)
npt.assert_array_equal(g.x, self.x)
npt.assert_array_equal(g.y, self.y)
plt.close("all")
def test_margin_grid_axis_labels(self):
g = ag.JointGrid("x", "y", self.data)
xlabel, ylabel = g.ax_joint.get_xlabel(), g.ax_joint.get_ylabel()
nt.assert_equal(xlabel, "x")
nt.assert_equal(ylabel, "y")
g.set_axis_labels("x variable", "y variable")
xlabel, ylabel = g.ax_joint.get_xlabel(), g.ax_joint.get_ylabel()
nt.assert_equal(xlabel, "x variable")
nt.assert_equal(ylabel, "y variable")
plt.close("all")
def test_dropna(self):
g = ag.JointGrid("x_na", "y", self.data, dropna=False)
nt.assert_equal(len(g.x), len(self.x_na))
g = ag.JointGrid("x_na", "y", self.data, dropna=True)
nt.assert_equal(len(g.x), pd.notnull(self.x_na).sum())
plt.close("all")
def test_axlims(self):
lim = (-3, 3)
g = ag.JointGrid("x", "y", self.data, xlim=lim, ylim=lim)
nt.assert_equal(g.ax_joint.get_xlim(), lim)
nt.assert_equal(g.ax_joint.get_ylim(), lim)
nt.assert_equal(g.ax_marg_x.get_xlim(), lim)
nt.assert_equal(g.ax_marg_y.get_ylim(), lim)
def test_marginal_ticks(self):
g = ag.JointGrid("x", "y", self.data)
nt.assert_true(~len(g.ax_marg_x.get_xticks()))
nt.assert_true(~len(g.ax_marg_y.get_yticks()))
plt.close("all")
def test_bivariate_plot(self):
g = ag.JointGrid("x", "y", self.data)
g.plot_joint(plt.plot)
x, y = g.ax_joint.lines[0].get_xydata().T
npt.assert_array_equal(x, self.x)
npt.assert_array_equal(y, self.y)
plt.close("all")
def test_univariate_plot(self):
g = ag.JointGrid("x", "x", self.data)
g.plot_marginals(kdeplot)
_, y1 = g.ax_marg_x.lines[0].get_xydata().T
y2, _ = g.ax_marg_y.lines[0].get_xydata().T
npt.assert_array_equal(y1, y2)
plt.close("all")
def test_plot(self):
g = ag.JointGrid("x", "x", self.data)
g.plot(plt.plot, kdeplot)
x, y = g.ax_joint.lines[0].get_xydata().T
npt.assert_array_equal(x, self.x)
npt.assert_array_equal(y, self.x)
_, y1 = g.ax_marg_x.lines[0].get_xydata().T
y2, _ = g.ax_marg_y.lines[0].get_xydata().T
npt.assert_array_equal(y1, y2)
plt.close("all")
def test_annotate(self):
g = ag.JointGrid("x", "y", self.data)
rp = stats.pearsonr(self.x, self.y)
g.annotate(stats.pearsonr)
annotation = g.ax_joint.legend_.texts[0].get_text()
nt.assert_equal(annotation, "pearsonr = %.2g; p = %.2g" % rp)
g.annotate(stats.pearsonr, stat="correlation")
annotation = g.ax_joint.legend_.texts[0].get_text()
nt.assert_equal(annotation, "correlation = %.2g; p = %.2g" % rp)
def rsquared(x, y):
return stats.pearsonr(x, y)[0] ** 2
r2 = rsquared(self.x, self.y)
g.annotate(rsquared)
annotation = g.ax_joint.legend_.texts[0].get_text()
nt.assert_equal(annotation, "rsquared = %.2g" % r2)
template = "{stat} = {val:.3g} (p = {p:.3g})"
g.annotate(stats.pearsonr, template=template)
annotation = g.ax_joint.legend_.texts[0].get_text()
nt.assert_equal(annotation, template.format(stat="pearsonr",
val=rp[0], p=rp[1]))
plt.close("all")
def test_space(self):
g = ag.JointGrid("x", "y", self.data, space=0)
joint_bounds = g.ax_joint.bbox.bounds
marg_x_bounds = g.ax_marg_x.bbox.bounds
marg_y_bounds = g.ax_marg_y.bbox.bounds
nt.assert_equal(joint_bounds[2], marg_x_bounds[2])
nt.assert_equal(joint_bounds[3], marg_y_bounds[3])
@classmethod
def teardown_class(cls):
"""Ensure that all figures are closed on exit."""
plt.close("all")
| bsd-3-clause |
bhargav2408/kivy | kivy/input/postproc/calibration.py | 37 | 3137 | '''
Calibration
===========
.. versionadded:: 1.9.0
Recalibrate input device to a specific range / offset.
Let's say you have 3 1080p displays, the 2 firsts are multitouch. By default,
both will have mixed touch, the range will conflict with each others: the 0-1
range will goes to 0-5760 px (remember, 3 * 1920 = 5760.)
To fix it, you need to manually reference them. For example::
[input]
left = mtdev,/dev/input/event17
middle = mtdev,/dev/input/event15
# the right screen is just a display.
Then, you can use the calibration postproc module::
[postproc:calibration]
left = xratio=0.3333
middle = xratio=0.3333,xoffset=0.3333
Now, the touches from the left screen will be within 0-0.3333 range, and the
touches from the middle screen will be within 0.3333-0.6666 range.
'''
__all__ = ('InputPostprocCalibration', )
from kivy.config import Config
from kivy.logger import Logger
class InputPostprocCalibration(object):
'''Recalibrate the inputs.
The configuration must go within a section named `postproc:calibration`.
Within the section, you must have line like::
devicename = param=value,param=value
:Parameters:
`xratio`: float
Value to multiply X
`yratio`: float
Value to multiply Y
`xoffset`: float
Value to add to X
`yoffset`: float
Value to add to Y
'''
def __init__(self):
super(InputPostprocCalibration, self).__init__()
self.devices = {}
self.frame = 0
if not Config.has_section('postproc:calibration'):
return
default_params = {'xoffset': 0, 'yoffset': 0, 'xratio': 1, 'yratio': 1}
for device_key, params_str in Config.items('postproc:calibration'):
params = default_params.copy()
for param in params_str.split(','):
param = param.strip()
if not param:
continue
key, value = param.split('=', 1)
if key not in ('xoffset', 'yoffset', 'xratio', 'yratio'):
Logger.error(
'Calibration: invalid key provided: {}'.format(key))
params[key] = float(value)
self.devices[device_key] = params
def process(self, events):
# avoid doing any processing if there is no device to calibrate at all.
if not self.devices:
return events
self.frame += 1
frame = self.frame
for etype, event in events:
if event.device not in self.devices:
continue
# some providers use the same event to update and end
if 'calibration:frame' not in event.ud:
event.ud['calibration:frame'] = frame
elif event.ud['calibration:frame'] == frame:
continue
params = self.devices[event.device]
event.sx = event.sx * params['xratio'] + params['xoffset']
event.sy = event.sy * params['yratio'] + params['yoffset']
event.ud['calibration:frame'] = frame
return events
| mit |
sebadiaz/rethinkdb | external/re2_20140111/re2/unicode.py | 119 | 7917 | # Copyright 2008 The RE2 Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Parser for Unicode data files (as distributed by unicode.org)."""
import os
import re
import urllib2
# Directory or URL where Unicode tables reside.
_UNICODE_DIR = "http://www.unicode.org/Public/6.3.0/ucd"
# Largest valid Unicode code value.
_RUNE_MAX = 0x10FFFF
class Error(Exception):
"""Unicode error base class."""
class InputError(Error):
"""Unicode input error class. Raised on invalid input."""
def _UInt(s):
"""Converts string to Unicode code point ('263A' => 0x263a).
Args:
s: string to convert
Returns:
Unicode code point
Raises:
InputError: the string is not a valid Unicode value.
"""
try:
v = int(s, 16)
except ValueError:
v = -1
if len(s) < 4 or len(s) > 6 or v < 0 or v > _RUNE_MAX:
raise InputError("invalid Unicode value %s" % (s,))
return v
def _URange(s):
"""Converts string to Unicode range.
'0001..0003' => [1, 2, 3].
'0001' => [1].
Args:
s: string to convert
Returns:
Unicode range
Raises:
InputError: the string is not a valid Unicode range.
"""
a = s.split("..")
if len(a) == 1:
return [_UInt(a[0])]
if len(a) == 2:
lo = _UInt(a[0])
hi = _UInt(a[1])
if lo < hi:
return range(lo, hi + 1)
raise InputError("invalid Unicode range %s" % (s,))
def _UStr(v):
"""Converts Unicode code point to hex string.
0x263a => '0x263A'.
Args:
v: code point to convert
Returns:
Unicode string
Raises:
InputError: the argument is not a valid Unicode value.
"""
if v < 0 or v > _RUNE_MAX:
raise InputError("invalid Unicode value %s" % (v,))
return "0x%04X" % (v,)
def _ParseContinue(s):
"""Parses a Unicode continuation field.
These are of the form '<Name, First>' or '<Name, Last>'.
Instead of giving an explicit range in a single table entry,
some Unicode tables use two entries, one for the first
code value in the range and one for the last.
The first entry's description is '<Name, First>' instead of 'Name'
and the second is '<Name, Last>'.
'<Name, First>' => ('Name', 'First')
'<Name, Last>' => ('Name', 'Last')
'Anything else' => ('Anything else', None)
Args:
s: continuation field string
Returns:
pair: name and ('First', 'Last', or None)
"""
match = re.match("<(.*), (First|Last)>", s)
if match is not None:
return match.groups()
return (s, None)
def ReadUnicodeTable(filename, nfields, doline):
"""Generic Unicode table text file reader.
The reader takes care of stripping out comments and also
parsing the two different ways that the Unicode tables specify
code ranges (using the .. notation and splitting the range across
multiple lines).
Each non-comment line in the table is expected to have the given
number of fields. The first field is known to be the Unicode value
and the second field its description.
The reader calls doline(codes, fields) for each entry in the table.
If fn raises an exception, the reader prints that exception,
prefixed with the file name and line number, and continues
processing the file. When done with the file, the reader re-raises
the first exception encountered during the file.
Arguments:
filename: the Unicode data file to read, or a file-like object.
nfields: the number of expected fields per line in that file.
doline: the function to call for each table entry.
Raises:
InputError: nfields is invalid (must be >= 2).
"""
if nfields < 2:
raise InputError("invalid number of fields %d" % (nfields,))
if type(filename) == str:
if filename.startswith("http://"):
fil = urllib2.urlopen(filename)
else:
fil = open(filename, "r")
else:
fil = filename
first = None # first code in multiline range
expect_last = None # tag expected for "Last" line in multiline range
lineno = 0 # current line number
for line in fil:
lineno += 1
try:
# Chop # comments and white space; ignore empty lines.
sharp = line.find("#")
if sharp >= 0:
line = line[:sharp]
line = line.strip()
if not line:
continue
# Split fields on ";", chop more white space.
# Must have the expected number of fields.
fields = [s.strip() for s in line.split(";")]
if len(fields) != nfields:
raise InputError("wrong number of fields %d %d - %s" %
(len(fields), nfields, line))
# The Unicode text files have two different ways
# to list a Unicode range. Either the first field is
# itself a range (0000..FFFF), or the range is split
# across two lines, with the second field noting
# the continuation.
codes = _URange(fields[0])
(name, cont) = _ParseContinue(fields[1])
if expect_last is not None:
# If the last line gave the First code in a range,
# this one had better give the Last one.
if (len(codes) != 1 or codes[0] <= first or
cont != "Last" or name != expect_last):
raise InputError("expected Last line for %s" %
(expect_last,))
codes = range(first, codes[0] + 1)
first = None
expect_last = None
fields[0] = "%04X..%04X" % (codes[0], codes[-1])
fields[1] = name
elif cont == "First":
# Otherwise, if this is the First code in a range,
# remember it and go to the next line.
if len(codes) != 1:
raise InputError("bad First line: range given")
expect_last = name
first = codes[0]
continue
doline(codes, fields)
except Exception, e:
print "%s:%d: %s" % (filename, lineno, e)
raise
if expect_last is not None:
raise InputError("expected Last line for %s; got EOF" %
(expect_last,))
def CaseGroups(unicode_dir=_UNICODE_DIR):
"""Returns list of Unicode code groups equivalent under case folding.
Each group is a sorted list of code points,
and the list of groups is sorted by first code point
in the group.
Args:
unicode_dir: Unicode data directory
Returns:
list of Unicode code groups
"""
# Dict mapping lowercase code point to fold-equivalent group.
togroup = {}
def DoLine(codes, fields):
"""Process single CaseFolding.txt line, updating togroup."""
(_, foldtype, lower, _) = fields
if foldtype not in ("C", "S"):
return
lower = _UInt(lower)
togroup.setdefault(lower, [lower]).extend(codes)
ReadUnicodeTable(unicode_dir+"/CaseFolding.txt", 4, DoLine)
groups = togroup.values()
for g in groups:
g.sort()
groups.sort()
return togroup, groups
def Scripts(unicode_dir=_UNICODE_DIR):
"""Returns dict mapping script names to code lists.
Args:
unicode_dir: Unicode data directory
Returns:
dict mapping script names to code lists
"""
scripts = {}
def DoLine(codes, fields):
"""Process single Scripts.txt line, updating scripts."""
(_, name) = fields
scripts.setdefault(name, []).extend(codes)
ReadUnicodeTable(unicode_dir+"/Scripts.txt", 2, DoLine)
return scripts
def Categories(unicode_dir=_UNICODE_DIR):
"""Returns dict mapping category names to code lists.
Args:
unicode_dir: Unicode data directory
Returns:
dict mapping category names to code lists
"""
categories = {}
def DoLine(codes, fields):
"""Process single UnicodeData.txt line, updating categories."""
category = fields[2]
categories.setdefault(category, []).extend(codes)
# Add codes from Lu into L, etc.
if len(category) > 1:
short = category[0]
categories.setdefault(short, []).extend(codes)
ReadUnicodeTable(unicode_dir+"/UnicodeData.txt", 15, DoLine)
return categories
| agpl-3.0 |
Southpaw-TACTIC/Team | src/python/Lib/site-packages/PySide/examples/demos/qtdemo/demotextitem.py | 1 | 2157 | from PySide import QtCore, QtGui
from demoitem import DemoItem
class DemoTextItem(DemoItem):
STATIC_TEXT, DYNAMIC_TEXT = range(2)
def __init__(self, text, font, textColor, textWidth, scene=None,
parent=None, type=STATIC_TEXT, bgColor=QtGui.QColor()):
super(DemoTextItem, self).__init__(scene, parent)
self.type = type
self.text = text
self.font = font
self.textColor = textColor
self.bgColor = bgColor
self.textWidth = textWidth
self.noSubPixeling = True
def setText(self, text):
self.text = text
self.update()
def createImage(self, matrix):
if self.type == DemoTextItem.DYNAMIC_TEXT:
return None
sx = min(matrix.m11(), matrix.m22())
sy = max(matrix.m22(), sx)
textItem = QtGui.QGraphicsTextItem()
textItem.setHtml(self.text)
textItem.setTextWidth(self.textWidth)
textItem.setFont(self.font)
textItem.setDefaultTextColor(self.textColor)
textItem.document().setDocumentMargin(2)
w = textItem.boundingRect().width()
h = textItem.boundingRect().height()
image = QtGui.QImage(int(w * sx), int(h * sy),
QtGui.QImage.Format_ARGB32_Premultiplied)
image.fill(QtGui.QColor(0, 0, 0, 0).rgba())
painter = QtGui.QPainter(image)
painter.scale(sx, sy)
style = QtGui.QStyleOptionGraphicsItem()
textItem.paint(painter, style, None)
return image
def animationStarted(self, id=0):
self.noSubPixeling = False
def animationStopped(self, id=0):
self.noSubPixeling = True
def boundingRect(self):
if self.type == DemoTextItem.STATIC_TEXT:
return super(DemoTextItem, self).boundingRect()
# Sorry for using magic number.
return QtCore.QRectF(0, 0, 50, 20)
def paint(self, painter, option, widget):
if self.type == DemoTextItem.STATIC_TEXT:
super(DemoTextItem, self).paint(painter, option, widget)
return
painter.setPen(self.textColor)
painter.drawText(0, 0, self.text)
| epl-1.0 |
topecz/l10n-spain | l10n_es_aeat_mod303/models/mod303.py | 2 | 9007 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, fields, api, _
class L10nEsAeatMod303Report(models.Model):
_inherit = "l10n.es.aeat.report"
_name = "l10n.es.aeat.mod303.report"
_description = "AEAT 303 Report"
def _get_export_conf(self):
try:
return self.env.ref(
'l10n_es_aeat_mod303.aeat_mod303_main_export_config').id
except ValueError:
return self.env['aeat.model.export.config']
def _default_counterpart_303(self):
return self.env['account.account'].search(
[('code', 'like', '4750%'), ('type', '!=', 'view')])[:1]
@api.multi
@api.depends('tax_lines', 'tax_lines.amount')
def _compute_total_devengado(self):
casillas_devengado = (3, 6, 9, 11, 13, 15, 18, 21, 24, 26)
for report in self:
tax_lines = report.tax_lines.filtered(
lambda x: x.field_number in casillas_devengado)
report.total_devengado = sum(tax_lines.mapped('amount'))
@api.multi
@api.depends('tax_lines', 'tax_lines.amount')
def _compute_total_deducir(self):
casillas_deducir = (29, 31, 33, 35, 37, 39, 41, 42, 43, 44)
for report in self:
tax_lines = report.tax_lines.filtered(
lambda x: x.field_number in casillas_deducir)
report.total_deducir = sum(tax_lines.mapped('amount'))
@api.multi
@api.depends('total_devengado', 'total_deducir')
def _compute_casilla_46(self):
for report in self:
report.casilla_46 = report.total_devengado - report.total_deducir
@api.multi
@api.depends('porcentaje_atribuible_estado', 'casilla_46')
def _compute_atribuible_estado(self):
for report in self:
report.atribuible_estado = (
report.casilla_46 * report.porcentaje_atribuible_estado / 100)
@api.multi
@api.depends('atribuible_estado', 'cuota_compensar',
'regularizacion_anual')
def _compute_casilla_69(self):
for report in self:
report.casilla_69 = (
report.atribuible_estado + report.cuota_compensar +
report.regularizacion_anual)
@api.multi
@api.depends('casilla_69', 'previous_result')
def _compute_resultado_liquidacion(self):
for report in self:
report.resultado_liquidacion = (
report.casilla_69 - report.previous_result)
currency_id = fields.Many2one(
comodel_name='res.currency', string='Currency',
related='company_id.currency_id', store=True)
number = fields.Char(default='303')
export_config = fields.Many2one(default=_get_export_conf)
company_partner_id = fields.Many2one('res.partner', string='Partner',
relation='company_id.partner_id',
store=True)
devolucion_mensual = fields.Boolean(
string="Devolución mensual", states={'done': [('readonly', True)]},
help="Inscrito en el Registro de Devolución Mensual")
total_devengado = fields.Float(
string="[27] IVA devengado", readonly=True,
compute="_compute_total_devengado", store=True)
total_deducir = fields.Float(
string="[45] IVA a deducir", readonly=True,
compute="_compute_total_deducir", store=True)
casilla_46 = fields.Float(
string="[46] Resultado régimen general", readonly=True, store=True,
help="(IVA devengado - IVA deducible)", compute="_compute_casilla_46")
porcentaje_atribuible_estado = fields.Float(
string="[65] % atribuible al Estado",
states={'done': [('readonly', True)]},
help="Los sujetos pasivos que tributen conjuntamente a la "
"Administración del Estado y a las Diputaciones Forales del País "
"Vasco o a la Comunidad Foral de Navarra, consignarán en esta "
"casilla el porcentaje del volumen de operaciones en territorio "
"común. Los demás sujetos pasivos consignarán en esta casilla el "
"100%", default=100)
atribuible_estado = fields.Float(
string="[66] Atribuible a la Administración", readonly=True,
compute="_compute_atribuible_estado", store=True)
cuota_compensar = fields.Float(
string="[67] Cuotas a compensar", default=0,
states={'done': [('readonly', True)]},
help="Cuota a compensar de periodos anteriores, en los que su "
"declaración fue a devolver y se escogió la opción de "
"compensación posterior")
regularizacion_anual = fields.Float(
string="[68] Regularización anual",
states={'done': [('readonly', True)]},
help="En la última autoliquidación del año (la del período 4T o mes "
"12) se hará constar, con el signo que corresponda, el resultado "
"de la regularización anual conforme disponen las Leyes por las "
"que se aprueban el Concierto Económico entre el Estado y la "
"Comunidad Autónoma del País Vasco y el Convenio Económico entre "
"el Estado y la Comunidad Foral de Navarra.""")
casilla_69 = fields.Float(
string="[69] Resultado", readonly=True, compute="_compute_casilla_69",
help="Atribuible a la Administración [66] - Cuotas a compensar [67] + "
"Regularización anual [68]""", store=True)
previous_result = fields.Float(
string="[70] A deducir",
help="Resultado de la anterior o anteriores declaraciones del mismo "
"concepto, ejercicio y periodo",
states={'done': [('readonly', True)]})
resultado_liquidacion = fields.Float(
string="[71] Result. liquidación", readonly=True,
compute="_compute_resultado_liquidacion", store=True)
result_type = fields.Selection(
selection=[('I', 'A ingresar'),
('D', 'A devolver'),
('N', 'Sin actividad/Resultado cero')],
compute="_compute_result_type")
compensate = fields.Boolean(
string="Compensate", states={'done': [('readonly', True)]},
help="Si se marca, indicará que el importe a devolver se compensará "
"en posteriores declaraciones")
bank_account = fields.Many2one(
comodel_name="res.partner.bank", string="Bank account",
states={'done': [('readonly', True)]})
counterpart_account = fields.Many2one(default=_default_counterpart_303)
allow_posting = fields.Boolean(default=True)
def __init__(self, pool, cr):
self._aeat_number = '303'
super(L10nEsAeatMod303Report, self).__init__(pool, cr)
@api.one
def _compute_allow_posting(self):
self.allow_posting = True
@api.one
@api.depends('resultado_liquidacion')
def _compute_result_type(self):
if self.resultado_liquidacion == 0:
self.result_type = 'N'
elif self.resultado_liquidacion > 0:
self.result_type = 'I'
else:
self.result_type = 'D'
@api.onchange('period_type', 'fiscalyear_id')
def onchange_period_type(self):
super(L10nEsAeatMod303Report, self).onchange_period_type()
if self.period_type not in ('4T', '12'):
self.regularizacion_anual = 0
@api.onchange('type')
def onchange_type(self):
if self.type != 'C':
self.previous_result = 0
@api.onchange('result_type')
def onchange_result_type(self):
if self.result_type != 'B':
self.compensate = False
@api.multi
def button_confirm(self):
"""Check records"""
msg = ""
for mod303 in self:
if mod303.result_type == 'I' and not mod303.bank_account:
msg = _('Select an account for making the charge')
if mod303.result_type == 'B' and not not mod303.bank_account:
msg = _('Select an account for receiving the money')
if msg:
# Don't raise error, because data is not used
# raise exceptions.Warning(msg)
pass
return super(L10nEsAeatMod303Report, self).button_confirm()
| agpl-3.0 |
RedhawkSDR/integration-gnuhawk | gnuradio/gr-qtgui/apps/usrp_display_qtgui.py | 91 | 12017 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'usrp_display_qtgui.ui'
#
# Created: Thu Jul 16 22:06:24 2009
# by: PyQt4 UI code generator 4.4.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(820, 774)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout_2 = QtGui.QGridLayout(self.centralwidget)
self.gridLayout_2.setObjectName("gridLayout_2")
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.groupBox = QtGui.QGroupBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox.sizePolicy().hasHeightForWidth())
self.groupBox.setSizePolicy(sizePolicy)
self.groupBox.setMinimumSize(QtCore.QSize(240, 150))
self.groupBox.setMaximumSize(QtCore.QSize(240, 16777215))
self.groupBox.setObjectName("groupBox")
self.formLayoutWidget = QtGui.QWidget(self.groupBox)
self.formLayoutWidget.setGeometry(QtCore.QRect(10, 20, 221, 124))
self.formLayoutWidget.setObjectName("formLayoutWidget")
self.formLayout = QtGui.QFormLayout(self.formLayoutWidget)
self.formLayout.setObjectName("formLayout")
self.frequencyLabel = QtGui.QLabel(self.formLayoutWidget)
self.frequencyLabel.setObjectName("frequencyLabel")
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.frequencyLabel)
self.gainLabel = QtGui.QLabel(self.formLayoutWidget)
self.gainLabel.setObjectName("gainLabel")
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.gainLabel)
self.bandwidthLabel = QtGui.QLabel(self.formLayoutWidget)
self.bandwidthLabel.setObjectName("bandwidthLabel")
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.bandwidthLabel)
self.frequencyEdit = QtGui.QLineEdit(self.formLayoutWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frequencyEdit.sizePolicy().hasHeightForWidth())
self.frequencyEdit.setSizePolicy(sizePolicy)
self.frequencyEdit.setMinimumSize(QtCore.QSize(120, 26))
self.frequencyEdit.setObjectName("frequencyEdit")
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.frequencyEdit)
self.gainEdit = QtGui.QLineEdit(self.formLayoutWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.gainEdit.sizePolicy().hasHeightForWidth())
self.gainEdit.setSizePolicy(sizePolicy)
self.gainEdit.setMinimumSize(QtCore.QSize(120, 26))
self.gainEdit.setObjectName("gainEdit")
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.gainEdit)
self.bandwidthEdit = QtGui.QLineEdit(self.formLayoutWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.bandwidthEdit.sizePolicy().hasHeightForWidth())
self.bandwidthEdit.setSizePolicy(sizePolicy)
self.bandwidthEdit.setMinimumSize(QtCore.QSize(120, 26))
self.bandwidthEdit.setObjectName("bandwidthEdit")
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.bandwidthEdit)
self.amplifierLabel = QtGui.QLabel(self.formLayoutWidget)
self.amplifierLabel.setObjectName("amplifierLabel")
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.amplifierLabel)
self.amplifierEdit = QtGui.QLineEdit(self.formLayoutWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.amplifierEdit.sizePolicy().hasHeightForWidth())
self.amplifierEdit.setSizePolicy(sizePolicy)
self.amplifierEdit.setMinimumSize(QtCore.QSize(120, 26))
self.amplifierEdit.setObjectName("amplifierEdit")
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.amplifierEdit)
self.horizontalLayout_2.addWidget(self.groupBox)
self.frame_2 = QtGui.QFrame(self.centralwidget)
self.frame_2.setMinimumSize(QtCore.QSize(200, 0))
self.frame_2.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtGui.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.verticalLayoutWidget = QtGui.QWidget(self.frame_2)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, -1, 191, 151))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.dcCancelCheckBox = QtGui.QCheckBox(self.verticalLayoutWidget)
self.dcCancelCheckBox.setObjectName("dcCancelCheckBox")
self.verticalLayout_3.addWidget(self.dcCancelCheckBox)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.dcGainLabel = QtGui.QLabel(self.verticalLayoutWidget)
self.dcGainLabel.setObjectName("dcGainLabel")
self.horizontalLayout.addWidget(self.dcGainLabel)
self.dcGainEdit = QtGui.QLineEdit(self.verticalLayoutWidget)
self.dcGainEdit.setObjectName("dcGainEdit")
self.horizontalLayout.addWidget(self.dcGainEdit)
self.verticalLayout_3.addLayout(self.horizontalLayout)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_3.addItem(spacerItem)
self.horizontalLayout_2.addWidget(self.frame_2)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem1)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
spacerItem2 = QtGui.QSpacerItem(20, 80, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
self.verticalLayout.addItem(spacerItem2)
self.pauseButton = QtGui.QPushButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pauseButton.sizePolicy().hasHeightForWidth())
self.pauseButton.setSizePolicy(sizePolicy)
self.pauseButton.setObjectName("pauseButton")
self.verticalLayout.addWidget(self.pauseButton)
self.closeButton = QtGui.QPushButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.closeButton.sizePolicy().hasHeightForWidth())
self.closeButton.setSizePolicy(sizePolicy)
self.closeButton.setMinimumSize(QtCore.QSize(75, 0))
self.closeButton.setObjectName("closeButton")
self.verticalLayout.addWidget(self.closeButton)
self.horizontalLayout_2.addLayout(self.verticalLayout)
self.gridLayout_2.addLayout(self.horizontalLayout_2, 1, 0, 1, 1)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.frame = QtGui.QFrame(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.frame.sizePolicy().hasHeightForWidth())
self.frame.setSizePolicy(sizePolicy)
self.frame.setMinimumSize(QtCore.QSize(800, 550))
self.frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName("frame")
self.gridLayout = QtGui.QGridLayout(self.frame)
self.gridLayout.setObjectName("gridLayout")
self.sinkLayout = QtGui.QHBoxLayout()
self.sinkLayout.setObjectName("sinkLayout")
self.gridLayout.addLayout(self.sinkLayout, 0, 0, 1, 1)
self.verticalLayout_2.addWidget(self.frame)
self.gridLayout_2.addLayout(self.verticalLayout_2, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 820, 24))
self.menubar.setObjectName("menubar")
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionExit = QtGui.QAction(MainWindow)
self.actionExit.setObjectName("actionExit")
self.actionSaveData = QtGui.QAction(MainWindow)
self.actionSaveData.setObjectName("actionSaveData")
self.menuFile.addAction(self.actionSaveData)
self.menuFile.addAction(self.actionExit)
self.menubar.addAction(self.menuFile.menuAction())
self.retranslateUi(MainWindow)
QtCore.QObject.connect(self.closeButton, QtCore.SIGNAL("clicked()"), MainWindow.close)
QtCore.QObject.connect(self.actionExit, QtCore.SIGNAL("triggered()"), MainWindow.close)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "USRP Display", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("MainWindow", "Receiver Parameters", None, QtGui.QApplication.UnicodeUTF8))
self.frequencyLabel.setText(QtGui.QApplication.translate("MainWindow", "Frequency (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.gainLabel.setText(QtGui.QApplication.translate("MainWindow", "RF Gain", None, QtGui.QApplication.UnicodeUTF8))
self.bandwidthLabel.setText(QtGui.QApplication.translate("MainWindow", "Bandwidth", None, QtGui.QApplication.UnicodeUTF8))
self.amplifierLabel.setText(QtGui.QApplication.translate("MainWindow", "Amplifier", None, QtGui.QApplication.UnicodeUTF8))
self.dcCancelCheckBox.setText(QtGui.QApplication.translate("MainWindow", "Cancel DC", None, QtGui.QApplication.UnicodeUTF8))
self.dcGainLabel.setText(QtGui.QApplication.translate("MainWindow", "DC Canceller Gain", None, QtGui.QApplication.UnicodeUTF8))
self.pauseButton.setText(QtGui.QApplication.translate("MainWindow", "Pause", None, QtGui.QApplication.UnicodeUTF8))
self.closeButton.setText(QtGui.QApplication.translate("MainWindow", "Close", None, QtGui.QApplication.UnicodeUTF8))
self.menuFile.setTitle(QtGui.QApplication.translate("MainWindow", "&File", None, QtGui.QApplication.UnicodeUTF8))
self.actionExit.setText(QtGui.QApplication.translate("MainWindow", "E&xit", None, QtGui.QApplication.UnicodeUTF8))
self.actionSaveData.setText(QtGui.QApplication.translate("MainWindow", "&Save Data", None, QtGui.QApplication.UnicodeUTF8))
| gpl-3.0 |
chrisgarber/drone_dispatcher | build/lib/drone_dispatcher/drone_dispatcher.py | 2 | 5928 | __version__ = "0.1.0"
import requests
import numpy
import json
import time
earthRadius = 6371 #km
depoLat = -37.8168 #degrees
depoLon = 144.9622 #degrees
droneSpeed = 50 #km/h
# haversine formula
def haversine(theta):
return numpy.sin(theta/2)**2
class location:
def __init__(self, lat, lon):
#locations are stored in degrees
self.latitude = lat
self.longitude = lon
#returns a distance in km
def getDistance(self, loc):
lat1, lon1, lat2, lon2 = map(numpy.radians, [self.latitude, self.longitude, loc.latitude, loc.longitude])
return 2*earthRadius*numpy.arcsin(numpy.sqrt(haversine(lat2-lat1) + (numpy.cos(lat1)*numpy.cos(lat2)*haversine(lon2-lon1))))
@classmethod
def from_dict(cls, d):
return cls(d['latitude'], d['longitude'])
depoLocation = location(depoLat, depoLon)
class package:
def __init__(self, destinationLoc, packageId, deadline):
self.destination = destinationLoc
self.packageId = packageId
self.deadline = deadline
@classmethod
def from_dict(cls, d):
return cls(location.from_dict(d['destination']), d['packageId'], d['deadline'])
@classmethod
def from_dict_list(cls, l):
return [cls.from_dict(d) for d in l]
class drone:
def __init__(self, time, droneId, location, packages, speed=droneSpeed, depo=depoLocation):
self.lastKnownLocationTime = time
self.lastKnownLocation = location
self.droneId = droneId
self.plannedDestinations = []
self.timeWhenFree = time
self.depo = depo
self.speed = speed
self.packages = []
self.initPlan(packages)
self.assigned = False
@classmethod
def from_dict(cls, dct, time):
return cls(time, dct['droneId'], location.from_dict(dct['location']), package.from_dict_list(dct['packages']))
def initPlan(self, packages):
#if we hae a package we're going to deliver, assign it to the drone
if len(packages) > 0:
self.assignPackages(packages)
#otherwise just go to depo
else:
self.addDestinations([self.depo])
#adds the destinations to the planned list and adjusts the time it will be available accordingly
def addDestinations(self, destinations):
for destination in destinations:
self.timeWhenFree += self.getTravelTime(destination,self.lastKnownLocation if len(self.plannedDestinations)==0 else self.plannedDestinations[-1])
self.plannedDestinations.append(destination)
#returns the time in seconds it will take this drone to get from one location to another
def getTravelTime(self, loc1, loc2):
return (loc1.getDistance(loc2)/self.speed)*3600
#adds the package's destination to the planned list and adjusts the time it will be available accordingly
def assignPackages(self, packages):
for pack in packages:
self.addDestinations([pack.destination, self.depo])
self.assigned = True
#returns the unix time a package would be delivered if assigned
def getTimePackageWouldBeDelivered(self, pack):
if self.timeWhenFree > time.time():
self.timeWhenFree = time.time()
return self.getTravelTime(pack.destination, self.lastKnownLocation if len(self.plannedDestinations)==0 else self.plannedDestinations[-1]) + self.timeWhenFree
class drone_dispatcher:
def __init__(self):
self.getDrones()
self.getPackages()
self.assignments = []
self.unassignedPackages = []
def getDrones(self):
response = requests.get("https://codetest.kube.getswift.co/drones")
responseTime = time.time()
droneDict = response.json()
# timeIter = [responseTime for i in droneDict]
self.drones = [drone.from_dict(d, responseTime) for d in droneDict]
if len(self.drones) != len(frozenset([d.droneId for d in self.drones])):
raise ValueError("duplicate drone ids")
self.unassignedDrones = len(self.drones)
def getPackages(self):
response = requests.get("https://codetest.kube.getswift.co/packages")
self.packages = package.from_dict_list(response.json())
if len(self.packages) != len(frozenset([pack.packageId for pack in self.packages])):
raise ValueError("duplicate package ids")
#this assigns the
def dispatchFastest(self, pack):
#filter out all the drones assigned already
unassignedDrones = [d for d in self.drones if not d.assigned]
#if we have any drones left
if len(unassignedDrones) != 0:
#gets the drone that could deliver the package the soonest
fastestDrone = min(unassignedDrones, key=lambda x: x.getTimePackageWouldBeDelivered(pack))
#if that is soon enough add it to the list
if fastestDrone.getTimePackageWouldBeDelivered(pack) < pack.deadline:
self.assignments.append(assignment(fastestDrone.droneId, pack.packageId))
# self.assignments.append({'packageId': pack.packageId, 'droneId': fastestDrone.droneId})
fastestDrone.assignPackages([pack])
return
#otherwise add to the bad list
self.unassignedPackages.append(pack)
def dispatchDrones(self):
for pack in self.packages:
self.dispatchFastest(pack)
#verify checksums, used mostly for debugging
if len(self.packages) != (len(self.assignments) + len(self.unassignedPackages)):
raise ValueError("packages must be in assignment list or unassigned list")
assignedDrones = [a.droneId for a in self.assignments]
if len(assignedDrones)!=len(frozenset(assignedDrones)):
raise ValueError("Checksum failed. Drones assigned multiple times")
assignedPackages = [a.packageId for a in self.assignments]
if len(assignedPackages)!=len(frozenset(assignedPackages)):
raise ValueError("Checksum failed. Package assigned multiple times")
print("assignments: " + json.dumps(self.assignments, default=lambda o: o.__dict__))
print("UnassignedPackageIds: " + json.dumps(self.unassignedPackages, default=lambda o: o.packageId))
class assignment(json.JSONEncoder):
def __init__(self, droneId, packageId):
self.droneId = droneId
self.packageId = packageId
| gpl-3.0 |
Ichag/odoo | addons/hr_timesheet_invoice/wizard/__init__.py | 433 | 1159 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_timesheet_invoice_create
import hr_timesheet_analytic_profit
import hr_timesheet_final_invoice_create
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
JCA-Developpement/Odoo | addons/portal_project_issue/__openerp__.py | 375 | 1713 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Portal Issue',
'version': '0.1',
'category': 'Tools',
'complexity': 'easy',
'description': """
This module adds issue menu and features to your portal if project_issue and portal are installed.
==================================================================================================
""",
'author': 'OpenERP SA',
'depends': ['project_issue','portal'],
'data': [
'security/portal_security.xml',
'security/ir.model.access.csv',
'portal_project_issue_view.xml',
'views/portal_project_issue.xml',
],
'installable': True,
'auto_install': True,
'category': 'Hidden',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
pramasoul/micropython | examples/network/http_server.py | 15 | 1883 | try:
import usocket as socket
except:
import socket
CONTENT = b"""\
HTTP/1.0 200 OK
Hello #%d from MicroPython!
"""
def main(micropython_optimize=False):
s = socket.socket()
# Binding to all interfaces - server will be accessible to other hosts!
ai = socket.getaddrinfo("0.0.0.0", 8080)
print("Bind address info:", ai)
addr = ai[0][-1]
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(addr)
s.listen(5)
print("Listening, connect your browser to http://<this_host>:8080/")
counter = 0
while True:
res = s.accept()
client_sock = res[0]
client_addr = res[1]
print("Client address:", client_addr)
print("Client socket:", client_sock)
if not micropython_optimize:
# To read line-oriented protocol (like HTTP) from a socket (and
# avoid short read problem), it must be wrapped in a stream (aka
# file-like) object. That's how you do it in CPython:
client_stream = client_sock.makefile("rwb")
else:
# .. but MicroPython socket objects support stream interface
# directly, so calling .makefile() method is not required. If
# you develop application which will run only on MicroPython,
# especially on a resource-constrained embedded device, you
# may take this shortcut to save resources.
client_stream = client_sock
print("Request:")
req = client_stream.readline()
print(req)
while True:
h = client_stream.readline()
if h == b"" or h == b"\r\n":
break
print(h)
client_stream.write(CONTENT % counter)
client_stream.close()
if not micropython_optimize:
client_sock.close()
counter += 1
print()
main()
| mit |
teeple/pns_server | work/install/node-v0.10.25/deps/v8/tools/testrunner/objects/testcase.py | 28 | 3512 | # Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from . import output
class TestCase(object):
def __init__(self, suite, path, flags=[], dependency=None):
self.suite = suite # TestSuite object
self.path = path # string, e.g. 'div-mod', 'test-api/foo'
self.flags = flags # list of strings, flags specific to this test case
self.dependency = dependency # |path| for testcase that must be run first
self.outcomes = None
self.output = None
self.id = None # int, used to map result back to TestCase instance
self.duration = None # assigned during execution
def CopyAddingFlags(self, flags):
copy = TestCase(self.suite, self.path, self.flags + flags, self.dependency)
copy.outcomes = self.outcomes
return copy
def PackTask(self):
"""
Extracts those parts of this object that are required to run the test
and returns them as a JSON serializable object.
"""
assert self.id is not None
return [self.suitename(), self.path, self.flags,
self.dependency, list(self.outcomes or []), self.id]
@staticmethod
def UnpackTask(task):
"""Creates a new TestCase object based on packed task data."""
# For the order of the fields, refer to PackTask() above.
test = TestCase(str(task[0]), task[1], task[2], task[3])
test.outcomes = set(task[4])
test.id = task[5]
return test
def SetSuiteObject(self, suites):
self.suite = suites[self.suite]
def PackResult(self):
"""Serializes the output of the TestCase after it has run."""
self.suite.StripOutputForTransmit(self)
return [self.id, self.output.Pack(), self.duration]
def MergeResult(self, result):
"""Applies the contents of a Result to this object."""
assert result[0] == self.id
self.output = output.Output.Unpack(result[1])
self.duration = result[2]
def suitename(self):
return self.suite.name
def GetLabel(self):
return self.suitename() + "/" + self.suite.CommonTestName(self)
| gpl-2.0 |
Jumpscale/core9 | JumpScale9/data/serializers/SerializerYAML.py | 1 | 3063 | import yaml
from collections import OrderedDict
from js9 import j
from .SerializerBase import SerializerBase
testtoml="""
name = 'something'
multiline = '''
these are multiple lines
next line
'''
nr = 87
nr2 = 34.4
"""
# from .PrettyYAMLDumper import PrettyYaml
class SerializerYAML(SerializerBase):
def __init__(self):
SerializerBase.__init__(self)
def dumps(self, obj):
return yaml.dump(obj, default_flow_style=False, default_style='',indent=4,line_break="\n")
def loads(self, s):
# out=cStringIO.StringIO(s)
try:
return yaml.load(s)
except Exception as e:
error = "error:%s\n" % e
error += "\nyaml could not parse:\n%s\n" % s
raise j.exceptions.Input(message=error, level=1, source="", tags="", msgpub="")
def load(self, path):
try:
s = j.sal.fs.readFile(path)
except Exception as e:
error = "error:%s\n" % e
error += '\npath:%s\n' % path
raise j.exceptions.Input(message=error, level=1, source="", tags="", msgpub="")
try:
return yaml.load(s)
except Exception as e:
error = "error:%s\n" % e
error += "\nyaml could not parse:\n%s\n" % s
raise j.exceptions.Input(message=error, level=1, source="", tags="", msgpub="")
def ordered_load(self, stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict):
"""
load a yaml stream and keep the order
"""
class OrderedLoader(Loader):
pass
def construct_mapping(loader, node):
loader.flatten_mapping(node)
return object_pairs_hook(loader.construct_pairs(node))
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
construct_mapping)
return yaml.load(stream, OrderedLoader)
def ordered_dump(self, data, stream=None, Dumper=yaml.Dumper, **kwds):
"""
dump a yaml stream with keeping the order
"""
class OrderedDumper(Dumper):
pass
def _dict_representer(dumper, data):
return dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items())
OrderedDumper.add_representer(OrderedDict, _dict_representer)
return yaml.dump(data, stream, OrderedDumper, **kwds)
def test(self):
ddict=j.data.serializer.toml.loads(testtoml)
#TODO:*3 write some test
# from js9 import j
# from yaml import load, dump
# try:
# from yaml import CLoader as Loader, CDumper as Dumper
# except ImportError:
# from yaml import Loader, Dumper
# class YAMLTool:
# def decode(self,string):
# """
# decode yaml string to python object
# """
# return load(string)
# def encode(self,obj,width=120):
# """
# encode python (simple) objects to yaml
# """
# return dump(obj, width=width, default_flow_style=False)
#
| apache-2.0 |
fernandezcuesta/ansible | test/units/module_utils/ec2/test_camel_to_snake.py | 35 | 1301 | # -*- coding: utf-8 -*-
# (c) 2017, Will Thames <will.thames@xvt.com.au>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible.compat.tests import unittest
from ansible.module_utils.ec2 import _camel_to_snake
EXPECTED_SNAKIFICATION = {
'alllower': 'alllower',
'TwoWords': 'two_words',
'AllUpperAtEND': 'all_upper_at_end',
'AllUpperButPLURALs': 'all_upper_but_plurals',
'TargetGroupARNs': 'target_group_arns',
'HTTPEndpoints': 'http_endpoints',
'PLURALs': 'plurals'
}
class CamelToSnakeTestCase(unittest.TestCase):
def test_camel_to_snake(self):
for (k, v) in EXPECTED_SNAKIFICATION.items():
self.assertEqual(_camel_to_snake(k), v)
| gpl-3.0 |
smartforceplus/SmartForceplus | addons/account/project/wizard/account_analytic_cost_ledger_for_journal_report.py | 378 | 2209 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class account_analytic_cost_ledger_journal_report(osv.osv_memory):
_name = 'account.analytic.cost.ledger.journal.report'
_description = 'Account Analytic Cost Ledger For Journal Report'
_columns = {
'date1': fields.date('Start of period', required=True),
'date2': fields.date('End of period', required=True),
'journal': fields.many2many('account.analytic.journal', 'ledger_journal_rel', 'ledger_id', 'journal_id', 'Journals'),
}
_defaults = {
'date1': lambda *a: time.strftime('%Y-01-01'),
'date2': lambda *a: time.strftime('%Y-%m-%d')
}
def check_report(self, cr, uid, ids, context=None):
if context is None:
context = {}
data = self.read(cr, uid, ids)[0]
datas = {
'ids': context.get('active_ids', []),
'model': 'account.analytic.account',
'form': data
}
datas['form']['active_ids'] = context.get('active_ids', False)
return self.pool['report'].get_action(cr, uid, [], 'account.report_analyticcostledgerquantity', data=datas, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
rolandovillca/python_basic_concepts | web/client_get_with_urllib2.py | 4 | 1905 | '''
urllib2 - Library for opening URLs
A library for opening URLs that can be extended by defining custom protocol
handlers.
The urllib2 module defines functions and classes which help in opening URLs
(mostly HTTP) in a complex world - basic and digest authentication,
redirections, cookies and more.
The urllib2 module provides an updated API for using internet resources
identified by URLs. It is designed to be extended by individual applications to
support new protocols or add variations to existing protocols (such as handling
HTTP basic authentication).
https://pymotw.com/2/urllib2/
'''
import urllib2
# EXAMPLE 1: HTTP GET:
# ==============================================================================
# As with urllib, an HTTP GET operation is the simplest use of urllib2.
# Pass the URL to urlopen() to get a "file-like" handle to the remote data.
url = 'http://www.google.com'
resp = urllib2.urlopen(url)
print 'Response: ', resp
print 'Url: ', resp.geturl()
print 'Code: ', resp.code
print 'Html: ', resp.read()
print
headers = resp.info()
print 'Date: ', headers['date']
print 'Server: ', headers['server']
print 'Headers:'
print headers
print
data = resp.read()
print 'Length: ', len(data)
print 'Data:'
print data
print
# EXAMPLE 2: The file-like object returned by urlopen() is iterable:
# ==============================================================================
for line in resp:
print line.rstrip()
# EXAMPLE 3: Encoding Arguments:
# ==============================================================================
# Arguments can be passed to the server by encoding them with urllib.urlencode()
# and appending them to the URL.
import urllib
url = 'http://www.google.com?'
query_args = { 'q':'query string', 'foo':'bar' }
encoded_args = urllib.urlencode(query_args)
print 'Encoded: ', encoded_args
url = url + encoded_args
print urllib2.urlopen(url).read() | mit |
jasimpson/gnuradio-jasimpson | gr-trellis/src/examples/test_sccc_turbo1.py | 2 | 3899 | #!/usr/bin/env python
from gnuradio import gr
from gnuradio import trellis
from gnuradio import eng_notation
import math
import sys
import random
import fsm_utils
def run_test (fo,fi,interleaver,Kb,bitspersymbol,K,dimensionality,constellation,Es,N0,IT,seed):
tb = gr.top_block ()
# TX
src = gr.lfsr_32k_source_s()
src_head = gr.head (gr.sizeof_short,Kb/16) # packet size in shorts
s2fsmi = gr.packed_to_unpacked_ss(bitspersymbol,gr.GR_MSB_FIRST) # unpack shorts to symbols compatible with the outer FSM input cardinality
enc = trellis.sccc_encoder_ss(fo,0,fi,0,interleaver,K)
mod = gr.chunks_to_symbols_sf(constellation,dimensionality)
# CHANNEL
add = gr.add_ff()
noise = gr.noise_source_f(gr.GR_GAUSSIAN,math.sqrt(N0/2),seed)
# RX
dec = trellis.sccc_decoder_combined_fs(fo,0,-1,fi,0,-1,interleaver,K,IT,trellis.TRELLIS_MIN_SUM,dimensionality,constellation,trellis.TRELLIS_EUCLIDEAN,1.0)
fsmi2s = gr.unpacked_to_packed_ss(bitspersymbol,gr.GR_MSB_FIRST) # pack FSM input symbols to shorts
dst = gr.check_lfsr_32k_s()
#tb.connect (src,src_head,s2fsmi,enc_out,inter,enc_in,mod)
tb.connect (src,src_head,s2fsmi,enc,mod)
tb.connect (mod,(add,0))
tb.connect (noise,(add,1))
#tb.connect (add,head)
#tb.connect (tail,fsmi2s,dst)
tb.connect (add,dec,fsmi2s,dst)
tb.run()
#print enc_out.ST(), enc_in.ST()
ntotal = dst.ntotal ()
nright = dst.nright ()
runlength = dst.runlength ()
return (ntotal,ntotal-nright)
def main(args):
nargs = len (args)
if nargs == 5:
fname_out=args[0]
fname_in=args[1]
esn0_db=float(args[2]) # Es/No in dB
IT=int(args[3])
rep=int(args[4]) # number of times the experiment is run to collect enough errors
else:
sys.stderr.write ('usage: test_tcm.py fsm_name_out fsm_fname_in Es/No_db iterations repetitions\n')
sys.exit (1)
# system parameters
Kb=1024*16 # packet size in bits (make it multiple of 16 so it can be packed in a short)
fo=trellis.fsm(fname_out) # get the outer FSM specification from a file
fi=trellis.fsm(fname_in) # get the innner FSM specification from a file
bitspersymbol = int(round(math.log(fo.I())/math.log(2))) # bits per FSM input symbol
if fo.O() != fi.I():
sys.stderr.write ('Incompatible cardinality between outer and inner FSM.\n')
sys.exit (1)
K=Kb/bitspersymbol # packet size in trellis steps
interleaver=trellis.interleaver(K,666) # construct a random interleaver
modulation = fsm_utils.psk8 # see fsm_utlis.py for available predefined modulations
dimensionality = modulation[0]
constellation = modulation[1]
if len(constellation)/dimensionality != fi.O():
sys.stderr.write ('Incompatible FSM output cardinality and modulation size.\n')
sys.exit (1)
# calculate average symbol energy
Es = 0
for i in range(len(constellation)):
Es = Es + constellation[i]**2
Es = Es / (len(constellation)/dimensionality)
N0=Es/pow(10.0,esn0_db/10.0); # calculate noise variance
tot_s=0 # total number of transmitted shorts
terr_s=0 # total number of shorts in error
terr_p=0 # total number of packets in error
for i in range(rep):
(s,e)=run_test(fo,fi,interleaver,Kb,bitspersymbol,K,dimensionality,constellation,Es,N0,IT,-long(666+i)) # run experiment with different seed to get different noise realizations
tot_s=tot_s+s
terr_s=terr_s+e
terr_p=terr_p+(terr_s!=0)
if ((i+1)%10==0): # display progress
print i+1,terr_p, '%.2e' % ((1.0*terr_p)/(i+1)),tot_s,terr_s, '%.2e' % ((1.0*terr_s)/tot_s)
# estimate of the (short or bit) error rate
print rep,terr_p, '%.2e' % ((1.0*terr_p)/(i+1)),tot_s,terr_s, '%.2e' % ((1.0*terr_s)/tot_s)
if __name__ == '__main__':
main (sys.argv[1:])
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.