repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
jreese/aiosqlite | aiosqlite/context.py | 1 | 1402 | # Copyright 2018
# Licensed under the MIT license
from functools import wraps
from typing import Any, Callable, Coroutine, Generator, TypeVar
from typing_extensions import AsyncContextManager
from .cursor import Cursor
_T = TypeVar("_T")
class Result(AsyncContextManager[_T], Coroutine[Any, Any, _T]):
__slots__ = ("_coro", "_obj")
def __init__(self, coro: Coroutine[Any, Any, _T]):
self._coro = coro
self._obj: _T
def send(self, value) -> None:
return self._coro.send(value)
def throw(self, typ, val=None, tb=None) -> None:
if val is None:
return self._coro.throw(typ)
if tb is None:
return self._coro.throw(typ, val)
return self._coro.throw(typ, val, tb)
def close(self) -> None:
return self._coro.close()
def __await__(self) -> Generator[Any, None, _T]:
return self._coro.__await__()
async def __aenter__(self) -> _T:
self._obj = await self._coro
return self._obj
async def __aexit__(self, exc_type, exc, tb) -> None:
if isinstance(self._obj, Cursor):
await self._obj.close()
def contextmanager(
method: Callable[..., Coroutine[Any, Any, _T]]
) -> Callable[..., Result[_T]]:
@wraps(method)
def wrapper(self, *args, **kwargs) -> Result[_T]:
return Result(method(self, *args, **kwargs))
return wrapper
| mit |
dincamihai/django-allauth | allauth/socialaccount/templatetags/socialaccount.py | 46 | 2979 | from django.template.defaulttags import token_kwargs
from django import template
from allauth.socialaccount import providers
from allauth.utils import get_request_param
register = template.Library()
class ProviderLoginURLNode(template.Node):
def __init__(self, provider_id, params):
self.provider_id_var = template.Variable(provider_id)
self.params = params
def render(self, context):
provider_id = self.provider_id_var.resolve(context)
provider = providers.registry.by_id(provider_id)
query = dict([(str(name), var.resolve(context)) for name, var
in self.params.items()])
request = context['request']
auth_params = query.get('auth_params', None)
scope = query.get('scope', None)
process = query.get('process', None)
if scope is '':
del query['scope']
if auth_params is '':
del query['auth_params']
if 'next' not in query:
next = get_request_param(request, 'next')
if next:
query['next'] = next
elif process == 'redirect':
query['next'] = request.get_full_path()
else:
if not query['next']:
del query['next']
# get the login url and append query as url parameters
return provider.get_login_url(request, **query)
@register.tag
def provider_login_url(parser, token):
"""
{% provider_login_url "facebook" next=bla %}
{% provider_login_url "openid" openid="http://me.yahoo.com" next=bla %}
"""
bits = token.split_contents()
provider_id = bits[1]
params = token_kwargs(bits[2:], parser, support_legacy=False)
return ProviderLoginURLNode(provider_id, params)
class ProvidersMediaJSNode(template.Node):
def render(self, context):
request = context['request']
ret = '\n'.join([p.media_js(request)
for p in providers.registry.get_list()])
return ret
@register.tag
def providers_media_js(parser, token):
return ProvidersMediaJSNode()
@register.assignment_tag
def get_social_accounts(user):
"""
{% get_social_accounts user as accounts %}
Then:
{{accounts.twitter}} -- a list of connected Twitter accounts
{{accounts.twitter.0}} -- the first Twitter account
{% if accounts %} -- if there is at least one social account
"""
accounts = {}
for account in user.socialaccount_set.all().iterator():
providers = accounts.setdefault(account.provider, [])
providers.append(account)
return accounts
@register.assignment_tag
def get_providers():
"""
Returns a list of social authentication providers.
Usage: `{% get_providers as socialaccount_providers %}`.
Then within the template context, `socialaccount_providers` will hold
a list of social providers configured for the current site.
"""
return providers.registry.get_list()
| mit |
sherazkasi/SabreSoftware | Lib/_pyio.py | 76 | 68418 | """
Python implementation of the io module.
"""
from __future__ import (print_function, unicode_literals)
import os
import abc
import codecs
import warnings
# Import thread instead of threading to reduce startup cost
try:
from thread import allocate_lock as Lock
except ImportError:
from dummy_thread import allocate_lock as Lock
import io
from io import (__all__, SEEK_SET, SEEK_CUR, SEEK_END)
from errno import EINTR
__metaclass__ = type
# open() uses st_blksize whenever we can
DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
# NOTE: Base classes defined here are registered with the "official" ABCs
# defined in io.py. We don't use real inheritance though, because we don't
# want to inherit the C implementations.
class BlockingIOError(IOError):
"""Exception raised when I/O would block on a non-blocking I/O stream."""
def __init__(self, errno, strerror, characters_written=0):
super(IOError, self).__init__(errno, strerror)
if not isinstance(characters_written, (int, long)):
raise TypeError("characters_written must be a integer")
self.characters_written = characters_written
def open(file, mode="r", buffering=-1,
encoding=None, errors=None,
newline=None, closefd=True):
r"""Open file and return a stream. Raise IOError upon failure.
file is either a text or byte string giving the name (and the path
if the file isn't in the current working directory) of the file to
be opened or an integer file descriptor of the file to be
wrapped. (If a file descriptor is given, it is closed when the
returned I/O object is closed, unless closefd is set to False.)
mode is an optional string that specifies the mode in which the file
is opened. It defaults to 'r' which means open for reading in text
mode. Other common values are 'w' for writing (truncating the file if
it already exists), and 'a' for appending (which on some Unix systems,
means that all writes append to the end of the file regardless of the
current seek position). In text mode, if encoding is not specified the
encoding used is platform dependent. (For reading and writing raw
bytes use binary mode and leave encoding unspecified.) The available
modes are:
========= ===============================================================
Character Meaning
--------- ---------------------------------------------------------------
'r' open for reading (default)
'w' open for writing, truncating the file first
'a' open for writing, appending to the end of the file if it exists
'b' binary mode
't' text mode (default)
'+' open a disk file for updating (reading and writing)
'U' universal newline mode (for backwards compatibility; unneeded
for new code)
========= ===============================================================
The default mode is 'rt' (open for reading text). For binary random
access, the mode 'w+b' opens and truncates the file to 0 bytes, while
'r+b' opens the file without truncation.
Python distinguishes between files opened in binary and text modes,
even when the underlying operating system doesn't. Files opened in
binary mode (appending 'b' to the mode argument) return contents as
bytes objects without any decoding. In text mode (the default, or when
't' is appended to the mode argument), the contents of the file are
returned as strings, the bytes having been first decoded using a
platform-dependent encoding or using the specified encoding if given.
buffering is an optional integer used to set the buffering policy.
Pass 0 to switch buffering off (only allowed in binary mode), 1 to select
line buffering (only usable in text mode), and an integer > 1 to indicate
the size of a fixed-size chunk buffer. When no buffering argument is
given, the default buffering policy works as follows:
* Binary files are buffered in fixed-size chunks; the size of the buffer
is chosen using a heuristic trying to determine the underlying device's
"block size" and falling back on `io.DEFAULT_BUFFER_SIZE`.
On many systems, the buffer will typically be 4096 or 8192 bytes long.
* "Interactive" text files (files for which isatty() returns True)
use line buffering. Other text files use the policy described above
for binary files.
encoding is the name of the encoding used to decode or encode the
file. This should only be used in text mode. The default encoding is
platform dependent, but any encoding supported by Python can be
passed. See the codecs module for the list of supported encodings.
errors is an optional string that specifies how encoding errors are to
be handled---this argument should not be used in binary mode. Pass
'strict' to raise a ValueError exception if there is an encoding error
(the default of None has the same effect), or pass 'ignore' to ignore
errors. (Note that ignoring encoding errors can lead to data loss.)
See the documentation for codecs.register for a list of the permitted
encoding error strings.
newline controls how universal newlines works (it only applies to text
mode). It can be None, '', '\n', '\r', and '\r\n'. It works as
follows:
* On input, if newline is None, universal newlines mode is
enabled. Lines in the input can end in '\n', '\r', or '\r\n', and
these are translated into '\n' before being returned to the
caller. If it is '', universal newline mode is enabled, but line
endings are returned to the caller untranslated. If it has any of
the other legal values, input lines are only terminated by the given
string, and the line ending is returned to the caller untranslated.
* On output, if newline is None, any '\n' characters written are
translated to the system default line separator, os.linesep. If
newline is '', no translation takes place. If newline is any of the
other legal values, any '\n' characters written are translated to
the given string.
If closefd is False, the underlying file descriptor will be kept open
when the file is closed. This does not work when a file name is given
and must be True in that case.
open() returns a file object whose type depends on the mode, and
through which the standard file operations such as reading and writing
are performed. When open() is used to open a file in a text mode ('w',
'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open
a file in a binary mode, the returned class varies: in read binary
mode, it returns a BufferedReader; in write binary and append binary
modes, it returns a BufferedWriter, and in read/write mode, it returns
a BufferedRandom.
It is also possible to use a string or bytearray as a file for both
reading and writing. For strings StringIO can be used like a file
opened in a text mode, and for bytes a BytesIO can be used like a file
opened in a binary mode.
"""
if not isinstance(file, (basestring, int, long)):
raise TypeError("invalid file: %r" % file)
if not isinstance(mode, basestring):
raise TypeError("invalid mode: %r" % mode)
if not isinstance(buffering, (int, long)):
raise TypeError("invalid buffering: %r" % buffering)
if encoding is not None and not isinstance(encoding, basestring):
raise TypeError("invalid encoding: %r" % encoding)
if errors is not None and not isinstance(errors, basestring):
raise TypeError("invalid errors: %r" % errors)
modes = set(mode)
if modes - set("arwb+tU") or len(mode) > len(modes):
raise ValueError("invalid mode: %r" % mode)
reading = "r" in modes
writing = "w" in modes
appending = "a" in modes
updating = "+" in modes
text = "t" in modes
binary = "b" in modes
if "U" in modes:
if writing or appending:
raise ValueError("can't use U and writing mode at once")
reading = True
if text and binary:
raise ValueError("can't have text and binary mode at once")
if reading + writing + appending > 1:
raise ValueError("can't have read/write/append mode at once")
if not (reading or writing or appending):
raise ValueError("must have exactly one of read/write/append mode")
if binary and encoding is not None:
raise ValueError("binary mode doesn't take an encoding argument")
if binary and errors is not None:
raise ValueError("binary mode doesn't take an errors argument")
if binary and newline is not None:
raise ValueError("binary mode doesn't take a newline argument")
raw = FileIO(file,
(reading and "r" or "") +
(writing and "w" or "") +
(appending and "a" or "") +
(updating and "+" or ""),
closefd)
line_buffering = False
if buffering == 1 or buffering < 0 and raw.isatty():
buffering = -1
line_buffering = True
if buffering < 0:
buffering = DEFAULT_BUFFER_SIZE
try:
bs = os.fstat(raw.fileno()).st_blksize
except (os.error, AttributeError):
pass
else:
if bs > 1:
buffering = bs
if buffering < 0:
raise ValueError("invalid buffering size")
if buffering == 0:
if binary:
return raw
raise ValueError("can't have unbuffered text I/O")
if updating:
buffer = BufferedRandom(raw, buffering)
elif writing or appending:
buffer = BufferedWriter(raw, buffering)
elif reading:
buffer = BufferedReader(raw, buffering)
else:
raise ValueError("unknown mode: %r" % mode)
if binary:
return buffer
text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
text.mode = mode
return text
class DocDescriptor:
"""Helper for builtins.open.__doc__
"""
def __get__(self, obj, typ):
return (
"open(file, mode='r', buffering=-1, encoding=None, "
"errors=None, newline=None, closefd=True)\n\n" +
open.__doc__)
class OpenWrapper:
"""Wrapper for builtins.open
Trick so that open won't become a bound method when stored
as a class variable (as dbm.dumb does).
See initstdio() in Python/pythonrun.c.
"""
__doc__ = DocDescriptor()
def __new__(cls, *args, **kwargs):
return open(*args, **kwargs)
class UnsupportedOperation(ValueError, IOError):
pass
class IOBase:
__metaclass__ = abc.ABCMeta
"""The abstract base class for all I/O classes, acting on streams of
bytes. There is no public constructor.
This class provides dummy implementations for many methods that
derived classes can override selectively; the default implementations
represent a file that cannot be read, written or seeked.
Even though IOBase does not declare read, readinto, or write because
their signatures will vary, implementations and clients should
consider those methods part of the interface. Also, implementations
may raise a IOError when operations they do not support are called.
The basic type used for binary data read from or written to a file is
bytes. bytearrays are accepted too, and in some cases (such as
readinto) needed. Text I/O classes work with str data.
Note that calling any method (even inquiries) on a closed stream is
undefined. Implementations may raise IOError in this case.
IOBase (and its subclasses) support the iterator protocol, meaning
that an IOBase object can be iterated over yielding the lines in a
stream.
IOBase also supports the :keyword:`with` statement. In this example,
fp is closed after the suite of the with statement is complete:
with open('spam.txt', 'r') as fp:
fp.write('Spam and eggs!')
"""
### Internal ###
def _unsupported(self, name):
"""Internal: raise an exception for unsupported operations."""
raise UnsupportedOperation("%s.%s() not supported" %
(self.__class__.__name__, name))
### Positioning ###
def seek(self, pos, whence=0):
"""Change stream position.
Change the stream position to byte offset offset. offset is
interpreted relative to the position indicated by whence. Values
for whence are:
* 0 -- start of stream (the default); offset should be zero or positive
* 1 -- current stream position; offset may be negative
* 2 -- end of stream; offset is usually negative
Return the new absolute position.
"""
self._unsupported("seek")
def tell(self):
"""Return current stream position."""
return self.seek(0, 1)
def truncate(self, pos=None):
"""Truncate file to size bytes.
Size defaults to the current IO position as reported by tell(). Return
the new size.
"""
self._unsupported("truncate")
### Flush and close ###
def flush(self):
"""Flush write buffers, if applicable.
This is not implemented for read-only and non-blocking streams.
"""
self._checkClosed()
# XXX Should this return the number of bytes written???
__closed = False
def close(self):
"""Flush and close the IO object.
This method has no effect if the file is already closed.
"""
if not self.__closed:
self.flush()
self.__closed = True
def __del__(self):
"""Destructor. Calls close()."""
# The try/except block is in case this is called at program
# exit time, when it's possible that globals have already been
# deleted, and then the close() call might fail. Since
# there's nothing we can do about such failures and they annoy
# the end users, we suppress the traceback.
try:
self.close()
except:
pass
### Inquiries ###
def seekable(self):
"""Return whether object supports random access.
If False, seek(), tell() and truncate() will raise IOError.
This method may need to do a test seek().
"""
return False
def _checkSeekable(self, msg=None):
"""Internal: raise an IOError if file is not seekable
"""
if not self.seekable():
raise IOError("File or stream is not seekable."
if msg is None else msg)
def readable(self):
"""Return whether object was opened for reading.
If False, read() will raise IOError.
"""
return False
def _checkReadable(self, msg=None):
"""Internal: raise an IOError if file is not readable
"""
if not self.readable():
raise IOError("File or stream is not readable."
if msg is None else msg)
def writable(self):
"""Return whether object was opened for writing.
If False, write() and truncate() will raise IOError.
"""
return False
def _checkWritable(self, msg=None):
"""Internal: raise an IOError if file is not writable
"""
if not self.writable():
raise IOError("File or stream is not writable."
if msg is None else msg)
@property
def closed(self):
"""closed: bool. True iff the file has been closed.
For backwards compatibility, this is a property, not a predicate.
"""
return self.__closed
def _checkClosed(self, msg=None):
"""Internal: raise an ValueError if file is closed
"""
if self.closed:
raise ValueError("I/O operation on closed file."
if msg is None else msg)
### Context manager ###
def __enter__(self):
"""Context management protocol. Returns self."""
self._checkClosed()
return self
def __exit__(self, *args):
"""Context management protocol. Calls close()"""
self.close()
### Lower-level APIs ###
# XXX Should these be present even if unimplemented?
def fileno(self):
"""Returns underlying file descriptor if one exists.
An IOError is raised if the IO object does not use a file descriptor.
"""
self._unsupported("fileno")
def isatty(self):
"""Return whether this is an 'interactive' stream.
Return False if it can't be determined.
"""
self._checkClosed()
return False
### Readline[s] and writelines ###
def readline(self, limit=-1):
r"""Read and return a line from the stream.
If limit is specified, at most limit bytes will be read.
The line terminator is always b'\n' for binary files; for text
files, the newlines argument to open can be used to select the line
terminator(s) recognized.
"""
# For backwards compatibility, a (slowish) readline().
if hasattr(self, "peek"):
def nreadahead():
readahead = self.peek(1)
if not readahead:
return 1
n = (readahead.find(b"\n") + 1) or len(readahead)
if limit >= 0:
n = min(n, limit)
return n
else:
def nreadahead():
return 1
if limit is None:
limit = -1
elif not isinstance(limit, (int, long)):
raise TypeError("limit must be an integer")
res = bytearray()
while limit < 0 or len(res) < limit:
b = self.read(nreadahead())
if not b:
break
res += b
if res.endswith(b"\n"):
break
return bytes(res)
def __iter__(self):
self._checkClosed()
return self
def next(self):
line = self.readline()
if not line:
raise StopIteration
return line
def readlines(self, hint=None):
"""Return a list of lines from the stream.
hint can be specified to control the number of lines read: no more
lines will be read if the total size (in bytes/characters) of all
lines so far exceeds hint.
"""
if hint is not None and not isinstance(hint, (int, long)):
raise TypeError("integer or None expected")
if hint is None or hint <= 0:
return list(self)
n = 0
lines = []
for line in self:
lines.append(line)
n += len(line)
if n >= hint:
break
return lines
def writelines(self, lines):
self._checkClosed()
for line in lines:
self.write(line)
io.IOBase.register(IOBase)
class RawIOBase(IOBase):
"""Base class for raw binary I/O."""
# The read() method is implemented by calling readinto(); derived
# classes that want to support read() only need to implement
# readinto() as a primitive operation. In general, readinto() can be
# more efficient than read().
# (It would be tempting to also provide an implementation of
# readinto() in terms of read(), in case the latter is a more suitable
# primitive operation, but that would lead to nasty recursion in case
# a subclass doesn't implement either.)
def read(self, n=-1):
"""Read and return up to n bytes.
Returns an empty bytes object on EOF, or None if the object is
set not to block and has no data to read.
"""
if n is None:
n = -1
if n < 0:
return self.readall()
b = bytearray(n.__index__())
n = self.readinto(b)
if n is None:
return None
del b[n:]
return bytes(b)
def readall(self):
"""Read until EOF, using multiple read() call."""
res = bytearray()
while True:
data = self.read(DEFAULT_BUFFER_SIZE)
if not data:
break
res += data
if res:
return bytes(res)
else:
# b'' or None
return data
def readinto(self, b):
"""Read up to len(b) bytes into b.
Returns number of bytes read (0 for EOF), or None if the object
is set not to block and has no data to read.
"""
self._unsupported("readinto")
def write(self, b):
"""Write the given buffer to the IO stream.
Returns the number of bytes written, which may be less than len(b).
"""
self._unsupported("write")
io.RawIOBase.register(RawIOBase)
from _io import FileIO
RawIOBase.register(FileIO)
class BufferedIOBase(IOBase):
"""Base class for buffered IO objects.
The main difference with RawIOBase is that the read() method
supports omitting the size argument, and does not have a default
implementation that defers to readinto().
In addition, read(), readinto() and write() may raise
BlockingIOError if the underlying raw stream is in non-blocking
mode and not ready; unlike their raw counterparts, they will never
return None.
A typical implementation should not inherit from a RawIOBase
implementation, but wrap one.
"""
def read(self, n=None):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, reads and
returns all data until EOF.
If the argument is positive, and the underlying raw stream is
not 'interactive', multiple raw reads may be issued to satisfy
the byte count (unless EOF is reached first). But for
interactive raw streams (XXX and for pipes?), at most one raw
read will be issued, and a short result does not imply that
EOF is imminent.
Returns an empty bytes array on EOF.
Raises BlockingIOError if the underlying raw stream has no
data at the moment.
"""
self._unsupported("read")
def read1(self, n=None):
"""Read up to n bytes with at most one read() system call."""
self._unsupported("read1")
def readinto(self, b):
"""Read up to len(b) bytes into b.
Like read(), this may issue multiple reads to the underlying raw
stream, unless the latter is 'interactive'.
Returns the number of bytes read (0 for EOF).
Raises BlockingIOError if the underlying raw stream has no
data at the moment.
"""
# XXX This ought to work with anything that supports the buffer API
data = self.read(len(b))
n = len(data)
try:
b[:n] = data
except TypeError as err:
import array
if not isinstance(b, array.array):
raise err
b[:n] = array.array(b'b', data)
return n
def write(self, b):
"""Write the given buffer to the IO stream.
Return the number of bytes written, which is never less than
len(b).
Raises BlockingIOError if the buffer is full and the
underlying raw stream cannot accept more data at the moment.
"""
self._unsupported("write")
def detach(self):
"""
Separate the underlying raw stream from the buffer and return it.
After the raw stream has been detached, the buffer is in an unusable
state.
"""
self._unsupported("detach")
io.BufferedIOBase.register(BufferedIOBase)
class _BufferedIOMixin(BufferedIOBase):
"""A mixin implementation of BufferedIOBase with an underlying raw stream.
This passes most requests on to the underlying raw stream. It
does *not* provide implementations of read(), readinto() or
write().
"""
def __init__(self, raw):
self._raw = raw
### Positioning ###
def seek(self, pos, whence=0):
new_position = self.raw.seek(pos, whence)
if new_position < 0:
raise IOError("seek() returned an invalid position")
return new_position
def tell(self):
pos = self.raw.tell()
if pos < 0:
raise IOError("tell() returned an invalid position")
return pos
def truncate(self, pos=None):
# Flush the stream. We're mixing buffered I/O with lower-level I/O,
# and a flush may be necessary to synch both views of the current
# file state.
self.flush()
if pos is None:
pos = self.tell()
# XXX: Should seek() be used, instead of passing the position
# XXX directly to truncate?
return self.raw.truncate(pos)
### Flush and close ###
def flush(self):
if self.closed:
raise ValueError("flush of closed file")
self.raw.flush()
def close(self):
if self.raw is not None and not self.closed:
self.flush()
self.raw.close()
def detach(self):
if self.raw is None:
raise ValueError("raw stream already detached")
self.flush()
raw = self._raw
self._raw = None
return raw
### Inquiries ###
def seekable(self):
return self.raw.seekable()
def readable(self):
return self.raw.readable()
def writable(self):
return self.raw.writable()
@property
def raw(self):
return self._raw
@property
def closed(self):
return self.raw.closed
@property
def name(self):
return self.raw.name
@property
def mode(self):
return self.raw.mode
def __repr__(self):
clsname = self.__class__.__name__
try:
name = self.name
except AttributeError:
return "<_pyio.{0}>".format(clsname)
else:
return "<_pyio.{0} name={1!r}>".format(clsname, name)
### Lower-level APIs ###
def fileno(self):
return self.raw.fileno()
def isatty(self):
return self.raw.isatty()
class BytesIO(BufferedIOBase):
"""Buffered I/O implementation using an in-memory bytes buffer."""
def __init__(self, initial_bytes=None):
buf = bytearray()
if initial_bytes is not None:
buf.extend(initial_bytes)
self._buffer = buf
self._pos = 0
def __getstate__(self):
if self.closed:
raise ValueError("__getstate__ on closed file")
return self.__dict__.copy()
def getvalue(self):
"""Return the bytes value (contents) of the buffer
"""
if self.closed:
raise ValueError("getvalue on closed file")
return bytes(self._buffer)
def read(self, n=None):
if self.closed:
raise ValueError("read from closed file")
if n is None:
n = -1
if not isinstance(n, (int, long)):
raise TypeError("integer argument expected, got {0!r}".format(
type(n)))
if n < 0:
n = len(self._buffer)
if len(self._buffer) <= self._pos:
return b""
newpos = min(len(self._buffer), self._pos + n)
b = self._buffer[self._pos : newpos]
self._pos = newpos
return bytes(b)
def read1(self, n):
"""This is the same as read.
"""
return self.read(n)
def write(self, b):
if self.closed:
raise ValueError("write to closed file")
if isinstance(b, unicode):
raise TypeError("can't write unicode to binary stream")
n = len(b)
if n == 0:
return 0
pos = self._pos
if pos > len(self._buffer):
# Inserts null bytes between the current end of the file
# and the new write position.
padding = b'\x00' * (pos - len(self._buffer))
self._buffer += padding
self._buffer[pos:pos + n] = b
self._pos += n
return n
def seek(self, pos, whence=0):
if self.closed:
raise ValueError("seek on closed file")
try:
pos.__index__
except AttributeError:
raise TypeError("an integer is required")
if whence == 0:
if pos < 0:
raise ValueError("negative seek position %r" % (pos,))
self._pos = pos
elif whence == 1:
self._pos = max(0, self._pos + pos)
elif whence == 2:
self._pos = max(0, len(self._buffer) + pos)
else:
raise ValueError("invalid whence value")
return self._pos
def tell(self):
if self.closed:
raise ValueError("tell on closed file")
return self._pos
def truncate(self, pos=None):
if self.closed:
raise ValueError("truncate on closed file")
if pos is None:
pos = self._pos
else:
try:
pos.__index__
except AttributeError:
raise TypeError("an integer is required")
if pos < 0:
raise ValueError("negative truncate position %r" % (pos,))
del self._buffer[pos:]
return pos
def readable(self):
return True
def writable(self):
return True
def seekable(self):
return True
class BufferedReader(_BufferedIOMixin):
"""BufferedReader(raw[, buffer_size])
A buffer for a readable, sequential BaseRawIO object.
The constructor creates a BufferedReader for the given readable raw
stream and buffer_size. If buffer_size is omitted, DEFAULT_BUFFER_SIZE
is used.
"""
def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
"""Create a new buffered reader using the given readable raw IO object.
"""
if not raw.readable():
raise IOError('"raw" argument must be readable.')
_BufferedIOMixin.__init__(self, raw)
if buffer_size <= 0:
raise ValueError("invalid buffer size")
self.buffer_size = buffer_size
self._reset_read_buf()
self._read_lock = Lock()
def _reset_read_buf(self):
self._read_buf = b""
self._read_pos = 0
def read(self, n=None):
"""Read n bytes.
Returns exactly n bytes of data unless the underlying raw IO
stream reaches EOF or if the call would block in non-blocking
mode. If n is negative, read until EOF or until read() would
block.
"""
if n is not None and n < -1:
raise ValueError("invalid number of bytes to read")
with self._read_lock:
return self._read_unlocked(n)
def _read_unlocked(self, n=None):
nodata_val = b""
empty_values = (b"", None)
buf = self._read_buf
pos = self._read_pos
# Special case for when the number of bytes to read is unspecified.
if n is None or n == -1:
self._reset_read_buf()
chunks = [buf[pos:]] # Strip the consumed bytes.
current_size = 0
while True:
# Read until EOF or until read() would block.
try:
chunk = self.raw.read()
except IOError as e:
if e.errno != EINTR:
raise
continue
if chunk in empty_values:
nodata_val = chunk
break
current_size += len(chunk)
chunks.append(chunk)
return b"".join(chunks) or nodata_val
# The number of bytes to read is specified, return at most n bytes.
avail = len(buf) - pos # Length of the available buffered data.
if n <= avail:
# Fast path: the data to read is fully buffered.
self._read_pos += n
return buf[pos:pos+n]
# Slow path: read from the stream until enough bytes are read,
# or until an EOF occurs or until read() would block.
chunks = [buf[pos:]]
wanted = max(self.buffer_size, n)
while avail < n:
try:
chunk = self.raw.read(wanted)
except IOError as e:
if e.errno != EINTR:
raise
continue
if chunk in empty_values:
nodata_val = chunk
break
avail += len(chunk)
chunks.append(chunk)
# n is more then avail only when an EOF occurred or when
# read() would have blocked.
n = min(n, avail)
out = b"".join(chunks)
self._read_buf = out[n:] # Save the extra data in the buffer.
self._read_pos = 0
return out[:n] if out else nodata_val
def peek(self, n=0):
"""Returns buffered bytes without advancing the position.
The argument indicates a desired minimal number of bytes; we
do at most one raw read to satisfy it. We never return more
than self.buffer_size.
"""
with self._read_lock:
return self._peek_unlocked(n)
def _peek_unlocked(self, n=0):
want = min(n, self.buffer_size)
have = len(self._read_buf) - self._read_pos
if have < want or have <= 0:
to_read = self.buffer_size - have
while True:
try:
current = self.raw.read(to_read)
except IOError as e:
if e.errno != EINTR:
raise
continue
break
if current:
self._read_buf = self._read_buf[self._read_pos:] + current
self._read_pos = 0
return self._read_buf[self._read_pos:]
def read1(self, n):
"""Reads up to n bytes, with at most one read() system call."""
# Returns up to n bytes. If at least one byte is buffered, we
# only return buffered bytes. Otherwise, we do one raw read.
if n < 0:
raise ValueError("number of bytes to read must be positive")
if n == 0:
return b""
with self._read_lock:
self._peek_unlocked(1)
return self._read_unlocked(
min(n, len(self._read_buf) - self._read_pos))
def tell(self):
return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos
def seek(self, pos, whence=0):
if not (0 <= whence <= 2):
raise ValueError("invalid whence value")
with self._read_lock:
if whence == 1:
pos -= len(self._read_buf) - self._read_pos
pos = _BufferedIOMixin.seek(self, pos, whence)
self._reset_read_buf()
return pos
class BufferedWriter(_BufferedIOMixin):
"""A buffer for a writeable sequential RawIO object.
The constructor creates a BufferedWriter for the given writeable raw
stream. If the buffer_size is not given, it defaults to
DEFAULT_BUFFER_SIZE.
"""
_warning_stack_offset = 2
def __init__(self, raw,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
if not raw.writable():
raise IOError('"raw" argument must be writable.')
_BufferedIOMixin.__init__(self, raw)
if buffer_size <= 0:
raise ValueError("invalid buffer size")
if max_buffer_size is not None:
warnings.warn("max_buffer_size is deprecated", DeprecationWarning,
self._warning_stack_offset)
self.buffer_size = buffer_size
self._write_buf = bytearray()
self._write_lock = Lock()
def write(self, b):
if self.closed:
raise ValueError("write to closed file")
if isinstance(b, unicode):
raise TypeError("can't write unicode to binary stream")
with self._write_lock:
# XXX we can implement some more tricks to try and avoid
# partial writes
if len(self._write_buf) > self.buffer_size:
# We're full, so let's pre-flush the buffer
try:
self._flush_unlocked()
except BlockingIOError as e:
# We can't accept anything else.
# XXX Why not just let the exception pass through?
raise BlockingIOError(e.errno, e.strerror, 0)
before = len(self._write_buf)
self._write_buf.extend(b)
written = len(self._write_buf) - before
if len(self._write_buf) > self.buffer_size:
try:
self._flush_unlocked()
except BlockingIOError as e:
if len(self._write_buf) > self.buffer_size:
# We've hit the buffer_size. We have to accept a partial
# write and cut back our buffer.
overage = len(self._write_buf) - self.buffer_size
written -= overage
self._write_buf = self._write_buf[:self.buffer_size]
raise BlockingIOError(e.errno, e.strerror, written)
return written
def truncate(self, pos=None):
with self._write_lock:
self._flush_unlocked()
if pos is None:
pos = self.raw.tell()
return self.raw.truncate(pos)
def flush(self):
with self._write_lock:
self._flush_unlocked()
def _flush_unlocked(self):
if self.closed:
raise ValueError("flush of closed file")
written = 0
try:
while self._write_buf:
try:
n = self.raw.write(self._write_buf)
except IOError as e:
if e.errno != EINTR:
raise
continue
if n > len(self._write_buf) or n < 0:
raise IOError("write() returned incorrect number of bytes")
del self._write_buf[:n]
written += n
except BlockingIOError as e:
n = e.characters_written
del self._write_buf[:n]
written += n
raise BlockingIOError(e.errno, e.strerror, written)
def tell(self):
return _BufferedIOMixin.tell(self) + len(self._write_buf)
def seek(self, pos, whence=0):
if not (0 <= whence <= 2):
raise ValueError("invalid whence")
with self._write_lock:
self._flush_unlocked()
return _BufferedIOMixin.seek(self, pos, whence)
class BufferedRWPair(BufferedIOBase):
"""A buffered reader and writer object together.
A buffered reader object and buffered writer object put together to
form a sequential IO object that can read and write. This is typically
used with a socket or two-way pipe.
reader and writer are RawIOBase objects that are readable and
writeable respectively. If the buffer_size is omitted it defaults to
DEFAULT_BUFFER_SIZE.
"""
# XXX The usefulness of this (compared to having two separate IO
# objects) is questionable.
def __init__(self, reader, writer,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
"""Constructor.
The arguments are two RawIO instances.
"""
if max_buffer_size is not None:
warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2)
if not reader.readable():
raise IOError('"reader" argument must be readable.')
if not writer.writable():
raise IOError('"writer" argument must be writable.')
self.reader = BufferedReader(reader, buffer_size)
self.writer = BufferedWriter(writer, buffer_size)
def read(self, n=None):
if n is None:
n = -1
return self.reader.read(n)
def readinto(self, b):
return self.reader.readinto(b)
def write(self, b):
return self.writer.write(b)
def peek(self, n=0):
return self.reader.peek(n)
def read1(self, n):
return self.reader.read1(n)
def readable(self):
return self.reader.readable()
def writable(self):
return self.writer.writable()
def flush(self):
return self.writer.flush()
def close(self):
self.writer.close()
self.reader.close()
def isatty(self):
return self.reader.isatty() or self.writer.isatty()
@property
def closed(self):
return self.writer.closed
class BufferedRandom(BufferedWriter, BufferedReader):
"""A buffered interface to random access streams.
The constructor creates a reader and writer for a seekable stream,
raw, given in the first argument. If the buffer_size is omitted it
defaults to DEFAULT_BUFFER_SIZE.
"""
_warning_stack_offset = 3
def __init__(self, raw,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
raw._checkSeekable()
BufferedReader.__init__(self, raw, buffer_size)
BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
def seek(self, pos, whence=0):
if not (0 <= whence <= 2):
raise ValueError("invalid whence")
self.flush()
if self._read_buf:
# Undo read ahead.
with self._read_lock:
self.raw.seek(self._read_pos - len(self._read_buf), 1)
# First do the raw seek, then empty the read buffer, so that
# if the raw seek fails, we don't lose buffered data forever.
pos = self.raw.seek(pos, whence)
with self._read_lock:
self._reset_read_buf()
if pos < 0:
raise IOError("seek() returned invalid position")
return pos
def tell(self):
if self._write_buf:
return BufferedWriter.tell(self)
else:
return BufferedReader.tell(self)
def truncate(self, pos=None):
if pos is None:
pos = self.tell()
# Use seek to flush the read buffer.
return BufferedWriter.truncate(self, pos)
def read(self, n=None):
if n is None:
n = -1
self.flush()
return BufferedReader.read(self, n)
def readinto(self, b):
self.flush()
return BufferedReader.readinto(self, b)
def peek(self, n=0):
self.flush()
return BufferedReader.peek(self, n)
def read1(self, n):
self.flush()
return BufferedReader.read1(self, n)
def write(self, b):
if self._read_buf:
# Undo readahead
with self._read_lock:
self.raw.seek(self._read_pos - len(self._read_buf), 1)
self._reset_read_buf()
return BufferedWriter.write(self, b)
class TextIOBase(IOBase):
"""Base class for text I/O.
This class provides a character and line based interface to stream
I/O. There is no readinto method because Python's character strings
are immutable. There is no public constructor.
"""
def read(self, n=-1):
"""Read at most n characters from stream.
Read from underlying buffer until we have n characters or we hit EOF.
If n is negative or omitted, read until EOF.
"""
self._unsupported("read")
def write(self, s):
"""Write string s to stream."""
self._unsupported("write")
def truncate(self, pos=None):
"""Truncate size to pos."""
self._unsupported("truncate")
def readline(self):
"""Read until newline or EOF.
Returns an empty string if EOF is hit immediately.
"""
self._unsupported("readline")
def detach(self):
"""
Separate the underlying buffer from the TextIOBase and return it.
After the underlying buffer has been detached, the TextIO is in an
unusable state.
"""
self._unsupported("detach")
@property
def encoding(self):
"""Subclasses should override."""
return None
@property
def newlines(self):
"""Line endings translated so far.
Only line endings translated during reading are considered.
Subclasses should override.
"""
return None
@property
def errors(self):
"""Error setting of the decoder or encoder.
Subclasses should override."""
return None
io.TextIOBase.register(TextIOBase)
class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
r"""Codec used when reading a file in universal newlines mode. It wraps
another incremental decoder, translating \r\n and \r into \n. It also
records the types of newlines encountered. When used with
translate=False, it ensures that the newline sequence is returned in
one piece.
"""
def __init__(self, decoder, translate, errors='strict'):
codecs.IncrementalDecoder.__init__(self, errors=errors)
self.translate = translate
self.decoder = decoder
self.seennl = 0
self.pendingcr = False
def decode(self, input, final=False):
# decode input (with the eventual \r from a previous pass)
if self.decoder is None:
output = input
else:
output = self.decoder.decode(input, final=final)
if self.pendingcr and (output or final):
output = "\r" + output
self.pendingcr = False
# retain last \r even when not translating data:
# then readline() is sure to get \r\n in one pass
if output.endswith("\r") and not final:
output = output[:-1]
self.pendingcr = True
# Record which newlines are read
crlf = output.count('\r\n')
cr = output.count('\r') - crlf
lf = output.count('\n') - crlf
self.seennl |= (lf and self._LF) | (cr and self._CR) \
| (crlf and self._CRLF)
if self.translate:
if crlf:
output = output.replace("\r\n", "\n")
if cr:
output = output.replace("\r", "\n")
return output
def getstate(self):
if self.decoder is None:
buf = b""
flag = 0
else:
buf, flag = self.decoder.getstate()
flag <<= 1
if self.pendingcr:
flag |= 1
return buf, flag
def setstate(self, state):
buf, flag = state
self.pendingcr = bool(flag & 1)
if self.decoder is not None:
self.decoder.setstate((buf, flag >> 1))
def reset(self):
self.seennl = 0
self.pendingcr = False
if self.decoder is not None:
self.decoder.reset()
_LF = 1
_CR = 2
_CRLF = 4
@property
def newlines(self):
return (None,
"\n",
"\r",
("\r", "\n"),
"\r\n",
("\n", "\r\n"),
("\r", "\r\n"),
("\r", "\n", "\r\n")
)[self.seennl]
class TextIOWrapper(TextIOBase):
r"""Character and line based layer over a BufferedIOBase object, buffer.
encoding gives the name of the encoding that the stream will be
decoded or encoded with. It defaults to locale.getpreferredencoding.
errors determines the strictness of encoding and decoding (see the
codecs.register) and defaults to "strict".
newline can be None, '', '\n', '\r', or '\r\n'. It controls the
handling of line endings. If it is None, universal newlines is
enabled. With this enabled, on input, the lines endings '\n', '\r',
or '\r\n' are translated to '\n' before being returned to the
caller. Conversely, on output, '\n' is translated to the system
default line seperator, os.linesep. If newline is any other of its
legal values, that newline becomes the newline when the file is read
and it is returned untranslated. On output, '\n' is converted to the
newline.
If line_buffering is True, a call to flush is implied when a call to
write contains a newline character.
"""
_CHUNK_SIZE = 2048
def __init__(self, buffer, encoding=None, errors=None, newline=None,
line_buffering=False):
if newline is not None and not isinstance(newline, basestring):
raise TypeError("illegal newline type: %r" % (type(newline),))
if newline not in (None, "", "\n", "\r", "\r\n"):
raise ValueError("illegal newline value: %r" % (newline,))
if encoding is None:
try:
import locale
except ImportError:
# Importing locale may fail if Python is being built
encoding = "ascii"
else:
encoding = locale.getpreferredencoding()
if not isinstance(encoding, basestring):
raise ValueError("invalid encoding: %r" % encoding)
if errors is None:
errors = "strict"
else:
if not isinstance(errors, basestring):
raise ValueError("invalid errors: %r" % errors)
self._buffer = buffer
self._line_buffering = line_buffering
self._encoding = encoding
self._errors = errors
self._readuniversal = not newline
self._readtranslate = newline is None
self._readnl = newline
self._writetranslate = newline != ''
self._writenl = newline or os.linesep
self._encoder = None
self._decoder = None
self._decoded_chars = '' # buffer for text returned from decoder
self._decoded_chars_used = 0 # offset into _decoded_chars for read()
self._snapshot = None # info for reconstructing decoder state
self._seekable = self._telling = self.buffer.seekable()
if self._seekable and self.writable():
position = self.buffer.tell()
if position != 0:
try:
self._get_encoder().setstate(0)
except LookupError:
# Sometimes the encoder doesn't exist
pass
# self._snapshot is either None, or a tuple (dec_flags, next_input)
# where dec_flags is the second (integer) item of the decoder state
# and next_input is the chunk of input bytes that comes next after the
# snapshot point. We use this to reconstruct decoder states in tell().
# Naming convention:
# - "bytes_..." for integer variables that count input bytes
# - "chars_..." for integer variables that count decoded characters
def __repr__(self):
try:
name = self.name
except AttributeError:
return "<_pyio.TextIOWrapper encoding='{0}'>".format(self.encoding)
else:
return "<_pyio.TextIOWrapper name={0!r} encoding='{1}'>".format(
name, self.encoding)
@property
def encoding(self):
return self._encoding
@property
def errors(self):
return self._errors
@property
def line_buffering(self):
return self._line_buffering
@property
def buffer(self):
return self._buffer
def seekable(self):
return self._seekable
def readable(self):
return self.buffer.readable()
def writable(self):
return self.buffer.writable()
def flush(self):
self.buffer.flush()
self._telling = self._seekable
def close(self):
if self.buffer is not None and not self.closed:
self.flush()
self.buffer.close()
@property
def closed(self):
return self.buffer.closed
@property
def name(self):
return self.buffer.name
def fileno(self):
return self.buffer.fileno()
def isatty(self):
return self.buffer.isatty()
def write(self, s):
if self.closed:
raise ValueError("write to closed file")
if not isinstance(s, unicode):
raise TypeError("can't write %s to text stream" %
s.__class__.__name__)
length = len(s)
haslf = (self._writetranslate or self._line_buffering) and "\n" in s
if haslf and self._writetranslate and self._writenl != "\n":
s = s.replace("\n", self._writenl)
encoder = self._encoder or self._get_encoder()
# XXX What if we were just reading?
b = encoder.encode(s)
self.buffer.write(b)
if self._line_buffering and (haslf or "\r" in s):
self.flush()
self._snapshot = None
if self._decoder:
self._decoder.reset()
return length
def _get_encoder(self):
make_encoder = codecs.getincrementalencoder(self._encoding)
self._encoder = make_encoder(self._errors)
return self._encoder
def _get_decoder(self):
make_decoder = codecs.getincrementaldecoder(self._encoding)
decoder = make_decoder(self._errors)
if self._readuniversal:
decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
self._decoder = decoder
return decoder
# The following three methods implement an ADT for _decoded_chars.
# Text returned from the decoder is buffered here until the client
# requests it by calling our read() or readline() method.
def _set_decoded_chars(self, chars):
"""Set the _decoded_chars buffer."""
self._decoded_chars = chars
self._decoded_chars_used = 0
def _get_decoded_chars(self, n=None):
"""Advance into the _decoded_chars buffer."""
offset = self._decoded_chars_used
if n is None:
chars = self._decoded_chars[offset:]
else:
chars = self._decoded_chars[offset:offset + n]
self._decoded_chars_used += len(chars)
return chars
def _rewind_decoded_chars(self, n):
"""Rewind the _decoded_chars buffer."""
if self._decoded_chars_used < n:
raise AssertionError("rewind decoded_chars out of bounds")
self._decoded_chars_used -= n
def _read_chunk(self):
"""
Read and decode the next chunk of data from the BufferedReader.
"""
# The return value is True unless EOF was reached. The decoded
# string is placed in self._decoded_chars (replacing its previous
# value). The entire input chunk is sent to the decoder, though
# some of it may remain buffered in the decoder, yet to be
# converted.
if self._decoder is None:
raise ValueError("no decoder")
if self._telling:
# To prepare for tell(), we need to snapshot a point in the
# file where the decoder's input buffer is empty.
dec_buffer, dec_flags = self._decoder.getstate()
# Given this, we know there was a valid snapshot point
# len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
# Read a chunk, decode it, and put the result in self._decoded_chars.
input_chunk = self.buffer.read1(self._CHUNK_SIZE)
eof = not input_chunk
self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
if self._telling:
# At the snapshot point, len(dec_buffer) bytes before the read,
# the next input to be decoded is dec_buffer + input_chunk.
self._snapshot = (dec_flags, dec_buffer + input_chunk)
return not eof
def _pack_cookie(self, position, dec_flags=0,
bytes_to_feed=0, need_eof=0, chars_to_skip=0):
# The meaning of a tell() cookie is: seek to position, set the
# decoder flags to dec_flags, read bytes_to_feed bytes, feed them
# into the decoder with need_eof as the EOF flag, then skip
# chars_to_skip characters of the decoded result. For most simple
# decoders, tell() will often just give a byte offset in the file.
return (position | (dec_flags<<64) | (bytes_to_feed<<128) |
(chars_to_skip<<192) | bool(need_eof)<<256)
def _unpack_cookie(self, bigint):
rest, position = divmod(bigint, 1<<64)
rest, dec_flags = divmod(rest, 1<<64)
rest, bytes_to_feed = divmod(rest, 1<<64)
need_eof, chars_to_skip = divmod(rest, 1<<64)
return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
def tell(self):
if not self._seekable:
raise IOError("underlying stream is not seekable")
if not self._telling:
raise IOError("telling position disabled by next() call")
self.flush()
position = self.buffer.tell()
decoder = self._decoder
if decoder is None or self._snapshot is None:
if self._decoded_chars:
# This should never happen.
raise AssertionError("pending decoded text")
return position
# Skip backward to the snapshot point (see _read_chunk).
dec_flags, next_input = self._snapshot
position -= len(next_input)
# How many decoded characters have been used up since the snapshot?
chars_to_skip = self._decoded_chars_used
if chars_to_skip == 0:
# We haven't moved from the snapshot point.
return self._pack_cookie(position, dec_flags)
# Starting from the snapshot position, we will walk the decoder
# forward until it gives us enough decoded characters.
saved_state = decoder.getstate()
try:
# Note our initial start point.
decoder.setstate((b'', dec_flags))
start_pos = position
start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
need_eof = 0
# Feed the decoder one byte at a time. As we go, note the
# nearest "safe start point" before the current location
# (a point where the decoder has nothing buffered, so seek()
# can safely start from there and advance to this location).
for next_byte in next_input:
bytes_fed += 1
chars_decoded += len(decoder.decode(next_byte))
dec_buffer, dec_flags = decoder.getstate()
if not dec_buffer and chars_decoded <= chars_to_skip:
# Decoder buffer is empty, so this is a safe start point.
start_pos += bytes_fed
chars_to_skip -= chars_decoded
start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
if chars_decoded >= chars_to_skip:
break
else:
# We didn't get enough decoded data; signal EOF to get more.
chars_decoded += len(decoder.decode(b'', final=True))
need_eof = 1
if chars_decoded < chars_to_skip:
raise IOError("can't reconstruct logical file position")
# The returned cookie corresponds to the last safe start point.
return self._pack_cookie(
start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
finally:
decoder.setstate(saved_state)
def truncate(self, pos=None):
self.flush()
if pos is None:
pos = self.tell()
return self.buffer.truncate(pos)
def detach(self):
if self.buffer is None:
raise ValueError("buffer is already detached")
self.flush()
buffer = self._buffer
self._buffer = None
return buffer
def seek(self, cookie, whence=0):
if self.closed:
raise ValueError("tell on closed file")
if not self._seekable:
raise IOError("underlying stream is not seekable")
if whence == 1: # seek relative to current position
if cookie != 0:
raise IOError("can't do nonzero cur-relative seeks")
# Seeking to the current position should attempt to
# sync the underlying buffer with the current position.
whence = 0
cookie = self.tell()
if whence == 2: # seek relative to end of file
if cookie != 0:
raise IOError("can't do nonzero end-relative seeks")
self.flush()
position = self.buffer.seek(0, 2)
self._set_decoded_chars('')
self._snapshot = None
if self._decoder:
self._decoder.reset()
return position
if whence != 0:
raise ValueError("invalid whence (%r, should be 0, 1 or 2)" %
(whence,))
if cookie < 0:
raise ValueError("negative seek position %r" % (cookie,))
self.flush()
# The strategy of seek() is to go back to the safe start point
# and replay the effect of read(chars_to_skip) from there.
start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip = \
self._unpack_cookie(cookie)
# Seek back to the safe start point.
self.buffer.seek(start_pos)
self._set_decoded_chars('')
self._snapshot = None
# Restore the decoder to its state from the safe start point.
if cookie == 0 and self._decoder:
self._decoder.reset()
elif self._decoder or dec_flags or chars_to_skip:
self._decoder = self._decoder or self._get_decoder()
self._decoder.setstate((b'', dec_flags))
self._snapshot = (dec_flags, b'')
if chars_to_skip:
# Just like _read_chunk, feed the decoder and save a snapshot.
input_chunk = self.buffer.read(bytes_to_feed)
self._set_decoded_chars(
self._decoder.decode(input_chunk, need_eof))
self._snapshot = (dec_flags, input_chunk)
# Skip chars_to_skip of the decoded characters.
if len(self._decoded_chars) < chars_to_skip:
raise IOError("can't restore logical file position")
self._decoded_chars_used = chars_to_skip
# Finally, reset the encoder (merely useful for proper BOM handling)
try:
encoder = self._encoder or self._get_encoder()
except LookupError:
# Sometimes the encoder doesn't exist
pass
else:
if cookie != 0:
encoder.setstate(0)
else:
encoder.reset()
return cookie
def read(self, n=None):
self._checkReadable()
if n is None:
n = -1
decoder = self._decoder or self._get_decoder()
try:
n.__index__
except AttributeError:
raise TypeError("an integer is required")
if n < 0:
# Read everything.
result = (self._get_decoded_chars() +
decoder.decode(self.buffer.read(), final=True))
self._set_decoded_chars('')
self._snapshot = None
return result
else:
# Keep reading chunks until we have n characters to return.
eof = False
result = self._get_decoded_chars(n)
while len(result) < n and not eof:
eof = not self._read_chunk()
result += self._get_decoded_chars(n - len(result))
return result
def next(self):
self._telling = False
line = self.readline()
if not line:
self._snapshot = None
self._telling = self._seekable
raise StopIteration
return line
def readline(self, limit=None):
if self.closed:
raise ValueError("read from closed file")
if limit is None:
limit = -1
elif not isinstance(limit, (int, long)):
raise TypeError("limit must be an integer")
# Grab all the decoded text (we will rewind any extra bits later).
line = self._get_decoded_chars()
start = 0
# Make the decoder if it doesn't already exist.
if not self._decoder:
self._get_decoder()
pos = endpos = None
while True:
if self._readtranslate:
# Newlines are already translated, only search for \n
pos = line.find('\n', start)
if pos >= 0:
endpos = pos + 1
break
else:
start = len(line)
elif self._readuniversal:
# Universal newline search. Find any of \r, \r\n, \n
# The decoder ensures that \r\n are not split in two pieces
# In C we'd look for these in parallel of course.
nlpos = line.find("\n", start)
crpos = line.find("\r", start)
if crpos == -1:
if nlpos == -1:
# Nothing found
start = len(line)
else:
# Found \n
endpos = nlpos + 1
break
elif nlpos == -1:
# Found lone \r
endpos = crpos + 1
break
elif nlpos < crpos:
# Found \n
endpos = nlpos + 1
break
elif nlpos == crpos + 1:
# Found \r\n
endpos = crpos + 2
break
else:
# Found \r
endpos = crpos + 1
break
else:
# non-universal
pos = line.find(self._readnl)
if pos >= 0:
endpos = pos + len(self._readnl)
break
if limit >= 0 and len(line) >= limit:
endpos = limit # reached length limit
break
# No line ending seen yet - get more data'
while self._read_chunk():
if self._decoded_chars:
break
if self._decoded_chars:
line += self._get_decoded_chars()
else:
# end of file
self._set_decoded_chars('')
self._snapshot = None
return line
if limit >= 0 and endpos > limit:
endpos = limit # don't exceed limit
# Rewind _decoded_chars to just after the line ending we found.
self._rewind_decoded_chars(len(line) - endpos)
return line[:endpos]
@property
def newlines(self):
return self._decoder.newlines if self._decoder else None
class StringIO(TextIOWrapper):
"""Text I/O implementation using an in-memory buffer.
The initial_value argument sets the value of object. The newline
argument is like the one of TextIOWrapper's constructor.
"""
def __init__(self, initial_value="", newline="\n"):
super(StringIO, self).__init__(BytesIO(),
encoding="utf-8",
errors="strict",
newline=newline)
# Issue #5645: make universal newlines semantics the same as in the
# C version, even under Windows.
if newline is None:
self._writetranslate = False
if initial_value:
if not isinstance(initial_value, unicode):
initial_value = unicode(initial_value)
self.write(initial_value)
self.seek(0)
def getvalue(self):
self.flush()
return self.buffer.getvalue().decode(self._encoding, self._errors)
def __repr__(self):
# TextIOWrapper tells the encoding in its repr. In StringIO,
# that's a implementation detail.
return object.__repr__(self)
@property
def errors(self):
return None
@property
def encoding(self):
return None
def detach(self):
# This doesn't make sense on StringIO.
self._unsupported("detach")
| gpl-3.0 |
dmitry-sobolev/ansible | lib/ansible/modules/network/eos/eos_system.py | 77 | 11333 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = """
---
module: eos_system
version_added: "2.3"
author: "Peter Sprygada (@privateip)"
short_description: Manage the system attributes on Arista EOS devices
description:
- This module provides declarative management of node system attributes
on Arista EOS devices. It provides an option to configure host system
parameters or remove those parameters from the device active
configuration.
extends_documentation_fragment: eos
options:
hostname:
description:
- Configure the device hostname parameter. This option takes an ASCII string value.
domain_name:
description:
- Configure the IP domain name
on the remote device to the provided value. Value
should be in the dotted name form and will be
appended to the C(hostname) to create a fully-qualified
domain name.
domain_search:
description:
- Provides the list of domain suffixes to
append to the hostname for the purpose of doing name resolution.
This argument accepts a list of names and will be reconciled
with the current active configuration on the running node.
lookup_source:
description:
- Provides one or more source
interfaces to use for performing DNS lookups. The interface
provided in C(lookup_source) can only exist in a single VRF. This
argument accepts either a list of interface names or a list of
hashes that configure the interface name and VRF name. See
examples.
name_servers:
description:
- List of DNS name servers by IP address to use to perform name resolution
lookups. This argument accepts either a list of DNS servers or
a list of hashes that configure the name server and VRF name. See
examples.
state:
description:
- State of the configuration
values in the device's current active configuration. When set
to I(present), the values should be configured in the device active
configuration and when set to I(absent) the values should not be
in the device active configuration
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: configure hostname and domain-name
eos_system:
hostname: eos01
domain_name: test.example.com
- name: remove configuration
eos_system:
state: absent
- name: configure DNS lookup sources
eos_system:
lookup_source: Management1
- name: configure DNS lookup sources with VRF support
eos_system:
lookup_source:
- interface: Management1
vrf: mgmt
- interface: Ethernet1
vrf: myvrf
- name: configure name servers
eos_system:
name_servers:
- 8.8.8.8
- 8.8.4.4
- name: configure name servers with VRF support
eos_system:
name_servers:
- { server: 8.8.8.8, vrf: mgmt }
- { server: 8.8.4.4, vrf: mgmt }
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- hostname eos01
- ip domain-name test.example.com
session_name:
description: The EOS config session name used to load the configuration
returned: changed
type: str
sample: ansible_1479315771
"""
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network_common import ComplexList
from ansible.module_utils.eos import load_config, get_config
from ansible.module_utils.eos import eos_argument_spec
_CONFIGURED_VRFS = None
def has_vrf(module, vrf):
global _CONFIGURED_VRFS
if _CONFIGURED_VRFS is not None:
return vrf in _CONFIGURED_VRFS
config = get_config(module)
_CONFIGURED_VRFS = re.findall('vrf definition (\S+)', config)
_CONFIGURED_VRFS.append('default')
return vrf in _CONFIGURED_VRFS
def map_obj_to_commands(want, have, module):
commands = list()
state = module.params['state']
needs_update = lambda x: want.get(x) and (want.get(x) != have.get(x))
if state == 'absent':
if have['domain_name']:
commands.append('no ip domain-name')
if have['hostname'] != 'localhost':
commands.append('no hostname')
if state == 'present':
if needs_update('hostname'):
commands.append('hostname %s' % want['hostname'])
if needs_update('domain_name'):
commands.append('ip domain-name %s' % want['domain_name'])
if want['domain_list']:
# handle domain_list items to be removed
for item in set(have['domain_list']).difference(want['domain_list']):
commands.append('no ip domain-list %s' % item)
# handle domain_list items to be added
for item in set(want['domain_list']).difference(have['domain_list']):
commands.append('ip domain-list %s' % item)
if want['lookup_source']:
# handle lookup_source items to be removed
for item in have['lookup_source']:
if item not in want['lookup_source']:
if item['vrf']:
if not has_vrf(module, item['vrf']):
module.fail_json(msg='vrf %s is not configured' % item['vrf'])
values = (item['vrf'], item['interface'])
commands.append('no ip domain lookup vrf %s source-interface %s' % values)
else:
commands.append('no ip domain lookup source-interface %s' % item['interface'])
# handle lookup_source items to be added
for item in want['lookup_source']:
if item not in have['lookup_source']:
if item['vrf']:
if not has_vrf(module, item['vrf']):
module.fail_json(msg='vrf %s is not configured' % item['vrf'])
values = (item['vrf'], item['interface'])
commands.append('ip domain lookup vrf %s source-interface %s' % values)
else:
commands.append('ip domain lookup source-interface %s' % item['interface'])
if want['name_servers']:
# handle name_servers items to be removed. Order does matter here
# since name servers can only be in one vrf at a time
for item in have['name_servers']:
if item not in want['name_servers']:
if not has_vrf(module, item['vrf']):
module.fail_json(msg='vrf %s is not configured' % item['vrf'])
if item['vrf'] not in ('default', None):
values = (item['vrf'], item['server'])
commands.append('no ip name-server vrf %s %s' % values)
else:
commands.append('no ip name-server %s' % item['server'])
# handle name_servers items to be added
for item in want['name_servers']:
if item not in have['name_servers']:
if not has_vrf(module, item['vrf']):
module.fail_json(msg='vrf %s is not configured' % item['vrf'])
if item['vrf'] not in ('default', None):
values = (item['vrf'], item['server'])
commands.append('ip name-server vrf %s %s' % values)
else:
commands.append('ip name-server %s' % item['server'])
return commands
def parse_hostname(config):
match = re.search('^hostname (\S+)', config, re.M)
if match:
return match.group(1)
def parse_domain_name(config):
match = re.search('^ip domain-name (\S+)', config, re.M)
if match:
return match.group(1)
def parse_lookup_source(config):
objects = list()
regex = 'ip domain lookup (?:vrf (\S+) )*source-interface (\S+)'
for vrf, intf in re.findall(regex, config, re.M):
if len(vrf) == 0:
vrf= None
objects.append({'interface': intf, 'vrf': vrf})
return objects
def parse_name_servers(config):
objects = list()
for vrf, addr in re.findall('ip name-server vrf (\S+) (\S+)', config, re.M):
objects.append({'server': addr, 'vrf': vrf})
return objects
def map_config_to_obj(module):
config = get_config(module)
return {
'hostname': parse_hostname(config),
'domain_name': parse_domain_name(config),
'domain_list': re.findall('^ip domain-list (\S+)', config, re.M),
'lookup_source': parse_lookup_source(config),
'name_servers': parse_name_servers(config)
}
def map_params_to_obj(module):
obj = {
'hostname': module.params['hostname'],
'domain_name': module.params['domain_name'],
'domain_list': module.params['domain_list']
}
lookup_source = ComplexList(dict(
interface=dict(key=True),
vrf=dict()
), module)
name_servers = ComplexList(dict(
server=dict(key=True),
vrf=dict(default='default')
), module)
for arg, cast in [('lookup_source', lookup_source), ('name_servers', name_servers)]:
if module.params[arg] is not None:
obj[arg] = cast(module.params[arg])
else:
obj[arg] = None
return obj
def main():
""" main entry point for module execution
"""
argument_spec = dict(
hostname=dict(),
domain_name=dict(),
domain_list=dict(type='list', aliases=['domain_search']),
# { interface: <str>, vrf: <str> }
lookup_source=dict(type='list'),
# { server: <str>; vrf: <str> }
name_servers=dict(type='list'),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(eos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
result = {'changed': False}
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands(want, have, module)
result['commands'] = commands
if commands:
commit = not module.check_mode
response = load_config(module, commands, commit=commit)
if response.get('diff') and module._diff:
result['diff'] = {'prepared': response.get('diff')}
result['session_name'] = response.get('session')
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
guschmue/tensorflow | tensorflow/contrib/distributions/python/ops/gumbel.py | 65 | 7694 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Gumbel distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import distribution
class _Gumbel(distribution.Distribution):
"""The scalar Gumbel distribution with location `loc` and `scale` parameters.
#### Mathematical details
The probability density function (pdf) of this distribution is,
```none
pdf(x; mu, sigma) = exp(-(x - mu) / sigma - exp(-(x - mu) / sigma))
```
where `loc = mu` and `scale = sigma`.
The cumulative density function of this distribution is,
```cdf(x; mu, sigma) = exp(-exp(-(x - mu) / sigma))```
The Gumbel distribution is a member of the [location-scale family](
https://en.wikipedia.org/wiki/Location-scale_family), i.e., it can be
constructed as,
```none
X ~ Gumbel(loc=0, scale=1)
Y = loc + scale * X
```
#### Examples
Examples of initialization of one or a batch of distributions.
```python
# Define a single scalar Gumbel distribution.
dist = tf.contrib.distributions.Gumbel(loc=0., scale=3.)
# Evaluate the cdf at 1, returning a scalar.
dist.cdf(1.)
# Define a batch of two scalar valued Gumbels.
# The first has mean 1 and scale 11, the second 2 and 22.
dist = tf.contrib.distributions.Gumbel(loc=[1, 2.], scale=[11, 22.])
# Evaluate the pdf of the first distribution on 0, and the second on 1.5,
# returning a length two tensor.
dist.prob([0, 1.5])
# Get 3 samples, returning a 3 x 2 tensor.
dist.sample([3])
```
Arguments are broadcast when possible.
```python
# Define a batch of two scalar valued Logistics.
# Both have mean 1, but different scales.
dist = tf.contrib.distributions.Gumbel(loc=1., scale=[11, 22.])
# Evaluate the pdf of both distributions on the same point, 3.0,
# returning a length 2 tensor.
dist.prob(3.0)
```
"""
def __init__(self,
loc,
scale,
validate_args=False,
allow_nan_stats=True,
name="Gumbel"):
"""Construct Gumbel distributions with location and scale `loc` and `scale`.
The parameters `loc` and `scale` must be shaped in a way that supports
broadcasting (e.g. `loc + scale` is a valid operation).
Args:
loc: Floating point tensor, the means of the distribution(s).
scale: Floating point tensor, the scales of the distribution(s).
scale must contain only positive values.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`,
statistics (e.g., mean, mode, variance) use the value "`NaN`" to
indicate the result is undefined. When `False`, an exception is raised
if one or more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
TypeError: if loc and scale are different dtypes.
"""
parameters = locals()
with ops.name_scope(name, values=[loc, scale]):
with ops.control_dependencies([check_ops.assert_positive(scale)] if
validate_args else []):
self._loc = array_ops.identity(loc, name="loc")
self._scale = array_ops.identity(scale, name="scale")
check_ops.assert_same_float_dtype([self._loc, self._scale])
super(_Gumbel, self).__init__(
dtype=self._scale.dtype,
reparameterization_type=distribution.FULLY_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._loc, self._scale],
name=name)
@staticmethod
def _param_shapes(sample_shape):
return dict(
zip(("loc", "scale"), ([ops.convert_to_tensor(
sample_shape, dtype=dtypes.int32)] * 2)))
@property
def loc(self):
"""Distribution parameter for the location."""
return self._loc
@property
def scale(self):
"""Distribution parameter for scale."""
return self._scale
def _batch_shape_tensor(self):
return array_ops.broadcast_dynamic_shape(
array_ops.shape(self.loc), array_ops.shape(self.scale))
def _batch_shape(self):
return array_ops.broadcast_static_shape(
self.loc.get_shape(), self.scale.get_shape())
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
def _sample_n(self, n, seed=None):
# Uniform variates must be sampled from the open-interval `(0, 1)` rather
# than `[0, 1)`. To do so, we use `np.finfo(self.dtype.as_numpy_dtype).tiny`
# because it is the smallest, positive, "normal" number. A "normal" number
# is such that the mantissa has an implicit leading 1. Normal, positive
# numbers x, y have the reasonable property that, `x + y >= max(x, y)`. In
# this case, a subnormal number (i.e., np.nextafter) can cause us to sample
# 0.
uniform = random_ops.random_uniform(
shape=array_ops.concat([[n], self.batch_shape_tensor()], 0),
minval=np.finfo(self.dtype.as_numpy_dtype).tiny,
maxval=1.,
dtype=self.dtype,
seed=seed)
sampled = -math_ops.log(-math_ops.log(uniform))
return sampled * self.scale + self.loc
def _log_prob(self, x):
return self._log_unnormalized_prob(x) - self._log_normalization()
def _prob(self, x):
return math_ops.exp(self._log_prob(x))
def _log_cdf(self, x):
return -math_ops.exp(-self._z(x))
def _cdf(self, x):
return math_ops.exp(-math_ops.exp(-self._z(x)))
def _log_unnormalized_prob(self, x):
z = self._z(x)
return - z - math_ops.exp(-z)
def _log_normalization(self):
return math_ops.log(self.scale)
def _entropy(self):
# Use broadcasting rules to calculate the full broadcast sigma.
scale = self.scale * array_ops.ones_like(self.loc)
return 1 + math_ops.log(scale) + np.euler_gamma
def _mean(self):
return self.loc + self.scale * np.euler_gamma
def _stddev(self):
return self.scale * array_ops.ones_like(self.loc) * math.pi / math.sqrt(6)
def _mode(self):
return self.loc * array_ops.ones_like(self.scale)
def _z(self, x):
"""Standardize input `x` to a unit logistic."""
with ops.name_scope("standardize", values=[x]):
return (x - self.loc) / self.scale
| apache-2.0 |
fraricci/pymatgen | pymatgen/io/abinit/variable.py | 4 | 6423 | import string
import warnings
import collections
import numpy as np
__all__ = [
'InputVariable',
]
_SPECIAL_DATASET_INDICES = (':', '+', '?')
_DATASET_INDICES = ''.join(list(string.digits) + list(_SPECIAL_DATASET_INDICES))
_INTERNAL_DATASET_INDICES = ('__s', '__i', '__a')
_SPECIAL_CONVERSION = zip(_INTERNAL_DATASET_INDICES, _SPECIAL_DATASET_INDICES)
_UNITS = {
'bohr': 1.0,
'angstrom': 1.8897261328856432,
'hartree': 1.0,
'Ha': 1.0,
'eV': 0.03674932539796232,
}
class InputVariable(object):
"""
An Abinit input variable.
"""
def __init__(self, name, value, units='', valperline=3):
self._name = name
self.value = value
self._units = units
# Maximum number of values per line.
self.valperline = valperline
if name in ['bdgw']:
self.valperline = 2
if (is_iter(self.value) and isinstance(self.value[-1], str) and self.value[-1] in _UNITS):
self.value = list(self.value)
self._units = self.value.pop(-1)
def get_value(self):
"""Return the value."""
if self.units:
return list(self.value) + [self.units]
else:
return self.value
@property
def name(self):
return self._name
@property
def basename(self):
"""Return the name trimmed of any dataset index."""
basename = self.name
return basename.rstrip(_DATASET_INDICES)
@property
def dataset(self):
"""Return the dataset index in string form."""
return self.name.split(self.basename)[-1]
@property
def units(self):
"""Return the units."""
return self._units
def __str__(self):
"""Declaration of the variable in the input file."""
value = self.value
if value is None or not str(value):
return ''
var = self.name
line = ' ' + var
# By default, do not impose a number of decimal points
floatdecimal = 0
# For some inputs, enforce number of decimal points...
if any(inp in var for inp in ('xred', 'xcart', 'rprim', 'qpt', 'kpt')):
floatdecimal = 16
# ...but not for those
if any(inp in var for inp in ('ngkpt', 'kptrlatt', 'ngqpt', 'ng2qpt')):
floatdecimal = 0
if isinstance(value, np.ndarray):
n = 1
for i in np.shape(value):
n *= i
value = np.reshape(value, n)
value = list(value)
# values in lists
if isinstance(value, (list, tuple)):
# Reshape a list of lists into a single list
if all(isinstance(v, (list, tuple)) for v in value):
line += self.format_list2d(value, floatdecimal)
else:
line += self.format_list(value, floatdecimal)
# scalar values
else:
line += ' ' + str(value)
# Add units
if self.units:
line += ' ' + self.units
return line
def format_scalar(self, val, floatdecimal=0):
"""
Format a single numerical value into a string
with the appropriate number of decimal.
"""
sval = str(val)
if sval.lstrip('-').lstrip('+').isdigit() and floatdecimal == 0:
return sval
try:
fval = float(val)
except Exception:
return sval
if fval == 0 or (abs(fval) > 1e-3 and abs(fval) < 1e4):
form = 'f'
addlen = 5
else:
form = 'e'
addlen = 8
ndec = max(len(str(fval-int(fval)))-2, floatdecimal)
ndec = min(ndec, 10)
sval = '{v:>{l}.{p}{f}}'.format(v=fval, l=ndec+addlen, p=ndec, f=form)
sval = sval.replace('e', 'd')
return sval
def format_list2d(self, values, floatdecimal=0):
"""Format a list of lists."""
lvals = flatten(values)
# Determine the representation
if all(isinstance(v, int) for v in lvals):
type_all = int
else:
try:
for v in lvals:
float(v)
type_all = float
except Exception:
type_all = str
# Determine the format
width = max(len(str(s)) for s in lvals)
if type_all == int:
formatspec = '>{0}d'.format(width)
elif type_all == str:
formatspec = '>{0}'.format(width)
else:
# Number of decimal
maxdec = max(len(str(f-int(f)))-2 for f in lvals)
ndec = min(max(maxdec, floatdecimal), 10)
if all(f == 0 or (abs(f) > 1e-3 and abs(f) < 1e4) for f in lvals):
formatspec = '>{w}.{p}f'.format(w=ndec+5, p=ndec)
else:
formatspec = '>{w}.{p}e'.format(w=ndec+8, p=ndec)
line = '\n'
for L in values:
for val in L:
line += ' {v:{f}}'.format(v=val, f=formatspec)
line += '\n'
return line.rstrip('\n')
def format_list(self, values, floatdecimal=0):
"""
Format a list of values into a string.
The result might be spread among several lines.
"""
line = ''
# Format the line declaring the value
for i, val in enumerate(values):
line += ' ' + self.format_scalar(val, floatdecimal)
if self.valperline is not None and (i+1) % self.valperline == 0:
line += '\n'
# Add a carriage return in case of several lines
if '\n' in line.rstrip('\n'):
line = '\n' + line
return line.rstrip('\n')
def is_iter(obj):
"""Return True if the argument is list-like."""
return hasattr(obj, '__iter__')
def flatten(iterable):
"""Make an iterable flat, i.e. a 1d iterable object."""
iterator = iter(iterable)
array, stack = collections.deque(), collections.deque()
while True:
try:
value = next(iterator)
except StopIteration:
if not stack:
return tuple(array)
iterator = stack.pop()
else:
if not isinstance(value, str) \
and isinstance(value, collections.Iterable):
stack.append(iterator)
iterator = iter(value)
else:
array.append(value)
| mit |
antsankov/cufcq-new | modules/fcq_card_module.py | 1 | 1052 | from modules.base_module import BaseModule
class FcqCardModule(BaseModule):
fcq_ids = []
def render(self, fcq_ids, color):
self.fcq_ids = fcq_ids
self.fcq_ids.reverse()
chunks = [self.fcq_ids[x:x + 6]
for x in range(0, len(self.fcq_ids), 6)]
return self.render_string(
'modules/FcqCollection.html', chunks=chunks, fcq_ids=fcq_ids, fcq_title=self.fcq_title, convert_date=self.convert_date, color=color)
def embedded_javascript(self):
javascript = ""
for fcq_id in self.fcq_ids:
javascript += '''
$("#card-{0}").one( "click", function(){{
$( "#body-{0}" ).load( "/ajax/fcqcard/{0}", function(){{
$( "#nav-{0} :not(.disabled) a").click(function (e) {{
e.preventDefault();
$(this).tab('show');
console.log(e);
}});
}});
}});
'''.format(fcq_id)
return javascript
| mit |
skynet/letsencrypt | acme/acme/challenges_test.py | 14 | 25817 | """Tests for acme.challenges."""
import unittest
import mock
import OpenSSL
import requests
from six.moves.urllib import parse as urllib_parse # pylint: disable=import-error
from acme import errors
from acme import jose
from acme import other
from acme import test_util
CERT = test_util.load_cert('cert.pem')
KEY = test_util.load_rsa_private_key('rsa512_key.pem')
class SimpleHTTPTest(unittest.TestCase):
def setUp(self):
from acme.challenges import SimpleHTTP
self.msg = SimpleHTTP(
token=jose.decode_b64jose(
'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ+PCt92wr+oA'))
self.jmsg = {
'type': 'simpleHttp',
'token': 'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA',
}
def test_to_partial_json(self):
self.assertEqual(self.jmsg, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import SimpleHTTP
self.assertEqual(self.msg, SimpleHTTP.from_json(self.jmsg))
def test_from_json_hashable(self):
from acme.challenges import SimpleHTTP
hash(SimpleHTTP.from_json(self.jmsg))
def test_good_token(self):
self.assertTrue(self.msg.good_token)
self.assertFalse(
self.msg.update(token=b'..').good_token)
class SimpleHTTPResponseTest(unittest.TestCase):
# pylint: disable=too-many-instance-attributes
def setUp(self):
from acme.challenges import SimpleHTTPResponse
self.msg_http = SimpleHTTPResponse(tls=False)
self.msg_https = SimpleHTTPResponse(tls=True)
self.jmsg_http = {
'resource': 'challenge',
'type': 'simpleHttp',
'tls': False,
}
self.jmsg_https = {
'resource': 'challenge',
'type': 'simpleHttp',
'tls': True,
}
from acme.challenges import SimpleHTTP
self.chall = SimpleHTTP(token=(b"x" * 16))
self.resp_http = SimpleHTTPResponse(tls=False)
self.resp_https = SimpleHTTPResponse(tls=True)
self.good_headers = {'Content-Type': SimpleHTTPResponse.CONTENT_TYPE}
def test_to_partial_json(self):
self.assertEqual(self.jmsg_http, self.msg_http.to_partial_json())
self.assertEqual(self.jmsg_https, self.msg_https.to_partial_json())
def test_from_json(self):
from acme.challenges import SimpleHTTPResponse
self.assertEqual(
self.msg_http, SimpleHTTPResponse.from_json(self.jmsg_http))
self.assertEqual(
self.msg_https, SimpleHTTPResponse.from_json(self.jmsg_https))
def test_from_json_hashable(self):
from acme.challenges import SimpleHTTPResponse
hash(SimpleHTTPResponse.from_json(self.jmsg_http))
hash(SimpleHTTPResponse.from_json(self.jmsg_https))
def test_scheme(self):
self.assertEqual('http', self.msg_http.scheme)
self.assertEqual('https', self.msg_https.scheme)
def test_port(self):
self.assertEqual(80, self.msg_http.port)
self.assertEqual(443, self.msg_https.port)
def test_uri(self):
self.assertEqual(
'http://example.com/.well-known/acme-challenge/'
'eHh4eHh4eHh4eHh4eHh4eA', self.msg_http.uri(
'example.com', self.chall))
self.assertEqual(
'https://example.com/.well-known/acme-challenge/'
'eHh4eHh4eHh4eHh4eHh4eA', self.msg_https.uri(
'example.com', self.chall))
def test_gen_check_validation(self):
account_key = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
self.assertTrue(self.resp_http.check_validation(
validation=self.resp_http.gen_validation(self.chall, account_key),
chall=self.chall, account_public_key=account_key.public_key()))
def test_gen_check_validation_wrong_key(self):
key1 = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
key2 = jose.JWKRSA.load(test_util.load_vector('rsa1024_key.pem'))
self.assertFalse(self.resp_http.check_validation(
validation=self.resp_http.gen_validation(self.chall, key1),
chall=self.chall, account_public_key=key2.public_key()))
def test_check_validation_wrong_payload(self):
account_key = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
validations = tuple(
jose.JWS.sign(payload=payload, alg=jose.RS256, key=account_key)
for payload in (b'', b'{}', self.chall.json_dumps().encode('utf-8'),
self.resp_http.json_dumps().encode('utf-8'))
)
for validation in validations:
self.assertFalse(self.resp_http.check_validation(
validation=validation, chall=self.chall,
account_public_key=account_key.public_key()))
def test_check_validation_wrong_fields(self):
resource = self.resp_http.gen_resource(self.chall)
account_key = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
validations = tuple(
jose.JWS.sign(payload=bad_resource.json_dumps().encode('utf-8'),
alg=jose.RS256, key=account_key)
for bad_resource in (resource.update(tls=True),
resource.update(token=(b'x' * 20)))
)
for validation in validations:
self.assertFalse(self.resp_http.check_validation(
validation=validation, chall=self.chall,
account_public_key=account_key.public_key()))
@mock.patch("acme.challenges.requests.get")
def test_simple_verify_good_validation(self, mock_get):
account_key = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
for resp in self.resp_http, self.resp_https:
mock_get.reset_mock()
validation = resp.gen_validation(self.chall, account_key)
mock_get.return_value = mock.MagicMock(
text=validation.json_dumps(), headers=self.good_headers)
self.assertTrue(resp.simple_verify(self.chall, "local", None))
mock_get.assert_called_once_with(resp.uri(
"local", self.chall), verify=False)
@mock.patch("acme.challenges.requests.get")
def test_simple_verify_bad_validation(self, mock_get):
mock_get.return_value = mock.MagicMock(
text="!", headers=self.good_headers)
self.assertFalse(self.resp_http.simple_verify(
self.chall, "local", None))
@mock.patch("acme.challenges.requests.get")
def test_simple_verify_bad_content_type(self, mock_get):
mock_get().text = self.chall.token
self.assertFalse(self.resp_http.simple_verify(
self.chall, "local", None))
@mock.patch("acme.challenges.requests.get")
def test_simple_verify_connection_error(self, mock_get):
mock_get.side_effect = requests.exceptions.RequestException
self.assertFalse(self.resp_http.simple_verify(
self.chall, "local", None))
@mock.patch("acme.challenges.requests.get")
def test_simple_verify_port(self, mock_get):
self.resp_http.simple_verify(
self.chall, domain="local", account_public_key=None, port=4430)
self.assertEqual("local:4430", urllib_parse.urlparse(
mock_get.mock_calls[0][1][0]).netloc)
class DVSNITest(unittest.TestCase):
def setUp(self):
from acme.challenges import DVSNI
self.msg = DVSNI(
token=jose.b64decode('a82d5ff8ef740d12881f6d3c2277ab2e'))
self.jmsg = {
'type': 'dvsni',
'token': 'a82d5ff8ef740d12881f6d3c2277ab2e',
}
def test_to_partial_json(self):
self.assertEqual(self.jmsg, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import DVSNI
self.assertEqual(self.msg, DVSNI.from_json(self.jmsg))
def test_from_json_hashable(self):
from acme.challenges import DVSNI
hash(DVSNI.from_json(self.jmsg))
def test_from_json_invalid_token_length(self):
from acme.challenges import DVSNI
self.jmsg['token'] = jose.encode_b64jose(b'abcd')
self.assertRaises(
jose.DeserializationError, DVSNI.from_json, self.jmsg)
def test_gen_response(self):
key = jose.JWKRSA(key=KEY)
from acme.challenges import DVSNI
self.assertEqual(self.msg, DVSNI.json_loads(
self.msg.gen_response(key).validation.payload.decode()))
class DVSNIResponseTest(unittest.TestCase):
# pylint: disable=too-many-instance-attributes
def setUp(self):
self.key = jose.JWKRSA(key=KEY)
from acme.challenges import DVSNI
self.chall = DVSNI(
token=jose.b64decode(b'a82d5ff8ef740d12881f6d3c2277ab2e'))
from acme.challenges import DVSNIResponse
self.validation = jose.JWS.sign(
payload=self.chall.json_dumps(sort_keys=True).encode(),
key=self.key, alg=jose.RS256)
self.msg = DVSNIResponse(validation=self.validation)
self.jmsg_to = {
'resource': 'challenge',
'type': 'dvsni',
'validation': self.validation,
}
self.jmsg_from = {
'resource': 'challenge',
'type': 'dvsni',
'validation': self.validation.to_json(),
}
# pylint: disable=invalid-name
label1 = b'e2df3498860637c667fedadc5a8494ec'
label2 = b'09dcc75553c9b3bd73662b50e71b1e42'
self.z = label1 + label2
self.z_domain = label1 + b'.' + label2 + b'.acme.invalid'
self.domain = 'foo.com'
def test_z_and_domain(self):
self.assertEqual(self.z, self.msg.z)
self.assertEqual(self.z_domain, self.msg.z_domain)
def test_to_partial_json(self):
self.assertEqual(self.jmsg_to, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import DVSNIResponse
self.assertEqual(self.msg, DVSNIResponse.from_json(self.jmsg_from))
def test_from_json_hashable(self):
from acme.challenges import DVSNIResponse
hash(DVSNIResponse.from_json(self.jmsg_from))
@mock.patch('acme.challenges.socket.gethostbyname')
@mock.patch('acme.challenges.crypto_util.probe_sni')
def test_probe_cert(self, mock_probe_sni, mock_gethostbyname):
mock_gethostbyname.return_value = '127.0.0.1'
self.msg.probe_cert('foo.com')
mock_gethostbyname.assert_called_once_with('foo.com')
mock_probe_sni.assert_called_once_with(
host='127.0.0.1', port=self.msg.PORT,
name=self.z_domain)
self.msg.probe_cert('foo.com', host='8.8.8.8')
mock_probe_sni.assert_called_with(
host='8.8.8.8', port=mock.ANY, name=mock.ANY)
self.msg.probe_cert('foo.com', port=1234)
mock_probe_sni.assert_called_with(
host=mock.ANY, port=1234, name=mock.ANY)
self.msg.probe_cert('foo.com', bar='baz')
mock_probe_sni.assert_called_with(
host=mock.ANY, port=mock.ANY, name=mock.ANY, bar='baz')
self.msg.probe_cert('foo.com', name=b'xxx')
mock_probe_sni.assert_called_with(
host=mock.ANY, port=mock.ANY,
name=self.z_domain)
def test_gen_verify_cert(self):
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
cert, key2 = self.msg.gen_cert(key1)
self.assertEqual(key1, key2)
self.assertTrue(self.msg.verify_cert(cert))
def test_gen_verify_cert_gen_key(self):
cert, key = self.msg.gen_cert()
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
self.assertTrue(self.msg.verify_cert(cert))
def test_verify_bad_cert(self):
self.assertFalse(self.msg.verify_cert(test_util.load_cert('cert.pem')))
def test_simple_verify_wrong_account_key(self):
self.assertFalse(self.msg.simple_verify(
self.chall, self.domain, jose.JWKRSA.load(
test_util.load_vector('rsa256_key.pem')).public_key()))
def test_simple_verify_wrong_payload(self):
for payload in b'', b'{}':
msg = self.msg.update(validation=jose.JWS.sign(
payload=payload, key=self.key, alg=jose.RS256))
self.assertFalse(msg.simple_verify(
self.chall, self.domain, self.key.public_key()))
def test_simple_verify_wrong_token(self):
msg = self.msg.update(validation=jose.JWS.sign(
payload=self.chall.update(token=(b'b' * 20)).json_dumps().encode(),
key=self.key, alg=jose.RS256))
self.assertFalse(msg.simple_verify(
self.chall, self.domain, self.key.public_key()))
@mock.patch('acme.challenges.DVSNIResponse.verify_cert', autospec=True)
def test_simple_verify(self, mock_verify_cert):
mock_verify_cert.return_value = mock.sentinel.verification
self.assertEqual(mock.sentinel.verification, self.msg.simple_verify(
self.chall, self.domain, self.key.public_key(),
cert=mock.sentinel.cert))
mock_verify_cert.assert_called_once_with(self.msg, mock.sentinel.cert)
def test_simple_verify_false_on_probe_error(self):
chall = mock.Mock()
chall.probe_cert.side_effect = errors.Error
self.assertFalse(self.msg.simple_verify(
self.chall, self.domain, self.key.public_key()))
class RecoveryContactTest(unittest.TestCase):
def setUp(self):
from acme.challenges import RecoveryContact
self.msg = RecoveryContact(
activation_url='https://example.ca/sendrecovery/a5bd99383fb0',
success_url='https://example.ca/confirmrecovery/bb1b9928932',
contact='c********n@example.com')
self.jmsg = {
'type': 'recoveryContact',
'activationURL': 'https://example.ca/sendrecovery/a5bd99383fb0',
'successURL': 'https://example.ca/confirmrecovery/bb1b9928932',
'contact': 'c********n@example.com',
}
def test_to_partial_json(self):
self.assertEqual(self.jmsg, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import RecoveryContact
self.assertEqual(self.msg, RecoveryContact.from_json(self.jmsg))
def test_from_json_hashable(self):
from acme.challenges import RecoveryContact
hash(RecoveryContact.from_json(self.jmsg))
def test_json_without_optionals(self):
del self.jmsg['activationURL']
del self.jmsg['successURL']
del self.jmsg['contact']
from acme.challenges import RecoveryContact
msg = RecoveryContact.from_json(self.jmsg)
self.assertTrue(msg.activation_url is None)
self.assertTrue(msg.success_url is None)
self.assertTrue(msg.contact is None)
self.assertEqual(self.jmsg, msg.to_partial_json())
class RecoveryContactResponseTest(unittest.TestCase):
def setUp(self):
from acme.challenges import RecoveryContactResponse
self.msg = RecoveryContactResponse(token='23029d88d9e123e')
self.jmsg = {
'resource': 'challenge',
'type': 'recoveryContact',
'token': '23029d88d9e123e',
}
def test_to_partial_json(self):
self.assertEqual(self.jmsg, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import RecoveryContactResponse
self.assertEqual(
self.msg, RecoveryContactResponse.from_json(self.jmsg))
def test_from_json_hashable(self):
from acme.challenges import RecoveryContactResponse
hash(RecoveryContactResponse.from_json(self.jmsg))
def test_json_without_optionals(self):
del self.jmsg['token']
from acme.challenges import RecoveryContactResponse
msg = RecoveryContactResponse.from_json(self.jmsg)
self.assertTrue(msg.token is None)
self.assertEqual(self.jmsg, msg.to_partial_json())
class ProofOfPossessionHintsTest(unittest.TestCase):
def setUp(self):
jwk = jose.JWKRSA(key=KEY.public_key())
issuers = (
'C=US, O=SuperT LLC, CN=SuperTrustworthy Public CA',
'O=LessTrustworthy CA Inc, CN=LessTrustworthy But StillSecure',
)
cert_fingerprints = (
'93416768eb85e33adc4277f4c9acd63e7418fcfe',
'16d95b7b63f1972b980b14c20291f3c0d1855d95',
'48b46570d9fc6358108af43ad1649484def0debf',
)
subject_key_identifiers = ('d0083162dcc4c8a23ecb8aecbd86120e56fd24e5')
authorized_for = ('www.example.com', 'example.net')
serial_numbers = (34234239832, 23993939911, 17)
from acme.challenges import ProofOfPossession
self.msg = ProofOfPossession.Hints(
jwk=jwk, issuers=issuers, cert_fingerprints=cert_fingerprints,
certs=(CERT,), subject_key_identifiers=subject_key_identifiers,
authorized_for=authorized_for, serial_numbers=serial_numbers)
self.jmsg_to = {
'jwk': jwk,
'certFingerprints': cert_fingerprints,
'certs': (jose.encode_b64jose(OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_ASN1, CERT)),),
'subjectKeyIdentifiers': subject_key_identifiers,
'serialNumbers': serial_numbers,
'issuers': issuers,
'authorizedFor': authorized_for,
}
self.jmsg_from = self.jmsg_to.copy()
self.jmsg_from.update({'jwk': jwk.to_json()})
def test_to_partial_json(self):
self.assertEqual(self.jmsg_to, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import ProofOfPossession
self.assertEqual(
self.msg, ProofOfPossession.Hints.from_json(self.jmsg_from))
def test_from_json_hashable(self):
from acme.challenges import ProofOfPossession
hash(ProofOfPossession.Hints.from_json(self.jmsg_from))
def test_json_without_optionals(self):
for optional in ['certFingerprints', 'certs', 'subjectKeyIdentifiers',
'serialNumbers', 'issuers', 'authorizedFor']:
del self.jmsg_from[optional]
del self.jmsg_to[optional]
from acme.challenges import ProofOfPossession
msg = ProofOfPossession.Hints.from_json(self.jmsg_from)
self.assertEqual(msg.cert_fingerprints, ())
self.assertEqual(msg.certs, ())
self.assertEqual(msg.subject_key_identifiers, ())
self.assertEqual(msg.serial_numbers, ())
self.assertEqual(msg.issuers, ())
self.assertEqual(msg.authorized_for, ())
self.assertEqual(self.jmsg_to, msg.to_partial_json())
class ProofOfPossessionTest(unittest.TestCase):
def setUp(self):
from acme.challenges import ProofOfPossession
hints = ProofOfPossession.Hints(
jwk=jose.JWKRSA(key=KEY.public_key()), cert_fingerprints=(),
certs=(), serial_numbers=(), subject_key_identifiers=(),
issuers=(), authorized_for=())
self.msg = ProofOfPossession(
alg=jose.RS256, hints=hints,
nonce=b'xD\xf9\xb9\xdbU\xed\xaa\x17\xf1y|\x81\x88\x99 ')
self.jmsg_to = {
'type': 'proofOfPossession',
'alg': jose.RS256,
'nonce': 'eET5udtV7aoX8Xl8gYiZIA',
'hints': hints,
}
self.jmsg_from = {
'type': 'proofOfPossession',
'alg': jose.RS256.to_json(),
'nonce': 'eET5udtV7aoX8Xl8gYiZIA',
'hints': hints.to_json(),
}
def test_to_partial_json(self):
self.assertEqual(self.jmsg_to, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import ProofOfPossession
self.assertEqual(
self.msg, ProofOfPossession.from_json(self.jmsg_from))
def test_from_json_hashable(self):
from acme.challenges import ProofOfPossession
hash(ProofOfPossession.from_json(self.jmsg_from))
class ProofOfPossessionResponseTest(unittest.TestCase):
def setUp(self):
# acme-spec uses a confusing example in which both signature
# nonce and challenge nonce are the same, don't make the same
# mistake here...
signature = other.Signature(
alg=jose.RS256, jwk=jose.JWKRSA(key=KEY.public_key()),
sig=b'\xa7\xc1\xe7\xe82o\xbc\xcd\xd0\x1e\x010#Z|\xaf\x15\x83'
b'\x94\x8f#\x9b\nQo(\x80\x15,\x08\xfcz\x1d\xfd\xfd.\xaap'
b'\xfa\x06\xd1\xa2f\x8d8X2>%d\xbd%\xe1T\xdd\xaa0\x18\xde'
b'\x99\x08\xf0\x0e{',
nonce=b'\x99\xc7Q\xb3f2\xbc\xdci\xfe\xd6\x98k\xc67\xdf',
)
from acme.challenges import ProofOfPossessionResponse
self.msg = ProofOfPossessionResponse(
nonce=b'xD\xf9\xb9\xdbU\xed\xaa\x17\xf1y|\x81\x88\x99 ',
signature=signature)
self.jmsg_to = {
'resource': 'challenge',
'type': 'proofOfPossession',
'nonce': 'eET5udtV7aoX8Xl8gYiZIA',
'signature': signature,
}
self.jmsg_from = {
'resource': 'challenge',
'type': 'proofOfPossession',
'nonce': 'eET5udtV7aoX8Xl8gYiZIA',
'signature': signature.to_json(),
}
def test_verify(self):
self.assertTrue(self.msg.verify())
def test_to_partial_json(self):
self.assertEqual(self.jmsg_to, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import ProofOfPossessionResponse
self.assertEqual(
self.msg, ProofOfPossessionResponse.from_json(self.jmsg_from))
def test_from_json_hashable(self):
from acme.challenges import ProofOfPossessionResponse
hash(ProofOfPossessionResponse.from_json(self.jmsg_from))
class DNSTest(unittest.TestCase):
def setUp(self):
self.account_key = jose.JWKRSA.load(
test_util.load_vector('rsa512_key.pem'))
from acme.challenges import DNS
self.msg = DNS(token=jose.b64decode(
b'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA'))
self.jmsg = {
'type': 'dns',
'token': 'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA',
}
def test_to_partial_json(self):
self.assertEqual(self.jmsg, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import DNS
self.assertEqual(self.msg, DNS.from_json(self.jmsg))
def test_from_json_hashable(self):
from acme.challenges import DNS
hash(DNS.from_json(self.jmsg))
def test_gen_check_validation(self):
self.assertTrue(self.msg.check_validation(
self.msg.gen_validation(self.account_key),
self.account_key.public_key()))
def test_gen_check_validation_wrong_key(self):
key2 = jose.JWKRSA.load(test_util.load_vector('rsa1024_key.pem'))
self.assertFalse(self.msg.check_validation(
self.msg.gen_validation(self.account_key), key2.public_key()))
def test_check_validation_wrong_payload(self):
validations = tuple(
jose.JWS.sign(payload=payload, alg=jose.RS256, key=self.account_key)
for payload in (b'', b'{}')
)
for validation in validations:
self.assertFalse(self.msg.check_validation(
validation, self.account_key.public_key()))
def test_check_validation_wrong_fields(self):
bad_validation = jose.JWS.sign(
payload=self.msg.update(token=b'x' * 20).json_dumps().encode('utf-8'),
alg=jose.RS256, key=self.account_key)
self.assertFalse(self.msg.check_validation(
bad_validation, self.account_key.public_key()))
def test_gen_response(self):
with mock.patch('acme.challenges.DNS.gen_validation') as mock_gen:
mock_gen.return_value = mock.sentinel.validation
response = self.msg.gen_response(self.account_key)
from acme.challenges import DNSResponse
self.assertTrue(isinstance(response, DNSResponse))
self.assertEqual(response.validation, mock.sentinel.validation)
def test_validation_domain_name(self):
self.assertEqual(
'_acme-challenge.le.wtf', self.msg.validation_domain_name('le.wtf'))
class DNSResponseTest(unittest.TestCase):
def setUp(self):
self.key = jose.JWKRSA(key=KEY)
from acme.challenges import DNS
self.chall = DNS(token=jose.b64decode(
b"evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA"))
self.validation = jose.JWS.sign(
payload=self.chall.json_dumps(sort_keys=True).encode(),
key=self.key, alg=jose.RS256)
from acme.challenges import DNSResponse
self.msg = DNSResponse(validation=self.validation)
self.jmsg_to = {
'resource': 'challenge',
'type': 'dns',
'validation': self.validation,
}
self.jmsg_from = {
'resource': 'challenge',
'type': 'dns',
'validation': self.validation.to_json(),
}
def test_to_partial_json(self):
self.assertEqual(self.jmsg_to, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import DNSResponse
self.assertEqual(self.msg, DNSResponse.from_json(self.jmsg_from))
def test_from_json_hashable(self):
from acme.challenges import DNSResponse
hash(DNSResponse.from_json(self.jmsg_from))
def test_check_validation(self):
self.assertTrue(
self.msg.check_validation(self.chall, self.key.public_key()))
if __name__ == '__main__':
unittest.main() # pragma: no cover
| apache-2.0 |
samfpetersen/gnuradio | gr-fec/python/fec/extended_async_encoder.py | 47 | 2515 | #!/usr/bin/env python
#
# Copyright 2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
import fec_swig as fec
from bitflip import read_bitlist
import weakref
class extended_async_encoder(gr.hier_block2):
def __init__(self, encoder_obj_list, puncpat=None):
gr.hier_block2.__init__(self, "extended_async_encoder",
gr.io_signature(0, 0, 0),
gr.io_signature(0, 0, 0))
# Set us up as a message passing block
self.message_port_register_hier_in('in')
self.message_port_register_hier_out('out')
self.puncpat=puncpat
# If it's a list of encoders, take the first one, unless it's
# a list of lists of encoders.
if(type(encoder_obj_list) == list):
# This block doesn't handle parallelism of > 1
if(type(encoder_obj_list[0]) == list):
gr.log.info("fec.extended_encoder: Parallelism must be 0 or 1.")
raise AttributeError
encoder_obj = encoder_obj_list[0]
# Otherwise, just take it as is
else:
encoder_obj = encoder_obj_list
self.encoder = fec.async_encoder(encoder_obj)
#self.puncture = None
#if self.puncpat != '11':
# self.puncture = fec.puncture_bb(len(puncpat), read_bitlist(puncpat), 0)
self.msg_connect(weakref.proxy(self), "in", self.encoder, "in")
#if(self.puncture):
# self.msg_connect(self.encoder, "out", self.puncture, "in")
# self.msg_connect(self.puncture, "out", weakref.proxy(self), "out")
#else:
# self.msg_connect(self.encoder, "out", weakref.proxy(self), "out")
self.msg_connect(self.encoder, "out", weakref.proxy(self), "out")
| gpl-3.0 |
GaZ3ll3/numpy | numpy/core/info.py | 165 | 4690 | """Defines a multi-dimensional array and useful procedures for Numerical computation.
Functions
- array - NumPy Array construction
- zeros - Return an array of all zeros
- empty - Return an unitialized array
- shape - Return shape of sequence or array
- rank - Return number of dimensions
- size - Return number of elements in entire array or a
certain dimension
- fromstring - Construct array from (byte) string
- take - Select sub-arrays using sequence of indices
- put - Set sub-arrays using sequence of 1-D indices
- putmask - Set portion of arrays using a mask
- reshape - Return array with new shape
- repeat - Repeat elements of array
- choose - Construct new array from indexed array tuple
- correlate - Correlate two 1-d arrays
- searchsorted - Search for element in 1-d array
- sum - Total sum over a specified dimension
- average - Average, possibly weighted, over axis or array.
- cumsum - Cumulative sum over a specified dimension
- product - Total product over a specified dimension
- cumproduct - Cumulative product over a specified dimension
- alltrue - Logical and over an entire axis
- sometrue - Logical or over an entire axis
- allclose - Tests if sequences are essentially equal
More Functions:
- arange - Return regularly spaced array
- asarray - Guarantee NumPy array
- convolve - Convolve two 1-d arrays
- swapaxes - Exchange axes
- concatenate - Join arrays together
- transpose - Permute axes
- sort - Sort elements of array
- argsort - Indices of sorted array
- argmax - Index of largest value
- argmin - Index of smallest value
- inner - Innerproduct of two arrays
- dot - Dot product (matrix multiplication)
- outer - Outerproduct of two arrays
- resize - Return array with arbitrary new shape
- indices - Tuple of indices
- fromfunction - Construct array from universal function
- diagonal - Return diagonal array
- trace - Trace of array
- dump - Dump array to file object (pickle)
- dumps - Return pickled string representing data
- load - Return array stored in file object
- loads - Return array from pickled string
- ravel - Return array as 1-D
- nonzero - Indices of nonzero elements for 1-D array
- shape - Shape of array
- where - Construct array from binary result
- compress - Elements of array where condition is true
- clip - Clip array between two values
- ones - Array of all ones
- identity - 2-D identity array (matrix)
(Universal) Math Functions
add logical_or exp
subtract logical_xor log
multiply logical_not log10
divide maximum sin
divide_safe minimum sinh
conjugate bitwise_and sqrt
power bitwise_or tan
absolute bitwise_xor tanh
negative invert ceil
greater left_shift fabs
greater_equal right_shift floor
less arccos arctan2
less_equal arcsin fmod
equal arctan hypot
not_equal cos around
logical_and cosh sign
arccosh arcsinh arctanh
"""
from __future__ import division, absolute_import, print_function
depends = ['testing']
global_symbols = ['*']
| bsd-3-clause |
nicky-ji/edx-nicky | common/djangoapps/track/tests/test_middleware.py | 18 | 5545 | import re
from mock import patch
from mock import sentinel
from django.contrib.auth.models import User
from django.contrib.sessions.middleware import SessionMiddleware
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from eventtracking import tracker
from track.middleware import TrackMiddleware
class TrackMiddlewareTestCase(TestCase):
def setUp(self):
self.track_middleware = TrackMiddleware()
self.request_factory = RequestFactory()
patcher = patch('track.views.server_track')
self.mock_server_track = patcher.start()
self.addCleanup(patcher.stop)
def test_normal_request(self):
request = self.request_factory.get('/somewhere')
self.track_middleware.process_request(request)
self.assertTrue(self.mock_server_track.called)
def test_default_filters_do_not_render_view(self):
for url in ['/event', '/event/1', '/login', '/heartbeat']:
request = self.request_factory.get(url)
self.track_middleware.process_request(request)
self.assertFalse(self.mock_server_track.called)
self.mock_server_track.reset_mock()
@override_settings(TRACKING_IGNORE_URL_PATTERNS=[])
def test_reading_filtered_urls_from_settings(self):
request = self.request_factory.get('/event')
self.track_middleware.process_request(request)
self.assertTrue(self.mock_server_track.called)
@override_settings(TRACKING_IGNORE_URL_PATTERNS=[r'^/some/excluded.*'])
def test_anchoring_of_patterns_at_beginning(self):
request = self.request_factory.get('/excluded')
self.track_middleware.process_request(request)
self.assertTrue(self.mock_server_track.called)
self.mock_server_track.reset_mock()
request = self.request_factory.get('/some/excluded/url')
self.track_middleware.process_request(request)
self.assertFalse(self.mock_server_track.called)
def test_default_request_context(self):
context = self.get_context_for_path('/courses/')
self.assertEquals(context, {
'user_id': '',
'session': '',
'username': '',
'ip': '127.0.0.1',
'host': 'testserver',
'agent': '',
'path': '/courses/',
'org_id': '',
'course_id': '',
})
def get_context_for_path(self, path):
"""Extract the generated event tracking context for a given request for the given path."""
request = self.request_factory.get(path)
return self.get_context_for_request(request)
def get_context_for_request(self, request):
"""Extract the generated event tracking context for the given request."""
self.track_middleware.process_request(request)
try:
captured_context = tracker.get_tracker().resolve_context()
finally:
self.track_middleware.process_response(request, None)
self.assertEquals(
tracker.get_tracker().resolve_context(),
{}
)
return captured_context
def test_request_in_course_context(self):
captured_context = self.get_context_for_path('/courses/test_org/test_course/test_run/foo')
expected_context_subset = {
'course_id': 'test_org/test_course/test_run',
'org_id': 'test_org',
}
self.assert_dict_subset(captured_context, expected_context_subset)
def assert_dict_subset(self, superset, subset):
"""Assert that the superset dict contains all of the key-value pairs found in the subset dict."""
for key, expected_value in subset.iteritems():
self.assertEquals(superset[key], expected_value)
def test_request_with_user(self):
user_id = 1
username = sentinel.username
request = self.request_factory.get('/courses/')
request.user = User(pk=user_id, username=username)
context = self.get_context_for_request(request)
self.assert_dict_subset(context, {
'user_id': user_id,
'username': username,
})
def test_request_with_session(self):
request = self.request_factory.get('/courses/')
SessionMiddleware().process_request(request)
request.session.save()
session_key = request.session.session_key
expected_session_key = self.track_middleware.encrypt_session_key(session_key)
self.assertEquals(len(session_key), len(expected_session_key))
context = self.get_context_for_request(request)
self.assert_dict_subset(context, {
'session': expected_session_key,
})
@override_settings(SECRET_KEY='85920908f28904ed733fe576320db18cabd7b6cd')
def test_session_key_encryption(self):
session_key = '665924b49a93e22b46ee9365abf28c2a'
expected_session_key = '3b81f559d14130180065d635a4f35dd2'
encrypted_session_key = self.track_middleware.encrypt_session_key(session_key)
self.assertEquals(encrypted_session_key, expected_session_key)
def test_request_headers(self):
ip_address = '10.0.0.0'
user_agent = 'UnitTest/1.0'
factory = RequestFactory(REMOTE_ADDR=ip_address, HTTP_USER_AGENT=user_agent)
request = factory.get('/some-path')
context = self.get_context_for_request(request)
self.assert_dict_subset(context, {
'ip': ip_address,
'agent': user_agent,
})
| agpl-3.0 |
vitiral/micropython | tests/basics/memoryview1.py | 56 | 1328 | # test memoryview
# test reading from bytes
b = b'1234'
m = memoryview(b)
print(len(m))
print(m[0], m[1], m[-1])
print(list(m))
# test writing to bytes
try:
m[0] = 1
except TypeError:
print("TypeError")
# test writing to bytearray
b = bytearray(b)
m = memoryview(b)
m[0] = 1
print(b)
print(list(m))
# test slice
m = memoryview(b'1234')
print(list(m[1:]))
print(list(m[1:-1]))
# this tests get_buffer of memoryview
m = memoryview(bytearray(2))
print(bytearray(m))
import array
a = array.array('i', [1, 2, 3, 4])
m = memoryview(a)
print(list(m))
print(list(m[1:-1]))
m[2] = 6
print(a)
# test slice assignment between memoryviews
b1 = bytearray(b'1234')
b2 = bytearray(b'5678')
b3 = bytearray(b'5678')
m1 = memoryview(b1)
m2 = memoryview(b2)
m3 = memoryview(b3)
m2[1:3] = m1[0:2]
print(b2)
b3[1:3] = m1[0:2]
print(b3)
m1[2:4] = b3[1:3]
print(b1)
try:
m2[1:3] = b1[0:4]
except ValueError:
print("ValueError")
try:
m2[1:3] = m1[0:4]
except ValueError:
print("ValueError")
try:
m2[0:4] = m1[1:3]
except ValueError:
print("ValueError")
# test memoryview of arrays with items sized larger than 1
a1 = array.array('i', [0]*5)
m4 = memoryview(a1)
a2 = array.array('i', [3]*5)
m5 = memoryview(a2)
m4[1:3] = m5[1:3]
print(a1)
try:
m4[1:3] = m2[1:3]
except ValueError:
print("ValueError")
| mit |
SrNetoChan/Quantum-GIS | tests/src/python/test_qgsserver_plugins.py | 4 | 9205 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer plugins and filters.
From build dir, run: ctest -R PyQgsServerPlugins -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Alessandro Pasotti'
__date__ = '22/04/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
import os
from qgis.server import QgsServer
from qgis.core import QgsMessageLog
from qgis.testing import unittest
from utilities import unitTestDataPath
from test_qgsserver import QgsServerTestBase
import osgeo.gdal # NOQA
# Strip path and content length because path may vary
RE_STRIP_UNCHECKABLE = br'MAP=[^"]+|Content-Length: \d+'
RE_ATTRIBUTES = br'[^>\s]+=[^>\s]+'
class TestQgsServerPlugins(QgsServerTestBase):
def setUp(self):
"""Create the server instance"""
self.testdata_path = unitTestDataPath('qgis_server') + '/'
d = unitTestDataPath('qgis_server_accesscontrol') + '/'
self.projectPath = os.path.join(d, "project.qgs")
# Clean env just to be sure
env_vars = ['QUERY_STRING', 'QGIS_PROJECT_FILE']
for ev in env_vars:
try:
del os.environ[ev]
except KeyError:
pass
self.server = QgsServer()
def test_pluginfilters(self):
"""Test python plugins filters"""
try:
from qgis.server import QgsServerFilter
except ImportError:
print("QGIS Server plugins are not compiled. Skipping test")
return
class SimpleHelloFilter(QgsServerFilter):
def requestReady(self):
QgsMessageLog.logMessage("SimpleHelloFilter.requestReady")
def sendResponse(self):
QgsMessageLog.logMessage("SimpleHelloFilter.sendResponse")
def responseComplete(self):
request = self.serverInterface().requestHandler()
params = request.parameterMap()
QgsMessageLog.logMessage("SimpleHelloFilter.responseComplete")
if params.get('SERVICE', '').upper() == 'SIMPLE':
request.clear()
request.setResponseHeader('Content-type', 'text/plain')
request.appendBody('Hello from SimpleServer!'.encode('utf-8'))
serverIface = self.server.serverInterface()
filter = SimpleHelloFilter(serverIface)
serverIface.registerFilter(filter, 100)
# Get registered filters
self.assertEqual(filter, serverIface.filters()[100][0])
# global to be modified inside plugin filters
globals()['status_code'] = 0
# body to be checked inside plugin filters
globals()['body2'] = None
# headers to be checked inside plugin filters
globals()['headers2'] = None
# Register some more filters
class Filter1(QgsServerFilter):
def responseComplete(self):
request = self.serverInterface().requestHandler()
params = request.parameterMap()
if params.get('SERVICE', '').upper() == 'SIMPLE':
request.appendBody('Hello from Filter1!'.encode('utf-8'))
class Filter2(QgsServerFilter):
def responseComplete(self):
request = self.serverInterface().requestHandler()
params = request.parameterMap()
if params.get('SERVICE', '').upper() == 'SIMPLE':
request.appendBody('Hello from Filter2!'.encode('utf-8'))
class Filter3(QgsServerFilter):
"""Test get and set status code"""
def responseComplete(self):
global status_code
request = self.serverInterface().requestHandler()
request.setStatusCode(999)
status_code = request.statusCode()
class Filter4(QgsServerFilter):
"""Body getter"""
def responseComplete(self):
global body2
request = self.serverInterface().requestHandler()
body2 = request.body()
class Filter5(QgsServerFilter):
"""Body setter, clear body, keep headers"""
def responseComplete(self):
global headers2
request = self.serverInterface().requestHandler()
request.clearBody()
headers2 = request.responseHeaders()
request.appendBody('new body, new life!'.encode('utf-8'))
filter1 = Filter1(serverIface)
filter2 = Filter2(serverIface)
filter3 = Filter3(serverIface)
filter4 = Filter4(serverIface)
serverIface.registerFilter(filter1, 101)
serverIface.registerFilter(filter2, 200)
serverIface.registerFilter(filter2, 100)
serverIface.registerFilter(filter3, 300)
serverIface.registerFilter(filter4, 400)
self.assertTrue(filter2 in serverIface.filters()[100])
self.assertEqual(filter1, serverIface.filters()[101][0])
self.assertEqual(filter2, serverIface.filters()[200][0])
header, body = [_v for _v in self._execute_request('?service=simple')]
response = header + body
expected = b'Content-Length: 62\nContent-type: text/plain\n\nHello from SimpleServer!Hello from Filter1!Hello from Filter2!'
self.assertEqual(response, expected)
# Check status code
self.assertEqual(status_code, 999)
# Check body getter from filter
self.assertEqual(body2, b'Hello from SimpleServer!Hello from Filter1!Hello from Filter2!')
# Check that the bindings for complex type QgsServerFiltersMap are working
filters = {100: [filter, filter2], 101: [filter1], 200: [filter2]}
serverIface.setFilters(filters)
self.assertTrue(filter in serverIface.filters()[100])
self.assertTrue(filter2 in serverIface.filters()[100])
self.assertEqual(filter1, serverIface.filters()[101][0])
self.assertEqual(filter2, serverIface.filters()[200][0])
header, body = self._execute_request('?service=simple')
response = header + body
expected = b'Content-Length: 62\nContent-type: text/plain\n\nHello from SimpleServer!Hello from Filter1!Hello from Filter2!'
self.assertEqual(response, expected)
# Now, re-run with body setter
filter5 = Filter5(serverIface)
serverIface.registerFilter(filter5, 500)
header, body = self._execute_request('?service=simple')
response = header + body
expected = b'Content-Length: 19\nContent-type: text/plain\n\nnew body, new life!'
self.assertEqual(response, expected)
self.assertEqual(headers2, {'Content-type': 'text/plain'})
def test_configpath(self):
""" Test plugin can read confif path
"""
try:
from qgis.server import QgsServerFilter
from qgis.core import QgsProject
except ImportError:
print("QGIS Server plugins are not compiled. Skipping test")
return
d = unitTestDataPath('qgis_server_accesscontrol') + '/'
self.projectPath = os.path.join(d, "project.qgs")
self.server = QgsServer()
# global to be modified inside plugin filters
globals()['configFilePath2'] = None
class Filter0(QgsServerFilter):
"""Body setter, clear body, keep headers"""
def requestReady(self):
global configFilePath2
configFilePath2 = self.serverInterface().configFilePath()
serverIface = self.server.serverInterface()
serverIface.registerFilter(Filter0(serverIface), 100)
# Test using MAP
self._execute_request('?service=simple&MAP=%s' % self.projectPath)
# Check config file path
self.assertEqual(configFilePath2, self.projectPath)
# Reset result
globals()['configFilePath2'] = None
# Test with prqject as argument
project = QgsProject()
project.read(self.projectPath)
self._execute_request_project('?service=simple', project=project)
# Check config file path
self.assertEqual(configFilePath2, project.fileName())
def test_exceptions(self):
"""Test that plugin filter Python exceptions can be caught"""
try:
from qgis.server import QgsServerFilter
except ImportError:
print("QGIS Server plugins are not compiled. Skipping test")
return
class FilterBroken(QgsServerFilter):
def responseComplete(self):
raise Exception("There was something very wrong!")
serverIface = self.server.serverInterface()
filter1 = FilterBroken(serverIface)
filters = {100: [filter1]}
serverIface.setFilters(filters)
header, body = self._execute_request('')
self.assertEqual(body, b'Internal Server Error')
serverIface.setFilters({})
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
FokkeZB/titanium_mobile | support/iphone/csspacker.py | 108 | 2193 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# CSS Minification Script
# http://www.siafoo.net/snippet/16
# released in public domain
#
from __future__ import division
import sys
import getopt
import re
import string
def compress(input):
output = ''
while True:
open_c = input.find('/*')
if open_c == -1:
output += input
break;
output += input[ : open_c]
input = input[ open_c + 2 : ]
close_c = input.find('*/')
if close_c == -1:
#print 'Runaway comment detected'
return output
input = input[close_c + 2: ]
# Replace tab with space
output = output.replace('\t', ' ')
# Remove double spaces
output = re.sub('\s{2,}', ' ', output)
# Remove spaces around stuff
output = re.sub('\s*;+\s*', ';', output)
output = re.sub('\s*:\s*', ':', output)
output = re.sub('\s*{\s*', '{', output)
output = re.sub('\s*}\s*', '}', output)
# Remove unecessary semicolon
output = output.replace(';}', '}')
# Split the directives on per line
output = output.replace('}', '}\n')
output = output.strip()
output = remove_dead(output)
output = shorten_colors(output)
# Remove all the newlines
output = output.replace('\n', '')
return output
def remove_dead(input):
output = ''
for line in input.splitlines(True):
if not re.search('([\.#][\w_]*{})', line):
output += line
return output
def shorten_colors(input):
output = ''
p = re.compile(':#([A-Fa-f0-9]{6})')
for line in input.splitlines(True):
m = p.search(line)
if m is not None:
old_c = m.group(1)
if old_c[0] == old_c[1] and old_c[2] == old_c[3] and old_c[4] == old_c[5]:
new_c = old_c[0] + old_c[2] + old_c[4]
output += line.replace(old_c, new_c)
continue
output += line
return output
class CSSPacker(object):
def __init__(self,contents):
self.contents = contents
def pack(self):
return compress(self.contents)
| apache-2.0 |
shrimpboyho/git.js | emscript/python/2.7.5.1_32bit/Lib/cProfile.py | 169 | 6515 | #! /usr/bin/env python
"""Python interface for the 'lsprof' profiler.
Compatible with the 'profile' module.
"""
__all__ = ["run", "runctx", "help", "Profile"]
import _lsprof
# ____________________________________________________________
# Simple interface
def run(statement, filename=None, sort=-1):
"""Run statement under profiler optionally saving results in filename
This function takes a single argument that can be passed to the
"exec" statement, and an optional file name. In all cases this
routine attempts to "exec" its first argument and gather profiling
statistics from the execution. If no file name is present, then this
function automatically prints a simple profiling report, sorted by the
standard name string (file/line/function-name) that is presented in
each line.
"""
prof = Profile()
result = None
try:
try:
prof = prof.run(statement)
except SystemExit:
pass
finally:
if filename is not None:
prof.dump_stats(filename)
else:
result = prof.print_stats(sort)
return result
def runctx(statement, globals, locals, filename=None, sort=-1):
"""Run statement under profiler, supplying your own globals and locals,
optionally saving results in filename.
statement and filename have the same semantics as profile.run
"""
prof = Profile()
result = None
try:
try:
prof = prof.runctx(statement, globals, locals)
except SystemExit:
pass
finally:
if filename is not None:
prof.dump_stats(filename)
else:
result = prof.print_stats(sort)
return result
# Backwards compatibility.
def help():
print "Documentation for the profile/cProfile modules can be found "
print "in the Python Library Reference, section 'The Python Profiler'."
# ____________________________________________________________
class Profile(_lsprof.Profiler):
"""Profile(custom_timer=None, time_unit=None, subcalls=True, builtins=True)
Builds a profiler object using the specified timer function.
The default timer is a fast built-in one based on real time.
For custom timer functions returning integers, time_unit can
be a float specifying a scale (i.e. how long each integer unit
is, in seconds).
"""
# Most of the functionality is in the base class.
# This subclass only adds convenient and backward-compatible methods.
def print_stats(self, sort=-1):
import pstats
pstats.Stats(self).strip_dirs().sort_stats(sort).print_stats()
def dump_stats(self, file):
import marshal
f = open(file, 'wb')
self.create_stats()
marshal.dump(self.stats, f)
f.close()
def create_stats(self):
self.disable()
self.snapshot_stats()
def snapshot_stats(self):
entries = self.getstats()
self.stats = {}
callersdicts = {}
# call information
for entry in entries:
func = label(entry.code)
nc = entry.callcount # ncalls column of pstats (before '/')
cc = nc - entry.reccallcount # ncalls column of pstats (after '/')
tt = entry.inlinetime # tottime column of pstats
ct = entry.totaltime # cumtime column of pstats
callers = {}
callersdicts[id(entry.code)] = callers
self.stats[func] = cc, nc, tt, ct, callers
# subcall information
for entry in entries:
if entry.calls:
func = label(entry.code)
for subentry in entry.calls:
try:
callers = callersdicts[id(subentry.code)]
except KeyError:
continue
nc = subentry.callcount
cc = nc - subentry.reccallcount
tt = subentry.inlinetime
ct = subentry.totaltime
if func in callers:
prev = callers[func]
nc += prev[0]
cc += prev[1]
tt += prev[2]
ct += prev[3]
callers[func] = nc, cc, tt, ct
# The following two methods can be called by clients to use
# a profiler to profile a statement, given as a string.
def run(self, cmd):
import __main__
dict = __main__.__dict__
return self.runctx(cmd, dict, dict)
def runctx(self, cmd, globals, locals):
self.enable()
try:
exec cmd in globals, locals
finally:
self.disable()
return self
# This method is more useful to profile a single function call.
def runcall(self, func, *args, **kw):
self.enable()
try:
return func(*args, **kw)
finally:
self.disable()
# ____________________________________________________________
def label(code):
if isinstance(code, str):
return ('~', 0, code) # built-in functions ('~' sorts at the end)
else:
return (code.co_filename, code.co_firstlineno, code.co_name)
# ____________________________________________________________
def main():
import os, sys
from optparse import OptionParser
usage = "cProfile.py [-o output_file_path] [-s sort] scriptfile [arg] ..."
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('-o', '--outfile', dest="outfile",
help="Save stats to <outfile>", default=None)
parser.add_option('-s', '--sort', dest="sort",
help="Sort order when printing to stdout, based on pstats.Stats class",
default=-1)
if not sys.argv[1:]:
parser.print_usage()
sys.exit(2)
(options, args) = parser.parse_args()
sys.argv[:] = args
if len(args) > 0:
progname = args[0]
sys.path.insert(0, os.path.dirname(progname))
with open(progname, 'rb') as fp:
code = compile(fp.read(), progname, 'exec')
globs = {
'__file__': progname,
'__name__': '__main__',
'__package__': None,
}
runctx(code, globs, None, options.outfile, options.sort)
else:
parser.print_usage()
return parser
# When invoked as main program, invoke the profiler on a script
if __name__ == '__main__':
main()
| gpl-2.0 |
anthkris/oppia | core/domain/stats_services.py | 6 | 11019 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Services for exploration-related statistics."""
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import stats_domain
from core.domain import stats_jobs_continuous
from core.platform import models
(stats_models,) = models.Registry.import_models([models.NAMES.statistics])
IMPROVE_TYPE_DEFAULT = 'default'
IMPROVE_TYPE_INCOMPLETE = 'incomplete'
# TODO(bhenning): Everything is handler name submit; therefore, it is
# pointless and should be removed.
_OLD_SUBMIT_HANDLER_NAME = 'submit'
def get_top_unresolved_answers_for_default_rule(exploration_id, state_name):
return {
answer: count for (answer, count) in
stats_domain.StateRuleAnswerLog.get(
exploration_id, state_name, exp_domain.DEFAULT_RULESPEC_STR
).get_top_answers(3)
}
def get_exps_unresolved_answers_for_default_rule(exp_ids):
"""Gets unresolved answers per exploration for default rule across all
states for explorations with ids in exp_ids. The value of total count should
match the sum of values of indiviual counts for each unresolved answer.
TODO(526avijitgupta): Note that this method currently returns the data only
for the DEFAULT rule. This should ideally handle all types of unresolved
answers.
Returns a dict of the following format:
{
'exp_id_1': {
'count': 7 (number of unresolved answers for this exploration),
'unresolved_answers': (list of unresolved answers sorted by count)
[
{'count': 4, 'value': 'answer_1', 'state': 'Introduction'},
{'count': 2, 'value': 'answer_2', 'state': 'Introduction'},
{'count': 1, 'value': 'answer_3', 'state': 'End'}
]
},
'exp_id_2': {
'count': 13,
'unresolved_answers':
[
{'count': 8, 'value': 'answer_5', 'state': 'Introduction'},
{'count': 3, 'value': 'answer_4', 'state': 'Quest'},
{'count': 1, 'value': 'answer_6', 'state': 'End'}
{'count': 1, 'value': 'answer_8', 'state': 'End'}
]
}
}
"""
def _get_explorations_states_tuples_by_ids(exp_ids):
"""Returns a list of all (exp_id, state_name) tuples for the given
exp_ids.
E.g. - [
('eid1', 'Introduction'),
('eid1', 'End'),
('eid2', 'Introduction'),
('eid3', 'Introduction')
]
when exp_ids = ['eid1', 'eid2', 'eid3'].
"""
explorations = (
exp_services.get_multiple_explorations_by_id(exp_ids, strict=False))
return [
(exploration.id, state_name)
for exploration in explorations.values()
for state_name in exploration.states
]
explorations_states_tuples = _get_explorations_states_tuples_by_ids(exp_ids)
exploration_states_answers_list = get_top_state_rule_answers_multi(
explorations_states_tuples, [exp_domain.DEFAULT_RULESPEC_STR])
exps_answers_mapping = {}
for ind, statewise_answers in enumerate(exploration_states_answers_list):
exp_id = explorations_states_tuples[ind][0]
if exp_id not in exps_answers_mapping:
exps_answers_mapping[exp_id] = {
'count': 0,
'unresolved_answers': []
}
for answer in statewise_answers:
exps_answers_mapping[exp_id]['count'] += answer['count']
answer['state'] = explorations_states_tuples[ind][1]
exps_answers_mapping[exp_id]['unresolved_answers'].extend(
statewise_answers)
for exp_id in exps_answers_mapping:
exps_answers_mapping[exp_id]['unresolved_answers'] = (sorted(
exps_answers_mapping[exp_id]['unresolved_answers'],
key=lambda a: a['count'],
reverse=True))
return exps_answers_mapping
def get_state_rules_stats(exploration_id, state_name):
"""Gets statistics for the answer groups and rules of this state.
Returns:
A dict, keyed by the string '{HANDLER_NAME}.{RULE_STR}', whose
values are the corresponding stats_domain.StateRuleAnswerLog
instances.
"""
exploration = exp_services.get_exploration_by_id(exploration_id)
state = exploration.states[state_name]
rule_keys = []
for group in state.interaction.answer_groups:
for rule in group.rule_specs:
rule_keys.append((
_OLD_SUBMIT_HANDLER_NAME, rule.stringify_classified_rule()))
if state.interaction.default_outcome:
rule_keys.append((
_OLD_SUBMIT_HANDLER_NAME, exp_domain.DEFAULT_RULESPEC_STR))
answer_logs = stats_domain.StateRuleAnswerLog.get_multi(
exploration_id, [{
'state_name': state_name,
'rule_str': rule_key[1]
} for rule_key in rule_keys])
results = {}
for ind, answer_log in enumerate(answer_logs):
results['.'.join(rule_keys[ind])] = {
'answers': answer_log.get_top_answers(5),
'rule_hits': answer_log.total_answer_count
}
return results
def get_top_state_rule_answers(exploration_id, state_name, rule_str_list):
"""Returns a list of top answers (by submission frequency) submitted to the
given state in the given exploration which were mapped to any of the rules
listed in 'rule_str_list'. All answers submitted to the specified state and
match the rule spec strings in rule_str_list are returned.
"""
return get_top_state_rule_answers_multi(
[(exploration_id, state_name)], rule_str_list)[0]
def get_top_state_rule_answers_multi(exploration_state_list, rule_str_list):
"""Returns a list of top answers (by submission frequency) submitted to the
given explorations and states which were mapped to any of the rules listed
in 'rule_str_list' for each exploration ID and state name tuple in
exploration_state_list.
For each exploration ID and state, all answers submitted that match any of
the rule spec strings in rule_str_list are returned.
"""
answer_log_list = (
stats_domain.StateRuleAnswerLog.get_multi_by_multi_explorations(
exploration_state_list, rule_str_list))
return [[
{
'value': top_answer[0],
'count': top_answer[1]
}
for top_answer in answer_log.get_all_top_answers()
] for answer_log in answer_log_list]
def get_state_improvements(exploration_id, exploration_version):
"""Returns a list of dicts, each representing a suggestion for improvement
to a particular state.
"""
ranked_states = []
exploration = exp_services.get_exploration_by_id(exploration_id)
state_names = exploration.states.keys()
default_rule_answer_logs = stats_domain.StateRuleAnswerLog.get_multi(
exploration_id, [{
'state_name': state_name,
'rule_str': exp_domain.DEFAULT_RULESPEC_STR
} for state_name in state_names])
statistics = stats_jobs_continuous.StatisticsAggregator.get_statistics(
exploration_id, exploration_version)
state_hit_counts = statistics['state_hit_counts']
for ind, state_name in enumerate(state_names):
total_entry_count = 0
no_answer_submitted_count = 0
if state_name in state_hit_counts:
total_entry_count = (
state_hit_counts[state_name]['total_entry_count'])
no_answer_submitted_count = state_hit_counts[state_name].get(
'no_answer_count', 0)
if total_entry_count == 0:
continue
threshold = 0.2 * total_entry_count
default_rule_answer_log = default_rule_answer_logs[ind]
default_count = default_rule_answer_log.total_answer_count
eligible_flags = []
state = exploration.states[state_name]
if (default_count > threshold and
state.interaction.default_outcome is not None and
state.interaction.default_outcome.dest == state_name):
eligible_flags.append({
'rank': default_count,
'improve_type': IMPROVE_TYPE_DEFAULT})
if no_answer_submitted_count > threshold:
eligible_flags.append({
'rank': no_answer_submitted_count,
'improve_type': IMPROVE_TYPE_INCOMPLETE})
if eligible_flags:
eligible_flags = sorted(
eligible_flags, key=lambda flag: flag['rank'], reverse=True)
ranked_states.append({
'rank': eligible_flags[0]['rank'],
'state_name': state_name,
'type': eligible_flags[0]['improve_type'],
})
return sorted([
ranked_state for ranked_state in ranked_states
if ranked_state['rank'] != 0
], key=lambda x: -x['rank'])
def get_versions_for_exploration_stats(exploration_id):
"""Returns list of versions for this exploration."""
return stats_models.ExplorationAnnotationsModel.get_versions(
exploration_id)
def get_exploration_stats(exploration_id, exploration_version):
"""Returns a dict with state statistics for the given exploration id.
Note that exploration_version should be a string.
"""
exploration = exp_services.get_exploration_by_id(exploration_id)
exp_stats = stats_jobs_continuous.StatisticsAggregator.get_statistics(
exploration_id, exploration_version)
last_updated = exp_stats['last_updated']
state_hit_counts = exp_stats['state_hit_counts']
return {
'improvements': get_state_improvements(
exploration_id, exploration_version),
'last_updated': last_updated,
'num_completions': exp_stats['complete_exploration_count'],
'num_starts': exp_stats['start_exploration_count'],
'state_stats': {
state_name: {
'name': state_name,
'firstEntryCount': (
state_hit_counts[state_name]['first_entry_count']
if state_name in state_hit_counts else 0),
'totalEntryCount': (
state_hit_counts[state_name]['total_entry_count']
if state_name in state_hit_counts else 0),
} for state_name in exploration.states
},
}
| apache-2.0 |
appleseedhq/cortex | test/IECoreScene/LightTest.py | 2 | 3579 | ##########################################################################
#
# Copyright (c) 2010-2011, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
import IECoreScene
class LightTest( unittest.TestCase ) :
def test( self ) :
s = IECoreScene.Light()
self.assertEqual( s.name, "distantlight" )
self.assert_( len(s.handle) > 0 )
self.assertEqual( len( s.parameters ), 0 )
self.assertEqual( s.parameters.typeName(), "CompoundData" )
ss = IECoreScene.Light()
self.assertNotEqual( s.handle, ss.handle )
s = IECoreScene.Light( "marble", "marble001" )
self.assertEqual( s.name, "marble" )
self.assertEqual( s.handle, "marble001" )
ss = s.copy()
self.assertEqual( ss.name, s.name )
self.assertEqual( ss.handle, s.handle )
def testProperties( self ) :
s = IECoreScene.Light()
s.handle = "myNewHandle"
s.name = "myNewName"
self.assertEqual( s.name, "myNewName" )
self.assertEqual( s.handle, "myNewHandle" )
def testConstructWithParameters( self ) :
s = IECoreScene.Light( "test", "test001", IECore.CompoundData( { "a" : IECore.StringData( "a" ) } ) )
self.assertEqual( s.name, "test" )
self.assertEqual( s.handle, "test001" )
self.assertEqual( len( s.parameters ), 1 )
self.assertEqual( s.parameters.typeName(), IECore.CompoundData.staticTypeName() )
self.assertEqual( s.parameters["a"], IECore.StringData( "a" ) )
def testCopy( self ) :
s = IECoreScene.Light( "test", "surface", IECore.CompoundData( { "a" : IECore.StringData( "a" ) } ) )
ss = s.copy()
self.assertEqual( s, ss )
def testHash( self ) :
s = IECoreScene.Light( "name", "handle" )
h = s.hash()
s.name = "name2"
self.assertNotEqual( s.hash(), h )
h = s.hash()
s.handle = "handle2"
self.assertNotEqual( s.hash(), h )
h = s.hash()
s.parameters["a"] = IECore.StringData( "a" )
self.assertNotEqual( s.hash(), h )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
andre-geldenhuis/bloggregator | blogaggregator/blogaggregator/public/views.py | 1 | 3472 | # -*- coding: utf-8 -*-
'''Public section, including homepage and signup.'''
from flask import (Blueprint, request, render_template, flash, url_for,
redirect, session)
from flask.ext.login import login_user, login_required, logout_user
from sqlalchemy import desc
from blogaggregator.extensions import login_manager
from blogaggregator.user.models import User
from blogaggregator.user.models import Post
from blogaggregator.public.forms import LoginForm
from blogaggregator.user.forms import RegisterForm
from blogaggregator.utils import flash_errors
from blogaggregator.database import db
from bleach import clean
class NoPosts:
def __init__( self, user ):
self.user = user
self.summary = "No Posts :("
blueprint = Blueprint('public', __name__, static_folder="../static")
@login_manager.user_loader
def load_user(id):
return User.get_by_id(int(id))
@blueprint.route("/", methods=["GET", "POST"])
def home():
form = LoginForm(request.form)
#get all the users who exist
allusers=User.query.all()
postlist=[]
for user in allusers:
latestpost_object = Post.query.filter_by(user_id=user.id).order_by(desc(Post.created_at)).limit(1).first()
if latestpost_object == None:
latestpost = NoPosts(user)
else:
latestpost = latestpost_object
postlist.append(latestpost)
#sort postlist by latest post or comment IF there are any posts
if len(postlist) == 1 and postlist[0] == "":
pass
else:
postlist.sort(key = lambda x: x.user.latest_update, reverse=True)
# Handle loggin
if request.method == 'POST':
if form.validate_on_submit():
login_user(form.user)
flash("You are logged in.", 'success')
redirect_url = request.args.get("next") or url_for("user.members")
return redirect(redirect_url)
else:
flash_errors(form)
return render_template("public/home.html", form=form, postlist=postlist)
@blueprint.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first()
if user == None:
flash('User %s not found!' % username,'warning')
return redirect(url_for('public.home'))
posts_all=db.session.query(Post).filter(Post.user_id==user.id).order_by(desc(Post.created_at)).all()
return render_template('public/user.html',
user=user,
posts=posts_all)
@blueprint.route('/logout/')
@login_required
def logout():
logout_user()
flash('You are logged out.', 'info')
return redirect(url_for('public.home'))
@blueprint.route("/register/", methods=['GET', 'POST'])
def register():
form = RegisterForm(request.form, csrf_enabled=False)
if form.validate_on_submit():
new_user = User.create(username=form.username.data,
email=form.email.data,
atomfeed=form.atomfeed.data,
password=form.password.data,
active=True)
flash("Thank you for registering. You can now log in.", 'success')
return redirect(url_for('public.home'))
else:
flash_errors(form)
return render_template('public/register.html', form=form)
@blueprint.route("/about/")
def about():
form = LoginForm(request.form)
return render_template("public/about.html", form=form)
| gpl-2.0 |
karminowa/martas-music-store | vendor/sonata-project/exporter/docs/conf.py | 63 | 7898 | # -*- coding: utf-8 -*-
#
# IoC documentation build configuration file, created by
# sphinx-quickstart on Fri Mar 29 01:43:00 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sensio.sphinx.refinclude', 'sensio.sphinx.configurationblock', 'sensio.sphinx.phpcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Sonata Project ~ Exporter'
copyright = u'2010-2014, Thomas Rabaix'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
#version = '0.0.1'
# The full version, including alpha/beta/rc tags.
#release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
import sphinx_rtd_theme
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'doc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
#latex_documents = [
# ('index', 'PythonElement.tex', u'Python Documentation',
# u'Thomas Rabaix', 'manual'),
#]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
#(source start file, name, description, authors, manual section).
#man_pages = [
# ('index', 'ioc', u'IoC Documentation',
# [u'Thomas Rabaix'], 1)
#]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
#texinfo_documents = [
# ('index', 'IoC', u'IoC Documentation',
# u'Thomas Rabaix', 'IoC', 'One line description of project.',
# 'Miscellaneous'),
#]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| mit |
satish-avninetworks/murano | doc/source/conf.py | 3 | 3887 | # Copyright (C) 2014 Mirantis Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import subprocess
import sys
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode', 'sphinxcontrib.httpdomain']
if not on_rtd:
extensions.append('oslosphinx')
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Murano'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
from murano.version import version_info
release = version_info.release_string()
version = version_info.version_string()
# Set the default Pygments syntax
highlight_language = 'python'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['specification/murano-repository.rst',
'specification/murano-api.rst',
'murano_pl/builtin_functions.rst',
'install/configure_network.rst',
'articles/ad-ui.rst',
'articles/telnet.rst']
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
if not on_rtd:
#TODO(efedorova): Change local theme to corresponds with the theme on rtd
pass
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local",
"-n1"]
html_last_updated_fmt = subprocess.Popen(
git_cmd, stdout=subprocess.PIPE).communicate()[0]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'Murano'
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': ['sidebarlinks.html', 'localtoc.html', 'searchbox.html', 'sourcelink.html'],
'**': ['localtoc.html', 'relations.html',
'searchbox.html', 'sourcelink.html']
}
| apache-2.0 |
knehez/edx-platform | common/test/acceptance/tests/lms/test_lms_courseware_search.py | 18 | 7307 | """
Test courseware search
"""
import os
import json
from nose.plugins.attrib import attr
from ..helpers import UniqueCourseTest, remove_file
from ...pages.common.logout import LogoutPage
from ...pages.studio.utils import add_html_component, click_css, type_in_codemirror
from ...pages.studio.auto_auth import AutoAuthPage
from ...pages.studio.overview import CourseOutlinePage
from ...pages.studio.container import ContainerPage
from ...pages.lms.courseware_search import CoursewareSearchPage
from ...fixtures.course import CourseFixture, XBlockFixtureDesc
@attr('shard_5')
class CoursewareSearchTest(UniqueCourseTest):
"""
Test courseware search.
"""
USERNAME = 'STUDENT_TESTER'
EMAIL = 'student101@example.com'
STAFF_USERNAME = "STAFF_TESTER"
STAFF_EMAIL = "staff101@example.com"
HTML_CONTENT = """
Someday I'll wish upon a star
And wake up where the clouds are far
Behind me.
Where troubles melt like lemon drops
Away above the chimney tops
That's where you'll find me.
"""
SEARCH_STRING = "chimney"
EDITED_CHAPTER_NAME = "Section 2 - edited"
EDITED_SEARCH_STRING = "edited"
TEST_INDEX_FILENAME = "test_root/index_file.dat"
def setUp(self):
"""
Create search page and course content to search
"""
# create test file in which index for this test will live
with open(self.TEST_INDEX_FILENAME, "w+") as index_file:
json.dump({}, index_file)
self.addCleanup(remove_file, self.TEST_INDEX_FILENAME)
super(CoursewareSearchTest, self).setUp()
self.courseware_search_page = CoursewareSearchPage(self.browser, self.course_id)
self.course_outline = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Section 1').add_children(
XBlockFixtureDesc('sequential', 'Subsection 1')
)
).add_children(
XBlockFixtureDesc('chapter', 'Section 2').add_children(
XBlockFixtureDesc('sequential', 'Subsection 2')
)
).install()
def _auto_auth(self, username, email, staff):
"""
Logout and login with given credentials.
"""
LogoutPage(self.browser).visit()
AutoAuthPage(self.browser, username=username, email=email,
course_id=self.course_id, staff=staff).visit()
def _studio_publish_content(self, section_index):
"""
Publish content on studio course page under specified section
"""
self._auto_auth(self.STAFF_USERNAME, self.STAFF_EMAIL, True)
self.course_outline.visit()
subsection = self.course_outline.section_at(section_index).subsection_at(0)
subsection.expand_subsection()
unit = subsection.unit_at(0)
unit.publish()
def _studio_edit_chapter_name(self, section_index):
"""
Edit chapter name on studio course page under specified section
"""
self._auto_auth(self.STAFF_USERNAME, self.STAFF_EMAIL, True)
self.course_outline.visit()
section = self.course_outline.section_at(section_index)
section.change_name(self.EDITED_CHAPTER_NAME)
def _studio_add_content(self, section_index):
"""
Add content on studio course page under specified section
"""
self._auto_auth(self.STAFF_USERNAME, self.STAFF_EMAIL, True)
# create a unit in course outline
self.course_outline.visit()
subsection = self.course_outline.section_at(section_index).subsection_at(0)
subsection.expand_subsection()
subsection.add_unit()
# got to unit and create an HTML component and save (not publish)
unit_page = ContainerPage(self.browser, None)
unit_page.wait_for_page()
add_html_component(unit_page, 0)
unit_page.wait_for_element_presence('.edit-button', 'Edit button is visible')
click_css(unit_page, '.edit-button', 0, require_notification=False)
unit_page.wait_for_element_visibility('.modal-editor', 'Modal editor is visible')
type_in_codemirror(unit_page, 0, self.HTML_CONTENT)
click_css(unit_page, '.action-save', 0)
def _studio_reindex(self):
"""
Reindex course content on studio course page
"""
self._auto_auth(self.STAFF_USERNAME, self.STAFF_EMAIL, True)
self.course_outline.visit()
self.course_outline.start_reindex()
self.course_outline.wait_for_ajax()
def _search_for_content(self, search_term):
"""
Login and search for specific content
Arguments:
search_term - term to be searched for
Returns:
(bool) True if search term is found in resulting content; False if not found
"""
self._auto_auth(self.USERNAME, self.EMAIL, False)
self.courseware_search_page.visit()
self.courseware_search_page.search_for_term(search_term)
return search_term in self.courseware_search_page.search_results.html[0]
def test_page_existence(self):
"""
Make sure that the page is accessible.
"""
self._auto_auth(self.USERNAME, self.EMAIL, False)
self.courseware_search_page.visit()
def test_search(self):
"""
Make sure that you can search for something.
"""
# Create content in studio without publishing.
self._studio_add_content(0)
# Do a search, there should be no results shown.
self.assertFalse(self._search_for_content(self.SEARCH_STRING))
# Publish in studio to trigger indexing.
self._studio_publish_content(0)
# Do the search again, this time we expect results.
self.assertTrue(self._search_for_content(self.SEARCH_STRING))
def test_reindex(self):
"""
Make sure new content gets reindexed on button press.
"""
# Create content in studio without publishing.
self._studio_add_content(1)
# Do a search, there should be no results shown.
self.assertFalse(self._search_for_content(self.EDITED_SEARCH_STRING))
# Publish in studio to trigger indexing, and edit chapter name afterwards.
self._studio_publish_content(1)
# Do a ReIndex from studio to ensure that our stuff is updated before the next stage of the test
self._studio_reindex()
# Search after publish, there should still be no results shown.
self.assertFalse(self._search_for_content(self.EDITED_SEARCH_STRING))
self._studio_edit_chapter_name(1)
# Do a ReIndex from studio to ensure that our stuff is updated before the next stage of the test
self._studio_reindex()
# Do the search again, this time we expect results.
self.assertTrue(self._search_for_content(self.EDITED_SEARCH_STRING))
| agpl-3.0 |
jingzhehu/udacity_mlnd | P4_Training_a_Smartcab_to_Drive/smartcab/pygame/pkgdata.py | 16 | 2198 | """
pkgdata is a simple, extensible way for a package to acquire data file
resources.
The getResource function is equivalent to the standard idioms, such as
the following minimal implementation:
import sys, os
def getResource(identifier, pkgname=__name__):
pkgpath = os.path.dirname(sys.modules[pkgname].__file__)
path = os.path.join(pkgpath, identifier)
return file(os.path.normpath(path), mode='rb')
When a __loader__ is present on the module given by __name__, it will defer
getResource to its get_data implementation and return it as a file-like
object (such as StringIO).
"""
__all__ = ['getResource']
import sys
import os
from pygame.compat import get_BytesIO
BytesIO = get_BytesIO()
try:
from pkg_resources import resource_stream, resource_exists
except ImportError:
def resource_exists(package_or_requirement, resource_name):
return False
def resource_stream(package_of_requirement, resource_name):
raise NotImplementedError
def getResource(identifier, pkgname=__name__):
"""
Acquire a readable object for a given package name and identifier.
An IOError will be raised if the resource can not be found.
For example:
mydata = getResource('mypkgdata.jpg').read()
Note that the package name must be fully qualified, if given, such
that it would be found in sys.modules.
In some cases, getResource will return a real file object. In that
case, it may be useful to use its name attribute to get the path
rather than use it as a file-like object. For example, you may
be handing data off to a C API.
"""
if resource_exists(pkgname, identifier):
return resource_stream(pkgname, identifier)
mod = sys.modules[pkgname]
fn = getattr(mod, '__file__', None)
if fn is None:
raise IOError("%s has no __file__!" % repr(mod))
path = os.path.join(os.path.dirname(fn), identifier)
loader = getattr(mod, '__loader__', None)
if loader is not None:
try:
data = loader.get_data(path)
except IOError:
pass
else:
return BytesIO(data)
return open(os.path.normpath(path), 'rb')
| mit |
cyberark-bizdev/ansible | lib/ansible/modules/network/vyos/vyos_lldp.py | 38 | 3216 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: vyos_lldp
version_added: "2.4"
author: "Ricardo Carrillo Cruz (@rcarrillocruz)"
short_description: Manage LLDP configuration on VyOS network devices
description:
- This module provides declarative management of LLDP service
on VyOS network devices.
notes:
- Tested against VYOS 1.1.7
options:
state:
description:
- State of the LLDP configuration.
default: present
choices: ['present', 'absent']
extends_documentation_fragment: vyos
"""
EXAMPLES = """
- name: Enable LLDP service
vyos_lldp:
state: present
- name: Disable LLDP service
vyos_lldp:
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always, except for the platforms that use Netconf transport to manage the device.
type: list
sample:
- set service lldp
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.vyos.vyos import get_config, load_config
from ansible.module_utils.network.vyos.vyos import vyos_argument_spec
def has_lldp(module):
config = get_config(module).splitlines()
if "set service 'lldp'" in config or 'set service lldp' in config:
return True
else:
return False
def main():
""" main entry point for module execution
"""
argument_spec = dict(
interfaces=dict(type='list'),
state=dict(default='present',
choices=['present', 'absent',
'enabled', 'disabled'])
)
argument_spec.update(vyos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
HAS_LLDP = has_lldp(module)
commands = []
if module.params['state'] == 'absent' and HAS_LLDP:
commands.append('delete service lldp')
elif module.params['state'] == 'present' and not HAS_LLDP:
commands.append('set service lldp')
result['commands'] = commands
if commands:
commit = not module.check_mode
load_config(module, commands, commit=commit)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
40223139/2015cdaa5-12 | static/Brython3.1.1-20150328-091302/Lib/copy.py | 628 | 8905 | """Generic (shallow and deep) copying operations.
Interface summary:
import copy
x = copy.copy(y) # make a shallow copy of y
x = copy.deepcopy(y) # make a deep copy of y
For module specific errors, copy.Error is raised.
The difference between shallow and deep copying is only relevant for
compound objects (objects that contain other objects, like lists or
class instances).
- A shallow copy constructs a new compound object and then (to the
extent possible) inserts *the same objects* into it that the
original contains.
- A deep copy constructs a new compound object and then, recursively,
inserts *copies* into it of the objects found in the original.
Two problems often exist with deep copy operations that don't exist
with shallow copy operations:
a) recursive objects (compound objects that, directly or indirectly,
contain a reference to themselves) may cause a recursive loop
b) because deep copy copies *everything* it may copy too much, e.g.
administrative data structures that should be shared even between
copies
Python's deep copy operation avoids these problems by:
a) keeping a table of objects already copied during the current
copying pass
b) letting user-defined classes override the copying operation or the
set of components copied
This version does not copy types like module, class, function, method,
nor stack trace, stack frame, nor file, socket, window, nor array, nor
any similar types.
Classes can use the same interfaces to control copying that they use
to control pickling: they can define methods called __getinitargs__(),
__getstate__() and __setstate__(). See the documentation for module
"pickle" for information on these methods.
"""
import types
import weakref
from copyreg import dispatch_table
import builtins
class Error(Exception):
pass
error = Error # backward compatibility
# module org.python.core does not exist in Brython, so lets just ignore
# this import request.
#try:
# from org.python.core import PyStringMap
#except ImportError:
# PyStringMap = None
PyStringMap = None
__all__ = ["Error", "copy", "deepcopy"]
def copy(x):
"""Shallow copy operation on arbitrary Python objects.
See the module's __doc__ string for more info.
"""
cls = type(x)
copier = _copy_dispatch.get(cls)
if copier:
return copier(x)
copier = getattr(cls, "__copy__", None)
if copier:
return copier(x)
reductor = dispatch_table.get(cls)
if reductor:
rv = reductor(x)
else:
reductor = getattr(x, "__reduce_ex__", None)
if reductor:
rv = reductor(2)
else:
reductor = getattr(x, "__reduce__", None)
if reductor:
rv = reductor()
else:
raise Error("un(shallow)copyable object of type %s" % cls)
return _reconstruct(x, rv, 0)
_copy_dispatch = d = {}
def _copy_immutable(x):
return x
for t in (type(None), int, float, bool, str, tuple,
frozenset, type, range,
types.BuiltinFunctionType, type(Ellipsis),
types.FunctionType, weakref.ref):
d[t] = _copy_immutable
t = getattr(types, "CodeType", None)
if t is not None:
d[t] = _copy_immutable
for name in ("complex", "unicode"):
t = getattr(builtins, name, None)
if t is not None:
d[t] = _copy_immutable
def _copy_with_constructor(x):
return type(x)(x)
for t in (list, dict, set):
d[t] = _copy_with_constructor
def _copy_with_copy_method(x):
return x.copy()
if PyStringMap is not None:
d[PyStringMap] = _copy_with_copy_method
del d
def deepcopy(x, memo=None, _nil=[]):
"""Deep copy operation on arbitrary Python objects.
See the module's __doc__ string for more info.
"""
if memo is None:
memo = {}
d = id(x)
y = memo.get(d, _nil)
if y is not _nil:
return y
cls = type(x)
copier = _deepcopy_dispatch.get(cls)
if copier:
y = copier(x, memo)
else:
try:
issc = issubclass(cls, type)
except TypeError: # cls is not a class (old Boost; see SF #502085)
issc = 0
if issc:
y = _deepcopy_atomic(x, memo)
else:
copier = getattr(x, "__deepcopy__", None)
if copier:
y = copier(memo)
else:
reductor = dispatch_table.get(cls)
if reductor:
rv = reductor(x)
else:
reductor = getattr(x, "__reduce_ex__", None)
if reductor:
rv = reductor(2)
else:
reductor = getattr(x, "__reduce__", None)
if reductor:
rv = reductor()
else:
raise Error(
"un(deep)copyable object of type %s" % cls)
y = _reconstruct(x, rv, 1, memo)
# If is its own copy, don't memoize.
if y is not x:
memo[d] = y
_keep_alive(x, memo) # Make sure x lives at least as long as d
return y
_deepcopy_dispatch = d = {}
def _deepcopy_atomic(x, memo):
return x
d[type(None)] = _deepcopy_atomic
d[type(Ellipsis)] = _deepcopy_atomic
d[int] = _deepcopy_atomic
d[float] = _deepcopy_atomic
d[bool] = _deepcopy_atomic
try:
d[complex] = _deepcopy_atomic
except NameError:
pass
d[bytes] = _deepcopy_atomic
d[str] = _deepcopy_atomic
try:
d[types.CodeType] = _deepcopy_atomic
except AttributeError:
pass
d[type] = _deepcopy_atomic
d[range] = _deepcopy_atomic
d[types.BuiltinFunctionType] = _deepcopy_atomic
d[types.FunctionType] = _deepcopy_atomic
d[weakref.ref] = _deepcopy_atomic
def _deepcopy_list(x, memo):
y = []
memo[id(x)] = y
for a in x:
y.append(deepcopy(a, memo))
return y
d[list] = _deepcopy_list
def _deepcopy_tuple(x, memo):
y = []
for a in x:
y.append(deepcopy(a, memo))
# We're not going to put the tuple in the memo, but it's still important we
# check for it, in case the tuple contains recursive mutable structures.
try:
return memo[id(x)]
except KeyError:
pass
for i in range(len(x)):
if x[i] is not y[i]:
y = tuple(y)
break
else:
y = x
return y
d[tuple] = _deepcopy_tuple
def _deepcopy_dict(x, memo):
y = {}
memo[id(x)] = y
for key, value in x.items():
y[deepcopy(key, memo)] = deepcopy(value, memo)
return y
d[dict] = _deepcopy_dict
if PyStringMap is not None:
d[PyStringMap] = _deepcopy_dict
def _deepcopy_method(x, memo): # Copy instance methods
return type(x)(x.__func__, deepcopy(x.__self__, memo))
_deepcopy_dispatch[types.MethodType] = _deepcopy_method
def _keep_alive(x, memo):
"""Keeps a reference to the object x in the memo.
Because we remember objects by their id, we have
to assure that possibly temporary objects are kept
alive by referencing them.
We store a reference at the id of the memo, which should
normally not be used unless someone tries to deepcopy
the memo itself...
"""
try:
memo[id(memo)].append(x)
except KeyError:
# aha, this is the first one :-)
memo[id(memo)]=[x]
def _reconstruct(x, info, deep, memo=None):
if isinstance(info, str):
return x
assert isinstance(info, tuple)
if memo is None:
memo = {}
n = len(info)
assert n in (2, 3, 4, 5)
callable, args = info[:2]
if n > 2:
state = info[2]
else:
state = {}
if n > 3:
listiter = info[3]
else:
listiter = None
if n > 4:
dictiter = info[4]
else:
dictiter = None
if deep:
args = deepcopy(args, memo)
y = callable(*args)
memo[id(x)] = y
if state:
if deep:
state = deepcopy(state, memo)
if hasattr(y, '__setstate__'):
y.__setstate__(state)
else:
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
else:
slotstate = None
if state is not None:
y.__dict__.update(state)
if slotstate is not None:
for key, value in slotstate.items():
setattr(y, key, value)
if listiter is not None:
for item in listiter:
if deep:
item = deepcopy(item, memo)
y.append(item)
if dictiter is not None:
for key, value in dictiter:
if deep:
key = deepcopy(key, memo)
value = deepcopy(value, memo)
y[key] = value
return y
del d
del types
# Helper for instance creation without calling __init__
class _EmptyClass:
pass
| gpl-3.0 |
mrquim/mrquimrepo | plugin.video.castaway/resources/lib/modules/unCaptcha.py | 9 | 14975 | # -*- coding: utf-8 -*-
import random
import re
import time
import urlparse, urllib,urllib2,cookielib
from base64 import b64encode
import xbmc
import xbmcgui,xbmcaddon,os
__scriptID__ = 'script.module.liveresolver'
__addon__ = xbmcaddon.Addon(__scriptID__)
class cInputWindow(xbmcgui.WindowDialog):
def __init__(self, *args, **kwargs):
bg_image = os.path.join( __addon__.getAddonInfo('path'), 'Images/' ) + "background.png"
check_image = os.path.join( __addon__.getAddonInfo('path'), 'Images/' ) + "trans_checked.png"
uncheck_image = os.path.join( __addon__.getAddonInfo('path'), 'Images/' ) + "trans_unchecked1.png"
self.ctrlBackgound = xbmcgui.ControlImage(
0,0,
1280, 720,
bg_image
)
self.cancelled=False
self.addControl (self.ctrlBackgound)
self.msg = kwargs.get('msg')+'\nNormally there are 3-4 selections and 2 rounds of pictures'
self.round=kwargs.get('round')
self.strActionInfo = xbmcgui.ControlLabel(335, 120, 700, 300, self.msg, 'font13', '0xFFFF00FF')
self.addControl(self.strActionInfo)
self.strActionInfo = xbmcgui.ControlLabel(335, 20, 724, 400, 'Captcha round %s'%(str(self.round)), 'font40', '0xFFFF00FF')
self.addControl(self.strActionInfo)
self.cptloc = kwargs.get('captcha')
#self.img = xbmcgui.ControlImage(335,200,624,400,self.cptloc)
imgw=400
imgh=300
imgX=335
imgY=200
pw=imgw/3
ph=imgh/3
self.img = xbmcgui.ControlImage(imgX,imgY,imgw,imgh,self.cptloc)
self.addControl(self.img)
self.chk=[0]*9
self.chkbutton=[0]*9
self.chkstate=[False]*9
#self.chk[0] = xbmcgui.ControlCheckMark(335,200,200,200,'select',checkWidth=30, checkHeight=30)
self.chk[0]= xbmcgui.ControlImage(imgX,imgY, pw, ph,check_image)# '', font='font1',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[1]= xbmcgui.ControlImage(imgX+pw,imgY, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[2]= xbmcgui.ControlImage(imgX+pw+pw,imgY, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[3]= xbmcgui.ControlImage(imgX,imgY+ph, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[4]= xbmcgui.ControlImage(imgX+pw,imgY+ph, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[5]= xbmcgui.ControlImage(imgX+pw+pw,imgY+ph, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[6]= xbmcgui.ControlImage(imgX,imgY+ph+ph, pw, ph,check_image)#, '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[7]= xbmcgui.ControlImage(imgX+pw,imgY+ph+ph, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[8]= xbmcgui.ControlImage(imgX+pw+pw,imgY+ph+ph, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chkbutton[0]= xbmcgui.ControlButton(imgX,imgY, pw, ph, '1', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[1]= xbmcgui.ControlButton(imgX+pw,imgY, pw, ph, '2', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[2]= xbmcgui.ControlButton(imgX+pw+pw,imgY, pw, ph, '3', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[3]= xbmcgui.ControlButton(imgX,imgY+ph, pw, ph, '4', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[4]= xbmcgui.ControlButton(imgX+pw,imgY+ph, pw, ph, '5', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[5]= xbmcgui.ControlButton(imgX+pw+pw,imgY+ph, pw, ph, '6', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[6]= xbmcgui.ControlButton(imgX,imgY+ph+ph, pw, ph, '7', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[7]= xbmcgui.ControlButton(imgX+pw,imgY+ph+ph, pw, ph, '8', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[8]= xbmcgui.ControlButton(imgX+pw+pw,imgY+ph+ph, pw, ph, '9', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
for obj in self.chk:
self.addControl(obj )
obj.setVisible(False)
for obj in self.chkbutton:
self.addControl(obj )
#self.chk[0].setSelected(False)
self.cancelbutton = xbmcgui.ControlButton(imgX+(imgw/2)-110,imgY+imgh+10,100,40,'Cancel',alignment=2)
self.okbutton = xbmcgui.ControlButton(imgX+(imgw/2)+10,imgY+imgh+10,100,40,'OK',alignment=2)
self.addControl(self.okbutton)
self.addControl(self.cancelbutton)
self.chkbutton[6].controlDown(self.cancelbutton); self.chkbutton[6].controlUp(self.chkbutton[3])
self.chkbutton[7].controlDown(self.cancelbutton); self.chkbutton[7].controlUp(self.chkbutton[4])
self.chkbutton[8].controlDown(self.okbutton); self.chkbutton[8].controlUp(self.chkbutton[5])
self.chkbutton[6].controlLeft(self.chkbutton[8]);self.chkbutton[6].controlRight(self.chkbutton[7]);
self.chkbutton[7].controlLeft(self.chkbutton[6]);self.chkbutton[7].controlRight(self.chkbutton[8]);
self.chkbutton[8].controlLeft(self.chkbutton[7]);self.chkbutton[8].controlRight(self.chkbutton[6]);
self.chkbutton[3].controlDown(self.chkbutton[6]); self.chkbutton[3].controlUp(self.chkbutton[0])
self.chkbutton[4].controlDown(self.chkbutton[7]); self.chkbutton[4].controlUp(self.chkbutton[1])
self.chkbutton[5].controlDown(self.chkbutton[8]); self.chkbutton[5].controlUp(self.chkbutton[2])
self.chkbutton[3].controlLeft(self.chkbutton[5]);self.chkbutton[3].controlRight(self.chkbutton[4]);
self.chkbutton[4].controlLeft(self.chkbutton[3]);self.chkbutton[4].controlRight(self.chkbutton[5]);
self.chkbutton[5].controlLeft(self.chkbutton[4]);self.chkbutton[5].controlRight(self.chkbutton[3]);
self.chkbutton[0].controlDown(self.chkbutton[3]); self.chkbutton[0].controlUp(self.cancelbutton)
self.chkbutton[1].controlDown(self.chkbutton[4]); self.chkbutton[1].controlUp(self.cancelbutton)
self.chkbutton[2].controlDown(self.chkbutton[5]); self.chkbutton[2].controlUp(self.okbutton)
self.chkbutton[0].controlLeft(self.chkbutton[2]);self.chkbutton[0].controlRight(self.chkbutton[1]);
self.chkbutton[1].controlLeft(self.chkbutton[0]);self.chkbutton[1].controlRight(self.chkbutton[2]);
self.chkbutton[2].controlLeft(self.chkbutton[1]);self.chkbutton[2].controlRight(self.chkbutton[0]);
self.cancelled=False
self.setFocus(self.okbutton)
self.okbutton.controlLeft(self.cancelbutton);self.okbutton.controlRight(self.cancelbutton);
self.cancelbutton.controlLeft(self.okbutton); self.cancelbutton.controlRight(self.okbutton);
self.okbutton.controlDown(self.chkbutton[2]);self.okbutton.controlUp(self.chkbutton[8]);
self.cancelbutton.controlDown(self.chkbutton[0]); self.cancelbutton.controlUp(self.chkbutton[6]);
#self.kbd = xbmc.Keyboard()
def get(self):
self.doModal()
#self.kbd.doModal()
#if (self.kbd.isConfirmed()):
# text = self.kbd.getText()
# self.close()
# return text
#xbmc.sleep(5000)
self.close()
if not self.cancelled:
retval=""
for objn in range(9):
if self.chkstate[objn]:#self.chk[objn].getSelected() :
retval+=("" if retval=="" else ",")+str(objn)
return retval
else:
return ""
# def onControl(self,control):
# if control == self.okbutton:
# self.close()
# elif control == self.cancelbutton:
# self.cancelled=True
# self.close()
def anythingChecked(self):
for obj in self.chkstate:
if obj:#obj.getSelected():
return True
return False
def onControl(self,control):
if control==self.okbutton:
if self.anythingChecked():
self.close()
elif control== self.cancelbutton:
self.cancelled=True
self.close()
try:
#print control
if 'xbmcgui.ControlButton' in repr(type(control)):
index=control.getLabel()
#print 'index',index
if index.isnumeric():
#print 'index2',index
#self.chk[int(index)-1].setSelected(not self.chk[int(index)-1].getSelected())
self.chkstate[int(index)-1]= not self.chkstate[int(index)-1]
self.chk[int(index)-1].setVisible(self.chkstate[int(index)-1])
#print 'ddone'
except: pass
# def onClick(self, controlId):
# print 'CLICKED',controlId
def onAction(self, action):
if action == 10:#ACTION_PREVIOUS_MENU:
self.cancelled=True
self.close()
def getUrl(url, cookieJar=None,post=None, timeout=20, headers=None, noredir=False):
cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
if noredir:
opener = urllib2.build_opener(NoRedirection,cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
else:
opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
#opener = urllib2.install_opener(opener)
req = urllib2.Request(url)
req.add_header('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36')
if headers:
for h,hv in headers:
req.add_header(h,hv)
response = opener.open(req,post,timeout=timeout)
link=response.read()
response.close()
return link;
class UnCaptchaReCaptcha:
def processCaptcha(self, key,lang):
headers=[("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0"),
("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
("Referer", "https://www.google.com/recaptcha/api2/demo"),
("Accept-Language", lang)];
html=getUrl("http://www.google.com/recaptcha/api/fallback?k=" + key,headers=headers);
token=""
round=0
while True:
payload = re.findall("\"(/recaptcha/api2/payload[^\"]+)",html);
round+=1
message =re.findall("<label .*?class=\"fbc-imageselect-message-text\">(.*?)</label>",html);
if len(message)==0:
message =re.findall("<div .*?class=\"fbc-imageselect-message-error\">(.*?)</div>",html)
if len(message)==0:
token = re.findall("\"this\\.select\\(\\)\">(.*?)</textarea>",html)[0];
if not token=="":
line1 = "Captcha Sucessfull"
xbmc.executebuiltin('Notification(%s, %s, %d, %s)'%('Liveresolver',line1, 3000, None))
else:
line1 = "Captcha failed"
xbmc.executebuiltin('Notification(%s, %s, %d, %s)'%('Liveresolver',line1, 3000, None))
break
else:
message=message[0]
payload=payload[0]
imgurl=re.findall("name=\"c\"\\s+value=\\s*\"([^\"]+)",html)[0]
headers=[("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0"),
("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
("Referer", "http://www.google.com/recaptcha/api/fallback?k=" + key),
("Accept-Language", lang)];
cval=re.findall('name="c" value="(.*?)"',html)[0]
captcha_imgurl = "https://www.google.com"+payload.replace('&','&')
#print message
message=message.replace('<strong>','')
message=message.replace('</strong>','')
#captcha_response=raw_input('-->')
oSolver = cInputWindow(captcha = captcha_imgurl,msg = message,round=round)
captcha_response = oSolver.get()
#print 'captcha_response',captcha_response
if captcha_response=="":
break
responses=""
for rr in captcha_response.split(','):
responses += "&response=" + rr;
html = getUrl("http://www.google.com/recaptcha/api/fallback?k="+key
,post=urllib.urlencode({'c' : cval,})+responses,headers=headers)#.decode('unicode-escape')
#print html
return token
def performCaptcha(sitename,cj,returnpage=True,captcharegex='data-sitekey="(.*?)"',lang="en",headers=None):
sitepage=getUrl(sitename,cookieJar=cj,headers=headers)
sitekey=re.findall(captcharegex,sitepage)
token=""
if len(sitekey)>=1:
c=UnCaptchaReCaptcha()
token=c.processCaptcha(sitekey[0],lang)
if returnpage:
if headers==None:
headers=[("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0"),
("Referer", sitename)];
else:
headers+=[("Referer", sitename)]
sitepage=getUrl(sitename,cookieJar=cj,post=urllib.urlencode({"g-recaptcha-response":token}),headers=headers)
if returnpage:
return sitepage
else:
return token
#cookieJar = cookielib.LWPCookieJar()
#performCaptcha("http://www.livetv.tn/",cookieJar);
| gpl-2.0 |
tmpgit/intellij-community | python/lib/Lib/site-packages/django/contrib/formtools/tests/__init__.py | 71 | 13996 | import os
from django import forms, http
from django.conf import settings
from django.contrib.formtools import preview, wizard, utils
from django.test import TestCase
from django.utils import unittest
success_string = "Done was called!"
class TestFormPreview(preview.FormPreview):
def get_context(self, request, form):
context = super(TestFormPreview, self).get_context(request, form)
context.update({'custom_context': True})
return context
def get_initial(self, request):
return {'field1': 'Works!'}
def done(self, request, cleaned_data):
return http.HttpResponse(success_string)
class TestForm(forms.Form):
field1 = forms.CharField()
field1_ = forms.CharField()
bool1 = forms.BooleanField(required=False)
class UserSecuredFormPreview(TestFormPreview):
"""
FormPreview with a custum security_hash method
"""
def security_hash(self, request, form):
return "123"
class PreviewTests(TestCase):
urls = 'django.contrib.formtools.tests.urls'
def setUp(self):
# Create a FormPreview instance to share between tests
self.preview = preview.FormPreview(TestForm)
input_template = '<input type="hidden" name="%s" value="%s" />'
self.input = input_template % (self.preview.unused_name('stage'), "%d")
self.test_data = {'field1':u'foo', 'field1_':u'asdf'}
def test_unused_name(self):
"""
Verifies name mangling to get uniue field name.
"""
self.assertEqual(self.preview.unused_name('field1'), 'field1__')
def test_form_get(self):
"""
Test contrib.formtools.preview form retrieval.
Use the client library to see if we can sucessfully retrieve
the form (mostly testing the setup ROOT_URLCONF
process). Verify that an additional hidden input field
is created to manage the stage.
"""
response = self.client.get('/test1/')
stage = self.input % 1
self.assertContains(response, stage, 1)
self.assertEquals(response.context['custom_context'], True)
self.assertEquals(response.context['form'].initial, {'field1': 'Works!'})
def test_form_preview(self):
"""
Test contrib.formtools.preview form preview rendering.
Use the client library to POST to the form to see if a preview
is returned. If we do get a form back check that the hidden
value is correctly managing the state of the form.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 1})
response = self.client.post('/test1/', self.test_data)
# Check to confirm stage is set to 2 in output form.
stage = self.input % 2
self.assertContains(response, stage, 1)
def test_form_submit(self):
"""
Test contrib.formtools.preview form submittal.
Use the client library to POST to the form with stage set to 3
to see if our forms done() method is called. Check first
without the security hash, verify failure, retry with security
hash and verify sucess.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage':2})
response = self.client.post('/test1/', self.test_data)
self.assertNotEqual(response.content, success_string)
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
def test_bool_submit(self):
"""
Test contrib.formtools.preview form submittal when form contains:
BooleanField(required=False)
Ticket: #6209 - When an unchecked BooleanField is previewed, the preview
form's hash would be computed with no value for ``bool1``. However, when
the preview form is rendered, the unchecked hidden BooleanField would be
rendered with the string value 'False'. So when the preview form is
resubmitted, the hash would be computed with the value 'False' for
``bool1``. We need to make sure the hashes are the same in both cases.
"""
self.test_data.update({'stage':2})
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash':hash, 'bool1':u'False'})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
def test_form_submit_django12_hash(self):
"""
Test contrib.formtools.preview form submittal, using the hash function
used in Django 1.2
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage':2})
response = self.client.post('/test1/', self.test_data)
self.assertNotEqual(response.content, success_string)
hash = utils.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
def test_form_submit_django12_hash_custom_hash(self):
"""
Test contrib.formtools.preview form submittal, using the hash function
used in Django 1.2 and a custom security_hash method.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage':2})
response = self.client.post('/test2/', self.test_data)
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response.content, success_string)
hash = utils.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/test2/', self.test_data)
self.assertNotEqual(response.content, success_string)
class SecurityHashTests(unittest.TestCase):
def test_textfield_hash(self):
"""
Regression test for #10034: the hash generation function should ignore
leading/trailing whitespace so as to be friendly to broken browsers that
submit it (usually in textareas).
"""
f1 = HashTestForm({'name': 'joe', 'bio': 'Nothing notable.'})
f2 = HashTestForm({'name': ' joe', 'bio': 'Nothing notable. '})
hash1 = utils.security_hash(None, f1)
hash2 = utils.security_hash(None, f2)
self.assertEqual(hash1, hash2)
def test_empty_permitted(self):
"""
Regression test for #10643: the security hash should allow forms with
empty_permitted = True, or forms where data has not changed.
"""
f1 = HashTestBlankForm({})
f2 = HashTestForm({}, empty_permitted=True)
hash1 = utils.security_hash(None, f1)
hash2 = utils.security_hash(None, f2)
self.assertEqual(hash1, hash2)
class FormHmacTests(unittest.TestCase):
"""
Same as SecurityHashTests, but with form_hmac
"""
def test_textfield_hash(self):
"""
Regression test for #10034: the hash generation function should ignore
leading/trailing whitespace so as to be friendly to broken browsers that
submit it (usually in textareas).
"""
f1 = HashTestForm({'name': 'joe', 'bio': 'Nothing notable.'})
f2 = HashTestForm({'name': ' joe', 'bio': 'Nothing notable. '})
hash1 = utils.form_hmac(f1)
hash2 = utils.form_hmac(f2)
self.assertEqual(hash1, hash2)
def test_empty_permitted(self):
"""
Regression test for #10643: the security hash should allow forms with
empty_permitted = True, or forms where data has not changed.
"""
f1 = HashTestBlankForm({})
f2 = HashTestForm({}, empty_permitted=True)
hash1 = utils.form_hmac(f1)
hash2 = utils.form_hmac(f2)
self.assertEqual(hash1, hash2)
class HashTestForm(forms.Form):
name = forms.CharField()
bio = forms.CharField()
class HashTestBlankForm(forms.Form):
name = forms.CharField(required=False)
bio = forms.CharField(required=False)
#
# FormWizard tests
#
class WizardPageOneForm(forms.Form):
field = forms.CharField()
class WizardPageTwoForm(forms.Form):
field = forms.CharField()
class WizardPageThreeForm(forms.Form):
field = forms.CharField()
class WizardClass(wizard.FormWizard):
def get_template(self, step):
return 'formwizard/wizard.html'
def done(self, request, cleaned_data):
return http.HttpResponse(success_string)
class UserSecuredWizardClass(WizardClass):
"""
Wizard with a custum security_hash method
"""
def security_hash(self, request, form):
return "123"
class DummyRequest(http.HttpRequest):
def __init__(self, POST=None):
super(DummyRequest, self).__init__()
self.method = POST and "POST" or "GET"
if POST is not None:
self.POST.update(POST)
self._dont_enforce_csrf_checks = True
class WizardTests(TestCase):
urls = 'django.contrib.formtools.tests.urls'
def setUp(self):
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(
os.path.dirname(__file__),
'templates'
),
)
# Use a known SECRET_KEY to make security_hash tests deterministic
self.old_SECRET_KEY = settings.SECRET_KEY
settings.SECRET_KEY = "123"
def tearDown(self):
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
settings.SECRET_KEY = self.old_SECRET_KEY
def test_step_starts_at_zero(self):
"""
step should be zero for the first form
"""
response = self.client.get('/wizard/')
self.assertEquals(0, response.context['step0'])
def test_step_increments(self):
"""
step should be incremented when we go to the next page
"""
response = self.client.post('/wizard/', {"0-field":"test", "wizard_step":"0"})
self.assertEquals(1, response.context['step0'])
def test_bad_hash(self):
"""
Form should not advance if the hash is missing or bad
"""
response = self.client.post('/wizard/',
{"0-field":"test",
"1-field":"test2",
"wizard_step": "1"})
self.assertEquals(0, response.context['step0'])
def test_good_hash_django12(self):
"""
Form should advance if the hash is present and good, as calculated using
django 1.2 method.
"""
# We are hard-coding a hash value here, but that is OK, since we want to
# ensure that we don't accidentally change the algorithm.
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "2fdbefd4c0cad51509478fbacddf8b13",
"wizard_step": "1"}
response = self.client.post('/wizard/', data)
self.assertEquals(2, response.context['step0'])
def test_good_hash_django12_subclass(self):
"""
The Django 1.2 method of calulating hashes should *not* be used as a
fallback if the FormWizard subclass has provided their own method
of calculating a hash.
"""
# We are hard-coding a hash value here, but that is OK, since we want to
# ensure that we don't accidentally change the algorithm.
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "2fdbefd4c0cad51509478fbacddf8b13",
"wizard_step": "1"}
response = self.client.post('/wizard2/', data)
self.assertEquals(0, response.context['step0'])
def test_good_hash_current(self):
"""
Form should advance if the hash is present and good, as calculated using
current method.
"""
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "7e9cea465f6a10a6fb47fcea65cb9a76350c9a5c",
"wizard_step": "1"}
response = self.client.post('/wizard/', data)
self.assertEquals(2, response.context['step0'])
def test_14498(self):
"""
Regression test for ticket #14498.
"""
that = self
class WizardWithProcessStep(WizardClass):
def process_step(self, request, form, step):
that.assertTrue(hasattr(form, 'cleaned_data'))
wizard = WizardWithProcessStep([WizardPageOneForm,
WizardPageTwoForm,
WizardPageThreeForm])
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "7e9cea465f6a10a6fb47fcea65cb9a76350c9a5c",
"wizard_step": "1"}
wizard(DummyRequest(POST=data))
def test_14576(self):
"""
Regression test for ticket #14576.
The form of the last step is not passed to the done method.
"""
reached = [False]
that = self
class Wizard(WizardClass):
def done(self, request, form_list):
reached[0] = True
that.assertTrue(len(form_list) == 2)
wizard = Wizard([WizardPageOneForm,
WizardPageTwoForm])
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "2fdbefd4c0cad51509478fbacddf8b13",
"wizard_step": "1"}
wizard(DummyRequest(POST=data))
self.assertTrue(reached[0])
| apache-2.0 |
walterbender/turtleconfusion | TurtleArt/talogo.py | 1 | 65513 | # -*- coding: utf-8 -*-
# Copyright (c) 2007-8, Playful Invention Company.
# Copyright (c) 2008-13, Walter Bender
# Copyright (c) 2008-10, Raúl Gutiérrez Segalés
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import tempfile
import urllib2
from time import time, sleep
from operator import isNumberType
from os.path import exists as os_path_exists
from UserDict import UserDict
from gi.repository import Gtk
from gi.repository import GObject
from gi.repository import GdkPixbuf
from sugar3.graphics import style
GRID_CELL_SIZE = style.GRID_CELL_SIZE
USER_HOME = os.path.expanduser('~')
import traceback
from tablock import (Block, Media, media_blocks_dictionary)
from taconstants import (TAB_LAYER, DEFAULT_SCALE, ICON_SIZE, Color)
from tajail import (myfunc, myfunc_import)
from tapalette import (block_names, value_blocks)
from tatype import (TATypeError, TYPES_NUMERIC)
from tautils import (get_pixbuf_from_journal, data_from_file, get_stack_name,
movie_media_type, audio_media_type, image_media_type,
text_media_type, round_int, debug_output, find_group,
get_path, image_to_base64, data_to_string, data_to_file,
get_load_name, chooser_dialog)
try:
from util.RtfParser import RtfTextOnly
RTFPARSE = True
except ImportError:
RTFPARSE = False
from gettext import gettext as _
primitive_dictionary = {} # new block primitives get added here
class noKeyError(UserDict):
def __missing__(x, y): return 0
class symbol:
def __init__(self, name):
self.name = name
self.nargs = None
self.fcn = None
def __str__(self):
return self.name
def __repr__(self):
return '#' + self.name
class logoerror(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
# return repr(self.value)
return str(self.value)
class NegativeRootError(BaseException):
""" Similar to the ZeroDivisionError, this error is raised at runtime
when trying to computer the square root of a negative number. """
DEFAULT_MESSAGE = 'square root of negative number'
def __init__(self, neg_value=None, message=DEFAULT_MESSAGE):
self.neg_value = neg_value
self.message = message
def __str__(self):
return str(self.message)
class HiddenBlock(Block):
def __init__(self, name, value=None):
self.name = name
self.values = []
if value is not None:
self.values.append(value)
self.primitive = None
else:
self.primitive = name
self.connections = []
self.docks = []
# Utility functions
def _change_user_path(path):
''' If the pathname saved in a project was from a different user, try
changing it.'''
# FIXME: Use regex
if path is None:
return None
if len(path) < 7:
return None
if '/' not in path[6:]:
return None
if path[0:5] == '/home' and '/':
i = path[6:].index('/')
new_path = USER_HOME + path[6 + i:]
if new_path == path:
return None
else:
return new_path
def _just_stop():
""" yield False to stop stack """
yield False
def _millisecond():
""" Current time in milliseconds """
return time() * 1000
class LogoCode:
""" A class for parsing Logo code """
def __init__(self, tw):
self.tw = tw
self.oblist = {}
DEFPRIM = {'(': [1, lambda self, x: self._prim_opar(x)],
'define': [2, self._prim_define],
'nop': [0, lambda self: None]}
for p in iter(DEFPRIM):
if len(DEFPRIM[p]) == 2:
self.def_prim(p, DEFPRIM[p][0], DEFPRIM[p][1])
else:
self.def_prim(p, DEFPRIM[p][0], DEFPRIM[p][1], DEFPRIM[p][2])
self.symtype = type(self._intern('print'))
self.symnothing = self._intern('%nothing%')
self.symopar = self._intern('(')
self.iline = None
self.cfun = None
self.arglist = None
self.ufun = None
self.procstop = False
self.running = False
self.istack = []
self.stacks = {}
self.boxes = {'box1': 0, 'box2': 0}
self.return_values = []
self.heap = []
self.iresults = None
self.step = None
self.bindex = None
self.hidden_turtle = None
self.trace = 0
self.update_values = False
self.gplay = None
self.filepath = None
self.pixbuf = None
self.dsobject = None
self.start_time = None
self._disable_help = False
self.body_height = int((self.tw.canvas.height / 40) * self.tw.scale)
self.scale = DEFAULT_SCALE
def stop_logo(self):
""" Stop logo is called from the Stop button on the toolbar """
self.step = _just_stop()
# Clear istack and iline of any code that was not executed due to Stop
self.istack = []
self.iline = None
self.tw.stop_plugins()
if self.tw.gst_available:
from .tagplay import stop_media
stop_media(self)
self.tw.turtles.get_active_turtle().show()
self.tw.running_blocks = False
# If we disabled hover help, reenable it
if self._disable_help:
self.tw.no_help = False
self._disable_help = False
def def_prim(self, name, args, fcn, rprim=False):
""" Define the primitives associated with the blocks """
sym = self._intern(name)
sym.nargs, sym.fcn = args, fcn
sym.rprim = rprim
def _intern(self, string):
""" Add any new objects to the symbol list. """
if string in self.oblist:
return self.oblist[string]
sym = symbol(string)
self.oblist[string] = sym
return sym
def get_prim_callable(self, name):
""" Return the callable primitive associated with the given name """
sym = self.oblist.get(name)
if sym is not None:
return sym.fcn
else:
return None
def run_blocks(self, code):
"""Run code generated by generate_code().
"""
self.start_time = time()
self._setup_cmd(code)
def generate_code(self, blk, blocks):
""" Generate code to be passed to run_blocks() from a stack of blocks.
"""
self._save_all_connections = []
for b in blocks:
tmp = []
for c in b.connections:
tmp.append(c)
self._save_all_connections.append(
{'blk': b, 'connections': tmp})
for k in self.stacks.keys():
self.stacks[k] = None
self.stacks['stack1'] = None
self.stacks['stack2'] = None
# Save state in case there is a hidden macro expansion
self._save_blocks = None
self._save_blk = blk
self._save_while_blocks = []
# self._save_connections = []
if self.trace > 0:
self.update_values = True
else:
self.update_values = False
self.clear_value_blocks()
# Disabled hover help while program is running
if not self.tw.no_help:
self._disable_help = True
self.tw.no_help = True
for b in blocks:
b.unhighlight()
# Hidden macro expansions
for b in blocks:
if b.name in ['returnstack']:
action_blk, new_blocks = self._expand_return(b, blk, blocks)
blocks = new_blocks[:]
if b == blk:
blk = action_blk
for b in blocks:
if b.name in ['while', 'until']:
action_blk, new_blocks = self._expand_forever(b, blk, blocks)
blocks = new_blocks[:]
if b == blk:
blk = action_blk
for b in blocks:
if b.name in ['forever']:
action_blk, new_blocks = self._expand_forever(b, blk, blocks)
blocks = new_blocks[:]
if b == blk:
blk = action_blk
for b in blocks:
if b.name in ('hat', 'hat1', 'hat2'):
stack_name = get_stack_name(b)
if stack_name:
stack_key = self._get_stack_key(stack_name)
code = self._blocks_to_code(b)
self.stacks[stack_key] = self._readline(code)
else:
self.tw.showlabel('#nostack')
self.tw.showblocks()
self.tw.running_blocks = False
return None
code = self._blocks_to_code(blk)
if self._save_blocks is not None:
# Undo any hidden macro expansion
blocks = self._save_blocks[:]
blk = self._save_blk
for b in self._save_while_blocks:
if b[1] is not None:
b[0].connections[0].connections[b[1]] = b[0]
if b[2] is not None:
b[0].connections[-1].connections[b[2]] = b[0]
if b[3] is not None:
b[0].connections[-2].connections[b[3]] = b[0]
if self._save_all_connections is not None:
# Restore any connections that may have been mangled
# during macro expansion.
for entry in self._save_all_connections:
b = entry['blk']
connections = entry['connections']
b.connections = connections[:]
return code
def _blocks_to_code(self, blk):
""" Convert a stack of blocks to pseudocode. """
if blk is None:
return ['%nothing%', '%nothing%']
code = []
dock = blk.docks[0]
# There could be a '(', ')', '[' or ']'.
if len(dock) > 4 and dock[4] in ('[', ']', ']['):
code.append(dock[4])
if blk.primitive is not None: # make a tuple (prim, blk)
if blk in self.tw.block_list.list:
code.append((blk.primitive,
self.tw.block_list.list.index(blk)))
else:
code.append(blk.primitive) # Hidden block
elif blk.is_value_block(): # Extract the value from content blocks.
value = blk.get_value()
if value is None:
return ['%nothing%']
else:
code.append(value)
else:
return ['%nothing%']
if blk.connections is not None and len(blk.connections) > 0:
for i in range(1, len(blk.connections)):
b = blk.connections[i]
dock = blk.docks[i]
# There could be a '(', ')', '[' or ']'.
if len(dock) > 4 and dock[4] in ('[', ']', ']['):
for c in dock[4]:
code.append(c)
if b is not None:
code.extend(self._blocks_to_code(b))
elif blk.docks[i][0] not in ['flow', 'unavailable']:
code.append('%nothing%')
return code
def _setup_cmd(self, string):
""" Execute the psuedocode. """
self.hidden_turtle = self.tw.turtles.get_active_turtle()
self.hidden_turtle.hide() # Hide the turtle while we are running.
self.procstop = False
blklist = self._readline(string)
self.step = self._start_eval(blklist)
def _readline(self, line):
"""
Convert the pseudocode into a list of commands.
The block associated with the command is stored as the second element
in a tuple, e.g., (#forward, 16)
"""
# debug_output(line, self.tw.running_sugar)
res = []
while line:
token = line.pop(0)
bindex = None
if isinstance(token, tuple):
(token, bindex) = token
if isinstance(token, Media):
res.append(token)
elif isNumberType(token):
res.append(token)
elif token.isdigit():
res.append(float(token))
elif token[0] == '-' and token[1:].isdigit():
res.append(-float(token[1:]))
elif token[0] == '"':
res.append(token[1:])
elif token[0:2] == "#s":
res.append(token[2:])
elif token == '[':
res.append(self._readline(line))
elif token == ']':
return res
elif bindex is None or not isinstance(bindex, int):
res.append(self._intern(token))
else:
res.append((self._intern(token), bindex))
return res
def _start_eval(self, blklist):
""" Step through the list. """
if self.tw.running_sugar:
self.tw.activity.stop_turtle_button.set_icon_name("stopiton")
self.tw.activity.stop_turtle_button.set_tooltip(
_('Stop turtle'))
elif self.tw.interactive_mode:
self.tw.toolbar_shapes['stopiton'].set_layer(TAB_LAYER)
self.running = True
self.icall(self.evline, blklist)
yield True
if self.tw.running_sugar:
if self.tw.step_time == 0 and self.tw.selected_blk is None:
self.tw.activity.stop_turtle_button.set_icon_name("hideshowon")
self.tw.activity.stop_turtle_button.set_tooltip(
_('Show blocks'))
else:
self.tw.activity.stop_turtle_button.set_icon_name(
"hideshowoff")
self.tw.activity.stop_turtle_button.set_tooltip(
_('Hide blocks'))
elif self.tw.interactive_mode:
self.tw.toolbar_shapes['stopiton'].hide()
yield False
self.running = False
# If we disabled hover help, reenable it
if self._disable_help:
self.tw.no_help = False
self._disable_help = False
self.tw.display_coordinates()
def icall(self, fcn, *args):
""" Add a function and its arguments to the program stack. """
self.istack.append(self.step)
self.step = fcn(*(args))
def evline(self, blklist, call_me=True):
""" Evaluate a line of code from the list. """
oldiline = self.iline
self.iline = blklist[:]
self.arglist = None
while self.iline:
token = self.iline[0]
self.bindex = None
if isinstance(token, tuple):
(token, self.bindex) = self.iline[0]
if self.bindex is not None:
current_block = self.tw.block_list.list[self.bindex]
# If the blocks are visible, highlight the current block.
if not self.tw.hide:
current_block.highlight()
# Anything we need to do specific for this block
# before it is run?
if current_block.before is not None:
current_block.before(self.tw, current_block)
if not self.tw.hide and self.bindex is not None:
current_block = self.tw.block_list.list[self.bindex]
current_block.highlight()
if current_block.before is not None:
current_block.before(current_block)
# In debugging modes, we pause between steps and show the turtle.
if self.tw.step_time > 0:
self.tw.turtles.get_active_turtle().show()
endtime = _millisecond() + self.tw.step_time * 100.
while _millisecond() < endtime:
sleep(0.1)
yield True
self.tw.turtles.get_active_turtle().hide()
# 'Stand-alone' booleans are handled here.
if token == self.symopar:
token = self.iline[1]
if isinstance(token, tuple):
(token, self.bindex) = self.iline[1]
# Process the token and any arguments.
self.icall(self._eval, call_me)
yield True
if self.bindex is not None:
current_block = self.tw.block_list.list[self.bindex]
# Time to unhighlight the current block.
if not self.tw.hide:
current_block.unhighlight()
# Anything we need to do specific for this block
# after it is run?
if current_block.after is not None:
current_block.after(self.tw, current_block)
if self.procstop:
break
if self.iresult is None:
continue
if self.bindex is not None:
self.tw.block_list.list[self.bindex].highlight()
self.tw.showblocks()
self.tw.display_coordinates()
raise logoerror(str(self.iresult))
self.iline = oldiline
self.ireturn()
if not self.tw.hide and self.tw.step_time > 0:
self.tw.display_coordinates()
yield True
def _eval(self, call_me=True):
""" Evaluate the next token on the line of code we are processing. """
token = self.iline.pop(0)
bindex = None
if isinstance(token, tuple):
(token, bindex) = token
# Either we are processing a symbol or a value.
if isinstance(token, self.symtype):
# We highlight blocks here in case an error occurs...
if not self.tw.hide and bindex is not None:
self.tw.block_list.list[bindex].highlight()
self.icall(self._evalsym, token, call_me)
yield True
# and unhighlight if everything was OK.
if not self.tw.hide and bindex is not None:
self.tw.block_list.list[bindex].unhighlight()
res = self.iresult
else:
res = token
self.ireturn(res)
yield True
def _evalsym(self, token, call_me):
""" Process primitive associated with symbol token """
self._undefined_check(token)
oldcfun, oldarglist = self.cfun, self.arglist
self.cfun, self.arglist = token, []
if token.nargs is None:
self.tw.showblocks()
self.tw.display_coordinates()
raise logoerror("#noinput")
is_Primitive = type(self.cfun.fcn).__name__ == 'Primitive'
is_PrimitiveDisjunction = type(self.cfun.fcn).__name__ == \
'PrimitiveDisjunction'
call_args = not (is_Primitive or is_PrimitiveDisjunction)
for i in range(token.nargs):
self._no_args_check()
self.icall(self._eval, call_args)
yield True
self.arglist.append(self.iresult)
need_to_pop_istack = False
if self.cfun.rprim:
if isinstance(self.cfun.fcn, list):
# debug_output('evalsym rprim list: %s' % (str(token)),
# self.tw.running_sugar)
self.icall(self._ufuncall, self.cfun.fcn, call_args)
yield True
need_to_pop_istack = True
result = None
else:
if call_me:
self.icall(self.cfun.fcn, *self.arglist)
yield True
need_to_pop_istack = True
result = None
else:
result = (self.cfun.fcn, ) + tuple(self.arglist)
else:
need_to_pop_istack = True
if call_me:
result = self.cfun.fcn(self, *self.arglist)
else:
result = (self.cfun.fcn, self) + tuple(self.arglist)
self.cfun, self.arglist = oldcfun, oldarglist
if self.arglist is not None and result is None:
self.tw.showblocks()
raise logoerror("%s %s %s" %
(oldcfun.name, _("did not output to"),
self.cfun.name))
if need_to_pop_istack:
self.ireturn(result)
yield True
else:
self.iresult = result
def _ufuncall(self, body, call_me):
""" ufuncall """
self.ijmp(self.evline, body, call_me)
yield True
def doevalstep(self):
""" evaluate one step """
starttime = _millisecond()
try:
while (_millisecond() - starttime) < 120:
try:
if self.step is None:
self.tw.running_blocks = False
return False
if self.tw.running_turtleart:
try:
self.step.next()
except ValueError:
debug_output('generator already executing',
self.tw.running_sugar)
self.tw.running_blocks = False
return False
except TATypeError as tte:
# TODO insert the correct block name
# (self.cfun.name is only the name of the
# outermost block in this statement/ line of code)
# use logoerror("#notanumber") when possible
if (tte.req_type in TYPES_NUMERIC and
tte.bad_type not in TYPES_NUMERIC):
raise logoerror("#notanumber")
else:
raise logoerror(
"%s %s %s %s" %
(self.cfun.name, _("doesn't like"),
str(tte.bad_value), _("as input")))
except ZeroDivisionError:
raise logoerror("#zerodivide")
except NegativeRootError:
raise logoerror("#negroot")
except IndexError:
raise logoerror("#emptyheap")
else:
try:
self.step.next()
except BaseException as error:
if isinstance(error, (StopIteration,
logoerror)):
self.tw.running_blocks = False
raise error
else:
traceback.print_exc()
self.tw.showlabel(
'status', '%s: %s' %
(type(error).__name__, str(error)))
self.tw.running_blocks = False
return False
except StopIteration:
if self.tw.running_turtleart:
# self.tw.turtles.show_all()
if self.hidden_turtle is not None:
self.hidden_turtle.show()
self.hidden_turtle = None
else:
self.tw.turtles.get_active_turtle().show()
self.tw.running_blocks = False
return False
else:
self.ireturn()
except logoerror as e:
if self.tw.running_turtleart:
self.tw.showblocks()
self.tw.display_coordinates()
self.tw.showlabel('syntaxerror', str(e))
self.tw.turtles.show_all()
else:
traceback.print_exc()
self.tw.showlabel('status', 'logoerror: ' + str(e))
self.tw.running_blocks = False
return False
return True
def ireturn(self, res=None):
""" return value """
self.step = self.istack.pop()
self.iresult = res
def ijmp(self, fcn, *args):
""" ijmp """
self.step = fcn(*(args))
def _undefined_check(self, token):
""" Make sure token has a definition """
if token.fcn is not None:
return False
if token.name == '%nothing%':
errormsg = ''
else:
errormsg = "%s %s" % (_("I don't know how to"), _(token.name))
self.tw.showblocks()
raise logoerror(errormsg)
def _no_args_check(self):
""" Missing argument ? """
if self.iline and self.iline[0] is not self.symnothing:
return
self.tw.showblocks()
self.tw.display_coordinates()
raise logoerror("#noinput")
#
# Primitives
#
def _prim_opar(self, val):
self.iline.pop(0)
return val
def _prim_define(self, name, body):
""" Define a primitive """
if not isinstance(name, self.symtype):
name = self._intern(name)
name.nargs, name.fcn = 0, body
name.rprim = True
def prim_start(self, *ignored_args):
''' Start block: recenter '''
if self.tw.running_sugar:
self.tw.activity.recenter()
def prim_clear(self):
""" Clear screen """
self.tw.clear_plugins()
self.stop_playing_media()
self.reset_scale()
# self.reset_timer() # Only reset timer on 'run'
self.clear_value_blocks()
self.tw.canvas.clearscreen()
self.tw.turtles.reset_turtles()
self.reset_internals()
def stop_playing_media(self):
if self.tw.gst_available:
from .tagplay import stop_media
stop_media(self)
def reset_scale(self):
self.scale = DEFAULT_SCALE
def reset_timer(self):
self.start_time = time()
def get_start_time(self):
return self.start_time
def reset_internals(self):
self.hidden_turtle = None
if self.tw.running_turtleart:
self.tw.activity.restore_state()
def prim_loop(self, controller, blklist):
""" Execute a loop
controller -- iterator that yields True iff the loop should be run
once more OR a callable that returns such an iterator
blklist -- list of callables that form the loop body """
if not hasattr(controller, "next"):
if callable(controller):
controller = controller()
else:
raise TypeError("a loop controller must be either an iterator "
"or a callable that returns an iterator")
while next(controller):
self.icall(self.evline, blklist[:])
yield True
if self.procstop:
break
self.ireturn()
yield True
def prim_clamp(self, blklist):
""" Run clamp blklist """
self.icall(self.evline, blklist[:])
yield True
self.procstop = False
self.ireturn()
yield True
def set_scale(self, scale):
''' Set scale for media blocks '''
self.scale = scale
def get_scale(self):
''' Set scale for media blocks '''
return self.scale
def prim_stop_stack(self):
""" Stop execution of a stack """
self.procstop = True
def prim_return(self, value):
""" Stop execution of a stack and sets return value"""
# self.boxes['__return__'] = value
self.return_values.append(value)
self.procstop = True
def active_turtle(self):
''' NOP used to add get_active_turtle to Python export '''
# turtle = self.tw.turtles.get_turtle()
pass
def prim_turtle(self, name):
self.tw.turtles.set_turtle(name)
def prim_wait(self, wait_time):
""" Show the turtle while we wait """
self.tw.turtles.get_active_turtle().show()
endtime = _millisecond() + wait_time * 1000.
while _millisecond() < endtime:
sleep(wait_time / 10.)
yield True
self.tw.turtles.get_active_turtle().hide()
self.ireturn()
yield True
def prim_if(self, boolean, blklist):
""" If bool, do list """
if boolean:
self.icall(self.evline, blklist[:])
yield True
self.ireturn()
yield True
def prim_ifelse(self, boolean, list1, list2):
""" If bool, do list1, else do list2 """
if boolean:
self.ijmp(self.evline, list1[:])
yield True
else:
self.ijmp(self.evline, list2[:])
yield True
def prim_set_box(self, name, value):
""" Store value in named box """
(key, is_native) = self._get_box_key(name)
self.boxes[key] = value
if is_native:
if self.update_values:
self.update_label_value(name, value)
else:
if self.update_values:
self.update_label_value('box', value, label=name)
def prim_get_box(self, name):
""" Retrieve value from named box """
if name == '__return__':
if len(self.return_values) == 0:
raise logoerror("#emptybox")
return self.return_values.pop()
(key, is_native) = self._get_box_key(name)
try:
return self.boxes[key]
except KeyError:
# FIXME this looks like a syntax error in the GUI
raise logoerror("#emptybox")
def _get_box_key(self, name):
""" Return the key used for this box in the boxes dictionary and a
boolean indicating whether it is a 'native' box """
if name in ('box1', 'box2'):
return (name, True)
# elif name == '__return__':
# return (name, True)
else:
# make sure '5' and '5.0' point to the same box
if isinstance(name, (basestring, int, long)):
try:
name = float(name)
except ValueError:
pass
return ('box3_' + str(name), False)
def prim_define_stack(self, name):
""" Top of a named stack """
pass
def prim_invoke_stack(self, name):
""" Process a named stack """
key = self._get_stack_key(name)
if self.stacks.get(key) is None:
raise logoerror("#nostack")
self.icall(self.evline, self.stacks[key][:])
yield True
self.procstop = False
self.ireturn()
yield True
def prim_invoke_return_stack(self, name):
""" Process a named stack and return a value"""
self.prim_invoke_stack(name)
return self.boxes['__return__']
def _get_stack_key(self, name):
""" Return the key used for this stack in the stacks dictionary """
if name in ('stack1', 'stack2'):
return name
else:
# make sure '5' and '5.0' point to the same action stack
if isinstance(name, (int, long, float)):
if int(name) == name:
name = int(name)
else:
name = float(name)
return 'stack3' + str(name)
def load_heap(self, obj):
""" Load FILO from file """
user_path = _change_user_path(obj)
if self.tw.running_sugar:
# Is the object a dsobject?
if isinstance(obj, Media) and obj.value:
from sugar3.datastore import datastore
try:
dsobject = datastore.get(obj.value)
except BaseException:
debug_output("Couldn't find dsobject %s" %
(obj.value), self.tw.running_sugar)
if dsobject is not None:
self.push_file_data_to_heap(dsobject)
# Or is it a path?
elif os.path.exists(obj):
self.push_file_data_to_heap(None, path=obj)
elif user_path is not None and os.path.exists(user_path):
self.push_file_data_to_heap(None, path=user_path)
elif os.path.exists(os.path.join(
self.tw.activity.get_bundle_path(), obj)):
self.push_file_data_to_heap(None, path=obj)
else:
# Finally try choosing a datastore object
chooser_dialog(self.tw.parent, obj,
self.push_file_data_to_heap)
else:
# If you cannot find the file, open a chooser.
if os.path.exists(obj):
self.push_file_data_to_heap(None, path=obj)
elif user_path is not None and os.path.exists(user_path):
self.push_file_data_to_heap(None, path=user_path)
else:
obj, self.tw.load_save_folder = get_load_name(
'.*', self.tw.load_save_folder)
if obj is not None:
self.push_file_data_to_heap(None, path=obj)
def save_heap(self, obj):
""" save FILO to file """
if self.tw.running_sugar:
from sugar3 import profile
from sugar3.datastore import datastore
from sugar3.activity import activity
# Save JSON-encoded heap to temporary file
heap_file = os.path.join(get_path(activity, 'instance'),
'heap.txt')
data_to_file(self.heap, heap_file)
# Write to an existing or new dsobject
if isinstance(obj, Media) and obj.value:
dsobject = datastore.get(obj.value)
else:
dsobject = datastore.create()
dsobject.metadata['title'] = str(obj)
dsobject.metadata['icon-color'] = \
profile.get_color().to_string()
dsobject.metadata['mime_type'] = 'text/plain'
dsobject.set_file_path(heap_file)
datastore.write(dsobject)
dsobject.destroy()
else:
heap_file = obj
data_to_file(self.heap, heap_file)
def get_heap(self):
return self.heap
def reset_heap(self):
""" Reset heap to an empty list """
# empty the list rather than setting it to a new empty list object,
# so the object references are preserved
while self.heap:
self.heap.pop()
def prim_myblock(self, *args):
""" Run Python code imported from Journal """
if self.bindex is not None and self.bindex in self.tw.myblock:
try:
myfunc_import(self, self.tw.myblock[self.bindex], args)
except BaseException:
raise logoerror("#syntaxerror")
def prim_myfunction(self, f, *args):
""" Programmable block (Call tajail.myfunc and convert any errors to
logoerrors) """
try:
y = myfunc(f, args)
if str(y) == 'nan':
debug_output('Python function returned NAN',
self.tw.running_sugar)
self.stop_logo()
raise logoerror("#notanumber")
else:
return y
except ZeroDivisionError:
self.stop_logo()
raise logoerror("#zerodivide")
except ValueError as e:
self.stop_logo()
raise logoerror('#' + str(e))
except SyntaxError as e:
self.stop_logo()
raise logoerror('#' + str(e))
except NameError as e:
self.stop_logo()
raise logoerror('#' + str(e))
except OverflowError:
self.stop_logo()
raise logoerror("#overflowerror")
except TypeError:
self.stop_logo()
raise logoerror("#notanumber")
def clear_value_blocks(self):
if not hasattr(self, 'value_blocks_to_update'):
return
for name in value_blocks:
self.update_label_value(name)
def int(self, n):
""" Raise an error if n doesn't convert to int. """
if isinstance(n, int):
return n
elif isinstance(n, float):
return int(n)
elif isinstance(n, str):
return int(ord(n[0]))
else:
self.tw.showblocks()
raise logoerror("%s %s %s %s" %
(self.cfun.name, _("doesn't like"), str(n),
_("as input")))
def find_value_blocks(self):
""" Find any value blocks that may need label updates """
self.value_blocks_to_update = {}
for name in value_blocks:
self.value_blocks_to_update[name] = \
self.tw.block_list.get_similar_blocks('block', name)
def update_label_value(self, name, value=None, label=None):
""" Update the label of value blocks to reflect current value """
# If it is a named box, we need to match the label to the box
if not self.tw.interactive_mode:
return
if self.tw.hide:
return
self.tw.display_coordinates()
if value is None:
for block in self.value_blocks_to_update[name]:
block.spr.set_label(block_names[name][0])
if name == 'box':
argblk = block.connections[-2]
dx = block.dx
block.resize()
if argblk is not None:
# Move connections over...
dx = (block.dx - dx) * self.tw.block_scale
drag_group = find_group(argblk)
for blk in drag_group:
blk.spr.move_relative((dx, 0))
else:
block.resize()
elif self.update_values:
if isinstance(value, float):
valstring = str(round_int(value)).replace(
'.', self.tw.decimal_point)
else:
valstring = str(value)
for block in self.value_blocks_to_update[name]:
if label is None:
block.spr.set_label(
block_names[name][0] + ' = ' + valstring)
block.resize()
else:
argblk = block.connections[-2]
# Only update if label matches
if argblk is not None and argblk.spr.labels[0] == label:
block.spr.set_label(
block_names[name][0] + ' = ' + valstring)
dx = block.dx
block.resize()
# Move connections over...
dx = (block.dx - dx) * self.tw.block_scale
drag_group = find_group(argblk)
for blk in drag_group:
blk.spr.move_relative((dx, 0))
def reskin(self, obj):
""" Reskin the turtle with an image from a file """
scale = int(ICON_SIZE * float(self.scale) / DEFAULT_SCALE)
if scale < 1:
return
self.filepath = None
self.dsobject = None
user_path = _change_user_path(obj.value)
if obj.value is not None and os.path.exists(obj.value):
self.filepath = obj.value
elif user_path is not None and os.path.exists(user_path):
self.filepath = user_path
elif self.tw.running_sugar: # datastore object
from suga3.datastore import datastore
try:
self.dsobject = datastore.get(obj.value)
except BaseException:
debug_output("Couldn't find dsobject %s" %
(obj.value), self.tw.running_sugar)
if self.dsobject is not None:
self.filepath = self.dsobject.file_path
if self.filepath is None:
self.tw.showlabel('nojournal', self.filepath)
return
pixbuf = None
try:
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(
self.filepath, scale, scale)
except BaseException:
self.tw.showlabel('nojournal', self.filepath)
debug_output("Couldn't open skin %s" % (self.filepath),
self.tw.running_sugar)
if pixbuf is not None:
self.tw.turtles.get_active_turtle().set_shapes([pixbuf])
pen_state = self.tw.turtles.get_active_turtle().get_pen_state()
if pen_state:
self.tw.turtles.get_active_turtle().set_pen_state(False)
self.tw.turtles.get_active_turtle().forward(0)
if pen_state:
self.tw.turtles.get_active_turtle().set_pen_state(True)
if self.tw.sharing():
if self.tw.running_sugar:
tmp_path = get_path(self.tw.activity, 'instance')
else:
tmp_path = tempfile.gettempdir()
tmp_file = os.path.join(get_path(self.tw.activity, 'instance'),
'tmpfile.png')
pixbuf.save(tmp_file, 'png', {'quality': '100'})
data = image_to_base64(tmp_file, tmp_path)
height = pixbuf.get_height()
width = pixbuf.get_width()
event = 'R|%s' % (data_to_string([self.tw.nick,
[round_int(width),
round_int(height),
data]]))
GObject.idle_add(self.tw.send_event, event)
os.remove(tmp_file)
def get_from_url(self, url):
""" Get contents of URL as text or tempfile to image """
if "://" not in url: # no protocol
url = "http://" + url # assume HTTP
try:
req = urllib2.urlopen(url)
except urllib2.HTTPError as e:
debug_output("Couldn't open %s: %s" % (url, e),
self.tw.running_sugar)
raise logoerror(url + ' [%d]' % (e.code))
except urllib2.URLError as e:
if hasattr(e, 'code'):
debug_output("Couldn't open %s: %s" % (url, e),
self.tw.running_sugar)
raise logoerror(url + ' [%d]' % (e.code))
else: # elif hasattr(e, 'reason'):
debug_output("Couldn't reach server: %s" % (e),
self.tw.running_sugar)
raise logoerror('#noconnection')
mediatype = req.info().getheader('Content-Type')
if mediatype[0:5] in ['image', 'audio', 'video']:
tmp = tempfile.NamedTemporaryFile(delete=False)
tmp.write(req.read())
tmp.flush()
obj = Media(mediatype[0:5], value=tmp.name)
return obj
else:
return req.read()
def showlist(self, objects):
""" Display list of media objects """
x = (self.tw.turtles.get_active_turtle().get_xy()[0] /
self.tw.coord_scale)
y = (self.tw.turtles.get_active_turtle().get_xy()[1] /
self.tw.coord_scale)
for obj in objects:
self.tw.turtles.get_active_turtle().set_xy(x, y, pendown=False)
self.show(obj)
y -= int(self.tw.canvas.textsize * self.tw.lead)
def show(self, obj, center=False):
""" Show is the general-purpose media-rendering block. """
mediatype = None
if isinstance(obj, Media) and obj.value:
self.filepath = None
self.pixbuf = None # Camera writes directly to pixbuf
self.dsobject = None
user_path = _change_user_path(obj.value)
if obj.value.lower() in media_blocks_dictionary:
media_blocks_dictionary[obj.value.lower()]()
mediatype = 'image' # camera snapshot
elif os_path_exists(obj.value):
self.filepath = obj.value
mediatype = obj.type
# If for some reason the obj.type is not set, try guessing.
if mediatype is None and self.filepath is not None:
if movie_media_type(self.filepath):
mediatype = 'video'
elif audio_media_type(self.filepath):
mediatype = 'audio'
elif image_media_type(self.filepath):
mediatype = 'image'
elif text_media_type(self.filepath):
mediatype = 'text'
elif user_path is not None and os_path_exists(user_path):
self.filepath = user_path
mediatype = obj.type
# If for some reason the obj.type is not set, try guessing.
if mediatype is None and self.filepath is not None:
if movie_media_type(self.filepath):
mediatype = 'video'
elif audio_media_type(self.filepath):
mediatype = 'audio'
elif image_media_type(self.filepath):
mediatype = 'image'
elif text_media_type(self.filepath):
mediatype = 'text'
elif self.tw.running_sugar:
from sugar3.datastore import datastore
try:
self.dsobject = datastore.get(obj.value)
except BaseException:
debug_output("Couldn't find dsobject %s" %
(obj.value), self.tw.running_sugar)
if self.dsobject is not None:
self.filepath = self.dsobject.file_path
if 'mime_type' in self.dsobject.metadata:
mimetype = self.dsobject.metadata['mime_type']
if mimetype[0:5] == 'video':
mediatype = 'video'
elif mimetype[0:5] == 'audio':
mediatype = 'audio'
elif mimetype[0:5] == 'image':
mediatype = 'image'
else:
mediatype = 'text'
if self.pixbuf is not None:
self.insert_image(center=center, pixbuf=True)
elif self.filepath is None:
if self.dsobject is not None:
self.tw.showlabel(
'nojournal',
self.dsobject.metadata['title'])
else:
self.tw.showlabel('nojournal', obj.value)
debug_output("Couldn't open %s" % (obj.value),
self.tw.running_sugar)
elif obj.type == 'media' or mediatype == 'image':
self.insert_image(center=center)
elif mediatype == 'audio':
self.play_sound()
elif mediatype == 'video':
self.play_video()
elif obj.type == 'descr' or mediatype == 'text':
mimetype = None
if self.dsobject is not None and \
'mime_type' in self.dsobject.metadata:
mimetype = self.dsobject.metadata['mime_type']
description = None
if self.dsobject is not None and \
'description' in self.dsobject.metadata:
description = self.dsobject.metadata[
'description']
self.insert_desc(mimetype, description)
if self.dsobject is not None:
self.dsobject.destroy()
elif isinstance(obj, (basestring, float, int)): # text or number
if isinstance(obj, (float, int)):
obj = round_int(obj)
x, y = self.x2tx(), self.y2ty()
if center:
y -= self.tw.canvas.textsize
self.tw.turtles.get_active_turtle().draw_text(
obj, x, y,
int(self.tw.canvas.textsize * self.scale / 100.),
self.tw.canvas.width - x)
def push_file_data_to_heap(self, dsobject, path=None):
""" push contents of a data store object (assuming json encoding) """
if dsobject:
data = data_from_file(dsobject.file_path)
elif path is not None:
data = data_from_file(path)
else:
data = None
debug_output("No file to open", self.tw.running_sugar)
if data is not None:
for val in data:
self.heap.append(val)
self.update_label_value('pop', self.heap[-1])
def x2tx(self):
""" Convert screen coordinates to turtle coordinates """
return int(self.tw.canvas.width / 2) + \
int(self.tw.turtles.get_active_turtle().get_xy()[0])
def y2ty(self):
""" Convert screen coordinates to turtle coordinates """
return int(self.tw.canvas.height / 2) - \
int(self.tw.turtles.get_active_turtle().get_xy()[1])
def wpercent(self):
""" width as a percentage of screen coordinates """
return int((self.tw.canvas.width * self.scale) / 100.)
def hpercent(self):
""" height as a percentage of screen coordinates """
return int((self.tw.canvas.height * self.scale) / 100.)
def insert_image(self, center=False, filepath=None, resize=True,
offset=False, pixbuf=False):
""" Image only (at current x, y) """
if filepath is not None:
self.filepath = filepath
if not pixbuf:
self.pixbuf = None
w, h = self.wpercent(), self.hpercent()
if w < 1 or h < 1:
return
if pixbuf: # We may have to rescale the picture
if w != self.pixbuf.get_width() or h != self.pixbuf.get_height():
self.pixbuf = self.pixbuf.scale_simple(
w, h, GdkPixbuf.InterpType.BILINEAR)
elif self.dsobject is not None:
try:
self.pixbuf = get_pixbuf_from_journal(self.dsobject, w, h)
except BaseException:
debug_output("Couldn't open dsobject %s" % (self.dsobject),
self.tw.running_sugar)
if self.pixbuf is None and \
self.filepath is not None and \
self.filepath != '':
try:
if not resize:
self.pixbuf = GdkPixbuf.Pixbuf.new_from_file(self.filepath)
w = self.pixbuf.get_width()
h = self.pixbuf.get_height()
else:
self.pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(
self.filepath, w, h)
except BaseException:
self.tw.showlabel('nojournal', self.filepath)
debug_output("Couldn't open filepath %s" % (self.filepath),
self.tw.running_sugar)
if self.pixbuf is not None:
# w, h are relative to screen size, not coord_scale
# w *= self.tw.coord_scale
# h *= self.tw.coord_scale
if center:
self.tw.turtles.get_active_turtle().draw_pixbuf(
self.pixbuf, 0, 0,
self.x2tx() - int(w / 2),
self.y2ty() - int(h / 2), w, h,
self.filepath)
elif offset:
self.tw.turtles.get_active_turtle().draw_pixbuf(
self.pixbuf, 0, 0,
self.x2tx(),
self.y2ty() - h,
w, h, self.filepath)
else:
self.tw.turtles.get_active_turtle().draw_pixbuf(
self.pixbuf, 0, 0,
self.x2tx(),
self.y2ty(),
w, h, self.filepath)
def insert_desc(self, mimetype=None, description=None):
""" Description text only (at current x, y) """
w = self.wpercent()
if w < 1:
return
text = None
if text_media_type(self.filepath):
if RTFPARSE and (
mimetype == 'application/rtf' or
self.filepath.endswith(('rtf'))):
text_only = RtfTextOnly()
for line in open(self.filepath, 'r'):
text_only.feed(line)
text = text_only.output
else:
try:
f = open(self.filepath, 'r')
text = f.read()
f.close()
except IOError:
self.tw.showlabel('nojournal', self.filepath)
debug_output("Couldn't open %s" % (self.filepath),
self.tw.running_sugar)
else:
if description is not None:
text = str(description)
else:
text = self.filepath
if text is not None:
self.tw.turtles.get_active_turtle().draw_text(
text, self.x2tx(), self.y2ty(), self.body_height, w)
def media_wait(self):
""" Wait for media to stop playing """
if self.tw.gst_available:
from .tagplay import media_playing
while(media_playing(self)):
yield True
self.ireturn()
yield True
def media_stop(self):
""" Stop playing media"""
if self.tw.gst_available:
from .tagplay import stop_media
stop_media(self)
self.ireturn()
yield True
def media_pause(self):
""" Pause media"""
if self.tw.gst_available:
from .tagplay import pause_media
pause_media(self)
self.ireturn()
yield True
def media_play(self):
""" Play media"""
if self.tw.gst_available:
from .tagplay import play_media
play_media(self)
self.ireturn()
yield True
def play_sound(self):
""" Sound file from Journal """
if self.tw.gst_available:
from .tagplay import play_audio_from_file
play_audio_from_file(self, self.filepath)
def play_video(self):
""" Movie file from Journal """
w, h = self.wpercent(), self.hpercent()
if w < 1 or h < 1:
return
if self.tw.gst_available:
from .tagplay import play_movie_from_file
# The video window is an overlay, so we need to know where
# the canvas is relative to the window, e.g., which
# toolbars, if any are open.
yoffset = 0
if self.tw.running_sugar:
if not self.tw.activity.is_fullscreen():
yoffset += GRID_CELL_SIZE
if self.tw.activity.toolbars_expanded():
yoffset += GRID_CELL_SIZE
play_movie_from_file(self, self.filepath, self.x2tx(),
self.y2ty() + yoffset, w, h)
def _expand_return(self, b, blk, blocks):
""" Expand a returnstack block into action name, box '__return__'
Parameters: the repeatstack block, the top block, all blocks.
Return all blocks."""
# We'll restore the original blocks when we are finished
if self._save_blocks is None:
self._save_blocks = blocks[:]
# Create an action block and a box
action_blk = HiddenBlock('stack')
blocks.append(action_blk)
box_blk = HiddenBlock('box')
blocks.append(box_blk)
box_label_blk = HiddenBlock('string', value='__return__')
blocks.append(box_label_blk)
# Make connections to substitute blocks
inflow = None
cblk = None
# FIXME: Need to use a stack for return values
# Find a flow block to use for adding the action blk.
tmp = b
while tmp.connections[0] is not None:
cblk = tmp.connections[0]
if cblk.docks[0][0] == 'flow':
break
else:
tmp = cblk
if cblk is not None:
if cblk.connections[0] is not None:
inflow = cblk.connections[0]
inflow.connections[-1] = action_blk
cblk.connections[0] = action_blk
action_blk.connections.append(inflow)
action_blk.docks.append(['flow', True, 0, 0])
action_blk.connections.append(b.connections[1])
b.connections[1].connections[0] = action_blk
action_blk.docks.append(['string', False, 0, 0])
action_blk.connections.append(cblk)
action_blk.docks.append(['flow', False, 0, 0])
# Replace the returnstack block with a box and label.
box_label_blk.connections.append(box_blk)
box_label_blk.docks.append(['string', True, 0, 0])
box_blk.connections.append(b.connections[0])
if b.connections[0] is not None:
for i in range(len(b.connections[0].connections)):
if b.connections[0].connections[i] == b:
b.connections[0].connections[i] = box_blk
box_blk.docks.append(['number', True, 0, 0])
box_blk.connections.append(box_label_blk)
box_blk.docks.append(['string', False, 0, 0])
return action_blk, blocks
def _expand_forever(self, b, blk, blocks):
""" Expand a while or until block into: forever, ifelse, stopstack
Expand a forever block to run in a separate stack
Parameters: the loop block, the top block, all blocks.
Return the start block of the expanded loop, and all blocks."""
# TODO: create a less brittle way of doing this; having to
# manage the connections and flows locally means we may run
# into trouble if any of these block types (forever, while,
# until. ifelse, stopstack, or stack) is changed in tablock.py
if b.name == 'while':
while_blk = True
else:
while_blk = False
if b.name == 'until':
until_blk = True
else:
until_blk = False
# We'll restore the original blocks when we are finished
if self._save_blocks is None:
self._save_blocks = blocks[:]
# Create an action block that will jump to the new stack
action_name = '_forever %d' % (len(self._save_while_blocks) + 1)
action_blk = HiddenBlock('stack')
action_label_blk = HiddenBlock('string', value=action_name)
# Create the blocks we'll put in the new stack
forever_blk = HiddenBlock('forever')
if while_blk or until_blk:
ifelse_blk = HiddenBlock('ifelse')
stopstack_blk = HiddenBlock('stopstack')
inflow = None
whileflow = None
outflow = None
boolflow = None
if b.connections is not None:
inflow = b.connections[0]
if while_blk or until_blk:
boolflow = b.connections[1]
whileflow = b.connections[-2]
outflow = b.connections[-1]
# Create action block(s) to run the code inside the forever loop
if until_blk and whileflow is not None: # run until flow at least once
action_flow_name = '_flow %d' % (len(self._save_while_blocks) + 1)
action_first = HiddenBlock('stack')
first_label_blk = HiddenBlock('string', value=action_flow_name)
# Assign new connections and build the docks
if inflow is not None and b in inflow.connections:
i = inflow.connections.index(b)
if until_blk and whileflow is not None:
inflow.connections[i] = action_first
else:
inflow.connections[i] = action_blk
else:
i = None
j = None
if outflow is not None:
if b in outflow.connections:
j = outflow.connections.index(b)
outflow.connections[j] = action_blk
if until_blk and whileflow is not None:
action_first.connections.append(inflow)
action_first.docks.append(['flow', True, 0, 0])
action_first.connections.append(first_label_blk)
action_first.docks.append(['number', False, 0, 0])
action_first.connections.append(action_blk)
action_first.docks.append(['flow', False, 0, 0])
first_label_blk.connections.append(action_first)
first_label_blk.docks.append(['number', True, 0, 0])
action_blk.connections.append(action_first)
else:
action_blk.connections.append(inflow)
action_blk.docks.append(['flow', True, 0, 0])
action_blk.connections.append(action_label_blk)
action_blk.docks.append(['number', False, 0, 0])
action_blk.connections.append(outflow)
action_blk.docks.append(['flow', False, 0, 0])
action_label_blk.connections.append(action_blk)
action_label_blk.docks.append(['number', True, 0, 0])
forever_blk.connections.append(None)
forever_blk.docks.append(['flow', True, 0, 0])
if while_blk or until_blk:
forever_blk.connections.append(ifelse_blk)
else:
forever_blk.connections.append(whileflow)
forever_blk.docks.append(['flow', False, 0, 0, '['])
forever_blk.connections.append(outflow)
forever_blk.docks.append(['flow', False, 0, 0, ']'])
if while_blk or until_blk:
ifelse_blk.connections.append(forever_blk)
ifelse_blk.docks.append(['flow', True, 0, 0])
ifelse_blk.connections.append(boolflow)
ifelse_blk.docks.append(['bool', False, 0, 0])
if while_blk:
ifelse_blk.connections.append(whileflow)
ifelse_blk.connections.append(stopstack_blk)
else: # until
ifelse_blk.connections.append(stopstack_blk)
ifelse_blk.connections.append(whileflow)
ifelse_blk.docks.append(['flow', False, 0, 0, '['])
ifelse_blk.docks.append(['flow', False, 0, 0, ']['])
ifelse_blk.connections.append(None)
ifelse_blk.docks.append(['flow', False, 0, 0, ']'])
stopstack_blk.connections.append(ifelse_blk)
stopstack_blk.docks.append(['flow', False, 0, 0])
if whileflow is not None:
if while_blk or until_blk:
whileflow.connections[0] = ifelse_blk
else:
whileflow.connections[0] = forever_blk
# Create a separate stacks for the forever loop and the whileflow
code = self._blocks_to_code(forever_blk)
self.stacks[self._get_stack_key(action_name)] = self._readline(code)
if until_blk and whileflow is not None:
# Create a stack from the whileflow to be called from
# action_first, but then reconnect it to the ifelse block
c = whileflow.connections[0]
whileflow.connections[0] = None
code = self._blocks_to_code(whileflow)
self.stacks[self._get_stack_key(action_flow_name)] = \
self._readline(code)
whileflow.connections[0] = c
# Save the connections so we can restore them later
if whileflow is not None:
self._save_while_blocks.append([b, i, j, 0])
else:
self._save_while_blocks.append([b, i, j, None])
# Insert the new blocks into the stack
i = blocks.index(b)
if i == 0:
blocks_left = []
else:
blocks_left = blocks[0:i]
if i == len(blocks) - 1:
blocks_right = []
else:
blocks_right = blocks[i + 1:]
blocks = blocks_left[:]
if until_blk and whileflow is not None:
blocks.append(action_first)
blocks.append(action_blk)
blocks.append(forever_blk)
if while_blk or until_blk:
blocks.append(ifelse_blk)
blocks.append(stopstack_blk)
blocks.extend(blocks_right)
if until_blk and whileflow is not None:
return action_first, blocks
else:
return action_blk, blocks
| mit |
jameshensman/pymc3 | pymc3/tests/models.py | 14 | 2427 | from pymc3 import Model, Normal, Metropolis, MvNormal
import numpy as np
import pymc3 as pm
from itertools import product
from theano.tensor import log
def simple_model():
mu = -2.1
tau = 1.3
with Model() as model:
x = Normal('x', mu, tau, shape=2, testval=[.1]*2)
return model.test_point, model, (mu, tau ** -1)
def multidimensional_model():
mu = -2.1
tau = 1.3
with Model() as model:
x = Normal('x', mu, tau, shape=(3,2), testval=.1*np.ones((3,2)) )
return model.test_point, model, (mu, tau ** -1)
def simple_init():
start, model, moments = simple_model()
step = Metropolis(model.vars, np.diag([1.]), model=model)
return model, start, step, moments
def simple_2model():
mu = -2.1
tau = 1.3
p = .4
with Model() as model:
x = pm.Normal('x', mu, tau, testval=.1)
logx = pm.Deterministic('logx', log(x))
y = pm.Bernoulli('y', p)
return model.test_point, model
def mv_simple():
mu = np.array([-.1, .5, 1.1])
p = np.array([
[2., 0, 0],
[.05, .1, 0],
[1., -0.05, 5.5]])
tau = np.dot(p, p.T)
with pm.Model() as model:
x = pm.MvNormal('x', pm.constant(mu), pm.constant(
tau), shape=3, testval=np.array([.1, 1., .8]))
H = tau
C = np.linalg.inv(H)
return model.test_point, model, (mu, C)
def mv_simple_discrete():
d= 2
n = 5
p = np.array([.15,.85])
with pm.Model() as model:
x = pm.Multinomial('x', n, pm.constant(p), shape=d, testval=np.array([1,4]))
mu = n * p
#covariance matrix
C = np.zeros((d,d))
for (i, j) in product(range(d), range(d)):
if i == j:
C[i,i] = n * p[i]*(1-p[i])
else:
C[i,j] = -n*p[i]*p[j]
return model.test_point, model, (mu, C)
def non_normal(n=2):
with pm.Model() as model:
x = pm.Beta('x', 3, 3, shape=n, transform=None)
return model.test_point, model, (np.tile([.5], n), None)
def exponential_beta(n=2):
with pm.Model() as model:
x = pm.Beta('x', 3, 1, shape=n, transform=None)
y = pm.Exponential('y', 1, shape=n, transform=None)
return model.test_point, model, None
def beta_bernoulli(n=2):
with pm.Model() as model:
x = pm.Beta('x', 3, 1, shape=n, transform=None)
y = pm.Bernoulli('y', 0.5)
return model.test_point, model, None
| apache-2.0 |
lexus24/40223224final | static/Brython3.1.1-20150328-091302/Lib/genericpath.py | 727 | 3093 | """
Path operations common to more than one OS
Do not use directly. The OS specific modules import the appropriate
functions from this module themselves.
"""
import os
import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
'getsize', 'isdir', 'isfile']
# Does a path exist?
# This is false for dangling symbolic links on systems that support them.
def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
os.stat(path)
except os.error:
return False
return True
# This follows symbolic links, so both islink() and isdir() can be true
# for the same path on systems that support symlinks
def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
except os.error:
return False
return stat.S_ISREG(st.st_mode)
# Is a path a directory?
# This follows symbolic links, so both islink() and isdir()
# can be true for the same path on systems that support symlinks
def isdir(s):
"""Return true if the pathname refers to an existing directory."""
try:
st = os.stat(s)
except os.error:
return False
return stat.S_ISDIR(st.st_mode)
def getsize(filename):
"""Return the size of a file, reported by os.stat()."""
return os.stat(filename).st_size
def getmtime(filename):
"""Return the last modification time of a file, reported by os.stat()."""
return os.stat(filename).st_mtime
def getatime(filename):
"""Return the last access time of a file, reported by os.stat()."""
return os.stat(filename).st_atime
def getctime(filename):
"""Return the metadata change time of a file, reported by os.stat()."""
return os.stat(filename).st_ctime
# Return the longest prefix of all list elements.
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
s1 = min(m)
s2 = max(m)
for i, c in enumerate(s1):
if c != s2[i]:
return s1[:i]
return s1
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
# Generic implementation of splitext, to be parametrized with
# the separators
def _splitext(p, sep, altsep, extsep):
"""Split the extension from a pathname.
Extension is everything from the last dot to the end, ignoring
leading dots. Returns "(root, ext)"; ext may be empty."""
# NOTE: This code must work for text and bytes strings.
sepIndex = p.rfind(sep)
if altsep:
altsepIndex = p.rfind(altsep)
sepIndex = max(sepIndex, altsepIndex)
dotIndex = p.rfind(extsep)
if dotIndex > sepIndex:
# skip all leading dots
filenameIndex = sepIndex + 1
while filenameIndex < dotIndex:
if p[filenameIndex:filenameIndex+1] != extsep:
return p[:dotIndex], p[dotIndex:]
filenameIndex += 1
return p, p[:0]
| gpl-3.0 |
brianwoo/django-tutorial | build/Django/tests/custom_lookups/tests.py | 6 | 22331 | from __future__ import unicode_literals
import contextlib
import time
import unittest
from datetime import date, datetime
from django.core.exceptions import FieldError
from django.db import connection, models
from django.test import TestCase, override_settings
from django.utils import timezone
from .models import Author, MySQLUnixTimestamp
@contextlib.contextmanager
def register_lookup(field, *lookups):
try:
for lookup in lookups:
field.register_lookup(lookup)
yield
finally:
for lookup in lookups:
field._unregister_lookup(lookup)
class Div3Lookup(models.Lookup):
lookup_name = 'div3'
def as_sql(self, compiler, connection):
lhs, params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params.extend(rhs_params)
return '(%s) %%%% 3 = %s' % (lhs, rhs), params
def as_oracle(self, compiler, connection):
lhs, params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params.extend(rhs_params)
return 'mod(%s, 3) = %s' % (lhs, rhs), params
class Div3Transform(models.Transform):
lookup_name = 'div3'
def as_sql(self, compiler, connection):
lhs, lhs_params = compiler.compile(self.lhs)
return '(%s) %%%% 3' % lhs, lhs_params
def as_oracle(self, compiler, connection):
lhs, lhs_params = compiler.compile(self.lhs)
return 'mod(%s, 3)' % lhs, lhs_params
class Div3BilateralTransform(Div3Transform):
bilateral = True
class Mult3BilateralTransform(models.Transform):
bilateral = True
lookup_name = 'mult3'
def as_sql(self, compiler, connection):
lhs, lhs_params = compiler.compile(self.lhs)
return '3 * (%s)' % lhs, lhs_params
class UpperBilateralTransform(models.Transform):
bilateral = True
lookup_name = 'upper'
def as_sql(self, compiler, connection):
lhs, lhs_params = compiler.compile(self.lhs)
return 'UPPER(%s)' % lhs, lhs_params
class YearTransform(models.Transform):
lookup_name = 'year'
def as_sql(self, compiler, connection):
lhs_sql, params = compiler.compile(self.lhs)
return connection.ops.date_extract_sql('year', lhs_sql), params
@property
def output_field(self):
return models.IntegerField()
@YearTransform.register_lookup
class YearExact(models.lookups.Lookup):
lookup_name = 'exact'
def as_sql(self, compiler, connection):
# We will need to skip the extract part, and instead go
# directly with the originating field, that is self.lhs.lhs
lhs_sql, lhs_params = self.process_lhs(compiler, connection, self.lhs.lhs)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
# Note that we must be careful so that we have params in the
# same order as we have the parts in the SQL.
params = lhs_params + rhs_params + lhs_params + rhs_params
# We use PostgreSQL specific SQL here. Note that we must do the
# conversions in SQL instead of in Python to support F() references.
return ("%(lhs)s >= (%(rhs)s || '-01-01')::date "
"AND %(lhs)s <= (%(rhs)s || '-12-31')::date" %
{'lhs': lhs_sql, 'rhs': rhs_sql}, params)
@YearTransform.register_lookup
class YearLte(models.lookups.LessThanOrEqual):
"""
The purpose of this lookup is to efficiently compare the year of the field.
"""
def as_sql(self, compiler, connection):
# Skip the YearTransform above us (no possibility for efficient
# lookup otherwise).
real_lhs = self.lhs.lhs
lhs_sql, params = self.process_lhs(compiler, connection, real_lhs)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
params.extend(rhs_params)
# Build SQL where the integer year is concatenated with last month
# and day, then convert that to date. (We try to have SQL like:
# WHERE somecol <= '2013-12-31')
# but also make it work if the rhs_sql is field reference.
return "%s <= (%s || '-12-31')::date" % (lhs_sql, rhs_sql), params
class SQLFunc(models.Lookup):
def __init__(self, name, *args, **kwargs):
super(SQLFunc, self).__init__(*args, **kwargs)
self.name = name
def as_sql(self, compiler, connection):
return '%s()', [self.name]
@property
def output_field(self):
return CustomField()
class SQLFuncFactory(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
return SQLFunc(self.name, *args, **kwargs)
class CustomField(models.TextField):
def get_lookup(self, lookup_name):
if lookup_name.startswith('lookupfunc_'):
key, name = lookup_name.split('_', 1)
return SQLFuncFactory(name)
return super(CustomField, self).get_lookup(lookup_name)
def get_transform(self, lookup_name):
if lookup_name.startswith('transformfunc_'):
key, name = lookup_name.split('_', 1)
return SQLFuncFactory(name)
return super(CustomField, self).get_transform(lookup_name)
class CustomModel(models.Model):
field = CustomField()
# We will register this class temporarily in the test method.
class InMonth(models.lookups.Lookup):
"""
InMonth matches if the column's month is the same as value's month.
"""
lookup_name = 'inmonth'
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
# We need to be careful so that we get the params in right
# places.
params = lhs_params + rhs_params + lhs_params + rhs_params
return ("%s >= date_trunc('month', %s) and "
"%s < date_trunc('month', %s) + interval '1 months'" %
(lhs, rhs, lhs, rhs), params)
class DateTimeTransform(models.Transform):
lookup_name = 'as_datetime'
@property
def output_field(self):
return models.DateTimeField()
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return 'from_unixtime({})'.format(lhs), params
class LookupTests(TestCase):
def test_basic_lookup(self):
a1 = Author.objects.create(name='a1', age=1)
a2 = Author.objects.create(name='a2', age=2)
a3 = Author.objects.create(name='a3', age=3)
a4 = Author.objects.create(name='a4', age=4)
with register_lookup(models.IntegerField, Div3Lookup):
self.assertQuerysetEqual(
Author.objects.filter(age__div3=0),
[a3], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(age__div3=1).order_by('age'),
[a1, a4], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(age__div3=2),
[a2], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(age__div3=3),
[], lambda x: x
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific SQL used")
def test_birthdate_month(self):
a1 = Author.objects.create(name='a1', birthdate=date(1981, 2, 16))
a2 = Author.objects.create(name='a2', birthdate=date(2012, 2, 29))
a3 = Author.objects.create(name='a3', birthdate=date(2012, 1, 31))
a4 = Author.objects.create(name='a4', birthdate=date(2012, 3, 1))
with register_lookup(models.DateField, InMonth):
self.assertQuerysetEqual(
Author.objects.filter(birthdate__inmonth=date(2012, 1, 15)),
[a3], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(birthdate__inmonth=date(2012, 2, 1)),
[a2], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(birthdate__inmonth=date(1981, 2, 28)),
[a1], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(birthdate__inmonth=date(2012, 3, 12)),
[a4], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(birthdate__inmonth=date(2012, 4, 1)),
[], lambda x: x
)
def test_div3_extract(self):
with register_lookup(models.IntegerField, Div3Transform):
a1 = Author.objects.create(name='a1', age=1)
a2 = Author.objects.create(name='a2', age=2)
a3 = Author.objects.create(name='a3', age=3)
a4 = Author.objects.create(name='a4', age=4)
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(age__div3=2),
[a2], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__lte=3),
[a1, a2, a3, a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__in=[0, 2]),
[a2, a3], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__in=[2, 4]),
[a2], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__gte=3),
[], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__range=(1, 2)),
[a1, a2, a4], lambda x: x)
class BilateralTransformTests(TestCase):
def test_bilateral_upper(self):
with register_lookup(models.CharField, UpperBilateralTransform):
Author.objects.bulk_create([
Author(name='Doe'),
Author(name='doe'),
Author(name='Foo'),
])
self.assertQuerysetEqual(
Author.objects.filter(name__upper='doe'),
["<Author: Doe>", "<Author: doe>"], ordered=False)
self.assertQuerysetEqual(
Author.objects.filter(name__upper__contains='f'),
["<Author: Foo>"], ordered=False)
def test_bilateral_inner_qs(self):
with register_lookup(models.CharField, UpperBilateralTransform):
with self.assertRaises(NotImplementedError):
Author.objects.filter(name__upper__in=Author.objects.values_list('name'))
def test_div3_bilateral_extract(self):
with register_lookup(models.IntegerField, Div3BilateralTransform):
a1 = Author.objects.create(name='a1', age=1)
a2 = Author.objects.create(name='a2', age=2)
a3 = Author.objects.create(name='a3', age=3)
a4 = Author.objects.create(name='a4', age=4)
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(age__div3=2),
[a2], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__lte=3),
[a3], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__in=[0, 2]),
[a2, a3], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__in=[2, 4]),
[a1, a2, a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__gte=3),
[a1, a2, a3, a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__range=(1, 2)),
[a1, a2, a4], lambda x: x)
def test_bilateral_order(self):
with register_lookup(models.IntegerField, Mult3BilateralTransform, Div3BilateralTransform):
a1 = Author.objects.create(name='a1', age=1)
a2 = Author.objects.create(name='a2', age=2)
a3 = Author.objects.create(name='a3', age=3)
a4 = Author.objects.create(name='a4', age=4)
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(age__mult3__div3=42),
# mult3__div3 always leads to 0
[a1, a2, a3, a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__mult3=42),
[a3], lambda x: x)
def test_bilateral_fexpr(self):
with register_lookup(models.IntegerField, Mult3BilateralTransform):
a1 = Author.objects.create(name='a1', age=1, average_rating=3.2)
a2 = Author.objects.create(name='a2', age=2, average_rating=0.5)
a3 = Author.objects.create(name='a3', age=3, average_rating=1.5)
a4 = Author.objects.create(name='a4', age=4)
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(age__mult3=models.F('age')),
[a1, a2, a3, a4], lambda x: x)
self.assertQuerysetEqual(
# Same as age >= average_rating
baseqs.filter(age__mult3__gte=models.F('average_rating')),
[a2, a3], lambda x: x)
@override_settings(USE_TZ=True)
class DateTimeLookupTests(TestCase):
@unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific SQL used")
def test_datetime_output_field(self):
with register_lookup(models.PositiveIntegerField, DateTimeTransform):
ut = MySQLUnixTimestamp.objects.create(timestamp=time.time())
y2k = timezone.make_aware(datetime(2000, 1, 1))
self.assertQuerysetEqual(
MySQLUnixTimestamp.objects.filter(timestamp__as_datetime__gt=y2k),
[ut], lambda x: x)
class YearLteTests(TestCase):
def setUp(self):
models.DateField.register_lookup(YearTransform)
self.a1 = Author.objects.create(name='a1', birthdate=date(1981, 2, 16))
self.a2 = Author.objects.create(name='a2', birthdate=date(2012, 2, 29))
self.a3 = Author.objects.create(name='a3', birthdate=date(2012, 1, 31))
self.a4 = Author.objects.create(name='a4', birthdate=date(2012, 3, 1))
def tearDown(self):
models.DateField._unregister_lookup(YearTransform)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific SQL used")
def test_year_lte(self):
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(birthdate__year__lte=2012),
[self.a1, self.a2, self.a3, self.a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(birthdate__year=2012),
[self.a2, self.a3, self.a4], lambda x: x)
self.assertNotIn('BETWEEN', str(baseqs.filter(birthdate__year=2012).query))
self.assertQuerysetEqual(
baseqs.filter(birthdate__year__lte=2011),
[self.a1], lambda x: x)
# The non-optimized version works, too.
self.assertQuerysetEqual(
baseqs.filter(birthdate__year__lt=2012),
[self.a1], lambda x: x)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific SQL used")
def test_year_lte_fexpr(self):
self.a2.age = 2011
self.a2.save()
self.a3.age = 2012
self.a3.save()
self.a4.age = 2013
self.a4.save()
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(birthdate__year__lte=models.F('age')),
[self.a3, self.a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(birthdate__year__lt=models.F('age')),
[self.a4], lambda x: x)
def test_year_lte_sql(self):
# This test will just check the generated SQL for __lte. This
# doesn't require running on PostgreSQL and spots the most likely
# error - not running YearLte SQL at all.
baseqs = Author.objects.order_by('name')
self.assertIn(
'<= (2011 || ', str(baseqs.filter(birthdate__year__lte=2011).query))
self.assertIn(
'-12-31', str(baseqs.filter(birthdate__year__lte=2011).query))
def test_postgres_year_exact(self):
baseqs = Author.objects.order_by('name')
self.assertIn(
'= (2011 || ', str(baseqs.filter(birthdate__year=2011).query))
self.assertIn(
'-12-31', str(baseqs.filter(birthdate__year=2011).query))
def test_custom_implementation_year_exact(self):
try:
# Two ways to add a customized implementation for different backends:
# First is MonkeyPatch of the class.
def as_custom_sql(self, compiler, connection):
lhs_sql, lhs_params = self.process_lhs(compiler, connection, self.lhs.lhs)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params + lhs_params + rhs_params
return ("%(lhs)s >= str_to_date(concat(%(rhs)s, '-01-01'), '%%%%Y-%%%%m-%%%%d') "
"AND %(lhs)s <= str_to_date(concat(%(rhs)s, '-12-31'), '%%%%Y-%%%%m-%%%%d')" %
{'lhs': lhs_sql, 'rhs': rhs_sql}, params)
setattr(YearExact, 'as_' + connection.vendor, as_custom_sql)
self.assertIn(
'concat(',
str(Author.objects.filter(birthdate__year=2012).query))
finally:
delattr(YearExact, 'as_' + connection.vendor)
try:
# The other way is to subclass the original lookup and register the subclassed
# lookup instead of the original.
class CustomYearExact(YearExact):
# This method should be named "as_mysql" for MySQL, "as_postgresql" for postgres
# and so on, but as we don't know which DB we are running on, we need to use
# setattr.
def as_custom_sql(self, compiler, connection):
lhs_sql, lhs_params = self.process_lhs(compiler, connection, self.lhs.lhs)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params + lhs_params + rhs_params
return ("%(lhs)s >= str_to_date(CONCAT(%(rhs)s, '-01-01'), '%%%%Y-%%%%m-%%%%d') "
"AND %(lhs)s <= str_to_date(CONCAT(%(rhs)s, '-12-31'), '%%%%Y-%%%%m-%%%%d')" %
{'lhs': lhs_sql, 'rhs': rhs_sql}, params)
setattr(CustomYearExact, 'as_' + connection.vendor, CustomYearExact.as_custom_sql)
YearTransform.register_lookup(CustomYearExact)
self.assertIn(
'CONCAT(',
str(Author.objects.filter(birthdate__year=2012).query))
finally:
YearTransform._unregister_lookup(CustomYearExact)
YearTransform.register_lookup(YearExact)
class TrackCallsYearTransform(YearTransform):
lookup_name = 'year'
call_order = []
def as_sql(self, compiler, connection):
lhs_sql, params = compiler.compile(self.lhs)
return connection.ops.date_extract_sql('year', lhs_sql), params
@property
def output_field(self):
return models.IntegerField()
def get_lookup(self, lookup_name):
self.call_order.append('lookup')
return super(TrackCallsYearTransform, self).get_lookup(lookup_name)
def get_transform(self, lookup_name):
self.call_order.append('transform')
return super(TrackCallsYearTransform, self).get_transform(lookup_name)
class LookupTransformCallOrderTests(TestCase):
def test_call_order(self):
with register_lookup(models.DateField, TrackCallsYearTransform):
# junk lookup - tries lookup, then transform, then fails
with self.assertRaises(FieldError):
Author.objects.filter(birthdate__year__junk=2012)
self.assertEqual(TrackCallsYearTransform.call_order,
['lookup', 'transform'])
TrackCallsYearTransform.call_order = []
# junk transform - tries transform only, then fails
with self.assertRaises(FieldError):
Author.objects.filter(birthdate__year__junk__more_junk=2012)
self.assertEqual(TrackCallsYearTransform.call_order,
['transform'])
TrackCallsYearTransform.call_order = []
# Just getting the year (implied __exact) - lookup only
Author.objects.filter(birthdate__year=2012)
self.assertEqual(TrackCallsYearTransform.call_order,
['lookup'])
TrackCallsYearTransform.call_order = []
# Just getting the year (explicit __exact) - lookup only
Author.objects.filter(birthdate__year__exact=2012)
self.assertEqual(TrackCallsYearTransform.call_order,
['lookup'])
class CustomisedMethodsTests(TestCase):
def test_overridden_get_lookup(self):
q = CustomModel.objects.filter(field__lookupfunc_monkeys=3)
self.assertIn('monkeys()', str(q.query))
def test_overridden_get_transform(self):
q = CustomModel.objects.filter(field__transformfunc_banana=3)
self.assertIn('banana()', str(q.query))
def test_overridden_get_lookup_chain(self):
q = CustomModel.objects.filter(field__transformfunc_banana__lookupfunc_elephants=3)
self.assertIn('elephants()', str(q.query))
def test_overridden_get_transform_chain(self):
q = CustomModel.objects.filter(field__transformfunc_banana__transformfunc_pear=3)
self.assertIn('pear()', str(q.query))
class SubqueryTransformTests(TestCase):
def test_subquery_usage(self):
with register_lookup(models.IntegerField, Div3Transform):
Author.objects.create(name='a1', age=1)
a2 = Author.objects.create(name='a2', age=2)
Author.objects.create(name='a3', age=3)
Author.objects.create(name='a4', age=4)
self.assertQuerysetEqual(
Author.objects.order_by('name').filter(id__in=Author.objects.filter(age__div3=2)),
[a2], lambda x: x)
| gpl-3.0 |
eonezhang/avro | lang/py/src/avro/ipc.py | 22 | 17211 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Support for inter-process calls.
"""
import httplib
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from avro import io
from avro import protocol
from avro import schema
#
# Constants
#
# Handshake schema is pulled in during build
HANDSHAKE_REQUEST_SCHEMA = schema.parse("""
@HANDSHAKE_REQUEST_SCHEMA@
""")
HANDSHAKE_RESPONSE_SCHEMA = schema.parse("""
@HANDSHAKE_RESPONSE_SCHEMA@
""")
HANDSHAKE_REQUESTOR_WRITER = io.DatumWriter(HANDSHAKE_REQUEST_SCHEMA)
HANDSHAKE_REQUESTOR_READER = io.DatumReader(HANDSHAKE_RESPONSE_SCHEMA)
HANDSHAKE_RESPONDER_WRITER = io.DatumWriter(HANDSHAKE_RESPONSE_SCHEMA)
HANDSHAKE_RESPONDER_READER = io.DatumReader(HANDSHAKE_REQUEST_SCHEMA)
META_SCHEMA = schema.parse('{"type": "map", "values": "bytes"}')
META_WRITER = io.DatumWriter(META_SCHEMA)
META_READER = io.DatumReader(META_SCHEMA)
SYSTEM_ERROR_SCHEMA = schema.parse('["string"]')
# protocol cache
REMOTE_HASHES = {}
REMOTE_PROTOCOLS = {}
BIG_ENDIAN_INT_STRUCT = io.struct_class('!I')
BUFFER_HEADER_LENGTH = 4
BUFFER_SIZE = 8192
#
# Exceptions
#
class AvroRemoteException(schema.AvroException):
"""
Raised when an error message is sent by an Avro requestor or responder.
"""
def __init__(self, fail_msg=None):
schema.AvroException.__init__(self, fail_msg)
class ConnectionClosedException(schema.AvroException):
pass
#
# Base IPC Classes (Requestor/Responder)
#
class BaseRequestor(object):
"""Base class for the client side of a protocol interaction."""
def __init__(self, local_protocol, transceiver):
self._local_protocol = local_protocol
self._transceiver = transceiver
self._remote_protocol = None
self._remote_hash = None
self._send_protocol = None
# read-only properties
local_protocol = property(lambda self: self._local_protocol)
transceiver = property(lambda self: self._transceiver)
# read/write properties
def set_remote_protocol(self, new_remote_protocol):
self._remote_protocol = new_remote_protocol
REMOTE_PROTOCOLS[self.transceiver.remote_name] = self.remote_protocol
remote_protocol = property(lambda self: self._remote_protocol,
set_remote_protocol)
def set_remote_hash(self, new_remote_hash):
self._remote_hash = new_remote_hash
REMOTE_HASHES[self.transceiver.remote_name] = self.remote_hash
remote_hash = property(lambda self: self._remote_hash, set_remote_hash)
def set_send_protocol(self, new_send_protocol):
self._send_protocol = new_send_protocol
send_protocol = property(lambda self: self._send_protocol, set_send_protocol)
def request(self, message_name, request_datum):
"""
Writes a request message and reads a response or error message.
"""
# build handshake and call request
buffer_writer = StringIO()
buffer_encoder = io.BinaryEncoder(buffer_writer)
self.write_handshake_request(buffer_encoder)
self.write_call_request(message_name, request_datum, buffer_encoder)
# send the handshake and call request; block until call response
call_request = buffer_writer.getvalue()
return self.issue_request(call_request, message_name, request_datum)
def write_handshake_request(self, encoder):
local_hash = self.local_protocol.md5
remote_name = self.transceiver.remote_name
remote_hash = REMOTE_HASHES.get(remote_name)
if remote_hash is None:
remote_hash = local_hash
self.remote_protocol = self.local_protocol
request_datum = {}
request_datum['clientHash'] = local_hash
request_datum['serverHash'] = remote_hash
if self.send_protocol:
request_datum['clientProtocol'] = str(self.local_protocol)
HANDSHAKE_REQUESTOR_WRITER.write(request_datum, encoder)
def write_call_request(self, message_name, request_datum, encoder):
"""
The format of a call request is:
* request metadata, a map with values of type bytes
* the message name, an Avro string, followed by
* the message parameters. Parameters are serialized according to
the message's request declaration.
"""
# request metadata (not yet implemented)
request_metadata = {}
META_WRITER.write(request_metadata, encoder)
# message name
message = self.local_protocol.messages.get(message_name)
if message is None:
raise schema.AvroException('Unknown message: %s' % message_name)
encoder.write_utf8(message.name)
# message parameters
self.write_request(message.request, request_datum, encoder)
def write_request(self, request_schema, request_datum, encoder):
datum_writer = io.DatumWriter(request_schema)
datum_writer.write(request_datum, encoder)
def read_handshake_response(self, decoder):
handshake_response = HANDSHAKE_REQUESTOR_READER.read(decoder)
match = handshake_response.get('match')
if match == 'BOTH':
self.send_protocol = False
return True
elif match == 'CLIENT':
if self.send_protocol:
raise schema.AvroException('Handshake failure.')
self.remote_protocol = protocol.parse(
handshake_response.get('serverProtocol'))
self.remote_hash = handshake_response.get('serverHash')
self.send_protocol = False
return True
elif match == 'NONE':
if self.send_protocol:
raise schema.AvroException('Handshake failure.')
self.remote_protocol = protocol.parse(
handshake_response.get('serverProtocol'))
self.remote_hash = handshake_response.get('serverHash')
self.send_protocol = True
return False
else:
raise schema.AvroException('Unexpected match: %s' % match)
def read_call_response(self, message_name, decoder):
"""
The format of a call response is:
* response metadata, a map with values of type bytes
* a one-byte error flag boolean, followed by either:
o if the error flag is false,
the message response, serialized per the message's response schema.
o if the error flag is true,
the error, serialized per the message's error union schema.
"""
# response metadata
response_metadata = META_READER.read(decoder)
# remote response schema
remote_message_schema = self.remote_protocol.messages.get(message_name)
if remote_message_schema is None:
raise schema.AvroException('Unknown remote message: %s' % message_name)
# local response schema
local_message_schema = self.local_protocol.messages.get(message_name)
if local_message_schema is None:
raise schema.AvroException('Unknown local message: %s' % message_name)
# error flag
if not decoder.read_boolean():
writers_schema = remote_message_schema.response
readers_schema = local_message_schema.response
return self.read_response(writers_schema, readers_schema, decoder)
else:
writers_schema = remote_message_schema.errors
readers_schema = local_message_schema.errors
raise self.read_error(writers_schema, readers_schema, decoder)
def read_response(self, writers_schema, readers_schema, decoder):
datum_reader = io.DatumReader(writers_schema, readers_schema)
result = datum_reader.read(decoder)
return result
def read_error(self, writers_schema, readers_schema, decoder):
datum_reader = io.DatumReader(writers_schema, readers_schema)
return AvroRemoteException(datum_reader.read(decoder))
class Requestor(BaseRequestor):
def issue_request(self, call_request, message_name, request_datum):
call_response = self.transceiver.transceive(call_request)
# process the handshake and call response
buffer_decoder = io.BinaryDecoder(StringIO(call_response))
call_response_exists = self.read_handshake_response(buffer_decoder)
if call_response_exists:
return self.read_call_response(message_name, buffer_decoder)
else:
return self.request(message_name, request_datum)
class Responder(object):
"""Base class for the server side of a protocol interaction."""
def __init__(self, local_protocol):
self._local_protocol = local_protocol
self._local_hash = self.local_protocol.md5
self._protocol_cache = {}
self.set_protocol_cache(self.local_hash, self.local_protocol)
# read-only properties
local_protocol = property(lambda self: self._local_protocol)
local_hash = property(lambda self: self._local_hash)
protocol_cache = property(lambda self: self._protocol_cache)
# utility functions to manipulate protocol cache
def get_protocol_cache(self, hash):
return self.protocol_cache.get(hash)
def set_protocol_cache(self, hash, protocol):
self.protocol_cache[hash] = protocol
def respond(self, call_request):
"""
Called by a server to deserialize a request, compute and serialize
a response or error. Compare to 'handle()' in Thrift.
"""
buffer_reader = StringIO(call_request)
buffer_decoder = io.BinaryDecoder(buffer_reader)
buffer_writer = StringIO()
buffer_encoder = io.BinaryEncoder(buffer_writer)
error = None
response_metadata = {}
try:
remote_protocol = self.process_handshake(buffer_decoder, buffer_encoder)
# handshake failure
if remote_protocol is None:
return buffer_writer.getvalue()
# read request using remote protocol
request_metadata = META_READER.read(buffer_decoder)
remote_message_name = buffer_decoder.read_utf8()
# get remote and local request schemas so we can do
# schema resolution (one fine day)
remote_message = remote_protocol.messages.get(remote_message_name)
if remote_message is None:
fail_msg = 'Unknown remote message: %s' % remote_message_name
raise schema.AvroException(fail_msg)
local_message = self.local_protocol.messages.get(remote_message_name)
if local_message is None:
fail_msg = 'Unknown local message: %s' % remote_message_name
raise schema.AvroException(fail_msg)
writers_schema = remote_message.request
readers_schema = local_message.request
request = self.read_request(writers_schema, readers_schema,
buffer_decoder)
# perform server logic
try:
response = self.invoke(local_message, request)
except AvroRemoteException, e:
error = e
except Exception, e:
error = AvroRemoteException(str(e))
# write response using local protocol
META_WRITER.write(response_metadata, buffer_encoder)
buffer_encoder.write_boolean(error is not None)
if error is None:
writers_schema = local_message.response
self.write_response(writers_schema, response, buffer_encoder)
else:
writers_schema = local_message.errors
self.write_error(writers_schema, error, buffer_encoder)
except schema.AvroException, e:
error = AvroRemoteException(str(e))
buffer_encoder = io.BinaryEncoder(StringIO())
META_WRITER.write(response_metadata, buffer_encoder)
buffer_encoder.write_boolean(True)
self.write_error(SYSTEM_ERROR_SCHEMA, error, buffer_encoder)
return buffer_writer.getvalue()
def process_handshake(self, decoder, encoder):
handshake_request = HANDSHAKE_RESPONDER_READER.read(decoder)
handshake_response = {}
# determine the remote protocol
client_hash = handshake_request.get('clientHash')
client_protocol = handshake_request.get('clientProtocol')
remote_protocol = self.get_protocol_cache(client_hash)
if remote_protocol is None and client_protocol is not None:
remote_protocol = protocol.parse(client_protocol)
self.set_protocol_cache(client_hash, remote_protocol)
# evaluate remote's guess of the local protocol
server_hash = handshake_request.get('serverHash')
if self.local_hash == server_hash:
if remote_protocol is None:
handshake_response['match'] = 'NONE'
else:
handshake_response['match'] = 'BOTH'
else:
if remote_protocol is None:
handshake_response['match'] = 'NONE'
else:
handshake_response['match'] = 'CLIENT'
if handshake_response['match'] != 'BOTH':
handshake_response['serverProtocol'] = str(self.local_protocol)
handshake_response['serverHash'] = self.local_hash
HANDSHAKE_RESPONDER_WRITER.write(handshake_response, encoder)
return remote_protocol
def invoke(self, local_message, request):
"""
Aactual work done by server: cf. handler in thrift.
"""
pass
def read_request(self, writers_schema, readers_schema, decoder):
datum_reader = io.DatumReader(writers_schema, readers_schema)
return datum_reader.read(decoder)
def write_response(self, writers_schema, response_datum, encoder):
datum_writer = io.DatumWriter(writers_schema)
datum_writer.write(response_datum, encoder)
def write_error(self, writers_schema, error_exception, encoder):
datum_writer = io.DatumWriter(writers_schema)
datum_writer.write(str(error_exception), encoder)
#
# Utility classes
#
class FramedReader(object):
"""Wrapper around a file-like object to read framed data."""
def __init__(self, reader):
self._reader = reader
# read-only properties
reader = property(lambda self: self._reader)
def read_framed_message(self):
message = []
while True:
buffer = StringIO()
buffer_length = self._read_buffer_length()
if buffer_length == 0:
return ''.join(message)
while buffer.tell() < buffer_length:
chunk = self.reader.read(buffer_length - buffer.tell())
if chunk == '':
raise ConnectionClosedException("Reader read 0 bytes.")
buffer.write(chunk)
message.append(buffer.getvalue())
def _read_buffer_length(self):
read = self.reader.read(BUFFER_HEADER_LENGTH)
if read == '':
raise ConnectionClosedException("Reader read 0 bytes.")
return BIG_ENDIAN_INT_STRUCT.unpack(read)[0]
class FramedWriter(object):
"""Wrapper around a file-like object to write framed data."""
def __init__(self, writer):
self._writer = writer
# read-only properties
writer = property(lambda self: self._writer)
def write_framed_message(self, message):
message_length = len(message)
total_bytes_sent = 0
while message_length - total_bytes_sent > 0:
if message_length - total_bytes_sent > BUFFER_SIZE:
buffer_length = BUFFER_SIZE
else:
buffer_length = message_length - total_bytes_sent
self.write_buffer(message[total_bytes_sent:
(total_bytes_sent + buffer_length)])
total_bytes_sent += buffer_length
# A message is always terminated by a zero-length buffer.
self.write_buffer_length(0)
def write_buffer(self, chunk):
buffer_length = len(chunk)
self.write_buffer_length(buffer_length)
self.writer.write(chunk)
def write_buffer_length(self, n):
self.writer.write(BIG_ENDIAN_INT_STRUCT.pack(n))
#
# Transceiver Implementations
#
class HTTPTransceiver(object):
"""
A simple HTTP-based transceiver implementation.
Useful for clients but not for servers
"""
def __init__(self, host, port, req_resource='/'):
self.req_resource = req_resource
self.conn = httplib.HTTPConnection(host, port)
self.conn.connect()
# read-only properties
sock = property(lambda self: self.conn.sock)
remote_name = property(lambda self: self.sock.getsockname())
# read/write properties
def set_conn(self, new_conn):
self._conn = new_conn
conn = property(lambda self: self._conn, set_conn)
req_resource = '/'
def transceive(self, request):
self.write_framed_message(request)
result = self.read_framed_message()
return result
def read_framed_message(self):
response = self.conn.getresponse()
response_reader = FramedReader(response)
framed_message = response_reader.read_framed_message()
response.read() # ensure we're ready for subsequent requests
return framed_message
def write_framed_message(self, message):
req_method = 'POST'
req_headers = {'Content-Type': 'avro/binary'}
req_body_buffer = FramedWriter(StringIO())
req_body_buffer.write_framed_message(message)
req_body = req_body_buffer.writer.getvalue()
self.conn.request(req_method, self.req_resource, req_body, req_headers)
def close(self):
self.conn.close()
#
# Server Implementations (none yet)
#
| apache-2.0 |
blacktear23/django | django/contrib/gis/tests/distapp/tests.py | 276 | 19055 | import os
from decimal import Decimal
from django.db import connection
from django.db.models import Q
from django.contrib.gis.geos import GEOSGeometry, Point, LineString
from django.contrib.gis.measure import D # alias for Distance
from django.contrib.gis.tests.utils import oracle, postgis, spatialite, no_oracle, no_spatialite
from django.test import TestCase
from models import AustraliaCity, Interstate, SouthTexasInterstate, \
SouthTexasCity, SouthTexasCityFt, CensusZipcode, SouthTexasZipcode
class DistanceTest(TestCase):
# A point we are testing distances with -- using a WGS84
# coordinate that'll be implicitly transormed to that to
# the coordinate system of the field, EPSG:32140 (Texas South Central
# w/units in meters)
stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
# Another one for Australia
au_pnt = GEOSGeometry('POINT (150.791 -34.4919)', 4326)
def get_names(self, qs):
cities = [c.name for c in qs]
cities.sort()
return cities
def test01_init(self):
"Test initialization of distance models."
self.assertEqual(9, SouthTexasCity.objects.count())
self.assertEqual(9, SouthTexasCityFt.objects.count())
self.assertEqual(11, AustraliaCity.objects.count())
self.assertEqual(4, SouthTexasZipcode.objects.count())
self.assertEqual(4, CensusZipcode.objects.count())
self.assertEqual(1, Interstate.objects.count())
self.assertEqual(1, SouthTexasInterstate.objects.count())
@no_spatialite
def test02_dwithin(self):
"Testing the `dwithin` lookup type."
# Distances -- all should be equal (except for the
# degree/meter pair in au_cities, that's somewhat
# approximate).
tx_dists = [(7000, 22965.83), D(km=7), D(mi=4.349)]
au_dists = [(0.5, 32000), D(km=32), D(mi=19.884)]
# Expected cities for Australia and Texas.
tx_cities = ['Downtown Houston', 'Southside Place']
au_cities = ['Mittagong', 'Shellharbour', 'Thirroul', 'Wollongong']
# Performing distance queries on two projected coordinate systems one
# with units in meters and the other in units of U.S. survey feet.
for dist in tx_dists:
if isinstance(dist, tuple): dist1, dist2 = dist
else: dist1 = dist2 = dist
qs1 = SouthTexasCity.objects.filter(point__dwithin=(self.stx_pnt, dist1))
qs2 = SouthTexasCityFt.objects.filter(point__dwithin=(self.stx_pnt, dist2))
for qs in qs1, qs2:
self.assertEqual(tx_cities, self.get_names(qs))
# Now performing the `dwithin` queries on a geodetic coordinate system.
for dist in au_dists:
if isinstance(dist, D) and not oracle: type_error = True
else: type_error = False
if isinstance(dist, tuple):
if oracle: dist = dist[1]
else: dist = dist[0]
# Creating the query set.
qs = AustraliaCity.objects.order_by('name')
if type_error:
# A ValueError should be raised on PostGIS when trying to pass
# Distance objects into a DWithin query using a geodetic field.
self.assertRaises(ValueError, AustraliaCity.objects.filter(point__dwithin=(self.au_pnt, dist)).count)
else:
self.assertEqual(au_cities, self.get_names(qs.filter(point__dwithin=(self.au_pnt, dist))))
def test03a_distance_method(self):
"Testing the `distance` GeoQuerySet method on projected coordinate systems."
# The point for La Grange, TX
lagrange = GEOSGeometry('POINT(-96.876369 29.905320)', 4326)
# Reference distances in feet and in meters. Got these values from
# using the provided raw SQL statements.
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 32140)) FROM distapp_southtexascity;
m_distances = [147075.069813, 139630.198056, 140888.552826,
138809.684197, 158309.246259, 212183.594374,
70870.188967, 165337.758878, 139196.085105]
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 2278)) FROM distapp_southtexascityft;
# Oracle 11 thinks this is not a projected coordinate system, so it's s
# not tested.
ft_distances = [482528.79154625, 458103.408123001, 462231.860397575,
455411.438904354, 519386.252102563, 696139.009211594,
232513.278304279, 542445.630586414, 456679.155883207]
# Testing using different variations of parameters and using models
# with different projected coordinate systems.
dist1 = SouthTexasCity.objects.distance(lagrange, field_name='point')
dist2 = SouthTexasCity.objects.distance(lagrange) # Using GEOSGeometry parameter
if spatialite or oracle:
dist_qs = [dist1, dist2]
else:
dist3 = SouthTexasCityFt.objects.distance(lagrange.ewkt) # Using EWKT string parameter.
dist4 = SouthTexasCityFt.objects.distance(lagrange)
dist_qs = [dist1, dist2, dist3, dist4]
# Original query done on PostGIS, have to adjust AlmostEqual tolerance
# for Oracle.
if oracle: tol = 2
else: tol = 5
# Ensuring expected distances are returned for each distance queryset.
for qs in dist_qs:
for i, c in enumerate(qs):
self.assertAlmostEqual(m_distances[i], c.distance.m, tol)
self.assertAlmostEqual(ft_distances[i], c.distance.survey_ft, tol)
@no_spatialite
def test03b_distance_method(self):
"Testing the `distance` GeoQuerySet method on geodetic coordnate systems."
if oracle: tol = 2
else: tol = 5
# Testing geodetic distance calculation with a non-point geometry
# (a LineString of Wollongong and Shellharbour coords).
ls = LineString( ( (150.902, -34.4245), (150.87, -34.5789) ) )
if oracle or connection.ops.geography:
# Reference query:
# SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326)) FROM distapp_australiacity ORDER BY name;
distances = [1120954.92533513, 140575.720018241, 640396.662906304,
60580.9693849269, 972807.955955075, 568451.8357838,
40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0]
qs = AustraliaCity.objects.distance(ls).order_by('name')
for city, distance in zip(qs, distances):
# Testing equivalence to within a meter.
self.assertAlmostEqual(distance, city.distance.m, 0)
else:
# PostGIS 1.4 and below is limited to disance queries only
# to/from point geometries, check for raising of ValueError.
self.assertRaises(ValueError, AustraliaCity.objects.distance, ls)
self.assertRaises(ValueError, AustraliaCity.objects.distance, ls.wkt)
# Got the reference distances using the raw SQL statements:
# SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326), 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
# SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326)) FROM distapp_australiacity WHERE (NOT (id = 11)); st_distance_sphere
if connection.ops.postgis and connection.ops.proj_version_tuple() >= (4, 7, 0):
# PROJ.4 versions 4.7+ have updated datums, and thus different
# distance values.
spheroid_distances = [60504.0628957201, 77023.9489850262, 49154.8867574404,
90847.4358768573, 217402.811919332, 709599.234564757,
640011.483550888, 7772.00667991925, 1047861.78619339,
1165126.55236034]
sphere_distances = [60580.9693849267, 77144.0435286473, 49199.4415344719,
90804.7533823494, 217713.384600405, 709134.127242793,
639828.157159169, 7786.82949717788, 1049204.06569028,
1162623.7238134]
else:
spheroid_distances = [60504.0628825298, 77023.948962654, 49154.8867507115,
90847.435881812, 217402.811862568, 709599.234619957,
640011.483583758, 7772.00667666425, 1047861.7859506,
1165126.55237647]
sphere_distances = [60580.7612632291, 77143.7785056615, 49199.2725132184,
90804.4414289463, 217712.63666124, 709131.691061906,
639825.959074112, 7786.80274606706, 1049200.46122281,
1162619.7297006]
# Testing with spheroid distances first.
hillsdale = AustraliaCity.objects.get(name='Hillsdale')
qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(hillsdale.point, spheroid=True)
for i, c in enumerate(qs):
self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
if postgis:
# PostGIS uses sphere-only distances by default, testing these as well.
qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(hillsdale.point)
for i, c in enumerate(qs):
self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
@no_oracle # Oracle already handles geographic distance calculation.
def test03c_distance_method(self):
"Testing the `distance` GeoQuerySet method used with `transform` on a geographic field."
# Normally you can't compute distances from a geometry field
# that is not a PointField (on PostGIS 1.4 and below).
if not connection.ops.geography:
self.assertRaises(ValueError, CensusZipcode.objects.distance, self.stx_pnt)
# We'll be using a Polygon (created by buffering the centroid
# of 77005 to 100m) -- which aren't allowed in geographic distance
# queries normally, however our field has been transformed to
# a non-geographic system.
z = SouthTexasZipcode.objects.get(name='77005')
# Reference query:
# SELECT ST_Distance(ST_Transform("distapp_censuszipcode"."poly", 32140), ST_GeomFromText('<buffer_wkt>', 32140)) FROM "distapp_censuszipcode";
dists_m = [3553.30384972258, 1243.18391525602, 2186.15439472242]
# Having our buffer in the SRID of the transformation and of the field
# -- should get the same results. The first buffer has no need for
# transformation SQL because it is the same SRID as what was given
# to `transform()`. The second buffer will need to be transformed,
# however.
buf1 = z.poly.centroid.buffer(100)
buf2 = buf1.transform(4269, clone=True)
ref_zips = ['77002', '77025', '77401']
for buf in [buf1, buf2]:
qs = CensusZipcode.objects.exclude(name='77005').transform(32140).distance(buf)
self.assertEqual(ref_zips, self.get_names(qs))
for i, z in enumerate(qs):
self.assertAlmostEqual(z.distance.m, dists_m[i], 5)
def test04_distance_lookups(self):
"Testing the `distance_lt`, `distance_gt`, `distance_lte`, and `distance_gte` lookup types."
# Retrieving the cities within a 20km 'donut' w/a 7km radius 'hole'
# (thus, Houston and Southside place will be excluded as tested in
# the `test02_dwithin` above).
qs1 = SouthTexasCity.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(point__distance_lte=(self.stx_pnt, D(km=20)))
# Can't determine the units on SpatiaLite from PROJ.4 string, and
# Oracle 11 incorrectly thinks it is not projected.
if spatialite or oracle:
dist_qs = (qs1,)
else:
qs2 = SouthTexasCityFt.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(point__distance_lte=(self.stx_pnt, D(km=20)))
dist_qs = (qs1, qs2)
for qs in dist_qs:
cities = self.get_names(qs)
self.assertEqual(cities, ['Bellaire', 'Pearland', 'West University Place'])
# Doing a distance query using Polygons instead of a Point.
z = SouthTexasZipcode.objects.get(name='77005')
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=275)))
self.assertEqual(['77025', '77401'], self.get_names(qs))
# If we add a little more distance 77002 should be included.
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=300)))
self.assertEqual(['77002', '77025', '77401'], self.get_names(qs))
def test05_geodetic_distance_lookups(self):
"Testing distance lookups on geodetic coordinate systems."
# Line is from Canberra to Sydney. Query is for all other cities within
# a 100km of that line (which should exclude only Hobart & Adelaide).
line = GEOSGeometry('LINESTRING(144.9630 -37.8143,151.2607 -33.8870)', 4326)
dist_qs = AustraliaCity.objects.filter(point__distance_lte=(line, D(km=100)))
if oracle or connection.ops.geography:
# Oracle and PostGIS 1.5 can do distance lookups on arbitrary geometries.
self.assertEqual(9, dist_qs.count())
self.assertEqual(['Batemans Bay', 'Canberra', 'Hillsdale',
'Melbourne', 'Mittagong', 'Shellharbour',
'Sydney', 'Thirroul', 'Wollongong'],
self.get_names(dist_qs))
else:
# PostGIS 1.4 and below only allows geodetic distance queries (utilizing
# ST_Distance_Sphere/ST_Distance_Spheroid) from Points to PointFields
# on geometry columns.
self.assertRaises(ValueError, dist_qs.count)
# Ensured that a ValueError was raised, none of the rest of the test is
# support on this backend, so bail now.
if spatialite: return
# Too many params (4 in this case) should raise a ValueError.
self.assertRaises(ValueError, len,
AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)', D(km=100), 'spheroid', '4')))
# Not enough params should raise a ValueError.
self.assertRaises(ValueError, len,
AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)',)))
# Getting all cities w/in 550 miles of Hobart.
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.exclude(name='Hobart').filter(point__distance_lte=(hobart.point, D(mi=550)))
cities = self.get_names(qs)
self.assertEqual(cities, ['Batemans Bay', 'Canberra', 'Melbourne'])
# Cities that are either really close or really far from Wollongong --
# and using different units of distance.
wollongong = AustraliaCity.objects.get(name='Wollongong')
d1, d2 = D(yd=19500), D(nm=400) # Yards (~17km) & Nautical miles.
# Normal geodetic distance lookup (uses `distance_sphere` on PostGIS.
gq1 = Q(point__distance_lte=(wollongong.point, d1))
gq2 = Q(point__distance_gte=(wollongong.point, d2))
qs1 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq1 | gq2)
# Geodetic distance lookup but telling GeoDjango to use `distance_spheroid`
# instead (we should get the same results b/c accuracy variance won't matter
# in this test case).
if postgis:
gq3 = Q(point__distance_lte=(wollongong.point, d1, 'spheroid'))
gq4 = Q(point__distance_gte=(wollongong.point, d2, 'spheroid'))
qs2 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq3 | gq4)
querysets = [qs1, qs2]
else:
querysets = [qs1]
for qs in querysets:
cities = self.get_names(qs)
self.assertEqual(cities, ['Adelaide', 'Hobart', 'Shellharbour', 'Thirroul'])
def test06_area(self):
"Testing the `area` GeoQuerySet method."
# Reference queries:
# SELECT ST_Area(poly) FROM distapp_southtexaszipcode;
area_sq_m = [5437908.90234375, 10183031.4389648, 11254471.0073242, 9881708.91772461]
# Tolerance has to be lower for Oracle and differences
# with GEOS 3.0.0RC4
tol = 2
for i, z in enumerate(SouthTexasZipcode.objects.area()):
self.assertAlmostEqual(area_sq_m[i], z.area.sq_m, tol)
def test07_length(self):
"Testing the `length` GeoQuerySet method."
# Reference query (should use `length_spheroid`).
# SELECT ST_length_spheroid(ST_GeomFromText('<wkt>', 4326) 'SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]]');
len_m1 = 473504.769553813
len_m2 = 4617.668
if spatialite:
# Does not support geodetic coordinate systems.
self.assertRaises(ValueError, Interstate.objects.length)
else:
qs = Interstate.objects.length()
if oracle: tol = 2
else: tol = 5
self.assertAlmostEqual(len_m1, qs[0].length.m, tol)
# Now doing length on a projected coordinate system.
i10 = SouthTexasInterstate.objects.length().get(name='I-10')
self.assertAlmostEqual(len_m2, i10.length.m, 2)
@no_spatialite
def test08_perimeter(self):
"Testing the `perimeter` GeoQuerySet method."
# Reference query:
# SELECT ST_Perimeter(distapp_southtexaszipcode.poly) FROM distapp_southtexaszipcode;
perim_m = [18404.3550889361, 15627.2108551001, 20632.5588368978, 17094.5996143697]
if oracle: tol = 2
else: tol = 7
for i, z in enumerate(SouthTexasZipcode.objects.perimeter()):
self.assertAlmostEqual(perim_m[i], z.perimeter.m, tol)
# Running on points; should return 0.
for i, c in enumerate(SouthTexasCity.objects.perimeter(model_att='perim')):
self.assertEqual(0, c.perim.m)
def test09_measurement_null_fields(self):
"Testing the measurement GeoQuerySet methods on fields with NULL values."
# Creating SouthTexasZipcode w/NULL value.
SouthTexasZipcode.objects.create(name='78212')
# Performing distance/area queries against the NULL PolygonField,
# and ensuring the result of the operations is None.
htown = SouthTexasCity.objects.get(name='Downtown Houston')
z = SouthTexasZipcode.objects.distance(htown.point).area().get(name='78212')
self.assertEqual(None, z.distance)
self.assertEqual(None, z.area)
| bsd-3-clause |
batermj/algorithm-challenger | code-analysis/programming_anguage/python/source_codes/Python3.5.9/Python-3.5.9/Lib/test/test_with.py | 5 | 26295 | """Unit tests for the with statement specified in PEP 343."""
__author__ = "Mike Bland"
__email__ = "mbland at acm dot org"
import sys
import unittest
from collections import deque
from contextlib import _GeneratorContextManager, contextmanager
class MockContextManager(_GeneratorContextManager):
def __init__(self, *args):
super().__init__(*args)
self.enter_called = False
self.exit_called = False
self.exit_args = None
def __enter__(self):
self.enter_called = True
return _GeneratorContextManager.__enter__(self)
def __exit__(self, type, value, traceback):
self.exit_called = True
self.exit_args = (type, value, traceback)
return _GeneratorContextManager.__exit__(self, type,
value, traceback)
def mock_contextmanager(func):
def helper(*args, **kwds):
return MockContextManager(func, args, kwds)
return helper
class MockResource(object):
def __init__(self):
self.yielded = False
self.stopped = False
@mock_contextmanager
def mock_contextmanager_generator():
mock = MockResource()
try:
mock.yielded = True
yield mock
finally:
mock.stopped = True
class Nested(object):
def __init__(self, *managers):
self.managers = managers
self.entered = None
def __enter__(self):
if self.entered is not None:
raise RuntimeError("Context is not reentrant")
self.entered = deque()
vars = []
try:
for mgr in self.managers:
vars.append(mgr.__enter__())
self.entered.appendleft(mgr)
except:
if not self.__exit__(*sys.exc_info()):
raise
return vars
def __exit__(self, *exc_info):
# Behave like nested with statements
# first in, last out
# New exceptions override old ones
ex = exc_info
for mgr in self.entered:
try:
if mgr.__exit__(*ex):
ex = (None, None, None)
except:
ex = sys.exc_info()
self.entered = None
if ex is not exc_info:
raise ex[0](ex[1]).with_traceback(ex[2])
class MockNested(Nested):
def __init__(self, *managers):
Nested.__init__(self, *managers)
self.enter_called = False
self.exit_called = False
self.exit_args = None
def __enter__(self):
self.enter_called = True
return Nested.__enter__(self)
def __exit__(self, *exc_info):
self.exit_called = True
self.exit_args = exc_info
return Nested.__exit__(self, *exc_info)
class FailureTestCase(unittest.TestCase):
def testNameError(self):
def fooNotDeclared():
with foo: pass
self.assertRaises(NameError, fooNotDeclared)
def testEnterAttributeError(self):
class LacksEnter(object):
def __exit__(self, type, value, traceback):
pass
def fooLacksEnter():
foo = LacksEnter()
with foo: pass
self.assertRaises(AttributeError, fooLacksEnter)
def testExitAttributeError(self):
class LacksExit(object):
def __enter__(self):
pass
def fooLacksExit():
foo = LacksExit()
with foo: pass
self.assertRaises(AttributeError, fooLacksExit)
def assertRaisesSyntaxError(self, codestr):
def shouldRaiseSyntaxError(s):
compile(s, '', 'single')
self.assertRaises(SyntaxError, shouldRaiseSyntaxError, codestr)
def testAssignmentToNoneError(self):
self.assertRaisesSyntaxError('with mock as None:\n pass')
self.assertRaisesSyntaxError(
'with mock as (None):\n'
' pass')
def testAssignmentToEmptyTupleError(self):
self.assertRaisesSyntaxError(
'with mock as ():\n'
' pass')
def testAssignmentToTupleOnlyContainingNoneError(self):
self.assertRaisesSyntaxError('with mock as None,:\n pass')
self.assertRaisesSyntaxError(
'with mock as (None,):\n'
' pass')
def testAssignmentToTupleContainingNoneError(self):
self.assertRaisesSyntaxError(
'with mock as (foo, None, bar):\n'
' pass')
def testEnterThrows(self):
class EnterThrows(object):
def __enter__(self):
raise RuntimeError("Enter threw")
def __exit__(self, *args):
pass
def shouldThrow():
ct = EnterThrows()
self.foo = None
with ct as self.foo:
pass
self.assertRaises(RuntimeError, shouldThrow)
self.assertEqual(self.foo, None)
def testExitThrows(self):
class ExitThrows(object):
def __enter__(self):
return
def __exit__(self, *args):
raise RuntimeError(42)
def shouldThrow():
with ExitThrows():
pass
self.assertRaises(RuntimeError, shouldThrow)
class ContextmanagerAssertionMixin(object):
def setUp(self):
self.TEST_EXCEPTION = RuntimeError("test exception")
def assertInWithManagerInvariants(self, mock_manager):
self.assertTrue(mock_manager.enter_called)
self.assertFalse(mock_manager.exit_called)
self.assertEqual(mock_manager.exit_args, None)
def assertAfterWithManagerInvariants(self, mock_manager, exit_args):
self.assertTrue(mock_manager.enter_called)
self.assertTrue(mock_manager.exit_called)
self.assertEqual(mock_manager.exit_args, exit_args)
def assertAfterWithManagerInvariantsNoError(self, mock_manager):
self.assertAfterWithManagerInvariants(mock_manager,
(None, None, None))
def assertInWithGeneratorInvariants(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertFalse(mock_generator.stopped)
def assertAfterWithGeneratorInvariantsNoError(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertTrue(mock_generator.stopped)
def raiseTestException(self):
raise self.TEST_EXCEPTION
def assertAfterWithManagerInvariantsWithError(self, mock_manager,
exc_type=None):
self.assertTrue(mock_manager.enter_called)
self.assertTrue(mock_manager.exit_called)
if exc_type is None:
self.assertEqual(mock_manager.exit_args[1], self.TEST_EXCEPTION)
exc_type = type(self.TEST_EXCEPTION)
self.assertEqual(mock_manager.exit_args[0], exc_type)
# Test the __exit__ arguments. Issue #7853
self.assertIsInstance(mock_manager.exit_args[1], exc_type)
self.assertIsNot(mock_manager.exit_args[2], None)
def assertAfterWithGeneratorInvariantsWithError(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertTrue(mock_generator.stopped)
class NonexceptionalTestCase(unittest.TestCase, ContextmanagerAssertionMixin):
def testInlineGeneratorSyntax(self):
with mock_contextmanager_generator():
pass
def testUnboundGenerator(self):
mock = mock_contextmanager_generator()
with mock:
pass
self.assertAfterWithManagerInvariantsNoError(mock)
def testInlineGeneratorBoundSyntax(self):
with mock_contextmanager_generator() as foo:
self.assertInWithGeneratorInvariants(foo)
# FIXME: In the future, we'll try to keep the bound names from leaking
self.assertAfterWithGeneratorInvariantsNoError(foo)
def testInlineGeneratorBoundToExistingVariable(self):
foo = None
with mock_contextmanager_generator() as foo:
self.assertInWithGeneratorInvariants(foo)
self.assertAfterWithGeneratorInvariantsNoError(foo)
def testInlineGeneratorBoundToDottedVariable(self):
with mock_contextmanager_generator() as self.foo:
self.assertInWithGeneratorInvariants(self.foo)
self.assertAfterWithGeneratorInvariantsNoError(self.foo)
def testBoundGenerator(self):
mock = mock_contextmanager_generator()
with mock as foo:
self.assertInWithGeneratorInvariants(foo)
self.assertInWithManagerInvariants(mock)
self.assertAfterWithGeneratorInvariantsNoError(foo)
self.assertAfterWithManagerInvariantsNoError(mock)
def testNestedSingleStatements(self):
mock_a = mock_contextmanager_generator()
with mock_a as foo:
mock_b = mock_contextmanager_generator()
with mock_b as bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(foo)
self.assertInWithGeneratorInvariants(bar)
self.assertAfterWithManagerInvariantsNoError(mock_b)
self.assertAfterWithGeneratorInvariantsNoError(bar)
self.assertInWithManagerInvariants(mock_a)
self.assertInWithGeneratorInvariants(foo)
self.assertAfterWithManagerInvariantsNoError(mock_a)
self.assertAfterWithGeneratorInvariantsNoError(foo)
class NestedNonexceptionalTestCase(unittest.TestCase,
ContextmanagerAssertionMixin):
def testSingleArgInlineGeneratorSyntax(self):
with Nested(mock_contextmanager_generator()):
pass
def testSingleArgBoundToNonTuple(self):
m = mock_contextmanager_generator()
# This will bind all the arguments to nested() into a single list
# assigned to foo.
with Nested(m) as foo:
self.assertInWithManagerInvariants(m)
self.assertAfterWithManagerInvariantsNoError(m)
def testSingleArgBoundToSingleElementParenthesizedList(self):
m = mock_contextmanager_generator()
# This will bind all the arguments to nested() into a single list
# assigned to foo.
with Nested(m) as (foo):
self.assertInWithManagerInvariants(m)
self.assertAfterWithManagerInvariantsNoError(m)
def testSingleArgBoundToMultipleElementTupleError(self):
def shouldThrowValueError():
with Nested(mock_contextmanager_generator()) as (foo, bar):
pass
self.assertRaises(ValueError, shouldThrowValueError)
def testSingleArgUnbound(self):
mock_contextmanager = mock_contextmanager_generator()
mock_nested = MockNested(mock_contextmanager)
with mock_nested:
self.assertInWithManagerInvariants(mock_contextmanager)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithManagerInvariantsNoError(mock_contextmanager)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
def testMultipleArgUnbound(self):
m = mock_contextmanager_generator()
n = mock_contextmanager_generator()
o = mock_contextmanager_generator()
mock_nested = MockNested(m, n, o)
with mock_nested:
self.assertInWithManagerInvariants(m)
self.assertInWithManagerInvariants(n)
self.assertInWithManagerInvariants(o)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithManagerInvariantsNoError(m)
self.assertAfterWithManagerInvariantsNoError(n)
self.assertAfterWithManagerInvariantsNoError(o)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
def testMultipleArgBound(self):
mock_nested = MockNested(mock_contextmanager_generator(),
mock_contextmanager_generator(), mock_contextmanager_generator())
with mock_nested as (m, n, o):
self.assertInWithGeneratorInvariants(m)
self.assertInWithGeneratorInvariants(n)
self.assertInWithGeneratorInvariants(o)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithGeneratorInvariantsNoError(m)
self.assertAfterWithGeneratorInvariantsNoError(n)
self.assertAfterWithGeneratorInvariantsNoError(o)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
class ExceptionalTestCase(ContextmanagerAssertionMixin, unittest.TestCase):
def testSingleResource(self):
cm = mock_contextmanager_generator()
def shouldThrow():
with cm as self.resource:
self.assertInWithManagerInvariants(cm)
self.assertInWithGeneratorInvariants(self.resource)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm)
self.assertAfterWithGeneratorInvariantsWithError(self.resource)
def testExceptionNormalized(self):
cm = mock_contextmanager_generator()
def shouldThrow():
with cm as self.resource:
# Note this relies on the fact that 1 // 0 produces an exception
# that is not normalized immediately.
1 // 0
self.assertRaises(ZeroDivisionError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm, ZeroDivisionError)
def testNestedSingleStatements(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
def shouldThrow():
with mock_a as self.foo:
with mock_b as self.bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(self.foo)
self.assertInWithGeneratorInvariants(self.bar)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithManagerInvariantsWithError(mock_b)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
self.assertAfterWithGeneratorInvariantsWithError(self.bar)
def testMultipleResourcesInSingleStatement(self):
cm_a = mock_contextmanager_generator()
cm_b = mock_contextmanager_generator()
mock_nested = MockNested(cm_a, cm_b)
def shouldThrow():
with mock_nested as (self.resource_a, self.resource_b):
self.assertInWithManagerInvariants(cm_a)
self.assertInWithManagerInvariants(cm_b)
self.assertInWithManagerInvariants(mock_nested)
self.assertInWithGeneratorInvariants(self.resource_a)
self.assertInWithGeneratorInvariants(self.resource_b)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm_a)
self.assertAfterWithManagerInvariantsWithError(cm_b)
self.assertAfterWithManagerInvariantsWithError(mock_nested)
self.assertAfterWithGeneratorInvariantsWithError(self.resource_a)
self.assertAfterWithGeneratorInvariantsWithError(self.resource_b)
def testNestedExceptionBeforeInnerStatement(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
self.bar = None
def shouldThrow():
with mock_a as self.foo:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithGeneratorInvariants(self.foo)
self.raiseTestException()
with mock_b as self.bar:
pass
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
# The inner statement stuff should never have been touched
self.assertEqual(self.bar, None)
self.assertFalse(mock_b.enter_called)
self.assertFalse(mock_b.exit_called)
self.assertEqual(mock_b.exit_args, None)
def testNestedExceptionAfterInnerStatement(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
def shouldThrow():
with mock_a as self.foo:
with mock_b as self.bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(self.foo)
self.assertInWithGeneratorInvariants(self.bar)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithManagerInvariantsNoError(mock_b)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
self.assertAfterWithGeneratorInvariantsNoError(self.bar)
def testRaisedStopIteration1(self):
# From bug 1462485
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise StopIteration("from with")
with self.assertWarnsRegex(PendingDeprecationWarning, "StopIteration"):
self.assertRaises(StopIteration, shouldThrow)
def testRaisedStopIteration2(self):
# From bug 1462485
class cm(object):
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
def shouldThrow():
with cm():
raise StopIteration("from with")
self.assertRaises(StopIteration, shouldThrow)
def testRaisedStopIteration3(self):
# Another variant where the exception hasn't been instantiated
# From bug 1705170
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise next(iter([]))
with self.assertWarnsRegex(PendingDeprecationWarning, "StopIteration"):
self.assertRaises(StopIteration, shouldThrow)
def testRaisedGeneratorExit1(self):
# From bug 1462485
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise GeneratorExit("from with")
self.assertRaises(GeneratorExit, shouldThrow)
def testRaisedGeneratorExit2(self):
# From bug 1462485
class cm (object):
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
def shouldThrow():
with cm():
raise GeneratorExit("from with")
self.assertRaises(GeneratorExit, shouldThrow)
def testErrorsInBool(self):
# issue4589: __exit__ return code may raise an exception
# when looking at its truth value.
class cm(object):
def __init__(self, bool_conversion):
class Bool:
def __bool__(self):
return bool_conversion()
self.exit_result = Bool()
def __enter__(self):
return 3
def __exit__(self, a, b, c):
return self.exit_result
def trueAsBool():
with cm(lambda: True):
self.fail("Should NOT see this")
trueAsBool()
def falseAsBool():
with cm(lambda: False):
self.fail("Should raise")
self.assertRaises(AssertionError, falseAsBool)
def failAsBool():
with cm(lambda: 1//0):
self.fail("Should NOT see this")
self.assertRaises(ZeroDivisionError, failAsBool)
class NonLocalFlowControlTestCase(unittest.TestCase):
def testWithBreak(self):
counter = 0
while True:
counter += 1
with mock_contextmanager_generator():
counter += 10
break
counter += 100 # Not reached
self.assertEqual(counter, 11)
def testWithContinue(self):
counter = 0
while True:
counter += 1
if counter > 2:
break
with mock_contextmanager_generator():
counter += 10
continue
counter += 100 # Not reached
self.assertEqual(counter, 12)
def testWithReturn(self):
def foo():
counter = 0
while True:
counter += 1
with mock_contextmanager_generator():
counter += 10
return counter
counter += 100 # Not reached
self.assertEqual(foo(), 11)
def testWithYield(self):
def gen():
with mock_contextmanager_generator():
yield 12
yield 13
x = list(gen())
self.assertEqual(x, [12, 13])
def testWithRaise(self):
counter = 0
try:
counter += 1
with mock_contextmanager_generator():
counter += 10
raise RuntimeError
counter += 100 # Not reached
except RuntimeError:
self.assertEqual(counter, 11)
else:
self.fail("Didn't raise RuntimeError")
class AssignmentTargetTestCase(unittest.TestCase):
def testSingleComplexTarget(self):
targets = {1: [0, 1, 2]}
with mock_contextmanager_generator() as targets[1][0]:
self.assertEqual(list(targets.keys()), [1])
self.assertEqual(targets[1][0].__class__, MockResource)
with mock_contextmanager_generator() as list(targets.values())[0][1]:
self.assertEqual(list(targets.keys()), [1])
self.assertEqual(targets[1][1].__class__, MockResource)
with mock_contextmanager_generator() as targets[2]:
keys = list(targets.keys())
keys.sort()
self.assertEqual(keys, [1, 2])
class C: pass
blah = C()
with mock_contextmanager_generator() as blah.foo:
self.assertEqual(hasattr(blah, "foo"), True)
def testMultipleComplexTargets(self):
class C:
def __enter__(self): return 1, 2, 3
def __exit__(self, t, v, tb): pass
targets = {1: [0, 1, 2]}
with C() as (targets[1][0], targets[1][1], targets[1][2]):
self.assertEqual(targets, {1: [1, 2, 3]})
with C() as (list(targets.values())[0][2], list(targets.values())[0][1], list(targets.values())[0][0]):
self.assertEqual(targets, {1: [3, 2, 1]})
with C() as (targets[1], targets[2], targets[3]):
self.assertEqual(targets, {1: 1, 2: 2, 3: 3})
class B: pass
blah = B()
with C() as (blah.one, blah.two, blah.three):
self.assertEqual(blah.one, 1)
self.assertEqual(blah.two, 2)
self.assertEqual(blah.three, 3)
class ExitSwallowsExceptionTestCase(unittest.TestCase):
def testExitTrueSwallowsException(self):
class AfricanSwallow:
def __enter__(self): pass
def __exit__(self, t, v, tb): return True
try:
with AfricanSwallow():
1/0
except ZeroDivisionError:
self.fail("ZeroDivisionError should have been swallowed")
def testExitFalseDoesntSwallowException(self):
class EuropeanSwallow:
def __enter__(self): pass
def __exit__(self, t, v, tb): return False
try:
with EuropeanSwallow():
1/0
except ZeroDivisionError:
pass
else:
self.fail("ZeroDivisionError should have been raised")
class NestedWith(unittest.TestCase):
class Dummy(object):
def __init__(self, value=None, gobble=False):
if value is None:
value = self
self.value = value
self.gobble = gobble
self.enter_called = False
self.exit_called = False
def __enter__(self):
self.enter_called = True
return self.value
def __exit__(self, *exc_info):
self.exit_called = True
self.exc_info = exc_info
if self.gobble:
return True
class InitRaises(object):
def __init__(self): raise RuntimeError()
class EnterRaises(object):
def __enter__(self): raise RuntimeError()
def __exit__(self, *exc_info): pass
class ExitRaises(object):
def __enter__(self): pass
def __exit__(self, *exc_info): raise RuntimeError()
def testNoExceptions(self):
with self.Dummy() as a, self.Dummy() as b:
self.assertTrue(a.enter_called)
self.assertTrue(b.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(b.exit_called)
def testExceptionInExprList(self):
try:
with self.Dummy() as a, self.InitRaises():
pass
except:
pass
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInEnter(self):
try:
with self.Dummy() as a, self.EnterRaises():
self.fail('body of bad with executed')
except RuntimeError:
pass
else:
self.fail('RuntimeError not reraised')
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInExit(self):
body_executed = False
with self.Dummy(gobble=True) as a, self.ExitRaises():
body_executed = True
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(body_executed)
self.assertNotEqual(a.exc_info[0], None)
def testEnterReturnsTuple(self):
with self.Dummy(value=(1,2)) as (a1, a2), \
self.Dummy(value=(10, 20)) as (b1, b2):
self.assertEqual(1, a1)
self.assertEqual(2, a2)
self.assertEqual(10, b1)
self.assertEqual(20, b2)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
dgoodwin/rounder | src/rounder/dto.py | 1 | 5744 | # Rounder - Poker for the GNOME Desktop
#
# Copyright (C) 2006 Devan Goodwin <dgoodwin@dangerouslyinc.com>
# Copyright (C) 2006 James Bowes <jbowes@dangerouslyinc.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Rounder Data Transfer Objects
Various objects used to safely transmit game state or activity to clients.
In cases where an engine object is considered safe to transmit to a client,
a DTO should still be created as child class of the engine object. Even if
initially empty it provides a hook for us to change the DTO should the
engine object later grow to contain sensitive information.
"""
from rounder.currency import Currency
class TableState(object):
"""
Representation of a table fit for sending to a table's observers.
Must be careful not to expose any engine internals, this WILL be
transferred over the wire.
"""
def __init__(self, table):
self.id = table.id
self.name = table.name
self.limit = table.limit
self.hand_underway = (table.gsm.current != None)
self.community_cards = []
self.dealer_seat_num = None
if table.dealer != None:
self.dealer_seat_num = table.dealer.seat
self.pots = []
self.round_bets = Currency(0.00)
if table.game != None:
self.community_cards = table.game.community_cards
for pot in table.game.pot_mgr.pots:
self.pots.append(PotState(pot))
# Pots only receive funds after the round of betting has completed,
# but clients are more than a little interested in seeing the
# amount of money bet in the current round as well. Include this
# info and let the client figure out how they want to render it.
# (as it's not really part of any pot yet, nor do we know which it
# will go to)
for p in table.game.players:
self.round_bets = self.round_bets + p.current_bet
# for now represent seated players as a list of tuples, player name
# and stack size:
self.seats = []
for seat_num in range(table.seats.get_size()):
p = table.seats.get_player(seat_num)
if p is None:
self.seats.append(None)
else:
self.seats.append(PlayerState(p))
def print_state(self):
""" Print the table state. """
print self.print_state_as_string()
def print_state_as_string(self):
output = []
output.append("Table state for table: %s" % self.name)
output.append(" hand underway: %s" % self.hand_underway)
i = 0
output.append(" seats:")
for s in self.seats:
output.append(" %s - %s" % (i, s))
i = i + 1
return '\n'.join(output)
class TableListing(object):
"""
Minimal representation of a table for use in the server window's table
list.
Contains only necessary information such as table ID, name, limit, and
the current number of players. Differs from the TableState object which
contains enough information to update the table on an observers screen.
"""
def __init__(self, table):
self.id = table.id
self.name = table.name
self.limit = table.limit
self.player_count = len(table.seats.seated_players)
def __str__(self):
return "%s - %s - %s players" % (self.name,
self.limit, self.player_count)
class PlayerState(object):
"""
Representation of a Player safe for transmitting over the wire.
"""
def __init__(self, player):
self.username = player.username
self.chips = player.chips
self.seat = player.seat
# TODO: Don't show sitting out here if player sat out mid-hand,
# this needs a test.
self.sitting_out = player.sitting_out
self.folded = player.folded
self.num_cards = len(player.cards)
if self.folded:
self.num_cards = 0
def __str__(self):
return "%s - %s - cards: %s" % (self.username, self.chips,
self.num_cards)
class PotState(object):
"""
Representaton of a Pot safe for transmitting over the wire.
The Pot class itself is dangerously close to being safe to transmit as it,
but wrapping it here just incase the references to players will
pose a problem, or inappropriate information is added in the future.
"""
def __init__(self, pot):
self.amount = pot.amount
self.is_main_pot = pot.is_main_pot
def __repr__(self):
return "Pot: $%s" % self.amount
class PotWinner(object):
"""
Winner of a pot.
"""
def __init__(self, username, amount, hand):
self.username = username
self.amount = amount
self.hand = hand
def __repr__(self):
return "PotWinner: %s won $%s with %s" % (self.username,
self.amount,
self.hand)
| gpl-2.0 |
cnelsonsic/Liquor-Cabinet | sqlalchemy/dialects/mysql/oursql.py | 18 | 9238 | """Support for the MySQL database via the oursql adapter.
OurSQL is available at:
http://packages.python.org/oursql/
Connecting
-----------
Connect string format::
mysql+oursql://<user>:<password>@<host>[:<port>]/<dbname>
Character Sets
--------------
oursql defaults to using ``utf8`` as the connection charset, but other
encodings may be used instead. Like the MySQL-Python driver, unicode support
can be completely disabled::
# oursql sets the connection charset to utf8 automatically; all strings come
# back as utf8 str
create_engine('mysql+oursql:///mydb?use_unicode=0')
To not automatically use ``utf8`` and instead use whatever the connection
defaults to, there is a separate parameter::
# use the default connection charset; all strings come back as unicode
create_engine('mysql+oursql:///mydb?default_charset=1')
# use latin1 as the connection charset; all strings come back as unicode
create_engine('mysql+oursql:///mydb?charset=latin1')
"""
import re
from sqlalchemy.dialects.mysql.base import (BIT, MySQLDialect, MySQLExecutionContext,
MySQLCompiler, MySQLIdentifierPreparer)
from sqlalchemy.engine import base as engine_base, default
from sqlalchemy.sql import operators as sql_operators
from sqlalchemy import exc, log, schema, sql, types as sqltypes, util
from sqlalchemy import processors
class _oursqlBIT(BIT):
def result_processor(self, dialect, coltype):
"""oursql already converts mysql bits, so."""
return None
class MySQLExecutionContext_oursql(MySQLExecutionContext):
@property
def plain_query(self):
return self.execution_options.get('_oursql_plain_query', False)
class MySQLDialect_oursql(MySQLDialect):
driver = 'oursql'
# Py3K
# description_encoding = None
# Py2K
supports_unicode_binds = True
supports_unicode_statements = True
# end Py2K
supports_native_decimal = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
execution_ctx_cls = MySQLExecutionContext_oursql
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
sqltypes.Time: sqltypes.Time,
BIT: _oursqlBIT,
}
)
@classmethod
def dbapi(cls):
return __import__('oursql')
def do_execute(self, cursor, statement, parameters, context=None):
"""Provide an implementation of *cursor.execute(statement, parameters)*."""
if context and context.plain_query:
cursor.execute(statement, plain_query=True)
else:
cursor.execute(statement, parameters)
def do_begin(self, connection):
connection.cursor().execute('BEGIN', plain_query=True)
def _xa_query(self, connection, query, xid):
# Py2K
arg = connection.connection._escape_string(xid)
# end Py2K
# Py3K
# charset = self._connection_charset
# arg = connection.connection._escape_string(xid.encode(charset)).decode(charset)
connection.execution_options(_oursql_plain_query=True).execute(query % arg)
# Because mysql is bad, these methods have to be
# reimplemented to use _PlainQuery. Basically, some queries
# refuse to return any data if they're run through
# the parameterized query API, or refuse to be parameterized
# in the first place.
def do_begin_twophase(self, connection, xid):
self._xa_query(connection, 'XA BEGIN "%s"', xid)
def do_prepare_twophase(self, connection, xid):
self._xa_query(connection, 'XA END "%s"', xid)
self._xa_query(connection, 'XA PREPARE "%s"', xid)
def do_rollback_twophase(self, connection, xid, is_prepared=True,
recover=False):
if not is_prepared:
self._xa_query(connection, 'XA END "%s"', xid)
self._xa_query(connection, 'XA ROLLBACK "%s"', xid)
def do_commit_twophase(self, connection, xid, is_prepared=True,
recover=False):
if not is_prepared:
self.do_prepare_twophase(connection, xid)
self._xa_query(connection, 'XA COMMIT "%s"', xid)
# Q: why didn't we need all these "plain_query" overrides earlier ?
# am i on a newer/older version of OurSQL ?
def has_table(self, connection, table_name, schema=None):
return MySQLDialect.has_table(self,
connection.connect().\
execution_options(_oursql_plain_query=True),
table_name, schema)
def get_table_options(self, connection, table_name, schema=None, **kw):
return MySQLDialect.get_table_options(self,
connection.connect().\
execution_options(_oursql_plain_query=True),
table_name,
schema = schema,
**kw
)
def get_columns(self, connection, table_name, schema=None, **kw):
return MySQLDialect.get_columns(self,
connection.connect().\
execution_options(_oursql_plain_query=True),
table_name,
schema=schema,
**kw
)
def get_view_names(self, connection, schema=None, **kw):
return MySQLDialect.get_view_names(self,
connection.connect().\
execution_options(_oursql_plain_query=True),
schema=schema,
**kw
)
def get_table_names(self, connection, schema=None, **kw):
return MySQLDialect.get_table_names(self,
connection.connect().\
execution_options(_oursql_plain_query=True),
schema
)
def get_schema_names(self, connection, **kw):
return MySQLDialect.get_schema_names(self,
connection.connect().\
execution_options(_oursql_plain_query=True),
**kw
)
def initialize(self, connection):
return MySQLDialect.initialize(
self,
connection.execution_options(_oursql_plain_query=True)
)
def _show_create_table(self, connection, table, charset=None,
full_name=None):
return MySQLDialect._show_create_table(self,
connection.contextual_connect(close_with_result=True).
execution_options(_oursql_plain_query=True),
table, charset, full_name)
def is_disconnect(self, e):
if isinstance(e, self.dbapi.ProgrammingError):
return e.errno is None and 'cursor' not in e.args[1] and e.args[1].endswith('closed')
else:
return e.errno in (2006, 2013, 2014, 2045, 2055)
def create_connect_args(self, url):
opts = url.translate_connect_args(database='db', username='user',
password='passwd')
opts.update(url.query)
util.coerce_kw_type(opts, 'port', int)
util.coerce_kw_type(opts, 'compress', bool)
util.coerce_kw_type(opts, 'autoping', bool)
util.coerce_kw_type(opts, 'default_charset', bool)
if opts.pop('default_charset', False):
opts['charset'] = None
else:
util.coerce_kw_type(opts, 'charset', str)
opts['use_unicode'] = opts.get('use_unicode', True)
util.coerce_kw_type(opts, 'use_unicode', bool)
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
# supports_sane_rowcount.
opts.setdefault('found_rows', True)
return [[], opts]
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = []
r = re.compile('[.\-]')
for n in r.split(dbapi_con.server_info):
try:
version.append(int(n))
except ValueError:
version.append(n)
return tuple(version)
def _extract_error_code(self, exception):
return exception.errno
def _detect_charset(self, connection):
"""Sniff out the character set in use for connection results."""
return connection.connection.charset
def _compat_fetchall(self, rp, charset=None):
"""oursql isn't super-broken like MySQLdb, yaaay."""
return rp.fetchall()
def _compat_fetchone(self, rp, charset=None):
"""oursql isn't super-broken like MySQLdb, yaaay."""
return rp.fetchone()
def _compat_first(self, rp, charset=None):
return rp.first()
dialect = MySQLDialect_oursql
| gpl-3.0 |
1tush/sentry | src/sentry/migrations/0107_expand_user.py | 36 | 25693 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.alter_column('auth_user', 'username', self.gf('django.db.models.fields.CharField')(max_length=128, unique=True))
def backwards(self, orm):
db.alter_column('auth_user', 'username', self.gf('django.db.models.fields.CharField')(max_length=30, unique=True))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
u'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'null': 'True'})
},
u'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': u"orm['sentry.AlertRelatedGroup']", 'to': u"orm['sentry.Group']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
u'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Alert']"}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
u'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
u'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"})
},
u'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
u'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': u"orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': u"orm['sentry.User']"})
},
u'sentry.groupcountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'GroupCountByMinute', 'db_table': "'sentry_messagecountbyminute'"},
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
u'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'db_index': 'False'})
},
u'sentry.grouptag': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTag', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'unique': 'True'})
},
u'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
u'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
u'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': u"orm['sentry.User']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Team']", 'null': 'True'})
},
u'sentry.projectcountbyminute': {
'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': u"orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'null': 'True'}),
'user_added': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': u"orm['sentry.User']"})
},
u'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
u'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'sentry.team': {
'Meta': {'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': u"orm['sentry.TeamMember']", 'to': u"orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
u'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': u"orm['sentry.User']"})
},
u'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
}
}
complete_apps = ['sentry']
| bsd-3-clause |
mcollins12321/anita | venv/lib/python2.7/site-packages/html5lib/treewalkers/genshistream.py | 1730 | 2278 | from __future__ import absolute_import, division, unicode_literals
from genshi.core import QName
from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT
from genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENT
from . import _base
from ..constants import voidElements, namespaces
class TreeWalker(_base.TreeWalker):
def __iter__(self):
# Buffer the events so we can pass in the following one
previous = None
for event in self.tree:
if previous is not None:
for token in self.tokens(previous, event):
yield token
previous = event
# Don't forget the final event!
if previous is not None:
for token in self.tokens(previous, None):
yield token
def tokens(self, event, next):
kind, data, pos = event
if kind == START:
tag, attribs = data
name = tag.localname
namespace = tag.namespace
converted_attribs = {}
for k, v in attribs:
if isinstance(k, QName):
converted_attribs[(k.namespace, k.localname)] = v
else:
converted_attribs[(None, k)] = v
if namespace == namespaces["html"] and name in voidElements:
for token in self.emptyTag(namespace, name, converted_attribs,
not next or next[0] != END
or next[1] != tag):
yield token
else:
yield self.startTag(namespace, name, converted_attribs)
elif kind == END:
name = data.localname
namespace = data.namespace
if name not in voidElements:
yield self.endTag(namespace, name)
elif kind == COMMENT:
yield self.comment(data)
elif kind == TEXT:
for token in self.text(data):
yield token
elif kind == DOCTYPE:
yield self.doctype(*data)
elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS,
START_CDATA, END_CDATA, PI):
pass
else:
yield self.unknown(kind)
| mit |
agiliq/django | django/conf/locale/en/formats.py | 394 | 1815 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y'
TIME_FORMAT = 'P'
DATETIME_FORMAT = 'N j, Y, P'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'F j'
SHORT_DATE_FORMAT = 'm/d/Y'
SHORT_DATETIME_FORMAT = 'm/d/Y P'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
| bsd-3-clause |
11dimension/niner | utils/mail.py | 1 | 14234 | from smtplib import SMTP_SSL as SMTP
from email.mime.text import MIMEText
import logging
import datetime
from utils.mongo_handler import mongodb_client
from utils.decorator import retry
from config import DEBUG as _DEBUG, EMAIL, INSTANCE_NAME as _INSTANCE_NAME
logger_server = logging.getLogger("DeployServer.MailManager")
success_title = "[{instance_name}][小九来散花啦] {repo_name}部署{tag_name}成功!"
success_text = """
-------------------部署相关-----------------------------
部署者:{payload_pusher}
部署来源:{payload_src}
部署仓库:{repo_name}
部署TAG:{tag_name}
部署开始时间:{start_time}
部署完成时间:{end_time}
部署耗时:{cost_time}
部署事件id:{event_id}
-------------------Tag相关-----------------------------
{tag_str}
"""
cancel_success_title = "[{instance_name}][小九被调戏,自愈成功] {repo_name}部署{tag_name}时被取消,且回滚成功!"
cancel_success_text = """
-------------------部署相关----------------------------
部署者:{payload_pusher}
部署来源:{payload_src}
部署仓库:{repo_name}
部署TAG:{tag_name}
部署开始时间:{start_time}
部署事件id:{event_id}
-------------------取消相关----------------------------
取消者:{cancel_user}
取消时间:{cancel_time}
回滚完成时间:{end_time}
整体耗时:{cost_time}
-------------------Tag相关----------------------------
{tag_str}
"""
cancel_fail_title = "[{instance_name}][[小九被调戏,自愈失败,严重警告] {repo_name}部署{tag_name}时被取消,回滚失败!"
cancel_fail_text = """
-------------------部署相关----------------------------
部署者:{payload_pusher}
部署来源:{payload_src}
部署仓库:{repo_name}
部署TAG:{tag_name}
部署事件id:{event_id}
-------------------取消相关----------------------------
取消者:{cancel_user}
取消时间:{cancel_time}
回滚完成时间:{end_time}
-------------------回滚错误----------------------------
{rollback_stack_info}
-------------------Tag相关----------------------------
{tag_str}
"""
error_title = "[{instance_name}][[小九遭遇意外,求关注] {repo_name}部署{tag_name}时发生错误,准备回滚!"
error_text = """
-------------------部署相关----------------------------
部署者:{payload_pusher}
部署来源:{payload_src}
部署仓库:{repo_name}
部署TAG:{tag_name}
部署开始时间:{start_time}
部署事件id:{event_id}
-------------------错误相关-----------------------------
{stack_info}
-------------------Tag相关-----------------------------
{tag_str}
"""
rollback_success_title = "[{instance_name}][[小九遭遇意外,自愈成功] {repo_name}部署{tag_name}时发生错误,回滚成功!"
rollback_success_text = """
-------------------部署相关----------------------------
部署者:{payload_pusher}
部署来源:{payload_src}
部署仓库:{repo_name}
部署TAG:{tag_name}
部署开始时间:{start_time}
部署事件id:{event_id}
-------------------回滚相关----------------------------
回滚完成时间:{end_time}
整体耗时:{cost_time}
-------------------错误相关-----------------------------
{stack_info}
-------------------Tag相关-----------------------------
{tag_str}
"""
rollback_fail_title = "[{instance_name}][[小九遭遇意外,自愈失败,严重警告] {repo_name}部署{tag_name}时发生错误,回滚失败!"
rollback_fail_text = """
-------------------部署相关----------------------------
部署者:{payload_pusher}
部署来源:{payload_src}
部署仓库:{repo_name}
部署TAG:{tag_name}
部署开始时间:{start_time}
部署事件id:{event_id}
-------------------回滚相关----------------------------
回滚出错时间:{end_time}
整体耗时:{cost_time}
-------------------回滚错误----------------------------
{rollback_stack_info}
-------------------原始错误----------------------------
{stack_info}
-------------------Tag相关-----------------------------
{tag_str}
"""
class MailManager():
def __init__(self, smtp, user, password):
self.smtp = smtp
self.user = user
self.password = password
def _connect(self):
self.conn = SMTP(self.smtp)
if _DEBUG:
self.conn.set_debuglevel(True)
self.conn.login(self.user, self.password)
@retry(3)
def send_mail(self, subject, text, mail_to):
msg = MIMEText(text, 'plain')
msg['Subject'] = subject
msg['To'] = ','.join(mail_to)
msg['From'] = self.user
self._connect()
try:
self.conn.sendmail(self.user, mail_to, msg.as_string())
return True
except Exception as ex:
logger_server.exception(str(ex))
finally:
self.conn.close()
return False
def send_success_mail(self, payload, tag, start_time, end_time):
try:
self.send_mail(subject=success_title.format(instance_name=_INSTANCE_NAME,
repo_name=payload.repository_name,
tag_name=payload.tag),
text=success_text.format(payload_pusher=payload.username,
payload_src=payload.src,
repo_name=payload.repository_name,
tag_name=payload.tag,
start_time=start_time.strftime("%Y-%m-%d %H:%M:%S"),
end_time=end_time.strftime("%Y-%m-%d %H:%M:%S"),
cost_time=str(end_time - start_time),
event_id=payload.event_id,
tag_str=str(tag)
),
mail_to=self.get_developer_emails())
except Exception as ex:
logger_server.exception(ex)
def send_cancel_success_mail(self, payload, tag, start_time, end_time):
try:
rst = mongodb_client['deployment']['operation_log'].find_one({
"operation": "cancel",
"statusSnapshot.task_running": payload.event_id
})
cancel_username = rst['username']
cancel_time = datetime.datetime.fromtimestamp(rst['createTimeStamp'])
self.send_mail(subject=cancel_success_title.format(instance_name=_INSTANCE_NAME,
repo_name=payload.repository_name,
tag_name=payload.tag),
text=cancel_success_text.format(payload_pusher=payload.username,
payload_src=payload.src,
repo_name=payload.repository_name,
tag_name=payload.tag,
start_time=start_time.strftime("%Y-%m-%d %H:%M:%S"),
end_time=end_time.strftime("%Y-%m-%d %H:%M:%S"),
cost_time=str(end_time - start_time),
event_id=payload.event_id,
tag_str=str(tag),
cancel_user=cancel_username,
cancel_time=cancel_time.strftime("%Y-%m-%d %H:%M:%S")
),
mail_to=self.get_developer_emails())
except Exception as ex:
logger_server.exception(ex)
def send_cancel_fail_mail(self, payload, tag, end_time, stack_info):
try:
rst = mongodb_client['deployment']['operation_log'].find_one({
"operation": "cancel",
"statusSnapshot.task_running": payload.event_id
})
cancel_username = rst['username']
cancel_time = datetime.datetime.fromtimestamp(rst['createTimeStamp'])
self.send_mail(subject=cancel_fail_title.format(instance_name=_INSTANCE_NAME,
repo_name=payload.repository_name,
tag_name=payload.tag),
text=cancel_fail_text.format(payload_pusher=payload.username,
payload_src=payload.src,
repo_name=payload.repository_name,
tag_name=payload.tag,
end_time=end_time.strftime("%Y-%m-%d %H:%M:%S"),
event_id=payload.event_id,
tag_str=str(tag),
cancel_user=cancel_username,
cancel_time=cancel_time.strftime("%Y-%m-%d %H:%M:%S"),
rollback_stack_info=stack_info
),
mail_to=self.get_developer_emails())
except Exception as ex:
logger_server.exception(ex)
def send_error_mail(self, payload, tag, start_time, stack_info):
try:
self.send_mail(subject=error_title.format(instance_name=_INSTANCE_NAME,
repo_name=payload.repository_name,
tag_name=payload.tag),
text=error_text.format(payload_pusher=payload.username,
payload_src=payload.src,
repo_name=payload.repository_name,
tag_name=payload.tag,
start_time=start_time.strftime("%Y-%m-%d %H:%M:%S"),
event_id=payload.event_id,
tag_str=str(tag),
stack_info=stack_info
),
mail_to=self.get_developer_emails())
except Exception as ex:
logger_server.exception(ex)
def send_rollback_success_mail(self, payload, tag, start_time, end_time, stack_info):
try:
self.send_mail(subject=rollback_success_title.format(instance_name=_INSTANCE_NAME,
repo_name=payload.repository_name,
tag_name=payload.tag),
text=rollback_success_text.format(payload_pusher=payload.username,
payload_src=payload.src,
repo_name=payload.repository_name,
tag_name=payload.tag,
start_time=start_time.strftime("%Y-%m-%d %H:%M:%S"),
end_time=end_time.strftime("%Y-%m-%d %H:%M:%S"),
cost_time=str(end_time - start_time),
event_id=payload.event_id,
tag_str=str(tag),
stack_info=stack_info
),
mail_to=self.get_developer_emails())
except Exception as ex:
logger_server.exception(ex)
def send_rollback_fail_mail(self, payload, tag, start_time, end_time, stack_info, rollback_stack_info):
try:
self.send_mail(subject=rollback_fail_title.format(instance_name=_INSTANCE_NAME,
repo_name=payload.repository_name,
tag_name=payload.tag),
text=rollback_fail_text.format(payload_pusher=payload.username,
payload_src=payload.src,
repo_name=payload.repository_name,
tag_name=payload.tag,
start_time=start_time.strftime("%Y-%m-%d %H:%M:%S"),
end_time=end_time.strftime("%Y-%m-%d %H:%M:%S"),
cost_time=str(end_time - start_time),
event_id=payload.event_id,
tag_str=str(tag),
stack_info=stack_info,
rollback_stack_info=rollback_stack_info
),
mail_to=self.get_developer_emails())
except Exception as ex:
logger_server.exception(ex)
def get_developer_emails(self):
if _DEBUG:
return "qinyang@baixing.com"
else:
to_list = []
rst = mongodb_client['deployment']['account'].find()
for one_dev in rst:
to_list.append(one_dev['email'])
return to_list
mail_manager = MailManager(EMAIL['SMTP'], EMAIL['USER'], EMAIL['PASSWORD'])
if __name__ == '__main__':
text = "TEST"
subject = "TEST SUBJECT"
mail_manager.send_mail(subject, text, mail_manager.get_developer_emails())
| mit |
mpharrigan/mdtraj | mdtraj/core/selection.py | 5 | 14120 | # #############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2014 Stanford University and the Authors
#
# Authors: Matthew Harrigan
# Contributors: Robert T. McGibbon
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
# #############################################################################
from __future__ import print_function
import re
import ast
import sys
from copy import deepcopy
from collections import namedtuple
from mdtraj.utils.six import PY2
from mdtraj.utils.external.pyparsing import (Word, ParserElement, MatchFirst,
Keyword, opAssoc, quotedString, alphas, alphanums, infixNotation, Group,
Optional, ParseException)
from mdtraj.utils.external.astor import codegen
ParserElement.enablePackrat()
__all__ = ['parse_selection']
# ############################################################################
# Globals
# ############################################################################
NUMS = '.0123456789'
THIS_ATOM = ast.Name(id='atom', ctx=ast.Load(), SINGLETON=True)
RE_MODULE = ast.Name(id='re', ctx=ast.Load(), SINGLETON=True)
SELECTION_GLOBALS = {'re': re}
_ParsedSelection = namedtuple('_ParsedSelection', ['expr', 'source', 'astnode'])
# ############################################################################
# Utils
# ############################################################################
class _RewriteNames(ast.NodeTransformer):
def visit_Name(self, node):
if hasattr(node, 'SINGLETON'):
return node
_safe_names = {'None': None, 'True': True, 'False': False}
if node.id in _safe_names:
if sys.version_info >= (3, 4):
return ast.NameConstant(value=_safe_names[node.id])
return node
# all other bare names are taken to be string literals. Thus something
# like parse_selection('name CA') properly resolves CA as a string
# literal, not a barename to be loaded from the global scope!
return ast.Str(s=node.id)
def _chain(*attrs):
"""This transforms, for example, ('residue', 'is_protein'), into
Attribute(value=Attribute(value=THIS_ATOM,
attr='residue', ctx=Load()), attr='is_protein', ctx=Load())
"""
left = THIS_ATOM
for attr in attrs:
left = ast.Attribute(value=left, attr=attr, ctx=ast.Load())
return left
def _kw(*tuples):
"""Create a many-to-one dictionary.
_kw((['one', '1'], 'one'))
gives {'one': 'one', '1': 'one'}
"""
dic = dict()
for keys, val in tuples:
for key in keys:
dic[key] = val
return dic
def _check_n_tokens(tokens, n_tokens, name):
if not len(tokens) == n_tokens:
err = "{} take {} values. You gave {}"
err = err.format(name, n_tokens, len(tokens))
raise ParseException(err)
class SelectionKeyword(object):
keyword_aliases = _kw(
# Atom.<attribute>
(('all', 'everything'), ast.Name(id='True', ctx=ast.Load())),
(('none', 'nothing'), ast.Name(id='False', ctx=ast.Load())),
(('backbone', 'is_backbone'), _chain('is_backbone')),
(('sidechain', 'is_sidechain'), _chain('is_sidechain')),
# Atom.residue.<attribute>
(('protein', 'is_protein'), _chain('residue', 'is_protein')),
(('code', 'rescode', 'resc'), _chain('residue', 'code')),
# (('nucleic', 'is_nucleic'), _chain('residue', 'is_nucleic')),
(('water', 'waters', 'is_water'), _chain('residue', 'is_water')),
(('name',), _chain('name')),
(('index',), _chain('index')),
(('n_bonds',), _chain('n_bonds')),
(('residue', 'resSeq'), _chain('residue', 'resSeq')),
(('resname', 'resn'), _chain('residue', 'name')),
(('resid', 'resi'), _chain('residue', 'index')),
(('segment_id','segname',), _chain('segment_id')),
# Atom.residue.chain.<attribute>
(('chainid',), _chain('residue', 'chain', 'index')),
# Atom.element.<attribute>
(('type', 'element', 'symbol'), _chain('element', 'symbol')),
# (('radius',), _chain('element', 'radius')),
(('mass',), _chain('element', 'mass')),
)
def __init__(self, tokens):
# pyparsing constructs the instance while building the parse tree,
# and gives us the set tokens. In this case, the tokens are
self._tokens = tokens
_check_n_tokens(tokens, 1, 'Unary selectors')
assert tokens[0] in self.keyword_aliases
def ast(self):
return self.keyword_aliases[self._tokens[0]]
class Literal(object):
def __init__(self, tokens):
self.token = tokens[0]
_check_n_tokens(tokens, 1, 'literal')
def ast(self):
return ast.parse(self.token, mode='eval').body
class UnaryInfixOperand(object):
n_terms = 1
assoc = 'RIGHT'
keyword_aliases = _kw(
(['not ', '!'], ast.Not()),
)
def __init__(self, tokens):
tokens = tokens[0]
_check_n_tokens(tokens, 2, 'Unary infix operators')
self.op_token, self.value_token = tokens
assert self.op_token in self.keyword_aliases
if isinstance(self.value_token, Literal):
raise ValueError("Cannot use literals as booleans.")
def ast(self):
return ast.UnaryOp(op=self.keyword_aliases[self.op_token],
operand=self.value_token.ast())
class RegexInfixOperand(object):
n_terms = 2
assoc = 'LEFT'
keyword_aliases = {'=~': '=~'}
def __init__(self, tokens):
self.tokens = tokens[0]
_check_n_tokens(self.tokens, 3, 'regex operator')
self.string, op, self.pattern = self.tokens
assert op == '=~'
if isinstance(self.string, Literal):
raise ValueError("Cannot do regex comparison on literal")
def ast(self):
pattern = self.tokens[2].ast()
string = self.tokens[0].ast()
return ast.Compare(
left=ast.Call(func=ast.Attribute(value=RE_MODULE, attr='match',
ctx=ast.Load()),
args=[pattern, string], keywords=[], starargs=None,
kwargs=None),
ops=[ast.IsNot()], comparators=[ast.Name(id='None', ctx=ast.Load())]
)
class BinaryInfixOperand(object):
n_terms = 2
assoc = 'LEFT'
keyword_aliases = _kw(
(['and', '&&'], ast.And()),
(['or', '||'], ast.Or()),
(['<', 'lt'], ast.Lt()),
(['==', 'eq'], ast.Eq()),
(['<=', 'le'], ast.LtE()),
(['!=', 'ne'], ast.NotEq()),
(['>=', 'ge'], ast.GtE()),
(['>', 'gt'], ast.Gt()),
)
def __init__(self, tokens):
tokens = tokens[0]
if len(tokens) % 2 == 1:
self.op_token = tokens[1]
self.comparators = tokens[::2]
else:
err = "Invalid number of infix expressions: {}"
err = err.format(len(tokens))
raise ParseException(err)
assert self.op_token in self.keyword_aliases
# Check for too many literals and not enough keywords
op = self.keyword_aliases[self.op_token]
if isinstance(op, ast.boolop):
if any(isinstance(c, Literal) for c in self.comparators):
raise ValueError("Cannot use literals as truth")
else:
if all(isinstance(c, Literal) for c in self.comparators):
raise ValueError("Cannot compare literals.")
def ast(self):
op = self.keyword_aliases[self.op_token]
if isinstance(op, ast.boolop):
# and and or use one type of AST node
value = ast.BoolOp(op=op, values=[e.ast() for e in self.comparators])
else:
# remaining operators use another
value = ast.Compare(left=self.comparators[0].ast(), ops=[op],
comparators=[e.ast() for e in self.comparators[1:]])
return value
class RangeCondition(object):
def __init__(self, tokens):
tokens = tokens[0]
_check_n_tokens(tokens, 4, 'range condition')
assert tokens[2] == 'to'
self._from, self._center, self._to = tokens[0], tokens[1], tokens[3]
if isinstance(self._from, Literal):
raise ValueError("Can't test literal in range.")
def ast(self):
return ast.Compare(left=self._center.ast(), ops=[ast.LtE(), ast.LtE()],
comparators=[self._from.ast(), self._to.ast()])
class parse_selection(object):
"""Parse an atom selection expression
Parameters
----------
selection_string : str
Selection string, a string in the MDTraj atom selection grammer.
Returns
-------
expr : callable (atom -> bool)
A callable object which accepts an MDTraj.core.topology.Atom object and
returns a boolean value giving whether or not that particular atom
satisfies the selection string.
source : str
Python source code corresponding to the expression ``expr``.
astnode : ast.AST
Python abstract syntax tree node containing the parsed expression
Examples
--------
>>> expr, source, astnode = parse_selection('protein and type CA')
>>> expr
<function __main__.<lambda>>
>>> source
'(atom.residue.is_protein and (atom.element.symbol == CA))'
>>> <_ast.BoolOp at 0x103969d50>
"""
def __init__(self):
self.is_initialized = False
self.expression = None
def _initialize(self):
def keywords(klass):
kws = sorted(klass.keyword_aliases.keys())
return MatchFirst([Keyword(kw) for kw in kws])
def infix(klass):
kws = sorted(klass.keyword_aliases.keys())
return [(kw, klass.n_terms, getattr(opAssoc, klass.assoc), klass)
for kw in kws]
# literals include words made of alphanumerics, numbers,
# or quoted strings but we exclude any of the logical
# operands (e.g. 'or') from being parsed literals
literal = (
~(keywords(BinaryInfixOperand) | keywords(UnaryInfixOperand)) +
(Word(NUMS) | quotedString | Word(alphas, alphanums))
)
literal.setParseAction(Literal)
# These are the other 'root' expressions,
# the selection keywords (resname, resid, mass, etc)
selection_keyword = keywords(SelectionKeyword)
selection_keyword.setParseAction(SelectionKeyword)
base_expression = MatchFirst([selection_keyword, literal])
# the grammar includes implicit equality comparisons
# between adjacent expressions:
# i.e. 'name CA' --> 'name == CA'
implicit_equality = Group(
base_expression + Optional(Keyword('=='), '==') + base_expression
)
implicit_equality.setParseAction(BinaryInfixOperand)
# range condition matches expressions such as 'mass 1 to 20'
range_condition = Group(
base_expression + literal + Keyword('to') + literal
)
range_condition.setParseAction(RangeCondition)
expression = range_condition | implicit_equality | base_expression
logical_expr = infixNotation(
expression,
infix(UnaryInfixOperand) +
infix(BinaryInfixOperand) +
infix(RegexInfixOperand)
)
self.expression = logical_expr
self.is_initialized = True
self.transformer = _RewriteNames()
def __call__(self, selection):
if not self.is_initialized:
self._initialize()
try:
parse_result = self.expression.parseString(selection, parseAll=True)
except ParseException as e:
msg = str(e)
lines = ["%s: %s" % (msg, selection),
" " * (12 + len("%s: " % msg) + e.loc) + "^^^"]
raise ValueError('\n'.join(lines))
# Change __ATOM__ in function bodies. It must bind to the arg
# name specified below (i.e. 'atom')
astnode = self.transformer.visit(deepcopy(parse_result[0].ast()))
# Special check for a single literal
if isinstance(astnode, ast.Num) or isinstance(astnode, ast.Str):
raise ValueError("Cannot use a single literal as a boolean.")
if PY2:
args = [ast.Name(id='atom', ctx=ast.Param())]
signature = ast.arguments(args=args, vararg=None, kwarg=None,
defaults=[])
else:
args = [ast.arg(arg='atom', annotation=None)]
signature = ast.arguments(args=args, vararg=None, kwarg=None,
kwonlyargs=[], defaults=[],
kw_defaults=[])
func = ast.Expression(body=ast.Lambda(signature, astnode))
source = codegen.to_source(astnode)
expr = eval(
compile(ast.fix_missing_locations(func), '<string>', mode='eval'),
SELECTION_GLOBALS)
return _ParsedSelection(expr, source, astnode)
# Create the callable, and use it to overshadow the class. this way there's
# basically just one global instance of the "function", even thought its
# a callable class.
parse_selection = parse_selection()
if __name__ == '__main__':
import sys
exp = parse_selection(sys.argv[1])
print(exp.source)
print(ast.dump(exp.astnode))
| lgpl-2.1 |
meghana1995/sympy | sympy/assumptions/handlers/sets.py | 9 | 20790 | """
Handlers for predicates related to set membership: integer, rational, etc.
"""
from __future__ import print_function, division
from sympy.assumptions import Q, ask
from sympy.assumptions.handlers import CommonHandler, test_closed_group
from sympy.core.numbers import pi
from sympy.functions.elementary.exponential import exp, log
from sympy import I
class AskIntegerHandler(CommonHandler):
"""
Handler for Q.integer
Test that an expression belongs to the field of integer numbers
"""
@staticmethod
def _number(expr, assumptions):
# helper method
try:
i = int(expr.round())
if not (expr - i).equals(0):
raise TypeError
return True
except TypeError:
return False
@staticmethod
def Add(expr, assumptions):
"""
Integer + Integer -> Integer
Integer + !Integer -> !Integer
!Integer + !Integer -> ?
"""
if expr.is_number:
return AskIntegerHandler._number(expr, assumptions)
return test_closed_group(expr, assumptions, Q.integer)
@staticmethod
def Mul(expr, assumptions):
"""
Integer*Integer -> Integer
Integer*Irrational -> !Integer
Odd/Even -> !Integer
Integer*Rational -> ?
"""
if expr.is_number:
return AskIntegerHandler._number(expr, assumptions)
_output = True
for arg in expr.args:
if not ask(Q.integer(arg), assumptions):
if arg.is_Rational:
if arg.q == 2:
return ask(Q.even(2*expr), assumptions)
if ~(arg.q & 1):
return None
elif ask(Q.irrational(arg), assumptions):
if _output:
_output = False
else:
return
else:
return
else:
return _output
Pow = Add
int, Integer = [staticmethod(CommonHandler.AlwaysTrue)]*2
Pi, Exp1, GoldenRatio, Infinity, NegativeInfinity, ImaginaryUnit = \
[staticmethod(CommonHandler.AlwaysFalse)]*6
@staticmethod
def Rational(expr, assumptions):
# rationals with denominator one get
# evaluated to Integers
return False
@staticmethod
def Float(expr, assumptions):
return int(expr) == expr
@staticmethod
def Abs(expr, assumptions):
return ask(Q.integer(expr.args[0]), assumptions)
@staticmethod
def MatrixElement(expr, assumptions):
return ask(Q.integer_elements(expr.args[0]), assumptions)
Determinant = Trace = MatrixElement
class AskRationalHandler(CommonHandler):
"""
Handler for Q.rational
Test that an expression belongs to the field of rational numbers
"""
@staticmethod
def Add(expr, assumptions):
"""
Rational + Rational -> Rational
Rational + !Rational -> !Rational
!Rational + !Rational -> ?
"""
if expr.is_number:
if expr.as_real_imag()[1]:
return False
return test_closed_group(expr, assumptions, Q.rational)
Mul = Add
@staticmethod
def Pow(expr, assumptions):
"""
Rational ** Integer -> Rational
Irrational ** Rational -> Irrational
Rational ** Irrational -> ?
"""
if ask(Q.integer(expr.exp), assumptions):
return ask(Q.rational(expr.base), assumptions)
elif ask(Q.rational(expr.exp), assumptions):
if ask(Q.prime(expr.base), assumptions):
return False
Rational, Float = \
[staticmethod(CommonHandler.AlwaysTrue)]*2 # Float is finite-precision
ImaginaryUnit, Infinity, NegativeInfinity, Pi, Exp1, GoldenRatio = \
[staticmethod(CommonHandler.AlwaysFalse)]*6
@staticmethod
def exp(expr, assumptions):
x = expr.args[0]
if ask(Q.rational(x), assumptions):
return ask(~Q.nonzero(x), assumptions)
@staticmethod
def cot(expr, assumptions):
x = expr.args[0]
if ask(Q.rational(x), assumptions):
return False
@staticmethod
def log(expr, assumptions):
x = expr.args[0]
if ask(Q.rational(x), assumptions):
return ask(~Q.nonzero(x - 1), assumptions)
sin, cos, tan, asin, atan = [exp]*5
acos, acot = log, cot
class AskIrrationalHandler(CommonHandler):
@staticmethod
def Basic(expr, assumptions):
_real = ask(Q.real(expr), assumptions)
if _real:
_rational = ask(Q.rational(expr), assumptions)
if _rational is None:
return None
return not _rational
else:
return _real
class AskRealHandler(CommonHandler):
"""
Handler for Q.real
Test that an expression belongs to the field of real numbers
"""
@staticmethod
def _number(expr, assumptions):
# let as_real_imag() work first since the expression may
# be simpler to evaluate
i = expr.as_real_imag()[1].evalf(2)
if i._prec != 1:
return not i
# allow None to be returned if we couldn't show for sure
# that i was 0
@staticmethod
def Add(expr, assumptions):
"""
Real + Real -> Real
Real + (Complex & !Real) -> !Real
"""
if expr.is_number:
return AskRealHandler._number(expr, assumptions)
return test_closed_group(expr, assumptions, Q.real)
@staticmethod
def Mul(expr, assumptions):
"""
Real*Real -> Real
Real*Imaginary -> !Real
Imaginary*Imaginary -> Real
"""
if expr.is_number:
return AskRealHandler._number(expr, assumptions)
result = True
for arg in expr.args:
if ask(Q.real(arg), assumptions):
pass
elif ask(Q.imaginary(arg), assumptions):
result = result ^ True
else:
break
else:
return result
@staticmethod
def Pow(expr, assumptions):
"""
Real**Integer -> Real
Positive**Real -> Real
Real**(Integer/Even) -> Real if base is nonnegative
Real**(Integer/Odd) -> Real
Imaginary**(Integer/Even) -> Real
Imaginary**(Integer/Odd) -> not Real
Imaginary**Real -> ? since Real could be 0 (giving real) or 1 (giving imaginary)
b**Imaginary -> Real if log(b) is imaginary and b != 0 and exponent != integer multiple of I*pi/log(b)
Real**Real -> ? e.g. sqrt(-1) is imaginary and sqrt(2) is not
"""
if expr.is_number:
return AskRealHandler._number(expr, assumptions)
if expr.base.func == exp:
if ask(Q.imaginary(expr.base.args[0]), assumptions):
if ask(Q.imaginary(expr.exp), assumptions):
return True
# If the i = (exp's arg)/(I*pi) is an integer or half-integer
# multiple of I*pi then 2*i will be an integer. In addition,
# exp(i*I*pi) = (-1)**i so the overall realness of the expr
# can be determined by replacing exp(i*I*pi) with (-1)**i.
i = expr.base.args[0]/I/pi
if ask(Q.integer(2*i), assumptions):
return ask(Q.real(((-1)**i)**expr.exp), assumptions)
return
if ask(Q.imaginary(expr.base), assumptions):
if ask(Q.integer(expr.exp), assumptions):
odd = ask(Q.odd(expr.exp), assumptions)
if odd is not None:
return not odd
return
if ask(Q.imaginary(expr.exp), assumptions):
imlog = ask(Q.imaginary(log(expr.base)), assumptions)
if imlog is not None:
# I**i -> real, log(I) is imag;
# (2*I)**i -> complex, log(2*I) is not imag
return imlog
if ask(Q.real(expr.base), assumptions):
if ask(Q.real(expr.exp), assumptions):
if expr.exp.is_Rational and \
ask(Q.even(expr.exp.q), assumptions):
return ask(Q.positive(expr.base), assumptions)
elif ask(Q.integer(expr.exp), assumptions):
return True
elif ask(Q.positive(expr.base), assumptions):
return True
elif ask(Q.negative(expr.base), assumptions):
return False
Rational, Float, Pi, Exp1, GoldenRatio, Abs, re, im = \
[staticmethod(CommonHandler.AlwaysTrue)]*8
ImaginaryUnit, Infinity, NegativeInfinity = \
[staticmethod(CommonHandler.AlwaysFalse)]*3
@staticmethod
def sin(expr, assumptions):
if ask(Q.real(expr.args[0]), assumptions):
return True
cos = sin
@staticmethod
def exp(expr, assumptions):
return ask(Q.integer(expr.args[0]/I/pi) | Q.real(expr.args[0]), assumptions)
@staticmethod
def log(expr, assumptions):
return ask(Q.positive(expr.args[0]), assumptions)
@staticmethod
def MatrixElement(expr, assumptions):
return ask(Q.real_elements(expr.args[0]), assumptions)
Determinant = Trace = MatrixElement
class AskExtendedRealHandler(AskRealHandler):
"""
Handler for Q.extended_real
Test that an expression belongs to the field of extended real numbers,
that is real numbers union {Infinity, -Infinity}
"""
@staticmethod
def Add(expr, assumptions):
return test_closed_group(expr, assumptions, Q.extended_real)
Mul, Pow = [Add]*2
Infinity, NegativeInfinity = [staticmethod(CommonHandler.AlwaysTrue)]*2
class AskHermitianHandler(AskRealHandler):
"""
Handler for Q.hermitian
Test that an expression belongs to the field of Hermitian operators
"""
@staticmethod
def Add(expr, assumptions):
"""
Hermitian + Hermitian -> Hermitian
Hermitian + !Hermitian -> !Hermitian
"""
if expr.is_number:
return AskRealHandler._number(expr, assumptions)
return test_closed_group(expr, assumptions, Q.hermitian)
@staticmethod
def Mul(expr, assumptions):
"""
As long as there is at most only one noncommutative term:
Hermitian*Hermitian -> Hermitian
Hermitian*Antihermitian -> !Hermitian
Antihermitian*Antihermitian -> Hermitian
"""
if expr.is_number:
return AskRealHandler._number(expr, assumptions)
nccount = 0
result = True
for arg in expr.args:
if ask(Q.antihermitian(arg), assumptions):
result = result ^ True
elif not ask(Q.hermitian(arg), assumptions):
break
if ask(~Q.commutative(arg), assumptions):
nccount += 1
if nccount > 1:
break
else:
return result
@staticmethod
def Pow(expr, assumptions):
"""
Hermitian**Integer -> Hermitian
"""
if expr.is_number:
return AskRealHandler._number(expr, assumptions)
if ask(Q.hermitian(expr.base), assumptions):
if ask(Q.integer(expr.exp), assumptions):
return True
@staticmethod
def sin(expr, assumptions):
if ask(Q.hermitian(expr.args[0]), assumptions):
return True
cos, exp = [sin]*2
class AskComplexHandler(CommonHandler):
"""
Handler for Q.complex
Test that an expression belongs to the field of complex numbers
"""
@staticmethod
def Add(expr, assumptions):
return test_closed_group(expr, assumptions, Q.complex)
Mul, Pow = [Add]*2
Number, sin, cos, log, exp, re, im, NumberSymbol, Abs, ImaginaryUnit = \
[staticmethod(CommonHandler.AlwaysTrue)]*10 # they are all complex functions or expressions
Infinity, NegativeInfinity = [staticmethod(CommonHandler.AlwaysFalse)]*2
@staticmethod
def MatrixElement(expr, assumptions):
return ask(Q.complex_elements(expr.args[0]), assumptions)
Determinant = Trace = MatrixElement
class AskImaginaryHandler(CommonHandler):
"""
Handler for Q.imaginary
Test that an expression belongs to the field of imaginary numbers,
that is, numbers in the form x*I, where x is real
"""
@staticmethod
def _number(expr, assumptions):
# let as_real_imag() work first since the expression may
# be simpler to evaluate
r = expr.as_real_imag()[0].evalf(2)
if r._prec != 1:
return not r
# allow None to be returned if we couldn't show for sure
# that r was 0
@staticmethod
def Add(expr, assumptions):
"""
Imaginary + Imaginary -> Imaginary
Imaginary + Complex -> ?
Imaginary + Real -> !Imaginary
"""
if expr.is_number:
return AskImaginaryHandler._number(expr, assumptions)
reals = 0
for arg in expr.args:
if ask(Q.imaginary(arg), assumptions):
pass
elif ask(Q.real(arg), assumptions):
reals += 1
else:
break
else:
if reals == 0:
return True
if reals == 1 or (len(expr.args) == reals):
# two reals could sum 0 thus giving an imaginary
return False
@staticmethod
def Mul(expr, assumptions):
"""
Real*Imaginary -> Imaginary
Imaginary*Imaginary -> Real
"""
if expr.is_number:
return AskImaginaryHandler._number(expr, assumptions)
result = False
reals = 0
for arg in expr.args:
if ask(Q.imaginary(arg), assumptions):
result = result ^ True
elif not ask(Q.real(arg), assumptions):
break
else:
if reals == len(expr.args):
return False
return result
@staticmethod
def Pow(expr, assumptions):
"""
Imaginary**Odd -> Imaginary
Imaginary**Even -> Real
b**Imaginary -> !Imaginary if exponent is an integer multiple of I*pi/log(b)
Imaginary**Real -> ?
Positive**Real -> Real
Negative**Integer -> Real
Negative**(Integer/2) -> Imaginary
Negative**Real -> not Imaginary if exponent is not Rational
"""
if expr.is_number:
return AskImaginaryHandler._number(expr, assumptions)
if expr.base.func == exp:
if ask(Q.imaginary(expr.base.args[0]), assumptions):
if ask(Q.imaginary(expr.exp), assumptions):
return False
i = expr.base.args[0]/I/pi
if ask(Q.integer(2*i), assumptions):
return ask(Q.imaginary(((-1)**i)**expr.exp), assumptions)
if ask(Q.imaginary(expr.base), assumptions):
if ask(Q.integer(expr.exp), assumptions):
odd = ask(Q.odd(expr.exp), assumptions)
if odd is not None:
return odd
return
if ask(Q.imaginary(expr.exp), assumptions):
imlog = ask(Q.imaginary(log(expr.base)), assumptions)
if imlog is not None:
return False # I**i -> real; (2*I)**i -> complex ==> not imaginary
if ask(Q.real(expr.base) & Q.real(expr.exp), assumptions):
if ask(Q.positive(expr.base), assumptions):
return False
else:
rat = ask(Q.rational(expr.exp), assumptions)
if not rat:
return rat
if ask(Q.integer(expr.exp), assumptions):
return False
else:
half = ask(Q.integer(2*expr.exp), assumptions)
if half:
return ask(Q.negative(expr.base), assumptions)
return half
@staticmethod
def log(expr, assumptions):
if ask(Q.real(expr.args[0]), assumptions):
if ask(Q.positive(expr.args[0]), assumptions):
return False
return
# XXX it should be enough to do
# return ask(Q.nonpositive(expr.args[0]), assumptions)
# but ask(Q.nonpositive(exp(x)), Q.imaginary(x)) -> None;
# it should return True since exp(x) will be either 0 or complex
if expr.args[0].func == exp:
if expr.args[0].args[0] in [I, -I]:
return True
im = ask(Q.imaginary(expr.args[0]), assumptions)
if im is False:
return False
@staticmethod
def exp(expr, assumptions):
a = expr.args[0]/I/pi
return ask(Q.integer(2*a) & ~Q.integer(a), assumptions)
@staticmethod
def Number(expr, assumptions):
return not (expr.as_real_imag()[1] == 0)
NumberSymbol = Number
ImaginaryUnit = staticmethod(CommonHandler.AlwaysTrue)
class AskAntiHermitianHandler(AskImaginaryHandler):
"""
Handler for Q.antihermitian
Test that an expression belongs to the field of anti-Hermitian operators,
that is, operators in the form x*I, where x is Hermitian
"""
@staticmethod
def Add(expr, assumptions):
"""
Antihermitian + Antihermitian -> Antihermitian
Antihermitian + !Antihermitian -> !Antihermitian
"""
if expr.is_number:
return AskImaginaryHandler._number(expr, assumptions)
return test_closed_group(expr, assumptions, Q.antihermitian)
@staticmethod
def Mul(expr, assumptions):
"""
As long as there is at most only one noncommutative term:
Hermitian*Hermitian -> !Antihermitian
Hermitian*Antihermitian -> Antihermitian
Antihermitian*Antihermitian -> !Antihermitian
"""
if expr.is_number:
return AskImaginaryHandler._number(expr, assumptions)
nccount = 0
result = False
for arg in expr.args:
if ask(Q.antihermitian(arg), assumptions):
result = result ^ True
elif not ask(Q.hermitian(arg), assumptions):
break
if ask(~Q.commutative(arg), assumptions):
nccount += 1
if nccount > 1:
break
else:
return result
@staticmethod
def Pow(expr, assumptions):
"""
Hermitian**Integer -> !Antihermitian
Antihermitian**Even -> !Antihermitian
Antihermitian**Odd -> Antihermitian
"""
if expr.is_number:
return AskImaginaryHandler._number(expr, assumptions)
if ask(Q.hermitian(expr.base), assumptions):
if ask(Q.integer(expr.exp), assumptions):
return False
elif ask(Q.antihermitian(expr.base), assumptions):
if ask(Q.even(expr.exp), assumptions):
return False
elif ask(Q.odd(expr.exp), assumptions):
return True
class AskAlgebraicHandler(CommonHandler):
"""Handler for Q.algebraic key. """
@staticmethod
def Add(expr, assumptions):
return test_closed_group(expr, assumptions, Q.algebraic)
@staticmethod
def Mul(expr, assumptions):
return test_closed_group(expr, assumptions, Q.algebraic)
@staticmethod
def Pow(expr, assumptions):
return expr.exp.is_Rational and ask(
Q.algebraic(expr.base), assumptions)
@staticmethod
def Rational(expr, assumptions):
return expr.q != 0
Float, GoldenRatio, ImaginaryUnit, AlgebraicNumber = \
[staticmethod(CommonHandler.AlwaysTrue)]*4
Infinity, NegativeInfinity, ComplexInfinity, Pi, Exp1 = \
[staticmethod(CommonHandler.AlwaysFalse)]*5
@staticmethod
def exp(expr, assumptions):
x = expr.args[0]
if ask(Q.algebraic(x), assumptions):
return ask(~Q.nonzero(x), assumptions)
@staticmethod
def cot(expr, assumptions):
x = expr.args[0]
if ask(Q.algebraic(x), assumptions):
return False
@staticmethod
def log(expr, assumptions):
x = expr.args[0]
if ask(Q.algebraic(x), assumptions):
return ask(~Q.nonzero(x - 1), assumptions)
sin, cos, tan, asin, atan = [exp]*5
acos, acot = log, cot
| bsd-3-clause |
x303597316/hue | desktop/core/ext-py/kazoo-2.0/kazoo/recipe/partitioner.py | 36 | 13074 | """Zookeeper Partitioner Implementation
:Maintainer: None
:Status: Unknown
:class:`SetPartitioner` implements a partitioning scheme using
Zookeeper for dividing up resources amongst members of a party.
This is useful when there is a set of resources that should only be
accessed by a single process at a time that multiple processes
across a cluster might want to divide up.
Example Use-Case
----------------
- Multiple workers across a cluster need to divide up a list of queues
so that no two workers own the same queue.
"""
import logging
import os
import socket
from functools import partial
from kazoo.exceptions import KazooException
from kazoo.protocol.states import KazooState
from kazoo.recipe.watchers import PatientChildrenWatch
log = logging.getLogger(__name__)
class PartitionState(object):
"""High level partition state values
.. attribute:: ALLOCATING
The set needs to be partitioned, and may require an existing
partition set to be released before acquiring a new partition
of the set.
.. attribute:: ACQUIRED
The set has been partitioned and acquired.
.. attribute:: RELEASE
The set needs to be repartitioned, and the current partitions
must be released before a new allocation can be made.
.. attribute:: FAILURE
The set partition has failed. This occurs when the maximum
time to partition the set is exceeded or the Zookeeper session
is lost. The partitioner is unusable after this state and must
be recreated.
"""
ALLOCATING = "ALLOCATING"
ACQUIRED = "ACQUIRED"
RELEASE = "RELEASE"
FAILURE = "FAILURE"
class SetPartitioner(object):
"""Partitions a set amongst members of a party
This class will partition a set amongst members of a party such
that each member will be given zero or more items of the set and
each set item will be given to a single member. When new members
enter or leave the party, the set will be re-partitioned amongst
the members.
When the :class:`SetPartitioner` enters the
:attr:`~PartitionState.FAILURE` state, it is unrecoverable
and a new :class:`SetPartitioner` should be created.
Example:
.. code-block:: python
from kazoo.client import KazooClient
client = KazooClient()
qp = client.SetPartitioner(
path='/work_queues', set=('queue-1', 'queue-2', 'queue-3'))
while 1:
if qp.failed:
raise Exception("Lost or unable to acquire partition")
elif qp.release:
qp.release_set()
elif qp.acquired:
for partition in qp:
# Do something with each partition
elif qp.allocating:
qp.wait_for_acquire()
**State Transitions**
When created, the :class:`SetPartitioner` enters the
:attr:`PartitionState.ALLOCATING` state.
:attr:`~PartitionState.ALLOCATING` ->
:attr:`~PartitionState.ACQUIRED`
Set was partitioned successfully, the partition list assigned
is accessible via list/iter methods or calling list() on the
:class:`SetPartitioner` instance.
:attr:`~PartitionState.ALLOCATING` ->
:attr:`~PartitionState.FAILURE`
Allocating the set failed either due to a Zookeeper session
expiration, or failure to acquire the items of the set within
the timeout period.
:attr:`~PartitionState.ACQUIRED` ->
:attr:`~PartitionState.RELEASE`
The members of the party have changed, and the set needs to be
repartitioned. :meth:`SetPartitioner.release` should be called
as soon as possible.
:attr:`~PartitionState.ACQUIRED` ->
:attr:`~PartitionState.FAILURE`
The current partition was lost due to a Zookeeper session
expiration.
:attr:`~PartitionState.RELEASE` ->
:attr:`~PartitionState.ALLOCATING`
The current partition was released and is being re-allocated.
"""
def __init__(self, client, path, set, partition_func=None,
identifier=None, time_boundary=30):
"""Create a :class:`~SetPartitioner` instance
:param client: A :class:`~kazoo.client.KazooClient` instance.
:param path: The partition path to use.
:param set: The set of items to partition.
:param partition_func: A function to use to decide how to
partition the set.
:param identifier: An identifier to use for this member of the
party when participating. Defaults to the
hostname + process id.
:param time_boundary: How long the party members must be stable
before allocation can complete.
"""
self.state = PartitionState.ALLOCATING
self._client = client
self._path = path
self._set = set
self._partition_set = []
self._partition_func = partition_func or self._partitioner
self._identifier = identifier or '%s-%s' % (
socket.getfqdn(), os.getpid())
self._locks = []
self._lock_path = '/'.join([path, 'locks'])
self._party_path = '/'.join([path, 'party'])
self._time_boundary = time_boundary
self._acquire_event = client.handler.event_object()
# Create basic path nodes
client.ensure_path(path)
client.ensure_path(self._lock_path)
client.ensure_path(self._party_path)
# Join the party
self._party = client.ShallowParty(self._party_path,
identifier=self._identifier)
self._party.join()
self._was_allocated = False
self._state_change = client.handler.rlock_object()
client.add_listener(self._establish_sessionwatch)
# Now watch the party and set the callback on the async result
# so we know when we're ready
self._children_updated = False
self._child_watching(self._allocate_transition, async=True)
def __iter__(self):
"""Return the partitions in this partition set"""
for partition in self._partition_set:
yield partition
@property
def failed(self):
"""Corresponds to the :attr:`PartitionState.FAILURE` state"""
return self.state == PartitionState.FAILURE
@property
def release(self):
"""Corresponds to the :attr:`PartitionState.RELEASE` state"""
return self.state == PartitionState.RELEASE
@property
def allocating(self):
"""Corresponds to the :attr:`PartitionState.ALLOCATING`
state"""
return self.state == PartitionState.ALLOCATING
@property
def acquired(self):
"""Corresponds to the :attr:`PartitionState.ACQUIRED` state"""
return self.state == PartitionState.ACQUIRED
def wait_for_acquire(self, timeout=30):
"""Wait for the set to be partitioned and acquired
:param timeout: How long to wait before returning.
:type timeout: int
"""
self._acquire_event.wait(timeout)
def release_set(self):
"""Call to release the set
This method begins the step of allocating once the set has
been released.
"""
self._release_locks()
if self._locks: # pragma: nocover
# This shouldn't happen, it means we couldn't release our
# locks, abort
self._fail_out()
return
else:
with self._state_change:
if self.failed:
return
self.state = PartitionState.ALLOCATING
self._child_watching(self._allocate_transition, async=True)
def finish(self):
"""Call to release the set and leave the party"""
self._release_locks()
self._fail_out()
def _fail_out(self):
with self._state_change:
self.state = PartitionState.FAILURE
if self._party.participating:
try:
self._party.leave()
except KazooException: # pragma: nocover
pass
def _allocate_transition(self, result):
"""Called when in allocating mode, and the children settled"""
# Did we get an exception waiting for children to settle?
if result.exception: # pragma: nocover
self._fail_out()
return
children, async_result = result.get()
self._children_updated = False
# Add a callback when children change on the async_result
def updated(result):
with self._state_change:
if self.acquired:
self.state = PartitionState.RELEASE
self._children_updated = True
async_result.rawlink(updated)
# Split up the set
self._partition_set = self._partition_func(
self._identifier, list(self._party), self._set)
# Proceed to acquire locks for the working set as needed
for member in self._partition_set:
if self._children_updated or self.failed:
# Still haven't settled down, release locks acquired
# so far and go back
return self._abort_lock_acquisition()
lock = self._client.Lock(self._lock_path + '/' +
str(member))
try:
lock.acquire()
except KazooException: # pragma: nocover
return self.finish()
self._locks.append(lock)
# All locks acquired! Time for state transition, make sure
# we didn't inadvertently get lost thus far
with self._state_change:
if self.failed: # pragma: nocover
return self.finish()
self.state = PartitionState.ACQUIRED
self._acquire_event.set()
def _release_locks(self):
"""Attempt to completely remove all the locks"""
self._acquire_event.clear()
for lock in self._locks[:]:
try:
lock.release()
except KazooException: # pragma: nocover
# We proceed to remove as many as possible, and leave
# the ones we couldn't remove
pass
else:
self._locks.remove(lock)
def _abort_lock_acquisition(self):
"""Called during lock acquisition if a party change occurs"""
self._partition_set = []
self._release_locks()
if self._locks:
# This shouldn't happen, it means we couldn't release our
# locks, abort
self._fail_out()
return
return self._child_watching(self._allocate_transition)
def _child_watching(self, func=None, async=False):
"""Called when children are being watched to stabilize
This actually returns immediately, child watcher spins up a
new thread/greenlet and waits for it to stabilize before
any callbacks might run.
"""
watcher = PatientChildrenWatch(self._client, self._party_path,
self._time_boundary)
asy = watcher.start()
if func is not None:
# We spin up the function in a separate thread/greenlet
# to ensure that the rawlink's it might use won't be
# blocked
if async:
func = partial(self._client.handler.spawn, func)
asy.rawlink(func)
return asy
def _establish_sessionwatch(self, state):
"""Register ourself to listen for session events, we shut down
if we become lost"""
with self._state_change:
# Handle network partition: If connection gets suspended,
# change state to ALLOCATING if we had already ACQUIRED. This way
# the caller does not process the members since we could eventually
# lose session get repartitioned. If we got connected after a suspension
# it means we've not lost the session and still have our members. Hence,
# restore to ACQUIRED
if state == KazooState.SUSPENDED:
if self.state == PartitionState.ACQUIRED:
self._was_allocated = True
self.state = PartitionState.ALLOCATING
elif state == KazooState.CONNECTED:
if self._was_allocated:
self._was_allocated = False
self.state = PartitionState.ACQUIRED
if state == KazooState.LOST:
self._client.handler.spawn(self._fail_out)
return True
def _partitioner(self, identifier, members, partitions):
# Ensure consistent order of partitions/members
all_partitions = sorted(partitions)
workers = sorted(members)
i = workers.index(identifier)
# Now return the partition list starting at our location and
# skipping the other workers
return all_partitions[i::len(workers)]
| apache-2.0 |
tsdotca/dmclient | dmclient.py | 1 | 6440 | #!/usr/bin/env python3.6
# dmclient.py
# Copyright (C) 2018 Alex Mair. All rights reserved.
# This file is part of dmclient.
#
# dmclient is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License.
#
# dmclient is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with dmclient. If not, see <http://www.gnu.org/licenses/>.
#
import argparse
import faulthandler
import itertools
import json
import logging
import logging.handlers
import sys
import traceback
from core import attrs
from core.config import *
_app = None
log = None
def excepthook(type_, value, tb):
# There is much to do in this department.
# For now, we can just prevent program abort if the PyQt
# event loop thread tracebacks due to programmer error.
print("Unhandled exception:", file=sys.stderr, flush=False)
traceback.print_tb(tb)
print("{}: {}".format(type_.__name__, value), file=sys.stderr, flush=True)
class LoggerSpec:
def __init__(self, specstr):
self.name, level = specstr.split('=')
try:
self.level = getattr(logging, level.upper())
except AttributeError:
raise argparse.ArgumentError("invalid logger level `%s'".format(level))
def parse_args(argv):
parser = argparse.ArgumentParser(
prog=APP_NAME,
description=APP_DESCRIPTION,
)
parser.add_argument("--disable-oracle",
action="store_true",
help="Disable the multi-process search indexer.")
parser.add_argument("--logfile",
default=os.path.join(APP_PATH, "dmclient.log"),
help="Override default log file.",
metavar="FILE")
parser.add_argument("--log",
action="append",
help="Specify logger level.",
metavar="<log>=<level>",
dest="loggers",
type=LoggerSpec,
default=[],
nargs='+')
parser.add_argument("campaign",
nargs='?',
help="open CAMPAIGN on startup")
return parser.parse_args(argv)
def init_logging(args):
handler = logging.handlers.RotatingFileHandler(args.logfile,
maxBytes=2 * 1024 * 1024,
backupCount=8,
encoding='utf-8')
stderr_handler = logging.StreamHandler(sys.stderr)
logging.basicConfig(handlers=[handler, stderr_handler],
format="%(asctime)s %(levelname)-7s "
"%(name)-16s %(message)s",
level=logging.DEBUG if __debug__ else logging.INFO)
for logger in itertools.chain.from_iterable(args.loggers):
logging.getLogger(logger.name).setLevel(logger.level)
global log
log = logging.getLogger("dmclient")
def init_appdirs():
for dir_ in [APP_PATH, CONFIG_PATH, TMP_PATH]:
try:
os.mkdir(dir_)
except FileExistsError:
pass
except OSError as e:
raise Exception("Could not create essential foldier %s: %s" % (dir_, e))
def init_config():
try:
decoder = json.JSONDecoder()
with open(os.path.join(CONFIG_PATH, "config.json")) as f:
parsed_config = decoder.decode(f.read())
if not isinstance(parsed_config, dict):
raise ValueError("malformed config file (not a dict?)")
appconfig().update(parsed_config)
except (OSError, ValueError) as e:
log.error("failed to load config file: %s", e)
def save_config(config):
try:
encoder = json.JSONEncoder()
encoded = encoder.encode(attrs(config))
with open(os.path.join(CONFIG_PATH, "config.json"), 'w') as f:
print(encoded, file=f)
except TypeError as e:
log.fatal("*** config object appears to have been corrupted: %s", e)
except OSError as e:
log.error("failed to save config file: %s", e)
def main():
sys.excepthook = excepthook
faulthandler.enable()
delphi, app_controller = None, None
try:
args = parse_args(sys.argv[1:])
init_appdirs()
init_logging(args)
log.debug("hello, world")
init_config()
config = appconfig()
log.debug("initialise oracle zygote")
from oracle import spawn_zygote
oracle_zygote = spawn_zygote(args)
log.debug("initialise Qt")
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QApplication
global _app
_app = QApplication(sys.argv)
_app.setApplicationName(APP_NAME)
_app.setApplicationDisplayName(APP_NAME)
_app.setWindowIcon(QIcon(APP_ICON_PATH))
_app.setOrganizationDomain(APP_URL)
_app.setApplicationVersion(APP_VERSION)
_app.setQuitOnLastWindowClosed(True)
if __debug__:
log.debug("initialise hacks")
from core.hacks import install_hacks
install_hacks()
log.debug("initialise UI")
import ui
log.debug("initialise controllers")
from core.app import AppController
app_controller = AppController(args, _app, oracle_zygote)
if args.campaign:
app_controller.load_campaign(args.campaign)
elif config.open_last_campaign and config.last_campaign_path:
app_controller.load_campaign(config.last_campaign_path)
else:
app_controller.show_new_campaign()
log.debug("Qt app exec")
_app.exec()
except Exception:
# TODO: "dmclient has encountered.." etc. etc.
traceback.print_exc(file=sys.stderr)
finally:
if app_controller:
app_controller.shutdown()
save_config(appconfig())
if log: # None if there was a CLI error
log.debug("goodbye")
logging.shutdown()
if __name__ == '__main__':
main()
| gpl-2.0 |
huguesv/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/pythonwin/pywin/scintilla/bindings.py | 7 | 5052 | from . import IDLEenvironment
import string
import win32ui
import win32api
import win32con
from . import keycodes
import sys
import traceback
HANDLER_ARGS_GUESS=0
HANDLER_ARGS_NATIVE=1
HANDLER_ARGS_IDLE=2
HANDLER_ARGS_EXTENSION=3
next_id = 5000
event_to_commands = {}# dict of integer IDs to event names.
command_to_events = {}# dict of event names to int IDs
def assign_command_id(event, id = 0):
global next_id
if id == 0:
id = event_to_commands.get(event, 0)
if id == 0:
id = next_id
next_id = next_id + 1
# Only map the ones we allocated - specified ones are assumed to have a handler
command_to_events[id] = event
event_to_commands[event] = id
return id
class SendCommandHandler:
def __init__(self, cmd):
self.cmd = cmd
def __call__(self, *args):
win32ui.GetMainFrame().SendMessage(win32con.WM_COMMAND, self.cmd)
class Binding:
def __init__(self, handler, handler_args_type):
self.handler = handler
self.handler_args_type = handler_args_type
class BindingsManager:
def __init__(self, parent_view):
self.parent_view = parent_view
self.bindings = {} # dict of Binding instances.
self.keymap = {}
def prepare_configure(self):
self.keymap = {}
def complete_configure(self):
for id in command_to_events.keys():
self.parent_view.HookCommand(self._OnCommand, id)
def close(self):
self.parent_view = self.bindings = self.keymap = None
def report_error(self, problem):
try:
win32ui.SetStatusText(problem, 1)
except win32ui.error:
# No status bar!
print(problem)
def update_keymap(self, keymap):
self.keymap.update(keymap)
def bind(self, event, handler, handler_args_type = HANDLER_ARGS_GUESS, cid = 0):
if handler is None:
handler = SendCommandHandler(cid)
self.bindings[event] = self._new_binding(handler, handler_args_type)
self.bind_command(event, cid)
def bind_command(self, event, id = 0):
"Binds an event to a Windows control/command ID"
id = assign_command_id(event, id)
return id
def get_command_id(self, event):
id = event_to_commands.get(event)
if id is None:
# See if we even have an event of that name!?
if event not in self.bindings:
return None
id = self.bind_command(event)
return id
def _OnCommand(self, id, code):
event = command_to_events.get(id)
if event is None:
self.report_error("No event associated with event ID %d" % id)
return 1
return self.fire(event)
def _new_binding(self, event, handler_args_type):
return Binding(event, handler_args_type)
def _get_IDLE_handler(self, ext, handler):
try:
instance = self.parent_view.idle.IDLEExtension(ext)
name = handler.replace("-", "_") + "_event"
return getattr(instance, name)
except (ImportError, AttributeError):
msg = "Can not find event '%s' in IDLE extension '%s'" % (handler, ext)
self.report_error(msg)
return None
def fire(self, event, event_param = None):
# Fire the specified event. Result is native Pythonwin result
# (ie, 1==pass one, 0 or None==handled)
# First look up the event directly - if there, we are set.
binding = self.bindings.get(event)
if binding is None:
# If possible, find it!
# A native method name
handler = getattr(self.parent_view, event + "Event", None)
if handler is None:
# Can't decide if I should report an error??
self.report_error("The event name '%s' can not be found." % event)
# Either way, just let the default handlers grab it.
return 1
binding = self._new_binding(handler, HANDLER_ARGS_NATIVE)
# Cache it.
self.bindings[event] = binding
handler_args_type = binding.handler_args_type
# Now actually fire it.
if handler_args_type==HANDLER_ARGS_GUESS:
# Can't be native, as natives are never added with "guess".
# Must be extension or IDLE.
if event[0]=="<":
handler_args_type = HANDLER_ARGS_IDLE
else:
handler_args_type = HANDLER_ARGS_EXTENSION
try:
if handler_args_type==HANDLER_ARGS_EXTENSION:
args = self.parent_view.idle, event_param
else:
args = (event_param,)
rc = binding.handler(*args)
if handler_args_type==HANDLER_ARGS_IDLE:
# Convert to our return code.
if rc in [None, "break"]:
rc = 0
else:
rc = 1
except:
message = "Firing event '%s' failed." % event
print(message)
traceback.print_exc()
self.report_error(message)
rc = 1 # Let any default handlers have a go!
return rc
def fire_key_event(self, msg):
key = msg[2]
keyState = 0
if win32api.GetKeyState(win32con.VK_CONTROL) & 0x8000:
keyState = keyState | win32con.RIGHT_CTRL_PRESSED | win32con.LEFT_CTRL_PRESSED
if win32api.GetKeyState(win32con.VK_SHIFT) & 0x8000:
keyState = keyState | win32con.SHIFT_PRESSED
if win32api.GetKeyState(win32con.VK_MENU) & 0x8000:
keyState = keyState | win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED
keyinfo = key, keyState
# Special hacks for the dead-char key on non-US keyboards.
# (XXX - which do not work :-(
event = self.keymap.get( keyinfo )
if event is None:
return 1
return self.fire(event, None)
| apache-2.0 |
sunlianqiang/kbengine | kbe/res/scripts/common/Lib/test/test_fileio.py | 80 | 15460 | # Adapted from test_file.py by Daniel Stutzbach
import sys
import os
import io
import errno
import unittest
from array import array
from weakref import proxy
from functools import wraps
from test.support import TESTFN, check_warnings, run_unittest, make_bad_fd, cpython_only
from collections import UserList
from _io import FileIO as _FileIO
class AutoFileTests(unittest.TestCase):
# file tests for which a test file is automatically set up
def setUp(self):
self.f = _FileIO(TESTFN, 'w')
def tearDown(self):
if self.f:
self.f.close()
os.remove(TESTFN)
def testWeakRefs(self):
# verify weak references
p = proxy(self.f)
p.write(bytes(range(10)))
self.assertEqual(self.f.tell(), p.tell())
self.f.close()
self.f = None
self.assertRaises(ReferenceError, getattr, p, 'tell')
def testSeekTell(self):
self.f.write(bytes(range(20)))
self.assertEqual(self.f.tell(), 20)
self.f.seek(0)
self.assertEqual(self.f.tell(), 0)
self.f.seek(10)
self.assertEqual(self.f.tell(), 10)
self.f.seek(5, 1)
self.assertEqual(self.f.tell(), 15)
self.f.seek(-5, 1)
self.assertEqual(self.f.tell(), 10)
self.f.seek(-5, 2)
self.assertEqual(self.f.tell(), 15)
def testAttributes(self):
# verify expected attributes exist
f = self.f
self.assertEqual(f.mode, "wb")
self.assertEqual(f.closed, False)
# verify the attributes are readonly
for attr in 'mode', 'closed':
self.assertRaises((AttributeError, TypeError),
setattr, f, attr, 'oops')
def testReadinto(self):
# verify readinto
self.f.write(bytes([1, 2]))
self.f.close()
a = array('b', b'x'*10)
self.f = _FileIO(TESTFN, 'r')
n = self.f.readinto(a)
self.assertEqual(array('b', [1, 2]), a[:n])
def testWritelinesList(self):
l = [b'123', b'456']
self.f.writelines(l)
self.f.close()
self.f = _FileIO(TESTFN, 'rb')
buf = self.f.read()
self.assertEqual(buf, b'123456')
def testWritelinesUserList(self):
l = UserList([b'123', b'456'])
self.f.writelines(l)
self.f.close()
self.f = _FileIO(TESTFN, 'rb')
buf = self.f.read()
self.assertEqual(buf, b'123456')
def testWritelinesError(self):
self.assertRaises(TypeError, self.f.writelines, [1, 2, 3])
self.assertRaises(TypeError, self.f.writelines, None)
self.assertRaises(TypeError, self.f.writelines, "abc")
def test_none_args(self):
self.f.write(b"hi\nbye\nabc")
self.f.close()
self.f = _FileIO(TESTFN, 'r')
self.assertEqual(self.f.read(None), b"hi\nbye\nabc")
self.f.seek(0)
self.assertEqual(self.f.readline(None), b"hi\n")
self.assertEqual(self.f.readlines(None), [b"bye\n", b"abc"])
def test_reject(self):
self.assertRaises(TypeError, self.f.write, "Hello!")
def testRepr(self):
self.assertEqual(repr(self.f), "<_io.FileIO name=%r mode=%r>"
% (self.f.name, self.f.mode))
del self.f.name
self.assertEqual(repr(self.f), "<_io.FileIO fd=%r mode=%r>"
% (self.f.fileno(), self.f.mode))
self.f.close()
self.assertEqual(repr(self.f), "<_io.FileIO [closed]>")
def testErrors(self):
f = self.f
self.assertTrue(not f.isatty())
self.assertTrue(not f.closed)
#self.assertEqual(f.name, TESTFN)
self.assertRaises(ValueError, f.read, 10) # Open for reading
f.close()
self.assertTrue(f.closed)
f = _FileIO(TESTFN, 'r')
self.assertRaises(TypeError, f.readinto, "")
self.assertTrue(not f.closed)
f.close()
self.assertTrue(f.closed)
def testMethods(self):
methods = ['fileno', 'isatty', 'read', 'readinto',
'seek', 'tell', 'truncate', 'write', 'seekable',
'readable', 'writable']
self.f.close()
self.assertTrue(self.f.closed)
for methodname in methods:
method = getattr(self.f, methodname)
# should raise on closed file
self.assertRaises(ValueError, method)
def testOpendir(self):
# Issue 3703: opening a directory should fill the errno
# Windows always returns "[Errno 13]: Permission denied
# Unix calls dircheck() and returns "[Errno 21]: Is a directory"
try:
_FileIO('.', 'r')
except OSError as e:
self.assertNotEqual(e.errno, 0)
self.assertEqual(e.filename, ".")
else:
self.fail("Should have raised OSError")
@unittest.skipIf(os.name == 'nt', "test only works on a POSIX-like system")
def testOpenDirFD(self):
fd = os.open('.', os.O_RDONLY)
with self.assertRaises(OSError) as cm:
_FileIO(fd, 'r')
os.close(fd)
self.assertEqual(cm.exception.errno, errno.EISDIR)
#A set of functions testing that we get expected behaviour if someone has
#manually closed the internal file descriptor. First, a decorator:
def ClosedFD(func):
@wraps(func)
def wrapper(self):
#forcibly close the fd before invoking the problem function
f = self.f
os.close(f.fileno())
try:
func(self, f)
finally:
try:
self.f.close()
except OSError:
pass
return wrapper
def ClosedFDRaises(func):
@wraps(func)
def wrapper(self):
#forcibly close the fd before invoking the problem function
f = self.f
os.close(f.fileno())
try:
func(self, f)
except OSError as e:
self.assertEqual(e.errno, errno.EBADF)
else:
self.fail("Should have raised OSError")
finally:
try:
self.f.close()
except OSError:
pass
return wrapper
@ClosedFDRaises
def testErrnoOnClose(self, f):
f.close()
@ClosedFDRaises
def testErrnoOnClosedWrite(self, f):
f.write(b'a')
@ClosedFDRaises
def testErrnoOnClosedSeek(self, f):
f.seek(0)
@ClosedFDRaises
def testErrnoOnClosedTell(self, f):
f.tell()
@ClosedFDRaises
def testErrnoOnClosedTruncate(self, f):
f.truncate(0)
@ClosedFD
def testErrnoOnClosedSeekable(self, f):
f.seekable()
@ClosedFD
def testErrnoOnClosedReadable(self, f):
f.readable()
@ClosedFD
def testErrnoOnClosedWritable(self, f):
f.writable()
@ClosedFD
def testErrnoOnClosedFileno(self, f):
f.fileno()
@ClosedFD
def testErrnoOnClosedIsatty(self, f):
self.assertEqual(f.isatty(), False)
def ReopenForRead(self):
try:
self.f.close()
except OSError:
pass
self.f = _FileIO(TESTFN, 'r')
os.close(self.f.fileno())
return self.f
@ClosedFDRaises
def testErrnoOnClosedRead(self, f):
f = self.ReopenForRead()
f.read(1)
@ClosedFDRaises
def testErrnoOnClosedReadall(self, f):
f = self.ReopenForRead()
f.readall()
@ClosedFDRaises
def testErrnoOnClosedReadinto(self, f):
f = self.ReopenForRead()
a = array('b', b'x'*10)
f.readinto(a)
class OtherFileTests(unittest.TestCase):
def testAbles(self):
try:
f = _FileIO(TESTFN, "w")
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
f.close()
f = _FileIO(TESTFN, "r")
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
f.close()
f = _FileIO(TESTFN, "a+")
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.assertEqual(f.isatty(), False)
f.close()
if sys.platform != "win32":
try:
f = _FileIO("/dev/tty", "a")
except OSError:
# When run in a cron job there just aren't any
# ttys, so skip the test. This also handles other
# OS'es that don't support /dev/tty.
pass
else:
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
if sys.platform != "darwin" and \
'bsd' not in sys.platform and \
not sys.platform.startswith('sunos'):
# Somehow /dev/tty appears seekable on some BSDs
self.assertEqual(f.seekable(), False)
self.assertEqual(f.isatty(), True)
f.close()
finally:
os.unlink(TESTFN)
def testInvalidModeStrings(self):
# check invalid mode strings
for mode in ("", "aU", "wU+", "rw", "rt"):
try:
f = _FileIO(TESTFN, mode)
except ValueError:
pass
else:
f.close()
self.fail('%r is an invalid file mode' % mode)
def testModeStrings(self):
# test that the mode attribute is correct for various mode strings
# given as init args
try:
for modes in [('w', 'wb'), ('wb', 'wb'), ('wb+', 'rb+'),
('w+b', 'rb+'), ('a', 'ab'), ('ab', 'ab'),
('ab+', 'ab+'), ('a+b', 'ab+'), ('r', 'rb'),
('rb', 'rb'), ('rb+', 'rb+'), ('r+b', 'rb+')]:
# read modes are last so that TESTFN will exist first
with _FileIO(TESTFN, modes[0]) as f:
self.assertEqual(f.mode, modes[1])
finally:
if os.path.exists(TESTFN):
os.unlink(TESTFN)
def testUnicodeOpen(self):
# verify repr works for unicode too
f = _FileIO(str(TESTFN), "w")
f.close()
os.unlink(TESTFN)
def testBytesOpen(self):
# Opening a bytes filename
try:
fn = TESTFN.encode("ascii")
except UnicodeEncodeError:
self.skipTest('could not encode %r to ascii' % TESTFN)
f = _FileIO(fn, "w")
try:
f.write(b"abc")
f.close()
with open(TESTFN, "rb") as f:
self.assertEqual(f.read(), b"abc")
finally:
os.unlink(TESTFN)
def testConstructorHandlesNULChars(self):
fn_with_NUL = 'foo\0bar'
self.assertRaises(TypeError, _FileIO, fn_with_NUL, 'w')
self.assertRaises(TypeError, _FileIO, bytes(fn_with_NUL, 'ascii'), 'w')
def testInvalidFd(self):
self.assertRaises(ValueError, _FileIO, -10)
self.assertRaises(OSError, _FileIO, make_bad_fd())
if sys.platform == 'win32':
import msvcrt
self.assertRaises(OSError, msvcrt.get_osfhandle, make_bad_fd())
@cpython_only
def testInvalidFd_overflow(self):
# Issue 15989
import _testcapi
self.assertRaises(TypeError, _FileIO, _testcapi.INT_MAX + 1)
self.assertRaises(TypeError, _FileIO, _testcapi.INT_MIN - 1)
def testBadModeArgument(self):
# verify that we get a sensible error message for bad mode argument
bad_mode = "qwerty"
try:
f = _FileIO(TESTFN, bad_mode)
except ValueError as msg:
if msg.args[0] != 0:
s = str(msg)
if TESTFN in s or bad_mode not in s:
self.fail("bad error message for invalid mode: %s" % s)
# if msg.args[0] == 0, we're probably on Windows where there may be
# no obvious way to discover why open() failed.
else:
f.close()
self.fail("no error for invalid mode: %s" % bad_mode)
def testTruncate(self):
f = _FileIO(TESTFN, 'w')
f.write(bytes(bytearray(range(10))))
self.assertEqual(f.tell(), 10)
f.truncate(5)
self.assertEqual(f.tell(), 10)
self.assertEqual(f.seek(0, io.SEEK_END), 5)
f.truncate(15)
self.assertEqual(f.tell(), 5)
self.assertEqual(f.seek(0, io.SEEK_END), 15)
f.close()
def testTruncateOnWindows(self):
def bug801631():
# SF bug <http://www.python.org/sf/801631>
# "file.truncate fault on windows"
f = _FileIO(TESTFN, 'w')
f.write(bytes(range(11)))
f.close()
f = _FileIO(TESTFN,'r+')
data = f.read(5)
if data != bytes(range(5)):
self.fail("Read on file opened for update failed %r" % data)
if f.tell() != 5:
self.fail("File pos after read wrong %d" % f.tell())
f.truncate()
if f.tell() != 5:
self.fail("File pos after ftruncate wrong %d" % f.tell())
f.close()
size = os.path.getsize(TESTFN)
if size != 5:
self.fail("File size after ftruncate wrong %d" % size)
try:
bug801631()
finally:
os.unlink(TESTFN)
def testAppend(self):
try:
f = open(TESTFN, 'wb')
f.write(b'spam')
f.close()
f = open(TESTFN, 'ab')
f.write(b'eggs')
f.close()
f = open(TESTFN, 'rb')
d = f.read()
f.close()
self.assertEqual(d, b'spameggs')
finally:
try:
os.unlink(TESTFN)
except:
pass
def testInvalidInit(self):
self.assertRaises(TypeError, _FileIO, "1", 0, 0)
def testWarnings(self):
with check_warnings(quiet=True) as w:
self.assertEqual(w.warnings, [])
self.assertRaises(TypeError, _FileIO, [])
self.assertEqual(w.warnings, [])
self.assertRaises(ValueError, _FileIO, "/some/invalid/name", "rt")
self.assertEqual(w.warnings, [])
def testUnclosedFDOnException(self):
class MyException(Exception): pass
class MyFileIO(_FileIO):
def __setattr__(self, name, value):
if name == "name":
raise MyException("blocked setting name")
return super(MyFileIO, self).__setattr__(name, value)
fd = os.open(__file__, os.O_RDONLY)
self.assertRaises(MyException, MyFileIO, fd)
os.close(fd) # should not raise OSError(EBADF)
def test_main():
# Historically, these tests have been sloppy about removing TESTFN.
# So get rid of it no matter what.
try:
run_unittest(AutoFileTests, OtherFileTests)
finally:
if os.path.exists(TESTFN):
os.unlink(TESTFN)
if __name__ == '__main__':
test_main()
| lgpl-3.0 |
mega-force/osmc | package/mediacenter-skin-osmc/files/usr/share/kodi/addons/script.module.unidecode/lib/unidecode/x055.py | 252 | 4599 | data = (
'You ', # 0x00
'Yan ', # 0x01
'Gu ', # 0x02
'Gu ', # 0x03
'Bai ', # 0x04
'Han ', # 0x05
'Suo ', # 0x06
'Chun ', # 0x07
'Yi ', # 0x08
'Ai ', # 0x09
'Jia ', # 0x0a
'Tu ', # 0x0b
'Xian ', # 0x0c
'Huan ', # 0x0d
'Li ', # 0x0e
'Xi ', # 0x0f
'Tang ', # 0x10
'Zuo ', # 0x11
'Qiu ', # 0x12
'Che ', # 0x13
'Wu ', # 0x14
'Zao ', # 0x15
'Ya ', # 0x16
'Dou ', # 0x17
'Qi ', # 0x18
'Di ', # 0x19
'Qin ', # 0x1a
'Ma ', # 0x1b
'Mal ', # 0x1c
'Hong ', # 0x1d
'Dou ', # 0x1e
'Kes ', # 0x1f
'Lao ', # 0x20
'Liang ', # 0x21
'Suo ', # 0x22
'Zao ', # 0x23
'Huan ', # 0x24
'Lang ', # 0x25
'Sha ', # 0x26
'Ji ', # 0x27
'Zuo ', # 0x28
'Wo ', # 0x29
'Feng ', # 0x2a
'Yin ', # 0x2b
'Hu ', # 0x2c
'Qi ', # 0x2d
'Shou ', # 0x2e
'Wei ', # 0x2f
'Shua ', # 0x30
'Chang ', # 0x31
'Er ', # 0x32
'Li ', # 0x33
'Qiang ', # 0x34
'An ', # 0x35
'Jie ', # 0x36
'Yo ', # 0x37
'Nian ', # 0x38
'Yu ', # 0x39
'Tian ', # 0x3a
'Lai ', # 0x3b
'Sha ', # 0x3c
'Xi ', # 0x3d
'Tuo ', # 0x3e
'Hu ', # 0x3f
'Ai ', # 0x40
'Zhou ', # 0x41
'Nou ', # 0x42
'Ken ', # 0x43
'Zhuo ', # 0x44
'Zhuo ', # 0x45
'Shang ', # 0x46
'Di ', # 0x47
'Heng ', # 0x48
'Lan ', # 0x49
'A ', # 0x4a
'Xiao ', # 0x4b
'Xiang ', # 0x4c
'Tun ', # 0x4d
'Wu ', # 0x4e
'Wen ', # 0x4f
'Cui ', # 0x50
'Sha ', # 0x51
'Hu ', # 0x52
'Qi ', # 0x53
'Qi ', # 0x54
'Tao ', # 0x55
'Dan ', # 0x56
'Dan ', # 0x57
'Ye ', # 0x58
'Zi ', # 0x59
'Bi ', # 0x5a
'Cui ', # 0x5b
'Chuo ', # 0x5c
'He ', # 0x5d
'Ya ', # 0x5e
'Qi ', # 0x5f
'Zhe ', # 0x60
'Pei ', # 0x61
'Liang ', # 0x62
'Xian ', # 0x63
'Pi ', # 0x64
'Sha ', # 0x65
'La ', # 0x66
'Ze ', # 0x67
'Qing ', # 0x68
'Gua ', # 0x69
'Pa ', # 0x6a
'Zhe ', # 0x6b
'Se ', # 0x6c
'Zhuan ', # 0x6d
'Nie ', # 0x6e
'Guo ', # 0x6f
'Luo ', # 0x70
'Yan ', # 0x71
'Di ', # 0x72
'Quan ', # 0x73
'Tan ', # 0x74
'Bo ', # 0x75
'Ding ', # 0x76
'Lang ', # 0x77
'Xiao ', # 0x78
'[?] ', # 0x79
'Tang ', # 0x7a
'Chi ', # 0x7b
'Ti ', # 0x7c
'An ', # 0x7d
'Jiu ', # 0x7e
'Dan ', # 0x7f
'Ke ', # 0x80
'Yong ', # 0x81
'Wei ', # 0x82
'Nan ', # 0x83
'Shan ', # 0x84
'Yu ', # 0x85
'Zhe ', # 0x86
'La ', # 0x87
'Jie ', # 0x88
'Hou ', # 0x89
'Han ', # 0x8a
'Die ', # 0x8b
'Zhou ', # 0x8c
'Chai ', # 0x8d
'Wai ', # 0x8e
'Re ', # 0x8f
'Yu ', # 0x90
'Yin ', # 0x91
'Zan ', # 0x92
'Yao ', # 0x93
'Wo ', # 0x94
'Mian ', # 0x95
'Hu ', # 0x96
'Yun ', # 0x97
'Chuan ', # 0x98
'Hui ', # 0x99
'Huan ', # 0x9a
'Huan ', # 0x9b
'Xi ', # 0x9c
'He ', # 0x9d
'Ji ', # 0x9e
'Kui ', # 0x9f
'Zhong ', # 0xa0
'Wei ', # 0xa1
'Sha ', # 0xa2
'Xu ', # 0xa3
'Huang ', # 0xa4
'Du ', # 0xa5
'Nie ', # 0xa6
'Xuan ', # 0xa7
'Liang ', # 0xa8
'Yu ', # 0xa9
'Sang ', # 0xaa
'Chi ', # 0xab
'Qiao ', # 0xac
'Yan ', # 0xad
'Dan ', # 0xae
'Pen ', # 0xaf
'Can ', # 0xb0
'Li ', # 0xb1
'Yo ', # 0xb2
'Zha ', # 0xb3
'Wei ', # 0xb4
'Miao ', # 0xb5
'Ying ', # 0xb6
'Pen ', # 0xb7
'Phos ', # 0xb8
'Kui ', # 0xb9
'Xi ', # 0xba
'Yu ', # 0xbb
'Jie ', # 0xbc
'Lou ', # 0xbd
'Ku ', # 0xbe
'Sao ', # 0xbf
'Huo ', # 0xc0
'Ti ', # 0xc1
'Yao ', # 0xc2
'He ', # 0xc3
'A ', # 0xc4
'Xiu ', # 0xc5
'Qiang ', # 0xc6
'Se ', # 0xc7
'Yong ', # 0xc8
'Su ', # 0xc9
'Hong ', # 0xca
'Xie ', # 0xcb
'Yi ', # 0xcc
'Suo ', # 0xcd
'Ma ', # 0xce
'Cha ', # 0xcf
'Hai ', # 0xd0
'Ke ', # 0xd1
'Ta ', # 0xd2
'Sang ', # 0xd3
'Tian ', # 0xd4
'Ru ', # 0xd5
'Sou ', # 0xd6
'Wa ', # 0xd7
'Ji ', # 0xd8
'Pang ', # 0xd9
'Wu ', # 0xda
'Xian ', # 0xdb
'Shi ', # 0xdc
'Ge ', # 0xdd
'Zi ', # 0xde
'Jie ', # 0xdf
'Luo ', # 0xe0
'Weng ', # 0xe1
'Wa ', # 0xe2
'Si ', # 0xe3
'Chi ', # 0xe4
'Hao ', # 0xe5
'Suo ', # 0xe6
'Jia ', # 0xe7
'Hai ', # 0xe8
'Suo ', # 0xe9
'Qin ', # 0xea
'Nie ', # 0xeb
'He ', # 0xec
'Cis ', # 0xed
'Sai ', # 0xee
'Ng ', # 0xef
'Ge ', # 0xf0
'Na ', # 0xf1
'Dia ', # 0xf2
'Ai ', # 0xf3
'[?] ', # 0xf4
'Tong ', # 0xf5
'Bi ', # 0xf6
'Ao ', # 0xf7
'Ao ', # 0xf8
'Lian ', # 0xf9
'Cui ', # 0xfa
'Zhe ', # 0xfb
'Mo ', # 0xfc
'Sou ', # 0xfd
'Sou ', # 0xfe
'Tan ', # 0xff
)
| gpl-2.0 |
Turan-no/Turan | apps/profiles/migrations/0002_auto__add_field_userprofiledetail_ftp__add_field_profile_ftp.py | 1 | 6276 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'UserProfileDetail.ftp'
db.add_column('profiles_userprofiledetail', 'ftp', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True), keep_default=False)
# Adding field 'Profile.ftp'
db.add_column('profiles_profile', 'ftp', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'UserProfileDetail.ftp'
db.delete_column('profiles_userprofiledetail', 'ftp')
# Deleting field 'Profile.ftp'
db.delete_column('profiles_profile', 'ftp')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'profiles.profile': {
'Meta': {'ordering': "('user__username',)", 'object_name': 'Profile'},
'about': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'cycle': ('django.db.models.fields.CharField', [], {'max_length': '99', 'blank': 'True'}),
'cycle_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'ftp': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'height': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'max_hr': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'motto': ('django.db.models.fields.CharField', [], {'max_length': '160'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'resting_hr': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'weight': ('django.db.models.fields.FloatField', [], {'default': '0', 'blank': 'True'})
},
'profiles.userprofiledetail': {
'Meta': {'object_name': 'UserProfileDetail'},
'ftp': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resting_hr': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'userprofile': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Profile']"}),
'weight': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['profiles']
| agpl-3.0 |
manojpandey/cohack | zouk-loans/medium/zauba.py | 1 | 1087 | from bs4 import BeautifulSoup as bs
import requests
import sys
import json
def main():
if len(sys.argv)<2:
print "Usage : python zauba.py item_to_import"
sys.exit()
headers = { "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.52 Safari/536.5", "content-type":"text/html"}
url = 'https://www.zauba.com/import-'+sys.argv[1]+'-hs-code.html'
page = requests.get(url, headers= headers)
soup = bs(page.text)
data = []
if soup.table:
table = soup.table
rows = table.find_all('tr')
th = rows[0].find_all('th')
info = {'date': '','hscode':'', 'desc':'', 'origin':'', 'port':'', 'unit':'', 'quantity':'', 'value':'', 'perunit':''}
myfile = open("data.json","w")
l = 1
li = ['date', 'hscode', 'desc', 'origin', 'port', 'unit', 'quantity', 'value', 'perunit']
for i in rows[1:]:
info['s_no'] = l
k = 0
for j in i:
info[li[k]] = j.string
k += 1
print info
myfile.write(json.dumps(info))
l += 1
myfile.close()
else:
print "No Info To Show !!"
if __name__ == '__main__':
main() | mit |
citrix-openstack-build/neutron | neutron/tests/unit/midonet/test_midonet_lib.py | 2 | 5273 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Midokura Japan K.K.
# Copyright (C) 2013 Midokura PTE LTD
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Ryu Ishimoto, Midokura Japan KK
# @author: Tomoe Sugihara, Midokura Japan KK
import mock
import sys
sys.modules["midonetclient"] = mock.Mock()
import testtools
import webob.exc as w_exc
from neutron.openstack.common import uuidutils
from neutron.plugins.midonet import midonet_lib
import neutron.tests.unit.midonet.mock_lib as mock_lib
def _create_test_chain(id, name, tenant_id):
return {'id': id, 'name': name, 'tenant_id': tenant_id}
def _create_test_port_group(id, name, tenant_id):
return {"id": id, "name": name, "tenant_id": tenant_id}
class MidoClientTestCase(testtools.TestCase):
def setUp(self):
super(MidoClientTestCase, self).setUp()
self._tenant_id = 'test-tenant'
self.mock_api = mock.Mock()
self.mock_api_cfg = mock_lib.MidoClientMockConfig(self.mock_api)
self.mock_api_cfg.setup()
self.client = midonet_lib.MidoClient(self.mock_api)
def test_delete_chains_by_names(self):
tenant_id = uuidutils.generate_uuid()
chain1_id = uuidutils.generate_uuid()
chain1 = _create_test_chain(chain1_id, "chain1", tenant_id)
chain2_id = uuidutils.generate_uuid()
chain2 = _create_test_chain(chain2_id, "chain2", tenant_id)
calls = [mock.call.delete_chain(chain1_id),
mock.call.delete_chain(chain2_id)]
self.mock_api_cfg.chains_in = [chain2, chain1]
self.client.delete_chains_by_names(tenant_id, ["chain1", "chain2"])
self.mock_api.assert_has_calls(calls, any_order=True)
def test_delete_port_group_by_name(self):
tenant_id = uuidutils.generate_uuid()
pg1_id = uuidutils.generate_uuid()
pg1 = _create_test_port_group(pg1_id, "pg1", tenant_id)
pg2_id = uuidutils.generate_uuid()
pg2 = _create_test_port_group(pg2_id, "pg2", tenant_id)
self.mock_api_cfg.port_groups_in = [pg1, pg2]
self.client.delete_port_group_by_name(tenant_id, "pg1")
self.mock_api.delete_port_group.assert_called_once_with(pg1_id)
def test_create_dhcp(self):
bridge = mock.Mock()
gateway_ip = "192.168.1.1"
cidr = "192.168.1.0/24"
host_rts = [{'destination': '10.0.0.0/24', 'nexthop': '10.0.0.1'},
{'destination': '10.0.1.0/24', 'nexthop': '10.0.1.1'}]
dns_servers = ["8.8.8.8", "8.8.4.4"]
dhcp_call = mock.call.add_bridge_dhcp(bridge, gateway_ip, cidr,
host_rts=host_rts,
dns_servers=dns_servers)
self.client.create_dhcp(bridge, gateway_ip, cidr, host_rts=host_rts,
dns_servers=dns_servers)
bridge.assert_has_call(dhcp_call)
def test_add_dhcp_host(self):
bridge = mock.Mock()
dhcp_subnet_call = mock.call.get_dhcp_subnet("10.0.0.0_24")
ip_addr_call = dhcp_subnet_call.add_dhcp_host().ip_addr("10.0.0.10")
mac_addr_call = ip_addr_call.mac_addr("2A:DB:6B:8C:19:99")
calls = [dhcp_subnet_call, ip_addr_call, mac_addr_call,
mac_addr_call.create()]
self.client.add_dhcp_host(bridge, "10.0.0.0/24", "10.0.0.10",
"2A:DB:6B:8C:19:99")
bridge.assert_has_calls(calls, any_order=True)
def test_get_router_error(self):
self.mock_api.get_router.side_effect = w_exc.HTTPInternalServerError()
self.assertRaises(midonet_lib.MidonetApiException,
self.client.get_router, uuidutils.generate_uuid())
def test_get_router_not_found(self):
self.mock_api.get_router.side_effect = w_exc.HTTPNotFound()
self.assertRaises(midonet_lib.MidonetResourceNotFound,
self.client.get_router, uuidutils.generate_uuid())
def test_get_bridge_error(self):
self.mock_api.get_bridge.side_effect = w_exc.HTTPInternalServerError()
self.assertRaises(midonet_lib.MidonetApiException,
self.client.get_bridge, uuidutils.generate_uuid())
def test_get_bridge_not_found(self):
self.mock_api.get_bridge.side_effect = w_exc.HTTPNotFound()
self.assertRaises(midonet_lib.MidonetResourceNotFound,
self.client.get_bridge, uuidutils.generate_uuid())
def test_get_bridge(self):
bridge_id = uuidutils.generate_uuid()
bridge = self.client.get_bridge(bridge_id)
self.assertIsNotNone(bridge)
self.assertEqual(bridge.get_id(), bridge_id)
| apache-2.0 |
Architektor/PySnip | venv/lib/python2.7/site-packages/twisted/_threads/_memory.py | 21 | 1666 | # -*- test-case-name: twisted._threads.test.test_memory -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Implementation of an in-memory worker that defers execution.
"""
from __future__ import absolute_import, division, print_function
from zope.interface import implementer
from . import IWorker
from ._convenience import Quit
NoMoreWork = object()
@implementer(IWorker)
class MemoryWorker(object):
"""
An L{IWorker} that queues work for later performance.
@ivar _quit: a flag indicating
@type _quit: L{Quit}
"""
def __init__(self, pending=list):
"""
Create a L{MemoryWorker}.
"""
self._quit = Quit()
self._pending = pending()
def do(self, work):
"""
Queue some work for to perform later; see L{createMemoryWorker}.
@param work: The work to perform.
"""
self._quit.check()
self._pending.append(work)
def quit(self):
"""
Quit this worker.
"""
self._quit.set()
self._pending.append(NoMoreWork)
def createMemoryWorker():
"""
Create an L{IWorker} that does nothing but defer work, to be performed
later.
@return: a worker that will enqueue work to perform later, and a callable
that will perform one element of that work.
@rtype: 2-L{tuple} of (L{IWorker}, L{callable})
"""
def perform():
if not worker._pending:
return False
if worker._pending[0] is NoMoreWork:
return False
worker._pending.pop(0)()
return True
worker = MemoryWorker()
return (worker, perform)
| gpl-3.0 |
qsnake/jinja2 | jinja2/testsuite/lexnparse.py | 12 | 14148 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.lexnparse
~~~~~~~~~~~~~~~~~~~~~~~~~~
All the unittests regarding lexing, parsing and syntax.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import time
import tempfile
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Environment, Template, TemplateSyntaxError, \
UndefinedError, nodes
env = Environment()
# how does a string look like in jinja syntax?
if sys.version_info < (3, 0):
def jinja_string_repr(string):
return repr(string)[1:]
else:
jinja_string_repr = repr
class LexerTestCase(JinjaTestCase):
def test_raw1(self):
tmpl = env.from_string('{% raw %}foo{% endraw %}|'
'{%raw%}{{ bar }}|{% baz %}{% endraw %}')
assert tmpl.render() == 'foo|{{ bar }}|{% baz %}'
def test_raw2(self):
tmpl = env.from_string('1 {%- raw -%} 2 {%- endraw -%} 3')
assert tmpl.render() == '123'
def test_balancing(self):
env = Environment('{%', '%}', '${', '}')
tmpl = env.from_string('''{% for item in seq
%}${{'foo': item}|upper}{% endfor %}''')
assert tmpl.render(seq=range(3)) == "{'FOO': 0}{'FOO': 1}{'FOO': 2}"
def test_comments(self):
env = Environment('<!--', '-->', '{', '}')
tmpl = env.from_string('''\
<ul>
<!--- for item in seq -->
<li>{item}</li>
<!--- endfor -->
</ul>''')
assert tmpl.render(seq=range(3)) == ("<ul>\n <li>0</li>\n "
"<li>1</li>\n <li>2</li>\n</ul>")
def test_string_escapes(self):
for char in u'\0', u'\u2668', u'\xe4', u'\t', u'\r', u'\n':
tmpl = env.from_string('{{ %s }}' % jinja_string_repr(char))
assert tmpl.render() == char
assert env.from_string('{{ "\N{HOT SPRINGS}" }}').render() == u'\u2668'
def test_bytefallback(self):
from pprint import pformat
tmpl = env.from_string(u'''{{ 'foo'|pprint }}|{{ 'bär'|pprint }}''')
assert tmpl.render() == pformat('foo') + '|' + pformat(u'bär')
def test_operators(self):
from jinja2.lexer import operators
for test, expect in operators.iteritems():
if test in '([{}])':
continue
stream = env.lexer.tokenize('{{ %s }}' % test)
stream.next()
assert stream.current.type == expect
def test_normalizing(self):
for seq in '\r', '\r\n', '\n':
env = Environment(newline_sequence=seq)
tmpl = env.from_string('1\n2\r\n3\n4\n')
result = tmpl.render()
assert result.replace(seq, 'X') == '1X2X3X4'
class ParserTestCase(JinjaTestCase):
def test_php_syntax(self):
env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->')
tmpl = env.from_string('''\
<!-- I'm a comment, I'm not interesting -->\
<? for item in seq -?>
<?= item ?>
<?- endfor ?>''')
assert tmpl.render(seq=range(5)) == '01234'
def test_erb_syntax(self):
env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>')
tmpl = env.from_string('''\
<%# I'm a comment, I'm not interesting %>\
<% for item in seq -%>
<%= item %>
<%- endfor %>''')
assert tmpl.render(seq=range(5)) == '01234'
def test_comment_syntax(self):
env = Environment('<!--', '-->', '${', '}', '<!--#', '-->')
tmpl = env.from_string('''\
<!--# I'm a comment, I'm not interesting -->\
<!-- for item in seq --->
${item}
<!--- endfor -->''')
assert tmpl.render(seq=range(5)) == '01234'
def test_balancing(self):
tmpl = env.from_string('''{{{'foo':'bar'}.foo}}''')
assert tmpl.render() == 'bar'
def test_start_comment(self):
tmpl = env.from_string('''{# foo comment
and bar comment #}
{% macro blub() %}foo{% endmacro %}
{{ blub() }}''')
assert tmpl.render().strip() == 'foo'
def test_line_syntax(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%')
tmpl = env.from_string('''\
<%# regular comment %>
% for item in seq:
${item}
% endfor''')
assert [int(x.strip()) for x in tmpl.render(seq=range(5)).split()] == \
range(5)
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##')
tmpl = env.from_string('''\
<%# regular comment %>
% for item in seq:
${item} ## the rest of the stuff
% endfor''')
assert [int(x.strip()) for x in tmpl.render(seq=range(5)).split()] == \
range(5)
def test_line_syntax_priority(self):
# XXX: why is the whitespace there in front of the newline?
env = Environment('{%', '%}', '${', '}', '/*', '*/', '##', '#')
tmpl = env.from_string('''\
/* ignore me.
I'm a multiline comment */
## for item in seq:
* ${item} # this is just extra stuff
## endfor''')
assert tmpl.render(seq=[1, 2]).strip() == '* 1\n* 2'
env = Environment('{%', '%}', '${', '}', '/*', '*/', '#', '##')
tmpl = env.from_string('''\
/* ignore me.
I'm a multiline comment */
# for item in seq:
* ${item} ## this is just extra stuff
## extra stuff i just want to ignore
# endfor''')
assert tmpl.render(seq=[1, 2]).strip() == '* 1\n\n* 2'
def test_error_messages(self):
def assert_error(code, expected):
try:
Template(code)
except TemplateSyntaxError, e:
assert str(e) == expected, 'unexpected error message'
else:
assert False, 'that was suposed to be an error'
assert_error('{% for item in seq %}...{% endif %}',
"Encountered unknown tag 'endif'. Jinja was looking "
"for the following tags: 'endfor' or 'else'. The "
"innermost block that needs to be closed is 'for'.")
assert_error('{% if foo %}{% for item in seq %}...{% endfor %}{% endfor %}',
"Encountered unknown tag 'endfor'. Jinja was looking for "
"the following tags: 'elif' or 'else' or 'endif'. The "
"innermost block that needs to be closed is 'if'.")
assert_error('{% if foo %}',
"Unexpected end of template. Jinja was looking for the "
"following tags: 'elif' or 'else' or 'endif'. The "
"innermost block that needs to be closed is 'if'.")
assert_error('{% for item in seq %}',
"Unexpected end of template. Jinja was looking for the "
"following tags: 'endfor' or 'else'. The innermost block "
"that needs to be closed is 'for'.")
assert_error('{% block foo-bar-baz %}',
"Block names in Jinja have to be valid Python identifiers "
"and may not contain hypens, use an underscore instead.")
assert_error('{% unknown_tag %}',
"Encountered unknown tag 'unknown_tag'.")
class SyntaxTestCase(JinjaTestCase):
def test_call(self):
env = Environment()
env.globals['foo'] = lambda a, b, c, e, g: a + b + c + e + g
tmpl = env.from_string("{{ foo('a', c='d', e='f', *['b'], **{'g': 'h'}) }}")
assert tmpl.render() == 'abdfh'
def test_slicing(self):
tmpl = env.from_string('{{ [1, 2, 3][:] }}|{{ [1, 2, 3][::-1] }}')
assert tmpl.render() == '[1, 2, 3]|[3, 2, 1]'
def test_attr(self):
tmpl = env.from_string("{{ foo.bar }}|{{ foo['bar'] }}")
assert tmpl.render(foo={'bar': 42}) == '42|42'
def test_subscript(self):
tmpl = env.from_string("{{ foo[0] }}|{{ foo[-1] }}")
assert tmpl.render(foo=[0, 1, 2]) == '0|2'
def test_tuple(self):
tmpl = env.from_string('{{ () }}|{{ (1,) }}|{{ (1, 2) }}')
assert tmpl.render() == '()|(1,)|(1, 2)'
def test_math(self):
tmpl = env.from_string('{{ (1 + 1 * 2) - 3 / 2 }}|{{ 2**3 }}')
assert tmpl.render() == '1.5|8'
def test_div(self):
tmpl = env.from_string('{{ 3 // 2 }}|{{ 3 / 2 }}|{{ 3 % 2 }}')
assert tmpl.render() == '1|1.5|1'
def test_unary(self):
tmpl = env.from_string('{{ +3 }}|{{ -3 }}')
assert tmpl.render() == '3|-3'
def test_concat(self):
tmpl = env.from_string("{{ [1, 2] ~ 'foo' }}")
assert tmpl.render() == '[1, 2]foo'
def test_compare(self):
tmpl = env.from_string('{{ 1 > 0 }}|{{ 1 >= 1 }}|{{ 2 < 3 }}|'
'{{ 2 == 2 }}|{{ 1 <= 1 }}')
assert tmpl.render() == 'True|True|True|True|True'
def test_inop(self):
tmpl = env.from_string('{{ 1 in [1, 2, 3] }}|{{ 1 not in [1, 2, 3] }}')
assert tmpl.render() == 'True|False'
def test_literals(self):
tmpl = env.from_string('{{ [] }}|{{ {} }}|{{ () }}')
assert tmpl.render().lower() == '[]|{}|()'
def test_bool(self):
tmpl = env.from_string('{{ true and false }}|{{ false '
'or true }}|{{ not false }}')
assert tmpl.render() == 'False|True|True'
def test_grouping(self):
tmpl = env.from_string('{{ (true and false) or (false and true) and not false }}')
assert tmpl.render() == 'False'
def test_django_attr(self):
tmpl = env.from_string('{{ [1, 2, 3].0 }}|{{ [[1]].0.0 }}')
assert tmpl.render() == '1|1'
def test_conditional_expression(self):
tmpl = env.from_string('''{{ 0 if true else 1 }}''')
assert tmpl.render() == '0'
def test_short_conditional_expression(self):
tmpl = env.from_string('<{{ 1 if false }}>')
assert tmpl.render() == '<>'
tmpl = env.from_string('<{{ (1 if false).bar }}>')
self.assert_raises(UndefinedError, tmpl.render)
def test_filter_priority(self):
tmpl = env.from_string('{{ "foo"|upper + "bar"|upper }}')
assert tmpl.render() == 'FOOBAR'
def test_function_calls(self):
tests = [
(True, '*foo, bar'),
(True, '*foo, *bar'),
(True, '*foo, bar=42'),
(True, '**foo, *bar'),
(True, '**foo, bar'),
(False, 'foo, bar'),
(False, 'foo, bar=42'),
(False, 'foo, bar=23, *args'),
(False, 'a, b=c, *d, **e'),
(False, '*foo, **bar')
]
for should_fail, sig in tests:
if should_fail:
self.assert_raises(TemplateSyntaxError,
env.from_string, '{{ foo(%s) }}' % sig)
else:
env.from_string('foo(%s)' % sig)
def test_tuple_expr(self):
for tmpl in [
'{{ () }}',
'{{ (1, 2) }}',
'{{ (1, 2,) }}',
'{{ 1, }}',
'{{ 1, 2 }}',
'{% for foo, bar in seq %}...{% endfor %}',
'{% for x in foo, bar %}...{% endfor %}',
'{% for x in foo, %}...{% endfor %}'
]:
assert env.from_string(tmpl)
def test_trailing_comma(self):
tmpl = env.from_string('{{ (1, 2,) }}|{{ [1, 2,] }}|{{ {1: 2,} }}')
assert tmpl.render().lower() == '(1, 2)|[1, 2]|{1: 2}'
def test_block_end_name(self):
env.from_string('{% block foo %}...{% endblock foo %}')
self.assert_raises(TemplateSyntaxError, env.from_string,
'{% block x %}{% endblock y %}')
def test_contant_casing(self):
for const in True, False, None:
tmpl = env.from_string('{{ %s }}|{{ %s }}|{{ %s }}' % (
str(const), str(const).lower(), str(const).upper()
))
assert tmpl.render() == '%s|%s|' % (const, const)
def test_test_chaining(self):
self.assert_raises(TemplateSyntaxError, env.from_string,
'{{ foo is string is sequence }}')
env.from_string('{{ 42 is string or 42 is number }}'
).render() == 'True'
def test_string_concatenation(self):
tmpl = env.from_string('{{ "foo" "bar" "baz" }}')
assert tmpl.render() == 'foobarbaz'
def test_notin(self):
bar = xrange(100)
tmpl = env.from_string('''{{ not 42 in bar }}''')
assert tmpl.render(bar=bar) == unicode(not 42 in bar)
def test_implicit_subscribed_tuple(self):
class Foo(object):
def __getitem__(self, x):
return x
t = env.from_string('{{ foo[1, 2] }}')
assert t.render(foo=Foo()) == u'(1, 2)'
def test_raw2(self):
tmpl = env.from_string('{% raw %}{{ FOO }} and {% BAR %}{% endraw %}')
assert tmpl.render() == '{{ FOO }} and {% BAR %}'
def test_const(self):
tmpl = env.from_string('{{ true }}|{{ false }}|{{ none }}|'
'{{ none is defined }}|{{ missing is defined }}')
assert tmpl.render() == 'True|False|None|True|False'
def test_neg_filter_priority(self):
node = env.parse('{{ -1|foo }}')
assert isinstance(node.body[0].nodes[0], nodes.Filter)
assert isinstance(node.body[0].nodes[0].node, nodes.Neg)
def test_const_assign(self):
constass1 = '''{% set true = 42 %}'''
constass2 = '''{% for none in seq %}{% endfor %}'''
for tmpl in constass1, constass2:
self.assert_raises(TemplateSyntaxError, env.from_string, tmpl)
def test_localset(self):
tmpl = env.from_string('''{% set foo = 0 %}\
{% for item in [1, 2] %}{% set foo = 1 %}{% endfor %}\
{{ foo }}''')
assert tmpl.render() == '0'
def test_parse_unary(self):
tmpl = env.from_string('{{ -foo["bar"] }}')
assert tmpl.render(foo={'bar': 42}) == '-42'
tmpl = env.from_string('{{ -foo["bar"]|abs }}')
assert tmpl.render(foo={'bar': 42}) == '42'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(LexerTestCase))
suite.addTest(unittest.makeSuite(ParserTestCase))
suite.addTest(unittest.makeSuite(SyntaxTestCase))
return suite
| bsd-3-clause |
cloudera/hue | desktop/core/ext-py/protobuf-3.13.0/google/protobuf/pyext/cpp_message.py | 137 | 2851 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Protocol message implementation hooks for C++ implementation.
Contains helper functions used to create protocol message classes from
Descriptor objects at runtime backed by the protocol buffer C++ API.
"""
__author__ = 'tibell@google.com (Johan Tibell)'
from google.protobuf.pyext import _message
class GeneratedProtocolMessageType(_message.MessageMeta):
"""Metaclass for protocol message classes created at runtime from Descriptors.
The protocol compiler currently uses this metaclass to create protocol
message classes at runtime. Clients can also manually create their own
classes at runtime, as in this example:
mydescriptor = Descriptor(.....)
factory = symbol_database.Default()
factory.pool.AddDescriptor(mydescriptor)
MyProtoClass = factory.GetPrototype(mydescriptor)
myproto_instance = MyProtoClass()
myproto.foo_field = 23
...
The above example will not work for nested types. If you wish to include them,
use reflection.MakeClass() instead of manually instantiating the class in
order to create the appropriate class structure.
"""
# Must be consistent with the protocol-compiler code in
# proto2/compiler/internal/generator.*.
_DESCRIPTOR_KEY = 'DESCRIPTOR'
| apache-2.0 |
axinging/crosswalk | tools/reflection_generator/java_class.py | 4 | 17083 | # Copyright (c) 2014 Intel Corporation. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import re
from string import Template
from java_class_component import Enum, Field
from java_method import Method
class JavaClassLoader(object):
"""Manager class maintains all loaded java classes."""
def __init__(self, src_path, class_list):
self._src_path = src_path
self._class_list = class_list
self._java_data_map = {}
for clazz in self._class_list:
self.LoadJavaFile(clazz)
for key,java_data in self._java_data_map.items():
for method in java_data._methods:
method.PrepareStrings()
def IsInternalClass(self, clazz):
return clazz in self._class_list
def GetJavaData(self, clazz):
return self._java_data_map.get(clazz)
def LoadJavaFile(self, clazz):
if self._java_data_map.has_key(clazz):
return
file_name = os.path.join(self._src_path, '%s.java' % clazz)
try:
file_handle = open(file_name, 'r')
file_content = file_handle.read()
file_handle.close()
except Exception:
print 'Error reading input Java file, please check.'
return
java_data = InternalJavaFileData(self)
java_data.SetClassContent(file_content)
self._java_data_map[clazz] = java_data
def GenerateDoc(self, doc):
if not doc:
return ''
def ReplaceInternal(matchobj):
match = matchobj.group(0)
if self.IsInternalClass(match):
return self.GetJavaData(match).wrapper_name
else:
return match
return re.sub('XWalk[a-zA-Z_0-9]*Internal',
ReplaceInternal, doc).lstrip('\n')
class InternalJavaFileData(object):
"""Data class stores the generator information of internal class."""
ANNOTATION_CREATE_INTERNALLY = 'createInternally'
ANNOTATION_CREATE_EXTERNALLY = 'createExternally'
ANNOTATION_EXTEND_CLASS = 'extendClass'
ANNOTATION_NO_INSTANCE = 'noInstance'
ANNOTATION_INSTANCE = 'instance'
ANNOTATION_IMPL = 'impl'
def __init__(self, class_loader):
self._class_loader = class_loader
self._class_name = ''
self._bridge_name = ''
self._wrapper_name = ''
self._class_type = '' # class or interface
self._class_doc = ''
self._class_annotations = {}
self._methods = []
self._fields = []
self._imports = []
self._enums = {}
self._package_name = ''
self._need_default_constructor = True
@property
def class_name(self):
return self._class_name
@property
def bridge_name(self):
return self._bridge_name
@property
def wrapper_name(self):
return self._wrapper_name
@property
def class_type(self):
return self._class_type
@property
def class_doc(self):
return self._class_doc
@property
def class_annotations(self):
return self._class_annotations
@property
def methods(self):
return self._methods
@property
def fields(self):
return self._fields
@property
def imports(self):
return self._imports
@property
def enums(self):
return self._enums
@property
def package_name(self):
return self._package_name
@property
def need_default_constructor(self):
return self._need_default_constructor
def GetJavaData(self, clazz):
return self._class_loader.GetJavaData(clazz)
def IsInternalClass(self, clazz):
return self._class_loader.IsInternalClass(clazz)
def MangleInternalNameToBridgeName(self, internal_name):
if not self.IsInternalClass(internal_name):
return internal_name
else:
return internal_name.replace('Internal', 'Bridge')
def MangleInternalNameToWrapperName(self, internal_name):
if not self.IsInternalClass(internal_name):
return internal_name
else:
return internal_name.replace('Internal', '')
def SetClassContent(self, content):
self.ExtractPackageName(content)
self.ExtractImports(content)
self.ExtractClassProperties(content)
self.ExtractMethods(content)
self.ExtractFields(content)
self.ExtractEnums(content)
def ExtractPackageName(self, java_content):
package_re = re.compile('\s*package\s+(?P<package>[a-zA-Z0-9._]+)\s*;')
for match in re.finditer(package_re, java_content):
self._package_name = match.group('package')
def ExtractImports(self, java_content):
imports_re = re.compile('\s*import\s+(?P<imported>[a-zA-Z0-9._*]+)\s*;')
for match in re.finditer(imports_re, java_content):
imported = match.group('imported')
# Determine whether the import rule should be ignored for generated code.
# TODO: Currently we only use a blacklist to filter the import rule.
if imported.startswith('org.xwalk.core.internal') or \
imported.startswith('org.xwalk.core') or \
imported.startswith('org.chromium'):
continue
self._imports.append(imported)
def ExtractClassProperties(self, java_content):
class_re = re.compile(
'(?P<class_doc>(\n\s*/\*\*.*\n(\s+\*(.)*\n)+\s+\*/\s*)?)\n'
'\s*@XWalkAPI\(?'
'(?P<annotation_content>[a-zA-Z0-9.,=\s]*)\)?'
'\s*public\s+([a-z]+\s+)*'
'(?P<type>(class|interface))\s+'
'(?P<class_name>[a-zA-Z0-9]*)')
for match in re.finditer(class_re, java_content):
annotation_content = match.group('annotation_content')
self._class_name = match.group('class_name')
self._bridge_name = \
self.MangleInternalNameToBridgeName(self._class_name)
self._wrapper_name = \
self.MangleInternalNameToWrapperName(self._class_name)
self._class_type = match.group('type')
self._class_doc = match.group('class_doc')
self.ParseClassAnnotations(annotation_content)
def ParseClassAnnotations(self, annotation):
"""Class annotation contains the following optional attributes:
'extendClass' - The class have to extend
'createExternally' - boolean
'craeteInternally' - boolean
'noInstance' - boolean
'isConst' - boolean
'impl' - Class to impl
'instance - instance'"""
extend_class_re = re.compile('extendClass\s*=\s*'
'(?P<extend_class>[a-zA-Z0-9.]+)')
for match in re.finditer(extend_class_re, annotation):
extend_class = match.group('extend_class')
self._class_annotations['extendClass'] = extend_class
create_internally_re = re.compile('createInternally\s*=\s*'
'(?P<create_internally>(true|false))')
for match in re.finditer(create_internally_re, annotation):
create_internally = match.group('create_internally')
if create_internally == 'true':
self._class_annotations['createInternally'] = True
self._need_default_constructor = False
elif create_internally == 'false':
self._class_annotations['createInternally'] = False
create_externally_re = re.compile('createExternally\s*=\s*'
'(?P<create_externally>(true|false))')
for match in re.finditer(create_externally_re, annotation):
create_externally = match.group('create_externally')
if create_externally == 'true':
self._class_annotations['createExternally'] = True
elif create_externally == 'false':
self._class_annotations['createExternally'] = False
no_instance_re = re.compile('noInstance\s*=\s*'
'(?P<no_instance>(true|false))')
for match in re.finditer(no_instance_re, annotation):
no_instance = match.group('no_instance')
if no_instance == 'true':
self._class_annotations['noInstance'] = True
self._need_default_constructor = False
elif no_instance == 'false':
self._class_annotations['noInstance'] = False
is_const_re = re.compile('isConst\s*=\s*'
'(?P<is_const>(true|false))')
for match in re.finditer(is_const_re, annotation):
is_const = match.group('is_const')
if is_const == 'true':
self._class_annotations['isConst'] = True
elif is_const == 'false':
self._class_annotations['isConst'] = False
impl_re = re.compile('impl\s*=\s*'
'(?P<impl>[a-zA-Z0-9.]+)')
for match in re.finditer(impl_re, annotation):
impl = match.group('impl')
self._class_annotations['impl'] = impl
instance_re = re.compile('instance\s*=\s*'
'(?P<instance>[a-zA-Z0-9.]+)')
for match in re.finditer(instance_re, annotation):
instance = match.group('instance')
self._class_annotations['instance'] = instance
def ExtractMethods(self, java_content):
constructor_re = re.compile(
'(?P<method_doc>(\n\s*/\*\*.*\n(\s+\*(.)*\n)+\s+\*/\s*)?)\n'
'\s*@XWalkAPI\(?'
'(?P<method_annotation>[a-zA-Z0-9\$\!%,\s\(\)\{\}\\\\;._"=]*)\)?'
'\s*public\s(?P<method_name>[a-zA-Z0-9]+)\('
'(?P<method_params>[a-zA-Z0-9\s,\[\]\>\<]*)\)')
for match in re.finditer(constructor_re, java_content):
method_annotation = match.group('method_annotation')
method_name = match.group('method_name')
method_params = match.group('method_params')
method_doc = match.group('method_doc')
method = Method(
self._class_name,
self._class_loader,
True, # is_constructor
False, # is_static
False, # is_abstract
method_name, None,
method_params, method_annotation, method_doc)
self._methods.append(method)
self._need_default_constructor = False
method_re = re.compile(
'(?P<method_doc>(\n\s*/\*\*.*\n(\s+\*(.)*\n)+\s+\*/\s*)?)\n'
'\s*@XWalkAPI\(?'
'(?P<method_annotation>[a-zA-Z0-9%,\s\(\)\{\};._"=]*)\)?'
'\s*public\s+(?P<method_return>[a-zA-Z0-9]+(\<[a-zA-Z0-9]+,\s[a-zA-Z0-9]+\>)*(\[\s*\])*)\s+'
'(?P<method_name>[a-zA-Z0-9]+)\('
'(?P<method_params>[a-zA-Z0-9\s,\]\[\<\>]*)\)')
for match in re.finditer(method_re, java_content):
method_annotation = match.group('method_annotation')
method_name = match.group('method_name')
method_params = match.group('method_params')
method_return = match.group('method_return')
method_doc = match.group('method_doc')
method = Method(
self._class_name,
self._class_loader,
False, # is_constructor
False, # is_static
False, # is_abstract
method_name, method_return, method_params,
method_annotation, method_doc)
self._methods.append(method)
method_re = re.compile(
'(?P<method_doc>(\n\s*/\*\*.*\n(\s+\*(.)*\n)+\s+\*/\s*)?)\n'
'\s*@XWalkAPI\(?'
'(?P<method_annotation>[a-zA-Z0-9%,\s\(\)\{\};._"=]*)\)?'
'\s*public\s+static\s+(synchronized\s+)*'
'(?P<method_return>[a-zA-Z0-9]+)\s+'
'(?P<method_name>[a-zA-Z0-9]+)\('
'(?P<method_params>[a-zA-Z0-9\s,\[\]\<\>]*)\)')
for match in re.finditer(method_re, java_content):
method_annotation = match.group('method_annotation')
method_name = match.group('method_name')
method_params = match.group('method_params')
method_return = match.group('method_return')
method_doc = match.group('method_doc')
method = Method(
self._class_name,
self._class_loader,
False, # is_constructor
True, # is_static
False, # is_abstract
method_name, method_return, method_params,
method_annotation, method_doc)
self._methods.append(method)
method_re = re.compile(
'(?P<method_doc>(\n\s*/\*\*.*\n(\s+\*(.)*\n)+\s+\*/\s*)?)\n'
'\s*@XWalkAPI\(?'
'(?P<method_annotation>[a-zA-Z0-9%,\s\(\)\{\};._"=]*)\)?'
'\s*public\s+abstract\s+(synchronized\s+)*'
'(?P<method_return>[a-zA-Z0-9]+)\s+'
'(?P<method_name>[a-zA-Z0-9]+)\('
'(?P<method_params>[a-zA-Z0-9\s,\[\]\<\>]*)\)')
for match in re.finditer(method_re, java_content):
method_annotation = match.group('method_annotation')
method_name = match.group('method_name')
method_params = match.group('method_params')
method_return = match.group('method_return')
method_doc = match.group('method_doc')
method = Method(
self._class_name,
self._class_loader,
False, # is_constructor
False, # is_static
True, # is_abstract
method_name, method_return, method_params,
method_annotation, method_doc)
self._methods.append(method)
def ExtractFields(self, java_content):
field_re = re.compile(
'(?P<field_doc>(\n\s*/\*\*.*\n(\s+\*(.)*\n)+\s+\*/\s*)?)\n'
'\s*@XWalkAPI\s*public\s+static\s+final\s+'
'(?P<field_type>[a-zA-Z0-9_]+)\s+'
'(?P<field_name>[a-zA-Z0-9_]+)\s*=\s*'
'(?P<field_value>[a-zA-Z0-9-_"]+)\s*;')
for match in re.finditer(field_re, java_content):
field_type = match.group('field_type')
field_name = match.group('field_name')
field_value = match.group('field_value')
field_doc = match.group('field_doc')
field_object = Field(field_type, field_name, field_value, field_doc)
self._fields.append(field_object)
def ExtractEnums(self, java_content):
enum_re = re.compile(
'(?P<enum_doc>(\n\s*/\*\*.*\n(\s+\*(.)*\n)+\s+\*/\s*)?)\n'
'\s*@XWalkAPI\s*public\s+enum\s+'
'(?P<enum_name>[a-zA-Z0-9_]+)\s+{'
'(?P<enum_content>(.|\n)*?)\s*}')
for match in re.finditer(enum_re, java_content):
enum_name = match.group('enum_name')
enum_content = match.group('enum_content')
enum_doc = match.group('enum_doc')
enum_object = Enum(enum_name, enum_content, enum_doc)
self._enums[enum_name] = enum_object
def HasNoInstanceAnnotation(self):
return self._class_annotations.get(
InternalJavaFileData.ANNOTATION_NO_INSTANCE, False)
def HasCreateInternallyAnnotation(self):
return self._class_annotations.get(
InternalJavaFileData.ANNOTATION_CREATE_INTERNALLY, False)
def HasInstanceCreateInternallyAnnotation(self):
instance = None
clazz = self._class_annotations.get(
InternalJavaFileData.ANNOTATION_INSTANCE, None)
if clazz:
instance = self.GetJavaData(clazz.replace('.class', ''))
if instance:
return instance.HasCreateInternallyAnnotation()
else:
return self.HasCreateInternallyAnnotation()
def UseAsInstanceInBridgeCall(self, var):
return '%s.getWrapper()' % self.UseAsReturnInBridgeSuperCall(var)
def UseAsInstanceInBridgeOverrideCall(self, var):
clazz = self._class_annotations.get(
InternalJavaFileData.ANNOTATION_INSTANCE, self._class_name)
clazz = clazz.replace('.class', '')
if self.GetJavaData(clazz).class_annotations.get(
InternalJavaFileData.ANNOTATION_CREATE_INTERNALLY, False):
return self.UseAsReturnInBridgeSuperCall(var)
return '(%s) %s' % (self.GetJavaData(clazz).bridge_name, var)
def UseAsReturnInBridgeSuperCall(self, var):
clazz = self._class_annotations.get(
InternalJavaFileData.ANNOTATION_INSTANCE, self._class_name)
clazz = clazz.replace('.class', '')
if self.GetJavaData(clazz).class_annotations.get(
InternalJavaFileData.ANNOTATION_CREATE_INTERNALLY, False):
typed_var_template = Template('(${VAR} instanceof ${BRIDGE_TYPE} ?'\
' ((${BRIDGE_TYPE}) ${VAR} ) : new ${BRIDGE_TYPE}(${INTERNAL_VAR}))')
value = {'VAR': var,
'BRIDGE_TYPE': self.GetJavaData(clazz).bridge_name,
'INTERNAL_VAR': var if clazz == self._class_name else\
'(%s) %s' % (clazz, var)}
var = typed_var_template.substitute(value)
else:
typed_var_template = Template('(${VAR} instanceof ${BRIDGE_TYPE} ?'\
' ((${BRIDGE_TYPE}) ${VAR} ) : null)')
value = {'VAR': var,
'BRIDGE_TYPE': self.GetJavaData(clazz).bridge_name}
var = typed_var_template.substitute(value)
return var
def UseAsInstanceInBridgeSuperCall(self, var):
# pylint: disable=R0201
return var
def UseAsInstanceInWrapperCall(self, var):
clazz = self._class_annotations.get('instance', self._class_name)
clazz = clazz.replace('.class', '')
if clazz != self._class_name:
var = '((%s) %s)' % (self.GetJavaData(clazz).wrapper_name, var)
return '%s.getBridge()' % var
def UseAsTypeInWrapperCall(self):
return self._wrapper_name
def GetBridgeName(self, subclass=None):
if not self.IsInternalClass(self._class_name):
return self._class_name
else:
clazz = self._class_annotations.get(
InternalJavaFileData.ANNOTATION_INSTANCE, self._class_name)
clazz = clazz.replace('.class', '')
if not subclass:
return self.GetJavaData(clazz).bridge_name
else:
return clazz + '$' + subclass
def GetWrapperName(self, subclass=None):
if not self.IsInternalClass(self._class_name):
return self._class_name
else:
if not subclass:
return self._wrapper_name
else:
return "%s$%s" % (self._wrapper_name, subclass.replace('Internal', ''))
| bsd-3-clause |
Moulde/django-extensions | django_extensions/management/commands/set_fake_emails.py | 27 | 3783 | """
set_fake_emails.py
Give all users a new email account. Useful for testing in a
development environment. As such, this command is only available when
setting.DEBUG is True.
"""
from optparse import make_option
from django.conf import settings
from django.core.management.base import CommandError, NoArgsCommand
from django_extensions.management.utils import signalcommand
DEFAULT_FAKE_EMAIL = '%(username)s@example.com'
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--email', dest='default_email', default=DEFAULT_FAKE_EMAIL,
help='Use this as the new email format.'),
make_option('-a', '--no-admin', action="store_true", dest='no_admin', default=False,
help='Do not change administrator accounts'),
make_option('-s', '--no-staff', action="store_true", dest='no_staff', default=False,
help='Do not change staff accounts'),
make_option('--include', dest='include_regexp', default=None,
help='Include usernames matching this regexp.'),
make_option('--exclude', dest='exclude_regexp', default=None,
help='Exclude usernames matching this regexp.'),
make_option('--include-groups', dest='include_groups', default=None,
help='Include users matching this group. (use comma seperation for multiple groups)'),
make_option('--exclude-groups', dest='exclude_groups', default=None,
help='Exclude users matching this group. (use comma seperation for multiple groups)'),
)
help = '''DEBUG only: give all users a new email based on their account data ("%s" by default). Possible parameters are: username, first_name, last_name''' % (DEFAULT_FAKE_EMAIL, )
requires_system_checks = False
@signalcommand
def handle_noargs(self, **options):
if not settings.DEBUG:
raise CommandError('Only available in debug mode')
from django_extensions.compat import get_user_model
from django.contrib.auth.models import Group
email = options.get('default_email', DEFAULT_FAKE_EMAIL)
include_regexp = options.get('include_regexp', None)
exclude_regexp = options.get('exclude_regexp', None)
include_groups = options.get('include_groups', None)
exclude_groups = options.get('exclude_groups', None)
no_admin = options.get('no_admin', False)
no_staff = options.get('no_staff', False)
User = get_user_model()
users = User.objects.all()
if no_admin:
users = users.exclude(is_superuser=True)
if no_staff:
users = users.exclude(is_staff=True)
if exclude_groups:
groups = Group.objects.filter(name__in=exclude_groups.split(","))
if groups:
users = users.exclude(groups__in=groups)
else:
raise CommandError("No group matches filter: %s" % exclude_groups)
if include_groups:
groups = Group.objects.filter(name__in=include_groups.split(","))
if groups:
users = users.filter(groups__in=groups)
else:
raise CommandError("No groups matches filter: %s" % include_groups)
if exclude_regexp:
users = users.exclude(username__regex=exclude_regexp)
if include_regexp:
users = users.filter(username__regex=include_regexp)
for user in users:
user.email = email % {'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name}
user.save()
print('Changed %d emails' % users.count())
| mit |
neon-lab/m3u8 | tests/test_strict_validations.py | 7 | 1080 | # coding: utf-8
# Copyright 2014 Globo.com Player authors. All rights reserved.
# Use of this source code is governed by a MIT License
# license that can be found in the LICENSE file.
import pytest
@pytest.mark.xfail
def test_should_fail_if_first_line_not_EXTM3U():
assert 0
@pytest.mark.xfail
def test_should_fail_if_expected_ts_segment_line_is_not_valid():
assert 0
@pytest.mark.xfail
def test_should_fail_if_EXT_X_MEDIA_SEQUENCE_is_diffent_from_sequence_number_of_first_uri():
assert 0
@pytest.mark.xfail
def test_should_fail_if_more_than_one_EXT_X_MEDIA_SEQUENCE():
assert 0
@pytest.mark.xfail
def test_should_fail_if_EXT_X_MEDIA_SEQUENCE_is_not_a_number():
assert 0
@pytest.mark.xfail
def test_should_validate_supported_EXT_X_VERSION():
assert 0
@pytest.mark.xfail
def test_should_fail_if_any_EXTINF_duration_is_greater_than_TARGET_DURATION():
assert 0
@pytest.mark.xfail
def test_should_fail_if_TARGET_DURATION_not_found():
assert 0
@pytest.mark.xfail
def test_should_fail_if_invalid_m3u8_url_after_EXT_X_STREAM_INF():
assert 0
| mit |
BlogomaticProject/Blogomatic | opt/blog-o-matic/usr/lib/python/Bio/File.py | 2 | 4640 | # Copyright 1999 by Jeffrey Chang. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Code for more fancy file handles.
Classes:
UndoHandle File object decorator with support for undo-like operations.
StringHandle Wraps a file object around a string.
SGMLStripper Object that strips SGML. This is now considered OBSOLETE, and
is likely to be deprecated in a future release of Biopython,
and later removed.
"""
import StringIO
class UndoHandle:
"""A Python handle that adds functionality for saving lines.
Saves lines in a LIFO fashion.
Added methods:
saveline Save a line to be returned next time.
peekline Peek at the next line without consuming it.
"""
def __init__(self, handle):
self._handle = handle
self._saved = []
def __iter__(self):
return self
def next(self):
next = self.readline()
if not next:
raise StopIteration
return next
def readlines(self, *args, **keywds):
lines = self._saved + self._handle.readlines(*args,**keywds)
self._saved = []
return lines
def readline(self, *args, **keywds):
if self._saved:
line = self._saved.pop(0)
else:
line = self._handle.readline(*args,**keywds)
return line
def read(self, size=-1):
if size == -1:
saved = "".join(self._saved)
self._saved[:] = []
else:
saved = ''
while size > 0 and self._saved:
if len(self._saved[0]) <= size:
size = size - len(self._saved[0])
saved = saved + self._saved.pop(0)
else:
saved = saved + self._saved[0][:size]
self._saved[0] = self._saved[0][size:]
size = 0
return saved + self._handle.read(size)
def saveline(self, line):
if line:
self._saved = [line] + self._saved
def peekline(self):
if self._saved:
line = self._saved[0]
else:
line = self._handle.readline()
self.saveline(line)
return line
def tell(self):
lengths = map(len, self._saved)
sum = reduce(lambda x, y: x+y, lengths, 0)
return self._handle.tell() - sum
def seek(self, *args):
self._saved = []
self._handle.seek(*args)
def __getattr__(self, attr):
return getattr(self._handle, attr)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self._handle.close()
# I could make this faster by using cStringIO.
# However, cStringIO (in v1.52) does not implement the
# readlines method.
StringHandle = StringIO.StringIO
try:
import sgmllib
except ImportError:
#This isn't available on Python 3, but we don't care much as SGMLStripper
#is obsolete
pass
else:
class SGMLStripper:
"""Object to strip SGML tags (OBSOLETE)."""
class MyParser(sgmllib.SGMLParser):
def __init__(self):
sgmllib.SGMLParser.__init__(self)
self.data = ''
def handle_data(self, data):
self.data = self.data + data
def __init__(self):
import warnings
warnings.warn("This class is obsolete, and likely to be deprecated and later removed in a future version of Biopython", PendingDeprecationWarning)
self._parser = SGMLStripper.MyParser()
def strip(self, str):
"""S.strip(str) -> string
Strip the SGML tags from str.
"""
if not str: # empty string, don't do anything.
return ''
# I need to make sure that I don't return an empty string if
# the buffer is not empty. This can happen if there's a newline
# character embedded within a tag. Thus, I'll first check to
# see if the last character is a newline. If it is, and it's stripped
# away, I'll add it back.
is_newline = str[-1] in ['\n', '\r']
self._parser.data = '' # clear the parser's data (don't reset)
self._parser.feed(str)
if self._parser.data:
str = self._parser.data
elif is_newline:
str = '\n'
else:
str = ''
return str
| gpl-2.0 |
TRESCLOUD/odoopub | addons/point_of_sale/point_of_sale.py | 10 | 70173 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import time
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
import openerp.addons.product.product
_logger = logging.getLogger(__name__)
class pos_config(osv.osv):
_name = 'pos.config'
POS_CONFIG_STATE = [
('active', 'Active'),
('inactive', 'Inactive'),
('deprecated', 'Deprecated')
]
def _get_currency(self, cr, uid, ids, fieldnames, args, context=None):
result = dict.fromkeys(ids, False)
for pos_config in self.browse(cr, uid, ids, context=context):
if pos_config.journal_id:
currency_id = pos_config.journal_id.currency.id or pos_config.journal_id.company_id.currency_id.id
else:
currency_id = self.pool['res.users'].browse(cr, uid, uid, context=context).company_id.currency_id.id
result[pos_config.id] = currency_id
return result
_columns = {
'name' : fields.char('Point of Sale Name', select=1,
required=True, help="An internal identification of the point of sale"),
'journal_ids' : fields.many2many('account.journal', 'pos_config_journal_rel',
'pos_config_id', 'journal_id', 'Available Payment Methods',
domain="[('journal_user', '=', True ), ('type', 'in', ['bank', 'cash'])]",),
'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type'),
'stock_location_id': fields.many2one('stock.location', 'Stock Location', domain=[('usage', '=', 'internal')], required=True),
'journal_id' : fields.many2one('account.journal', 'Sale Journal',
domain=[('type', '=', 'sale')],
help="Accounting journal used to post sales entries."),
'currency_id' : fields.function(_get_currency, type="many2one", string="Currency", relation="res.currency"),
'iface_self_checkout' : fields.boolean('Self Checkout Mode',
help="Check this if this point of sale should open by default in a self checkout mode. If unchecked, Odoo uses the normal cashier mode by default."),
'iface_cashdrawer' : fields.boolean('Cashdrawer', help="Automatically open the cashdrawer"),
'iface_payment_terminal' : fields.boolean('Payment Terminal', help="Enables Payment Terminal integration"),
'iface_electronic_scale' : fields.boolean('Electronic Scale', help="Enables Electronic Scale integration"),
'iface_vkeyboard' : fields.boolean('Virtual KeyBoard', help="Enables an integrated Virtual Keyboard"),
'iface_print_via_proxy' : fields.boolean('Print via Proxy', help="Bypass browser printing and prints via the hardware proxy"),
'iface_scan_via_proxy' : fields.boolean('Scan via Proxy', help="Enable barcode scanning with a remotely connected barcode scanner"),
'iface_invoicing': fields.boolean('Invoicing',help='Enables invoice generation from the Point of Sale'),
'iface_big_scrollbars': fields.boolean('Large Scrollbars',help='For imprecise industrial touchscreens'),
'receipt_header': fields.text('Receipt Header',help="A short text that will be inserted as a header in the printed receipt"),
'receipt_footer': fields.text('Receipt Footer',help="A short text that will be inserted as a footer in the printed receipt"),
'proxy_ip': fields.char('IP Address', help='The hostname or ip address of the hardware proxy, Will be autodetected if left empty', size=45),
'state' : fields.selection(POS_CONFIG_STATE, 'Status', required=True, readonly=True, copy=False),
'sequence_id' : fields.many2one('ir.sequence', 'Order IDs Sequence', readonly=True,
help="This sequence is automatically created by Odoo but you can change it "\
"to customize the reference numbers of your orders.", copy=False),
'session_ids': fields.one2many('pos.session', 'config_id', 'Sessions'),
'group_by' : fields.boolean('Group Journal Items', help="Check this if you want to group the Journal Items by Product while closing a Session"),
'pricelist_id': fields.many2one('product.pricelist','Pricelist', required=True),
'company_id': fields.many2one('res.company', 'Company', required=True),
'barcode_product': fields.char('Product Barcodes', size=64, help='The pattern that identifies product barcodes'),
'barcode_cashier': fields.char('Cashier Barcodes', size=64, help='The pattern that identifies cashier login barcodes'),
'barcode_customer': fields.char('Customer Barcodes',size=64, help='The pattern that identifies customer\'s client card barcodes'),
'barcode_price': fields.char('Price Barcodes', size=64, help='The pattern that identifies a product with a barcode encoded price'),
'barcode_weight': fields.char('Weight Barcodes', size=64, help='The pattern that identifies a product with a barcode encoded weight'),
'barcode_discount': fields.char('Discount Barcodes', size=64, help='The pattern that identifies a product with a barcode encoded discount'),
}
def _check_cash_control(self, cr, uid, ids, context=None):
return all(
(sum(int(journal.cash_control) for journal in record.journal_ids) <= 1)
for record in self.browse(cr, uid, ids, context=context)
)
_constraints = [
(_check_cash_control, "You cannot have two cash controls in one Point Of Sale !", ['journal_ids']),
]
def name_get(self, cr, uid, ids, context=None):
result = []
states = {
'opening_control': _('Opening Control'),
'opened': _('In Progress'),
'closing_control': _('Closing Control'),
'closed': _('Closed & Posted'),
}
for record in self.browse(cr, uid, ids, context=context):
if (not record.session_ids) or (record.session_ids[0].state=='closed'):
result.append((record.id, record.name+' ('+_('not used')+')'))
continue
session = record.session_ids[0]
result.append((record.id, record.name + ' ('+session.user_id.name+')')) #, '+states[session.state]+')'))
return result
def _default_sale_journal(self, cr, uid, context=None):
company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
res = self.pool.get('account.journal').search(cr, uid, [('type', '=', 'sale'), ('company_id', '=', company_id)], limit=1, context=context)
return res and res[0] or False
def _default_pricelist(self, cr, uid, context=None):
res = self.pool.get('product.pricelist').search(cr, uid, [('type', '=', 'sale')], limit=1, context=context)
return res and res[0] or False
def _get_default_location(self, cr, uid, context=None):
wh_obj = self.pool.get('stock.warehouse')
user = self.pool.get('res.users').browse(cr, uid, uid, context)
res = wh_obj.search(cr, uid, [('company_id', '=', user.company_id.id)], limit=1, context=context)
if res and res[0]:
return wh_obj.browse(cr, uid, res[0], context=context).lot_stock_id.id
return False
def _get_default_company(self, cr, uid, context=None):
company_id = self.pool.get('res.users')._get_company(cr, uid, context=context)
return company_id
_defaults = {
'state' : POS_CONFIG_STATE[0][0],
'journal_id': _default_sale_journal,
'group_by' : True,
'pricelist_id': _default_pricelist,
'iface_invoicing': True,
'stock_location_id': _get_default_location,
'company_id': _get_default_company,
'barcode_product': '*',
'barcode_cashier': '041*',
'barcode_customer':'042*',
'barcode_weight': '21xxxxxNNDDD',
'barcode_discount':'22xxxxxxxxNN',
'barcode_price': '23xxxxxNNNDD',
}
def onchange_picking_type_id(self, cr, uid, ids, picking_type_id, context=None):
p_type_obj = self.pool.get("stock.picking.type")
p_type = p_type_obj.browse(cr, uid, picking_type_id, context=context)
if p_type.default_location_src_id and p_type.default_location_src_id.usage == 'internal' and p_type.default_location_dest_id and p_type.default_location_dest_id.usage == 'customer':
return {'value': {'stock_location_id': p_type.default_location_src_id.id}}
return False
def set_active(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state' : 'active'}, context=context)
def set_inactive(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state' : 'inactive'}, context=context)
def set_deprecate(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state' : 'deprecated'}, context=context)
def create(self, cr, uid, values, context=None):
proxy = self.pool.get('ir.sequence')
sequence_values = dict(
name='PoS %s' % values['name'],
padding=5,
prefix="%s/" % values['name'],
)
sequence_id = proxy.create(cr, uid, sequence_values, context=context)
values['sequence_id'] = sequence_id
return super(pos_config, self).create(cr, uid, values, context=context)
def unlink(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
if obj.sequence_id:
obj.sequence_id.unlink()
return super(pos_config, self).unlink(cr, uid, ids, context=context)
class pos_session(osv.osv):
_name = 'pos.session'
_order = 'id desc'
POS_SESSION_STATE = [
('opening_control', 'Opening Control'), # Signal open
('opened', 'In Progress'), # Signal closing
('closing_control', 'Closing Control'), # Signal close
('closed', 'Closed & Posted'),
]
def _compute_cash_all(self, cr, uid, ids, fieldnames, args, context=None):
result = dict()
for record in self.browse(cr, uid, ids, context=context):
result[record.id] = {
'cash_journal_id' : False,
'cash_register_id' : False,
'cash_control' : False,
}
for st in record.statement_ids:
if st.journal_id.cash_control == True:
result[record.id]['cash_control'] = True
result[record.id]['cash_journal_id'] = st.journal_id.id
result[record.id]['cash_register_id'] = st.id
return result
_columns = {
'config_id' : fields.many2one('pos.config', 'Point of Sale',
help="The physical point of sale you will use.",
required=True,
select=1,
domain="[('state', '=', 'active')]",
),
'name' : fields.char('Session ID', required=True, readonly=True),
'user_id' : fields.many2one('res.users', 'Responsible',
required=True,
select=1,
readonly=True,
states={'opening_control' : [('readonly', False)]}
),
'currency_id' : fields.related('config_id', 'currency_id', type="many2one", relation='res.currency', string="Currnecy"),
'start_at' : fields.datetime('Opening Date', readonly=True),
'stop_at' : fields.datetime('Closing Date', readonly=True),
'state' : fields.selection(POS_SESSION_STATE, 'Status',
required=True, readonly=True,
select=1, copy=False),
'sequence_number': fields.integer('Order Sequence Number'),
'cash_control' : fields.function(_compute_cash_all,
multi='cash',
type='boolean', string='Has Cash Control'),
'cash_journal_id' : fields.function(_compute_cash_all,
multi='cash',
type='many2one', relation='account.journal',
string='Cash Journal', store=True),
'cash_register_id' : fields.function(_compute_cash_all,
multi='cash',
type='many2one', relation='account.bank.statement',
string='Cash Register', store=True),
'opening_details_ids' : fields.related('cash_register_id', 'opening_details_ids',
type='one2many', relation='account.cashbox.line',
string='Opening Cash Control'),
'details_ids' : fields.related('cash_register_id', 'details_ids',
type='one2many', relation='account.cashbox.line',
string='Cash Control'),
'cash_register_balance_end_real' : fields.related('cash_register_id', 'balance_end_real',
type='float',
digits_compute=dp.get_precision('Account'),
string="Ending Balance",
help="Total of closing cash control lines.",
readonly=True),
'cash_register_balance_start' : fields.related('cash_register_id', 'balance_start',
type='float',
digits_compute=dp.get_precision('Account'),
string="Starting Balance",
help="Total of opening cash control lines.",
readonly=True),
'cash_register_total_entry_encoding' : fields.related('cash_register_id', 'total_entry_encoding',
string='Total Cash Transaction',
readonly=True,
help="Total of all paid sale orders"),
'cash_register_balance_end' : fields.related('cash_register_id', 'balance_end',
type='float',
digits_compute=dp.get_precision('Account'),
string="Theoretical Closing Balance",
help="Sum of opening balance and transactions.",
readonly=True),
'cash_register_difference' : fields.related('cash_register_id', 'difference',
type='float',
string='Difference',
help="Difference between the theoretical closing balance and the real closing balance.",
readonly=True),
'journal_ids' : fields.related('config_id', 'journal_ids',
type='many2many',
readonly=True,
relation='account.journal',
string='Available Payment Methods'),
'order_ids' : fields.one2many('pos.order', 'session_id', 'Orders'),
'statement_ids' : fields.one2many('account.bank.statement', 'pos_session_id', 'Bank Statement', readonly=True),
}
_defaults = {
'name' : '/',
'user_id' : lambda obj, cr, uid, context: uid,
'state' : 'opening_control',
'sequence_number': 1,
}
_sql_constraints = [
('uniq_name', 'unique(name)', "The name of this POS Session must be unique !"),
]
def _check_unicity(self, cr, uid, ids, context=None):
for session in self.browse(cr, uid, ids, context=None):
# open if there is no session in 'opening_control', 'opened', 'closing_control' for one user
domain = [
('state', 'not in', ('closed','closing_control')),
('user_id', '=', session.user_id.id)
]
count = self.search_count(cr, uid, domain, context=context)
if count>1:
return False
return True
def _check_pos_config(self, cr, uid, ids, context=None):
for session in self.browse(cr, uid, ids, context=None):
domain = [
('state', '!=', 'closed'),
('config_id', '=', session.config_id.id)
]
count = self.search_count(cr, uid, domain, context=context)
if count>1:
return False
return True
_constraints = [
(_check_unicity, "You cannot create two active sessions with the same responsible!", ['user_id', 'state']),
(_check_pos_config, "You cannot create two active sessions related to the same point of sale!", ['config_id']),
]
def create(self, cr, uid, values, context=None):
context = dict(context or {})
config_id = values.get('config_id', False) or context.get('default_config_id', False)
if not config_id:
raise osv.except_osv( _('Error!'),
_("You should assign a Point of Sale to your session."))
# journal_id is not required on the pos_config because it does not
# exists at the installation. If nothing is configured at the
# installation we do the minimal configuration. Impossible to do in
# the .xml files as the CoA is not yet installed.
jobj = self.pool.get('pos.config')
pos_config = jobj.browse(cr, uid, config_id, context=context)
context.update({'company_id': pos_config.company_id.id})
if not pos_config.journal_id:
jid = jobj.default_get(cr, uid, ['journal_id'], context=context)['journal_id']
if jid:
jobj.write(cr, uid, [pos_config.id], {'journal_id': jid}, context=context)
else:
raise osv.except_osv( _('error!'),
_("Unable to open the session. You have to assign a sale journal to your point of sale."))
# define some cash journal if no payment method exists
if not pos_config.journal_ids:
journal_proxy = self.pool.get('account.journal')
cashids = journal_proxy.search(cr, uid, [('journal_user', '=', True), ('type','=','cash')], context=context)
if not cashids:
cashids = journal_proxy.search(cr, uid, [('type', '=', 'cash')], context=context)
if not cashids:
cashids = journal_proxy.search(cr, uid, [('journal_user','=',True)], context=context)
jobj.write(cr, uid, [pos_config.id], {'journal_ids': [(6,0, cashids)]})
pos_config = jobj.browse(cr, uid, config_id, context=context)
bank_statement_ids = []
for journal in pos_config.journal_ids:
bank_values = {
'journal_id' : journal.id,
'user_id' : uid,
'company_id' : pos_config.company_id.id
}
statement_id = self.pool.get('account.bank.statement').create(cr, uid, bank_values, context=context)
bank_statement_ids.append(statement_id)
values.update({
'name' : pos_config.sequence_id._next(),
'statement_ids' : [(6, 0, bank_statement_ids)],
'config_id': config_id
})
return super(pos_session, self).create(cr, uid, values, context=context)
def unlink(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
for statement in obj.statement_ids:
statement.unlink(context=context)
return True
def open_cb(self, cr, uid, ids, context=None):
"""
call the Point Of Sale interface and set the pos.session to 'opened' (in progress)
"""
if context is None:
context = dict()
if isinstance(ids, (int, long)):
ids = [ids]
this_record = self.browse(cr, uid, ids[0], context=context)
this_record.signal_workflow('open')
context.update(active_id=this_record.id)
return {
'type' : 'ir.actions.act_url',
'url' : '/pos/web/',
'target': 'self',
}
def wkf_action_open(self, cr, uid, ids, context=None):
# second browse because we need to refetch the data from the DB for cash_register_id
for record in self.browse(cr, uid, ids, context=context):
values = {}
if not record.start_at:
values['start_at'] = time.strftime('%Y-%m-%d %H:%M:%S')
values['state'] = 'opened'
record.write(values)
for st in record.statement_ids:
st.button_open()
return self.open_frontend_cb(cr, uid, ids, context=context)
def wkf_action_opening_control(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state' : 'opening_control'}, context=context)
def wkf_action_closing_control(self, cr, uid, ids, context=None):
for session in self.browse(cr, uid, ids, context=context):
for statement in session.statement_ids:
if (statement != session.cash_register_id) and (statement.balance_end != statement.balance_end_real):
self.pool.get('account.bank.statement').write(cr, uid, [statement.id], {'balance_end_real': statement.balance_end})
return self.write(cr, uid, ids, {'state' : 'closing_control', 'stop_at' : time.strftime('%Y-%m-%d %H:%M:%S')}, context=context)
def wkf_action_close(self, cr, uid, ids, context=None):
# Close CashBox
bsl = self.pool.get('account.bank.statement.line')
for record in self.browse(cr, uid, ids, context=context):
for st in record.statement_ids:
if abs(st.difference) > st.journal_id.amount_authorized_diff:
# The pos manager can close statements with maximums.
if not self.pool.get('ir.model.access').check_groups(cr, uid, "point_of_sale.group_pos_manager"):
raise osv.except_osv( _('Error!'),
_("Your ending balance is too different from the theoretical cash closing (%.2f), the maximum allowed is: %.2f. You can contact your manager to force it.") % (st.difference, st.journal_id.amount_authorized_diff))
if (st.journal_id.type not in ['bank', 'cash']):
raise osv.except_osv(_('Error!'),
_("The type of the journal for your payment method should be bank or cash "))
if st.difference and st.journal_id.cash_control == True:
if st.difference > 0.0:
name= _('Point of Sale Profit')
else:
name= _('Point of Sale Loss')
bsl.create(cr, uid, {
'statement_id': st.id,
'amount': st.difference,
'ref': record.name,
'name': name,
}, context=context)
if st.journal_id.type == 'bank':
st.write({'balance_end_real' : st.balance_end})
getattr(st, 'button_confirm_%s' % st.journal_id.type)(context=context)
self._confirm_orders(cr, uid, ids, context=context)
self.write(cr, uid, ids, {'state' : 'closed'}, context=context)
obj = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'point_of_sale', 'menu_point_root')[1]
return {
'type' : 'ir.actions.client',
'name' : 'Point of Sale Menu',
'tag' : 'reload',
'params' : {'menu_id': obj},
}
def _confirm_orders(self, cr, uid, ids, context=None):
account_move_obj = self.pool.get('account.move')
pos_order_obj = self.pool.get('pos.order')
for session in self.browse(cr, uid, ids, context=context):
local_context = dict(context or {}, force_company=session.config_id.journal_id.company_id.id)
order_ids = [order.id for order in session.order_ids if order.state == 'paid']
move_id = account_move_obj.create(cr, uid, {'ref' : session.name, 'journal_id' : session.config_id.journal_id.id, }, context=local_context)
pos_order_obj._create_account_move_line(cr, uid, order_ids, session, move_id, context=local_context)
for order in session.order_ids:
if order.state == 'done':
continue
if order.state not in ('paid', 'invoiced'):
raise osv.except_osv(
_('Error!'),
_("You cannot confirm all orders of this session, because they have not the 'paid' status"))
else:
pos_order_obj.signal_workflow(cr, uid, [order.id], 'done')
return True
def open_frontend_cb(self, cr, uid, ids, context=None):
if not context:
context = {}
if not ids:
return {}
for session in self.browse(cr, uid, ids, context=context):
if session.user_id.id != uid:
raise osv.except_osv(
_('Error!'),
_("You cannot use the session of another users. This session is owned by %s. Please first close this one to use this point of sale." % session.user_id.name))
context.update({'active_id': ids[0]})
return {
'type' : 'ir.actions.act_url',
'target': 'self',
'url': '/pos/web/',
}
class pos_order(osv.osv):
_name = "pos.order"
_description = "Point of Sale"
_order = "id desc"
def _order_fields(self, cr, uid, ui_order, context=None):
return {
'name': ui_order['name'],
'user_id': ui_order['user_id'] or False,
'session_id': ui_order['pos_session_id'],
'lines': ui_order['lines'],
'pos_reference':ui_order['name'],
'partner_id': ui_order['partner_id'] or False,
}
def _payment_fields(self, cr, uid, ui_paymentline, context=None):
return {
'amount': ui_paymentline['amount'] or 0.0,
'payment_date': ui_paymentline['name'],
'statement_id': ui_paymentline['statement_id'],
'payment_name': ui_paymentline.get('note',False),
'journal': ui_paymentline['journal_id'],
}
def create_from_ui(self, cr, uid, orders, context=None):
# Keep only new orders
submitted_references = [o['data']['name'] for o in orders]
existing_order_ids = self.search(cr, uid, [('pos_reference', 'in', submitted_references)], context=context)
existing_orders = self.read(cr, uid, existing_order_ids, ['pos_reference'], context=context)
existing_references = set([o['pos_reference'] for o in existing_orders])
orders_to_save = [o for o in orders if o['data']['name'] not in existing_references]
order_ids = []
for tmp_order in orders_to_save:
to_invoice = tmp_order['to_invoice']
order = tmp_order['data']
order_id = self.create(cr, uid, self._order_fields(cr, uid, order, context=context),context)
for payments in order['statement_ids']:
self.add_payment(cr, uid, order_id, self._payment_fields(cr, uid, payments[2], context=context), context=context)
session = self.pool.get('pos.session').browse(cr, uid, order['pos_session_id'], context=context)
if session.sequence_number <= order['sequence_number']:
session.write({'sequence_number': order['sequence_number'] + 1})
session.refresh()
if order['amount_return']:
cash_journal = session.cash_journal_id
if not cash_journal:
cash_journal_ids = filter(lambda st: st.journal_id.type=='cash', session.statement_ids)
if not len(cash_journal_ids):
raise osv.except_osv( _('error!'),
_("No cash statement found for this session. Unable to record returned cash."))
cash_journal = cash_journal_ids[0].journal_id
self.add_payment(cr, uid, order_id, {
'amount': -order['amount_return'],
'payment_date': time.strftime('%Y-%m-%d %H:%M:%S'),
'payment_name': _('return'),
'journal': cash_journal.id,
}, context=context)
order_ids.append(order_id)
try:
self.signal_workflow(cr, uid, [order_id], 'paid')
except Exception as e:
_logger.error('Could not fully process the POS Order: %s', tools.ustr(e))
if to_invoice:
self.action_invoice(cr, uid, [order_id], context)
order_obj = self.browse(cr, uid, order_id, context)
self.pool['account.invoice'].signal_workflow(cr, uid, [order_obj.invoice_id.id], 'invoice_open')
return order_ids
def write(self, cr, uid, ids, vals, context=None):
res = super(pos_order, self).write(cr, uid, ids, vals, context=context)
#If you change the partner of the PoS order, change also the partner of the associated bank statement lines
partner_obj = self.pool.get('res.partner')
bsl_obj = self.pool.get("account.bank.statement.line")
if 'partner_id' in vals:
for posorder in self.browse(cr, uid, ids, context=context):
if posorder.invoice_id:
raise osv.except_osv( _('Error!'), _("You cannot change the partner of a POS order for which an invoice has already been issued."))
if vals['partner_id']:
p_id = partner_obj.browse(cr, uid, vals['partner_id'], context=context)
part_id = partner_obj._find_accounting_partner(p_id).id
else:
part_id = False
bsl_ids = [x.id for x in posorder.statement_ids]
bsl_obj.write(cr, uid, bsl_ids, {'partner_id': part_id}, context=context)
return res
def unlink(self, cr, uid, ids, context=None):
for rec in self.browse(cr, uid, ids, context=context):
if rec.state not in ('draft','cancel'):
raise osv.except_osv(_('Unable to Delete!'), _('In order to delete a sale, it must be new or cancelled.'))
return super(pos_order, self).unlink(cr, uid, ids, context=context)
def onchange_partner_id(self, cr, uid, ids, part=False, context=None):
if not part:
return {'value': {}}
pricelist = self.pool.get('res.partner').browse(cr, uid, part, context=context).property_product_pricelist.id
return {'value': {'pricelist_id': pricelist}}
def _amount_all(self, cr, uid, ids, name, args, context=None):
cur_obj = self.pool.get('res.currency')
res = {}
for order in self.browse(cr, uid, ids, context=context):
res[order.id] = {
'amount_paid': 0.0,
'amount_return':0.0,
'amount_tax':0.0,
}
val1 = val2 = 0.0
cur = order.pricelist_id.currency_id
for payment in order.statement_ids:
res[order.id]['amount_paid'] += payment.amount
res[order.id]['amount_return'] += (payment.amount < 0 and payment.amount or 0)
for line in order.lines:
val1 += line.price_subtotal_incl
val2 += line.price_subtotal
res[order.id]['amount_tax'] = cur_obj.round(cr, uid, cur, val1-val2)
res[order.id]['amount_total'] = cur_obj.round(cr, uid, cur, val1)
return res
_columns = {
'name': fields.char('Order Ref', required=True, readonly=True, copy=False),
'company_id':fields.many2one('res.company', 'Company', required=True, readonly=True),
'date_order': fields.datetime('Order Date', readonly=True, select=True),
'user_id': fields.many2one('res.users', 'Salesman', help="Person who uses the the cash register. It can be a reliever, a student or an interim employee."),
'amount_tax': fields.function(_amount_all, string='Taxes', digits_compute=dp.get_precision('Account'), multi='all'),
'amount_total': fields.function(_amount_all, string='Total', multi='all'),
'amount_paid': fields.function(_amount_all, string='Paid', states={'draft': [('readonly', False)]}, readonly=True, digits_compute=dp.get_precision('Account'), multi='all'),
'amount_return': fields.function(_amount_all, 'Returned', digits_compute=dp.get_precision('Account'), multi='all'),
'lines': fields.one2many('pos.order.line', 'order_id', 'Order Lines', states={'draft': [('readonly', False)]}, readonly=True, copy=True),
'statement_ids': fields.one2many('account.bank.statement.line', 'pos_statement_id', 'Payments', states={'draft': [('readonly', False)]}, readonly=True),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', required=True, states={'draft': [('readonly', False)]}, readonly=True),
'partner_id': fields.many2one('res.partner', 'Customer', change_default=True, select=1, states={'draft': [('readonly', False)], 'paid': [('readonly', False)]}),
'sequence_number': fields.integer('Sequence Number', help='A session-unique sequence number for the order'),
'session_id' : fields.many2one('pos.session', 'Session',
#required=True,
select=1,
domain="[('state', '=', 'opened')]",
states={'draft' : [('readonly', False)]},
readonly=True),
'state': fields.selection([('draft', 'New'),
('cancel', 'Cancelled'),
('paid', 'Paid'),
('done', 'Posted'),
('invoiced', 'Invoiced')],
'Status', readonly=True, copy=False),
'invoice_id': fields.many2one('account.invoice', 'Invoice', copy=False),
'account_move': fields.many2one('account.move', 'Journal Entry', readonly=True, copy=False),
'picking_id': fields.many2one('stock.picking', 'Picking', readonly=True, copy=False),
'picking_type_id': fields.related('session_id', 'config_id', 'picking_type_id', string="Picking Type", type='many2one', relation='stock.picking.type'),
'location_id': fields.related('session_id', 'config_id', 'stock_location_id', string="Location", type='many2one', store=True, relation='stock.location'),
'note': fields.text('Internal Notes'),
'nb_print': fields.integer('Number of Print', readonly=True, copy=False),
'pos_reference': fields.char('Receipt Ref', readonly=True, copy=False),
'sale_journal': fields.related('session_id', 'config_id', 'journal_id', relation='account.journal', type='many2one', string='Sale Journal', store=True, readonly=True),
}
def _default_session(self, cr, uid, context=None):
so = self.pool.get('pos.session')
session_ids = so.search(cr, uid, [('state','=', 'opened'), ('user_id','=',uid)], context=context)
return session_ids and session_ids[0] or False
def _default_pricelist(self, cr, uid, context=None):
session_ids = self._default_session(cr, uid, context)
if session_ids:
session_record = self.pool.get('pos.session').browse(cr, uid, session_ids, context=context)
return session_record.config_id.pricelist_id and session_record.config_id.pricelist_id.id or False
return False
def _get_out_picking_type(self, cr, uid, context=None):
return self.pool.get('ir.model.data').xmlid_to_res_id(
cr, uid, 'point_of_sale.picking_type_posout', context=context)
_defaults = {
'user_id': lambda self, cr, uid, context: uid,
'state': 'draft',
'name': '/',
'date_order': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'nb_print': 0,
'sequence_number': 1,
'session_id': _default_session,
'company_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id,
'pricelist_id': _default_pricelist,
}
def create(self, cr, uid, values, context=None):
values['name'] = self.pool.get('ir.sequence').get(cr, uid, 'pos.order')
return super(pos_order, self).create(cr, uid, values, context=context)
def test_paid(self, cr, uid, ids, context=None):
"""A Point of Sale is paid when the sum
@return: True
"""
for order in self.browse(cr, uid, ids, context=context):
if order.lines and not order.amount_total:
return True
if (not order.lines) or (not order.statement_ids) or \
(abs(order.amount_total-order.amount_paid) > 0.00001):
return False
return True
def create_picking(self, cr, uid, ids, context=None):
"""Create a picking for each order and validate it."""
picking_obj = self.pool.get('stock.picking')
partner_obj = self.pool.get('res.partner')
move_obj = self.pool.get('stock.move')
for order in self.browse(cr, uid, ids, context=context):
addr = order.partner_id and partner_obj.address_get(cr, uid, [order.partner_id.id], ['delivery']) or {}
picking_type = order.picking_type_id
picking_id = False
if picking_type:
picking_id = picking_obj.create(cr, uid, {
'origin': order.name,
'partner_id': addr.get('delivery',False),
'picking_type_id': picking_type.id,
'company_id': order.company_id.id,
'move_type': 'direct',
'note': order.note or "",
'invoice_state': 'none',
}, context=context)
self.write(cr, uid, [order.id], {'picking_id': picking_id}, context=context)
location_id = order.location_id.id
if order.partner_id:
destination_id = order.partner_id.property_stock_customer.id
elif picking_type:
if not picking_type.default_location_dest_id:
raise osv.except_osv(_('Error!'), _('Missing source or destination location for picking type %s. Please configure those fields and try again.' % (picking_type.name,)))
destination_id = picking_type.default_location_dest_id.id
else:
destination_id = partner_obj.default_get(cr, uid, ['property_stock_customer'], context=context)['property_stock_customer']
move_list = []
for line in order.lines:
if line.product_id and line.product_id.type == 'service':
continue
move_list.append(move_obj.create(cr, uid, {
'name': line.name,
'product_uom': line.product_id.uom_id.id,
'product_uos': line.product_id.uom_id.id,
'picking_id': picking_id,
'picking_type_id': picking_type.id,
'product_id': line.product_id.id,
'product_uos_qty': abs(line.qty),
'product_uom_qty': abs(line.qty),
'state': 'draft',
'location_id': location_id if line.qty >= 0 else destination_id,
'location_dest_id': destination_id if line.qty >= 0 else location_id,
}, context=context))
if picking_id:
picking_obj.action_confirm(cr, uid, [picking_id], context=context)
picking_obj.force_assign(cr, uid, [picking_id], context=context)
picking_obj.action_done(cr, uid, [picking_id], context=context)
elif move_list:
move_obj.action_confirm(cr, uid, move_list, context=context)
move_obj.force_assign(cr, uid, move_list, context=context)
move_obj.action_done(cr, uid, move_list, context=context)
return True
def cancel_order(self, cr, uid, ids, context=None):
""" Changes order state to cancel
@return: True
"""
stock_picking_obj = self.pool.get('stock.picking')
for order in self.browse(cr, uid, ids, context=context):
stock_picking_obj.action_cancel(cr, uid, [order.picking_id.id])
if stock_picking_obj.browse(cr, uid, order.picking_id.id, context=context).state <> 'cancel':
raise osv.except_osv(_('Error!'), _('Unable to cancel the picking.'))
self.write(cr, uid, ids, {'state': 'cancel'}, context=context)
return True
def add_payment(self, cr, uid, order_id, data, context=None):
"""Create a new payment for the order"""
context = dict(context or {})
statement_line_obj = self.pool.get('account.bank.statement.line')
property_obj = self.pool.get('ir.property')
order = self.browse(cr, uid, order_id, context=context)
args = {
'amount': data['amount'],
'date': data.get('payment_date', time.strftime('%Y-%m-%d')),
'name': order.name + ': ' + (data.get('payment_name', '') or ''),
'partner_id': order.partner_id and order.partner_id.id or None,
}
account_def = property_obj.get(cr, uid, 'property_account_receivable', 'res.partner', context=context)
args['account_id'] = (order.partner_id and order.partner_id.property_account_receivable \
and order.partner_id.property_account_receivable.id) or (account_def and account_def.id) or False
if not args['account_id']:
if not args['partner_id']:
msg = _('There is no receivable account defined to make payment.')
else:
msg = _('There is no receivable account defined to make payment for the partner: "%s" (id:%d).') % (order.partner_id.name, order.partner_id.id,)
raise osv.except_osv(_('Configuration Error!'), msg)
context.pop('pos_session_id', False)
journal_id = data.get('journal', False)
statement_id = data.get('statement_id', False)
assert journal_id or statement_id, "No statement_id or journal_id passed to the method!"
for statement in order.session_id.statement_ids:
if statement.id == statement_id:
journal_id = statement.journal_id.id
break
elif statement.journal_id.id == journal_id:
statement_id = statement.id
break
if not statement_id:
raise osv.except_osv(_('Error!'), _('You have to open at least one cashbox.'))
args.update({
'statement_id': statement_id,
'pos_statement_id': order_id,
'journal_id': journal_id,
'ref': order.session_id.name,
})
statement_line_obj.create(cr, uid, args, context=context)
return statement_id
def refund(self, cr, uid, ids, context=None):
"""Create a copy of order for refund order"""
clone_list = []
line_obj = self.pool.get('pos.order.line')
for order in self.browse(cr, uid, ids, context=context):
current_session_ids = self.pool.get('pos.session').search(cr, uid, [
('state', '!=', 'closed'),
('user_id', '=', uid)], context=context)
if not current_session_ids:
raise osv.except_osv(_('Error!'), _('To return product(s), you need to open a session that will be used to register the refund.'))
clone_id = self.copy(cr, uid, order.id, {
'name': order.name + ' REFUND', # not used, name forced by create
'session_id': current_session_ids[0],
'date_order': time.strftime('%Y-%m-%d %H:%M:%S'),
}, context=context)
clone_list.append(clone_id)
for clone in self.browse(cr, uid, clone_list, context=context):
for order_line in clone.lines:
line_obj.write(cr, uid, [order_line.id], {
'qty': -order_line.qty
}, context=context)
abs = {
'name': _('Return Products'),
'view_type': 'form',
'view_mode': 'form',
'res_model': 'pos.order',
'res_id':clone_list[0],
'view_id': False,
'context':context,
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
}
return abs
def action_invoice_state(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state':'invoiced'}, context=context)
def action_invoice(self, cr, uid, ids, context=None):
inv_ref = self.pool.get('account.invoice')
inv_line_ref = self.pool.get('account.invoice.line')
product_obj = self.pool.get('product.product')
inv_ids = []
for order in self.pool.get('pos.order').browse(cr, uid, ids, context=context):
if order.invoice_id:
inv_ids.append(order.invoice_id.id)
continue
if not order.partner_id:
raise osv.except_osv(_('Error!'), _('Please provide a partner for the sale.'))
acc = order.partner_id.property_account_receivable.id
inv = {
'name': order.name,
'origin': order.name,
'account_id': acc,
'journal_id': order.sale_journal.id or None,
'type': 'out_invoice',
'reference': order.name,
'partner_id': order.partner_id.id,
'comment': order.note or '',
'currency_id': order.pricelist_id.currency_id.id, # considering partner's sale pricelist's currency
}
inv.update(inv_ref.onchange_partner_id(cr, uid, [], 'out_invoice', order.partner_id.id)['value'])
if not inv.get('account_id', None):
inv['account_id'] = acc
inv_id = inv_ref.create(cr, uid, inv, context=context)
self.write(cr, uid, [order.id], {'invoice_id': inv_id, 'state': 'invoiced'}, context=context)
inv_ids.append(inv_id)
for line in order.lines:
inv_line = {
'invoice_id': inv_id,
'product_id': line.product_id.id,
'quantity': line.qty,
}
inv_name = product_obj.name_get(cr, uid, [line.product_id.id], context=context)[0][1]
inv_line.update(inv_line_ref.product_id_change(cr, uid, [],
line.product_id.id,
line.product_id.uom_id.id,
line.qty, partner_id = order.partner_id.id,
fposition_id=order.partner_id.property_account_position.id)['value'])
inv_line['price_unit'] = line.price_unit
inv_line['discount'] = line.discount
inv_line['name'] = inv_name
inv_line['invoice_line_tax_id'] = [(6, 0, [x.id for x in line.product_id.taxes_id] )]
inv_line_ref.create(cr, uid, inv_line, context=context)
inv_ref.button_reset_taxes(cr, uid, [inv_id], context=context)
self.signal_workflow(cr, uid, [order.id], 'invoice')
inv_ref.signal_workflow(cr, uid, [inv_id], 'validate')
if not inv_ids: return {}
mod_obj = self.pool.get('ir.model.data')
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form')
res_id = res and res[1] or False
return {
'name': _('Customer Invoice'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id],
'res_model': 'account.invoice',
'context': "{'type':'out_invoice'}",
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': inv_ids and inv_ids[0] or False,
}
def create_account_move(self, cr, uid, ids, context=None):
return self._create_account_move_line(cr, uid, ids, None, None, context=context)
def _create_account_move_line(self, cr, uid, ids, session=None, move_id=None, context=None):
# Tricky, via the workflow, we only have one id in the ids variable
"""Create a account move line of order grouped by products or not."""
account_move_obj = self.pool.get('account.move')
account_period_obj = self.pool.get('account.period')
account_tax_obj = self.pool.get('account.tax')
property_obj = self.pool.get('ir.property')
cur_obj = self.pool.get('res.currency')
#session_ids = set(order.session_id for order in self.browse(cr, uid, ids, context=context))
if session and not all(session.id == order.session_id.id for order in self.browse(cr, uid, ids, context=context)):
raise osv.except_osv(_('Error!'), _('Selected orders do not have the same session!'))
grouped_data = {}
have_to_group_by = session and session.config_id.group_by or False
def compute_tax(amount, tax, line):
if amount > 0:
tax_code_id = tax['base_code_id']
tax_amount = line.price_subtotal * tax['base_sign']
else:
tax_code_id = tax['ref_base_code_id']
tax_amount = line.price_subtotal * tax['ref_base_sign']
return (tax_code_id, tax_amount,)
for order in self.browse(cr, uid, ids, context=context):
if order.account_move:
continue
if order.state != 'paid':
continue
current_company = order.sale_journal.company_id
group_tax = {}
account_def = property_obj.get(cr, uid, 'property_account_receivable', 'res.partner', context=context)
order_account = order.partner_id and \
order.partner_id.property_account_receivable and \
order.partner_id.property_account_receivable.id or \
account_def and account_def.id or current_company.account_receivable.id
if move_id is None:
# Create an entry for the sale
move_id = account_move_obj.create(cr, uid, {
'ref' : order.name,
'journal_id': order.sale_journal.id,
}, context=context)
def insert_data(data_type, values):
# if have_to_group_by:
sale_journal_id = order.sale_journal.id
period = account_period_obj.find(cr, uid, context=dict(context or {}, company_id=current_company.id))[0]
# 'quantity': line.qty,
# 'product_id': line.product_id.id,
values.update({
'date': order.date_order[:10],
'ref': order.name,
'journal_id' : sale_journal_id,
'period_id' : period,
'move_id' : move_id,
'company_id': current_company.id,
})
if data_type == 'product':
key = ('product', values['partner_id'], values['product_id'], values['debit'] > 0)
elif data_type == 'tax':
key = ('tax', values['partner_id'], values['tax_code_id'], values['debit'] > 0)
elif data_type == 'counter_part':
key = ('counter_part', values['partner_id'], values['account_id'], values['debit'] > 0)
else:
return
grouped_data.setdefault(key, [])
# if not have_to_group_by or (not grouped_data[key]):
# grouped_data[key].append(values)
# else:
# pass
if have_to_group_by:
if not grouped_data[key]:
grouped_data[key].append(values)
else:
current_value = grouped_data[key][0]
current_value['quantity'] = current_value.get('quantity', 0.0) + values.get('quantity', 0.0)
current_value['credit'] = current_value.get('credit', 0.0) + values.get('credit', 0.0)
current_value['debit'] = current_value.get('debit', 0.0) + values.get('debit', 0.0)
current_value['tax_amount'] = current_value.get('tax_amount', 0.0) + values.get('tax_amount', 0.0)
else:
grouped_data[key].append(values)
#because of the weird way the pos order is written, we need to make sure there is at least one line,
#because just after the 'for' loop there are references to 'line' and 'income_account' variables (that
#are set inside the for loop)
#TOFIX: a deep refactoring of this method (and class!) is needed in order to get rid of this stupid hack
assert order.lines, _('The POS order must have lines when calling this method')
# Create an move for each order line
cur = order.pricelist_id.currency_id
for line in order.lines:
tax_amount = 0
taxes = []
for t in line.product_id.taxes_id:
if t.company_id.id == current_company.id:
taxes.append(t)
computed_taxes = account_tax_obj.compute_all(cr, uid, taxes, line.price_unit * (100.0-line.discount) / 100.0, line.qty)['taxes']
for tax in computed_taxes:
tax_amount += cur_obj.round(cr, uid, cur, tax['amount'])
group_key = (tax['tax_code_id'], tax['base_code_id'], tax['account_collected_id'], tax['id'])
group_tax.setdefault(group_key, 0)
group_tax[group_key] += cur_obj.round(cr, uid, cur, tax['amount'])
amount = line.price_subtotal
# Search for the income account
if line.product_id.property_account_income.id:
income_account = line.product_id.property_account_income.id
elif line.product_id.categ_id.property_account_income_categ.id:
income_account = line.product_id.categ_id.property_account_income_categ.id
else:
raise osv.except_osv(_('Error!'), _('Please define income '\
'account for this product: "%s" (id:%d).') \
% (line.product_id.name, line.product_id.id, ))
# Empty the tax list as long as there is no tax code:
tax_code_id = False
tax_amount = 0
while computed_taxes:
tax = computed_taxes.pop(0)
tax_code_id, tax_amount = compute_tax(amount, tax, line)
# If there is one we stop
if tax_code_id:
break
# Create a move for the line
insert_data('product', {
'name': line.product_id.name,
'quantity': line.qty,
'product_id': line.product_id.id,
'account_id': income_account,
'credit': ((amount>0) and amount) or 0.0,
'debit': ((amount<0) and -amount) or 0.0,
'tax_code_id': tax_code_id,
'tax_amount': tax_amount,
'partner_id': order.partner_id and self.pool.get("res.partner")._find_accounting_partner(order.partner_id).id or False
})
# For each remaining tax with a code, whe create a move line
for tax in computed_taxes:
tax_code_id, tax_amount = compute_tax(amount, tax, line)
if not tax_code_id:
continue
insert_data('tax', {
'name': _('Tax'),
'product_id':line.product_id.id,
'quantity': line.qty,
'account_id': income_account,
'credit': 0.0,
'debit': 0.0,
'tax_code_id': tax_code_id,
'tax_amount': tax_amount,
'partner_id': order.partner_id and self.pool.get("res.partner")._find_accounting_partner(order.partner_id).id or False
})
# Create a move for each tax group
(tax_code_pos, base_code_pos, account_pos, tax_id)= (0, 1, 2, 3)
for key, tax_amount in group_tax.items():
tax = self.pool.get('account.tax').browse(cr, uid, key[tax_id], context=context)
insert_data('tax', {
'name': _('Tax') + ' ' + tax.name,
'quantity': line.qty,
'product_id': line.product_id.id,
'account_id': key[account_pos] or income_account,
'credit': ((tax_amount>0) and tax_amount) or 0.0,
'debit': ((tax_amount<0) and -tax_amount) or 0.0,
'tax_code_id': key[tax_code_pos],
'tax_amount': tax_amount,
'partner_id': order.partner_id and self.pool.get("res.partner")._find_accounting_partner(order.partner_id).id or False
})
# counterpart
insert_data('counter_part', {
'name': _("Trade Receivables"), #order.name,
'account_id': order_account,
'credit': ((order.amount_total < 0) and -order.amount_total) or 0.0,
'debit': ((order.amount_total > 0) and order.amount_total) or 0.0,
'partner_id': order.partner_id and self.pool.get("res.partner")._find_accounting_partner(order.partner_id).id or False
})
order.write({'state':'done', 'account_move': move_id})
all_lines = []
for group_key, group_data in grouped_data.iteritems():
for value in group_data:
all_lines.append((0, 0, value),)
if move_id: #In case no order was changed
self.pool.get("account.move").write(cr, uid, [move_id], {'line_id':all_lines}, context=context)
return True
def action_payment(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'payment'}, context=context)
def action_paid(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'paid'}, context=context)
self.create_picking(cr, uid, ids, context=context)
return True
def action_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'cancel'}, context=context)
return True
def action_done(self, cr, uid, ids, context=None):
self.create_account_move(cr, uid, ids, context=context)
return True
class account_bank_statement(osv.osv):
_inherit = 'account.bank.statement'
_columns= {
'user_id': fields.many2one('res.users', 'User', readonly=True),
}
_defaults = {
'user_id': lambda self,cr,uid,c={}: uid
}
class account_bank_statement_line(osv.osv):
_inherit = 'account.bank.statement.line'
_columns= {
'pos_statement_id': fields.many2one('pos.order', ondelete='cascade'),
}
class pos_order_line(osv.osv):
_name = "pos.order.line"
_description = "Lines of Point of Sale"
_rec_name = "product_id"
def _amount_line_all(self, cr, uid, ids, field_names, arg, context=None):
res = dict([(i, {}) for i in ids])
account_tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
for line in self.browse(cr, uid, ids, context=context):
taxes_ids = [ tax for tax in line.product_id.taxes_id if tax.company_id.id == line.order_id.company_id.id ]
price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
taxes = account_tax_obj.compute_all(cr, uid, taxes_ids, price, line.qty, product=line.product_id, partner=line.order_id.partner_id or False)
cur = line.order_id.pricelist_id.currency_id
res[line.id]['price_subtotal'] = cur_obj.round(cr, uid, cur, taxes['total'])
res[line.id]['price_subtotal_incl'] = cur_obj.round(cr, uid, cur, taxes['total_included'])
return res
def onchange_product_id(self, cr, uid, ids, pricelist, product_id, qty=0, partner_id=False, context=None):
context = context or {}
if not product_id:
return {}
if not pricelist:
raise osv.except_osv(_('No Pricelist!'),
_('You have to select a pricelist in the sale form !\n' \
'Please set one before choosing a product.'))
price = self.pool.get('product.pricelist').price_get(cr, uid, [pricelist],
product_id, qty or 1.0, partner_id)[pricelist]
result = self.onchange_qty(cr, uid, ids, product_id, 0.0, qty, price, context=context)
result['value']['price_unit'] = price
return result
def onchange_qty(self, cr, uid, ids, product, discount, qty, price_unit, context=None):
result = {}
if not product:
return result
account_tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
prod = self.pool.get('product.product').browse(cr, uid, product, context=context)
price = price_unit * (1 - (discount or 0.0) / 100.0)
taxes = account_tax_obj.compute_all(cr, uid, prod.taxes_id, price, qty, product=prod, partner=False)
result['price_subtotal'] = taxes['total']
result['price_subtotal_incl'] = taxes['total_included']
return {'value': result}
_columns = {
'company_id': fields.many2one('res.company', 'Company', required=True),
'name': fields.char('Line No', required=True, copy=False),
'notice': fields.char('Discount Notice'),
'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], required=True, change_default=True),
'price_unit': fields.float(string='Unit Price', digits_compute=dp.get_precision('Account')),
'qty': fields.float('Quantity', digits_compute=dp.get_precision('Product UoS')),
'price_subtotal': fields.function(_amount_line_all, multi='pos_order_line_amount', string='Subtotal w/o Tax', store=True),
'price_subtotal_incl': fields.function(_amount_line_all, multi='pos_order_line_amount', string='Subtotal', store=True),
'discount': fields.float('Discount (%)', digits_compute=dp.get_precision('Account')),
'order_id': fields.many2one('pos.order', 'Order Ref', ondelete='cascade'),
'create_date': fields.datetime('Creation Date', readonly=True),
}
_defaults = {
'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'pos.order.line'),
'qty': lambda *a: 1,
'discount': lambda *a: 0.0,
'company_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id,
}
class ean_wizard(osv.osv_memory):
_name = 'pos.ean_wizard'
_columns = {
'ean13_pattern': fields.char('Reference', size=13, required=True, translate=True),
}
def sanitize_ean13(self, cr, uid, ids, context):
for r in self.browse(cr,uid,ids):
ean13 = openerp.addons.product.product.sanitize_ean13(r.ean13_pattern)
m = context.get('active_model')
m_id = context.get('active_id')
self.pool[m].write(cr,uid,[m_id],{'ean13':ean13})
return { 'type' : 'ir.actions.act_window_close' }
class pos_category(osv.osv):
_name = "pos.category"
_description = "Public Category"
_order = "sequence, name"
_constraints = [
(osv.osv._check_recursion, 'Error ! You cannot create recursive categories.', ['parent_id'])
]
def name_get(self, cr, uid, ids, context=None):
if not len(ids):
return []
reads = self.read(cr, uid, ids, ['name','parent_id'], context=context)
res = []
for record in reads:
name = record['name']
if record['parent_id']:
name = record['parent_id'][1]+' / '+name
res.append((record['id'], name))
return res
def _name_get_fnc(self, cr, uid, ids, prop, unknow_none, context=None):
res = self.name_get(cr, uid, ids, context=context)
return dict(res)
def _get_image(self, cr, uid, ids, name, args, context=None):
result = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
result[obj.id] = tools.image_get_resized_images(obj.image)
return result
def _set_image(self, cr, uid, id, name, value, args, context=None):
return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context)
_columns = {
'name': fields.char('Name', required=True, translate=True),
'complete_name': fields.function(_name_get_fnc, type="char", string='Name'),
'parent_id': fields.many2one('pos.category','Parent Category', select=True),
'child_id': fields.one2many('pos.category', 'parent_id', string='Children Categories'),
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of product categories."),
# NOTE: there is no 'default image', because by default we don't show thumbnails for categories. However if we have a thumbnail
# for at least one category, then we display a default image on the other, so that the buttons have consistent styling.
# In this case, the default image is set by the js code.
# NOTE2: image: all image fields are base64 encoded and PIL-supported
'image': fields.binary("Image",
help="This field holds the image used as image for the cateogry, limited to 1024x1024px."),
'image_medium': fields.function(_get_image, fnct_inv=_set_image,
string="Medium-sized image", type="binary", multi="_get_image",
store={
'pos.category': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Medium-sized image of the category. It is automatically "\
"resized as a 128x128px image, with aspect ratio preserved. "\
"Use this field in form views or some kanban views."),
'image_small': fields.function(_get_image, fnct_inv=_set_image,
string="Smal-sized image", type="binary", multi="_get_image",
store={
'pos.category': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Small-sized image of the category. It is automatically "\
"resized as a 64x64px image, with aspect ratio preserved. "\
"Use this field anywhere a small image is required."),
}
class product_template(osv.osv):
_inherit = 'product.template'
_columns = {
'income_pdt': fields.boolean('Point of Sale Cash In', help="Check if, this is a product you can use to put cash into a statement for the point of sale backend."),
'expense_pdt': fields.boolean('Point of Sale Cash Out', help="Check if, this is a product you can use to take cash from a statement for the point of sale backend, example: money lost, transfer to bank, etc."),
'available_in_pos': fields.boolean('Available in the Point of Sale', help='Check if you want this product to appear in the Point of Sale'),
'to_weight' : fields.boolean('To Weigh', help="Check if the product should be weighted (mainly used with self check-out interface)."),
'pos_categ_id': fields.many2one('pos.category','Point of Sale Category', help="Those categories are used to group similar products for point of sale."),
}
_defaults = {
'to_weight' : False,
'available_in_pos': True,
}
def edit_ean(self, cr, uid, ids, context):
return {
'name': _("Assign a Custom EAN"),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'pos.ean_wizard',
'target' : 'new',
'view_id': False,
'context':context,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
abo-abo/edx-platform | lms/djangoapps/courseware/features/video.py | 4 | 3767 | #pylint: disable=C0111
from lettuce import world, step
from lettuce.django import django_url
from common import i_am_registered_for_the_course, section_location
from django.utils.translation import ugettext as _
############### ACTIONS ####################
HTML5_SOURCES = [
'https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.mp4',
'https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.webm',
'https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.ogv'
]
HTML5_SOURCES_INCORRECT = [
'https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.mp99'
]
@step('when I view the (.*) it does not have autoplay enabled$')
def does_not_autoplay(_step, video_type):
assert(world.css_find('.%s' % video_type)[0]['data-autoplay'] == 'False')
@step('the course has a Video component in (.*) mode$')
def view_video(_step, player_mode):
coursenum = 'test_course'
i_am_registered_for_the_course(step, coursenum)
# Make sure we have a video
add_video_to_course(coursenum, player_mode.lower())
chapter_name = world.scenario_dict['SECTION'].display_name.replace(" ", "_")
section_name = chapter_name
url = django_url('/courses/%s/%s/%s/courseware/%s/%s' %
(world.scenario_dict['COURSE'].org, world.scenario_dict['COURSE'].number, world.scenario_dict['COURSE'].display_name.replace(' ', '_'),
chapter_name, section_name,))
world.browser.visit(url)
def add_video_to_course(course, player_mode):
category = 'video'
kwargs = {
'parent_location': section_location(course),
'category': category,
'display_name': 'Video'
}
if player_mode == 'html5':
kwargs.update({
'metadata': {
'youtube_id_1_0': '',
'youtube_id_0_75': '',
'youtube_id_1_25': '',
'youtube_id_1_5': '',
'html5_sources': HTML5_SOURCES
}
})
if player_mode == 'youtube_html5':
kwargs.update({
'metadata': {
'html5_sources': HTML5_SOURCES
}
})
if player_mode == 'youtube_html5_unsupported_video':
kwargs.update({
'metadata': {
'html5_sources': HTML5_SOURCES_INCORRECT
}
})
if player_mode == 'html5_unsupported_video':
kwargs.update({
'metadata': {
'youtube_id_1_0': '',
'youtube_id_0_75': '',
'youtube_id_1_25': '',
'youtube_id_1_5': '',
'html5_sources': HTML5_SOURCES_INCORRECT
}
})
world.ItemFactory.create(**kwargs)
@step('youtube server is up and response time is (.*) seconds$')
def set_youtube_response_timeout(_step, time):
world.youtube_server.time_to_response = time
@step('when I view the video it has rendered in (.*) mode$')
def video_is_rendered(_step, mode):
modes = {
'html5': 'video',
'youtube': 'iframe'
}
html_tag = modes[mode.lower()]
assert world.css_find('.video {0}'.format(html_tag)).first
@step('all sources are correct$')
def all_sources_are_correct(_step):
sources = world.css_find('.video video source')
assert set(source['src'] for source in sources) == set(HTML5_SOURCES)
@step('error message is shown$')
def error_message_is_shown(_step):
selector = '.video .video-player h3'
assert world.css_visible(selector)
@step('error message has correct text$')
def error_message_has_correct_text(_step):
selector = '.video .video-player h3'
text = _('ERROR: No playable video sources found!')
assert world.css_has_text(selector, text)
| agpl-3.0 |
KellyChan/python-examples | javascript/backbone/backbone-templates/backbone-fileupload/venvs/lib/python2.7/site-packages/django/contrib/gis/geoip/base.py | 93 | 10955 | import os
import re
from ctypes import c_char_p
from django.core.validators import ipv4_re
from django.contrib.gis.geoip.libgeoip import GEOIP_SETTINGS
from django.contrib.gis.geoip.prototypes import (
GeoIPRecord, GeoIPTag, GeoIP_open, GeoIP_delete, GeoIP_database_info,
GeoIP_lib_version, GeoIP_record_by_addr, GeoIP_record_by_name,
GeoIP_country_code_by_addr, GeoIP_country_code_by_name,
GeoIP_country_name_by_addr, GeoIP_country_name_by_name)
# Regular expressions for recognizing the GeoIP free database editions.
free_regex = re.compile(r'^GEO-\d{3}FREE')
lite_regex = re.compile(r'^GEO-\d{3}LITE')
#### GeoIP classes ####
class GeoIPException(Exception): pass
class GeoIP(object):
# The flags for GeoIP memory caching.
# GEOIP_STANDARD - read database from filesystem, uses least memory.
#
# GEOIP_MEMORY_CACHE - load database into memory, faster performance
# but uses more memory
#
# GEOIP_CHECK_CACHE - check for updated database. If database has been
# updated, reload filehandle and/or memory cache. This option
# is not thread safe.
#
# GEOIP_INDEX_CACHE - just cache the most frequently accessed index
# portion of the database, resulting in faster lookups than
# GEOIP_STANDARD, but less memory usage than GEOIP_MEMORY_CACHE -
# useful for larger databases such as GeoIP Organization and
# GeoIP City. Note, for GeoIP Country, Region and Netspeed
# databases, GEOIP_INDEX_CACHE is equivalent to GEOIP_MEMORY_CACHE
#
# GEOIP_MMAP_CACHE - load database into mmap shared memory ( not available
# on Windows).
GEOIP_STANDARD = 0
GEOIP_MEMORY_CACHE = 1
GEOIP_CHECK_CACHE = 2
GEOIP_INDEX_CACHE = 4
GEOIP_MMAP_CACHE = 8
cache_options = dict((opt, None) for opt in (0, 1, 2, 4, 8))
# Paths to the city & country binary databases.
_city_file = ''
_country_file = ''
# Initially, pointers to GeoIP file references are NULL.
_city = None
_country = None
def __init__(self, path=None, cache=0, country=None, city=None):
"""
Initializes the GeoIP object, no parameters are required to use default
settings. Keyword arguments may be passed in to customize the locations
of the GeoIP data sets.
* path: Base directory to where GeoIP data is located or the full path
to where the city or country data files (*.dat) are located.
Assumes that both the city and country data sets are located in
this directory; overrides the GEOIP_PATH settings attribute.
* cache: The cache settings when opening up the GeoIP datasets,
and may be an integer in (0, 1, 2, 4, 8) corresponding to
the GEOIP_STANDARD, GEOIP_MEMORY_CACHE, GEOIP_CHECK_CACHE,
GEOIP_INDEX_CACHE, and GEOIP_MMAP_CACHE, `GeoIPOptions` C API
settings, respectively. Defaults to 0, meaning that the data is read
from the disk.
* country: The name of the GeoIP country data file. Defaults to
'GeoIP.dat'; overrides the GEOIP_COUNTRY settings attribute.
* city: The name of the GeoIP city data file. Defaults to
'GeoLiteCity.dat'; overrides the GEOIP_CITY settings attribute.
"""
# Checking the given cache option.
if cache in self.cache_options:
self._cache = cache
else:
raise GeoIPException('Invalid GeoIP caching option: %s' % cache)
# Getting the GeoIP data path.
if not path:
path = GEOIP_SETTINGS.get('GEOIP_PATH', None)
if not path: raise GeoIPException('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
if not isinstance(path, basestring):
raise TypeError('Invalid path type: %s' % type(path).__name__)
if os.path.isdir(path):
# Constructing the GeoIP database filenames using the settings
# dictionary. If the database files for the GeoLite country
# and/or city datasets exist, then try and open them.
country_db = os.path.join(path, country or GEOIP_SETTINGS.get('GEOIP_COUNTRY', 'GeoIP.dat'))
if os.path.isfile(country_db):
self._country = GeoIP_open(country_db, cache)
self._country_file = country_db
city_db = os.path.join(path, city or GEOIP_SETTINGS.get('GEOIP_CITY', 'GeoLiteCity.dat'))
if os.path.isfile(city_db):
self._city = GeoIP_open(city_db, cache)
self._city_file = city_db
elif os.path.isfile(path):
# Otherwise, some detective work will be needed to figure
# out whether the given database path is for the GeoIP country
# or city databases.
ptr = GeoIP_open(path, cache)
info = GeoIP_database_info(ptr)
if lite_regex.match(info):
# GeoLite City database detected.
self._city = ptr
self._city_file = path
elif free_regex.match(info):
# GeoIP Country database detected.
self._country = ptr
self._country_file = path
else:
raise GeoIPException('Unable to recognize database edition: %s' % info)
else:
raise GeoIPException('GeoIP path must be a valid file or directory.')
def __del__(self):
# Cleaning any GeoIP file handles lying around.
if self._country: GeoIP_delete(self._country)
if self._city: GeoIP_delete(self._city)
def _check_query(self, query, country=False, city=False, city_or_country=False):
"Helper routine for checking the query and database availability."
# Making sure a string was passed in for the query.
if not isinstance(query, basestring):
raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__)
# GeoIP only takes ASCII-encoded strings.
query = query.encode('ascii')
# Extra checks for the existence of country and city databases.
if city_or_country and not (self._country or self._city):
raise GeoIPException('Invalid GeoIP country and city data files.')
elif country and not self._country:
raise GeoIPException('Invalid GeoIP country data file: %s' % self._country_file)
elif city and not self._city:
raise GeoIPException('Invalid GeoIP city data file: %s' % self._city_file)
# Return the query string back to the caller.
return query
def city(self, query):
"""
Returns a dictionary of city information for the given IP address or
Fully Qualified Domain Name (FQDN). Some information in the dictionary
may be undefined (None).
"""
query = self._check_query(query, city=True)
if ipv4_re.match(query):
# If an IP address was passed in
return GeoIP_record_by_addr(self._city, c_char_p(query))
else:
# If a FQDN was passed in.
return GeoIP_record_by_name(self._city, c_char_p(query))
def country_code(self, query):
"Returns the country code for the given IP Address or FQDN."
query = self._check_query(query, city_or_country=True)
if self._country:
if ipv4_re.match(query):
return GeoIP_country_code_by_addr(self._country, query)
else:
return GeoIP_country_code_by_name(self._country, query)
else:
return self.city(query)['country_code']
def country_name(self, query):
"Returns the country name for the given IP Address or FQDN."
query = self._check_query(query, city_or_country=True)
if self._country:
if ipv4_re.match(query):
return GeoIP_country_name_by_addr(self._country, query)
else:
return GeoIP_country_name_by_name(self._country, query)
else:
return self.city(query)['country_name']
def country(self, query):
"""
Returns a dictonary with with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters.
"""
# Returning the country code and name
return {'country_code' : self.country_code(query),
'country_name' : self.country_name(query),
}
#### Coordinate retrieval routines ####
def coords(self, query, ordering=('longitude', 'latitude')):
cdict = self.city(query)
if cdict is None: return None
else: return tuple(cdict[o] for o in ordering)
def lon_lat(self, query):
"Returns a tuple of the (longitude, latitude) for the given query."
return self.coords(query)
def lat_lon(self, query):
"Returns a tuple of the (latitude, longitude) for the given query."
return self.coords(query, ('latitude', 'longitude'))
def geos(self, query):
"Returns a GEOS Point object for the given query."
ll = self.lon_lat(query)
if ll:
from django.contrib.gis.geos import Point
return Point(ll, srid=4326)
else:
return None
#### GeoIP Database Information Routines ####
@property
def country_info(self):
"Returns information about the GeoIP country database."
if self._country is None:
ci = 'No GeoIP Country data in "%s"' % self._country_file
else:
ci = GeoIP_database_info(self._country)
return ci
@property
def city_info(self):
"Retuns information about the GeoIP city database."
if self._city is None:
ci = 'No GeoIP City data in "%s"' % self._city_file
else:
ci = GeoIP_database_info(self._city)
return ci
@property
def info(self):
"Returns information about the GeoIP library and databases in use."
info = ''
if GeoIP_lib_version:
info += 'GeoIP Library:\n\t%s\n' % GeoIP_lib_version()
return info + 'Country:\n\t%s\nCity:\n\t%s' % (self.country_info, self.city_info)
#### Methods for compatibility w/the GeoIP-Python API. ####
@classmethod
def open(cls, full_path, cache):
return GeoIP(full_path, cache)
def _rec_by_arg(self, arg):
if self._city:
return self.city(arg)
else:
return self.country(arg)
region_by_addr = city
region_by_name = city
record_by_addr = _rec_by_arg
record_by_name = _rec_by_arg
country_code_by_addr = country_code
country_code_by_name = country_code
country_name_by_addr = country_name
country_name_by_name = country_name
| mit |
apache/hadoop-common | src/contrib/hod/hodlib/GridServices/service.py | 182 | 8174 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
"""defines Service as abstract interface"""
# -*- python -*-
import random, socket
class Service:
""" the service base class that all the
other services inherit from. """
def __init__(self, serviceDesc, workDirs):
self.serviceDesc = serviceDesc
self.workDirs = workDirs
def getName(self):
return self.serviceDesc.getName()
def getInfoAddrs(self):
"""Return a list of addresses that provide
information about the servie"""
return []
def isLost(self):
"""True if the service is down"""
raise NotImplementedError
def addNodes(self, nodeList):
"""add nodeSet"""
raise NotImplementedError
def removeNodes(self, nodeList):
"""remove a nodeset"""
raise NotImplementedError
def getWorkers(self):
raise NotImplementedError
def needsMore(self):
"""return number of nodes the service wants to add"""
raise NotImplementedError
def needsLess(self):
"""return number of nodes the service wants to remove"""
raise NotImplementedError
class MasterSlave(Service):
""" the base class for a master slave
service architecture. """
def __init__(self, serviceDesc, workDirs,requiredNode):
Service.__init__(self, serviceDesc, workDirs)
self.launchedMaster = False
self.masterInitialized = False
self.masterAddress = 'none'
self.requiredNode = requiredNode
self.failedMsg = None
self.masterFailureCount = 0
def getRequiredNode(self):
return self.requiredNode
def getMasterRequest(self):
""" the number of master you need
to run for this service. """
raise NotImplementedError
def isLaunchable(self, serviceDict):
""" if your service does not depend on
other services. is set to true by default. """
return True
def getMasterCommands(self, serviceDict):
""" a list of master commands you
want to run for this service. """
raise NotImplementedError
def getAdminCommands(self, serviceDict):
""" a list of admin commands you
want to run for this service. """
raise NotImplementedError
def getWorkerCommands(self, serviceDict):
""" a list of worker commands you want to
run for this service. """
raise NotImplementedError
def setMasterNodes(self, list):
""" set the status of master nodes
after they start running on a node cluster. """
raise NotImplementedError
def addNodes(self, list):
""" add nodes to a service. Not implemented
currently. """
raise NotImplementedError
def getMasterAddrs(self):
""" return the addresses of master. the
hostname:port to which worker nodes should
connect. """
raise NotImplementedError
def setMasterParams(self, list):
""" set the various master params
depending on what each hodring set
the master params to. """
raise NotImplementedError
def setlaunchedMaster(self):
""" set the status of master launched
to true. """
self.launchedMaster = True
def isMasterLaunched(self):
""" return if a master has been launched
for the service or not. """
return self.launchedMaster
def isMasterInitialized(self):
""" return if a master if launched
has been initialized or not. """
return self.masterInitialized
def setMasterInitialized(self):
""" set the master initialized to
true. """
self.masterInitialized = True
# Reset failure related variables, as master is initialized successfully.
self.masterFailureCount = 0
self.failedMsg = None
def getMasterAddress(self):
""" it needs to change to reflect
more that one masters. Currently it
keeps a knowledge of where the master
was launched and to keep track if it was actually
up or not. """
return self.masterAddress
def setMasterAddress(self, addr):
self.masterAddress = addr
def isExternal(self):
return self.serviceDesc.isExternal()
def setMasterFailed(self, err):
"""Sets variables related to Master failure"""
self.masterFailureCount += 1
self.failedMsg = err
# When command is sent to HodRings, this would have been set to True.
# Reset it to reflect the correct status.
self.launchedMaster = False
def getMasterFailed(self):
return self.failedMsg
def getMasterFailureCount(self):
return self.masterFailureCount
class NodeRequest:
""" A class to define
a node request. """
def __init__(self, n, required = [], preferred = [], isPreemptee = True):
self.numNodes = n
self.preferred = preferred
self.isPreemptee = isPreemptee
self.required = required
def setNumNodes(self, n):
self.numNodes = n
def setPreferredList(self, list):
self.preferred = list
def setIsPreemptee(self, flag):
self.isPreemptee = flag
class ServiceUtil:
""" this class should be moved out of
service.py to a util file"""
localPortUsed = {}
def getUniqRandomPort(h=None, low=50000, high=60000, retry=900, log=None):
"""This allocates a randome free port between low and high"""
# We use a default value of 900 retries, which takes an agreeable
# time limit of ~ 6.2 seconds to check 900 ports, in the worse case
# of no available port in those 900.
while retry > 0:
n = random.randint(low, high)
if n in ServiceUtil.localPortUsed:
continue
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if not h:
h = socket.gethostname()
avail = False
if log: log.debug("Trying to see if port %s is available"% n)
try:
s.bind((h, n))
if log: log.debug("Yes, port %s is available" % n)
avail = True
except socket.error,e:
if log: log.debug("Could not bind to the port %s. Reason %s" % (n,e))
retry -= 1
pass
# The earlier code that used to be here had syntax errors. The code path
# couldn't be followd anytime, so the error remained uncaught.
# This time I stumbled upon the error
s.close()
if avail:
ServiceUtil.localPortUsed[n] = True
return n
raise ValueError, "Can't find unique local port between %d and %d" % (low, high)
getUniqRandomPort = staticmethod(getUniqRandomPort)
def getUniqPort(h=None, low=40000, high=60000, retry=900, log=None):
"""get unique port on a host that can be used by service
This and its consumer code should disappear when master
nodes get allocatet by nodepool"""
# We use a default value of 900 retries, which takes an agreeable
# time limit of ~ 6.2 seconds to check 900 ports, in the worse case
# of no available port in those 900.
n = low
while retry > 0:
n = n + 1
if n in ServiceUtil.localPortUsed:
continue
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if not h:
h = socket.gethostname()
avail = False
if log: log.debug("Trying to see if port %s is available"% n)
try:
s.bind((h, n))
if log: log.debug("Yes, port %s is available" % n)
avail = True
except socket.error,e:
if log: log.debug("Could not bind to the port %s. Reason %s" % (n,e))
retry -= 1
pass
s.close()
if avail:
ServiceUtil.localPortUsed[n] = True
return n
raise ValueError, "Can't find unique local port between %d and %d" % (low, high)
getUniqPort = staticmethod(getUniqPort)
| apache-2.0 |
Yarichi/Proyecto-DASI | Malmo/Python_Examples/default_world_test.py | 1 | 7173 | # ------------------------------------------------------------------------------------------------
# Copyright (c) 2016 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ------------------------------------------------------------------------------------------------
# Sample to demonstrate use of the DefaultWorldGenerator, ContinuousMovementCommands, timestamps and ObservationFromFullStats.
# Runs an agent in a standard Minecraft world, randomly seeded, uses timestamps and observations
# to calculate speed of movement, and chooses tiny "programmes" to execute if the speed drops to below a certain threshold.
# Mission continues until the agent dies.
import MalmoPython
import os
import random
import sys
import time
import datetime
import json
import random
def GetMissionXML():
''' Build an XML mission string that uses the DefaultWorldGenerator.'''
return '''<?xml version="1.0" encoding="UTF-8" ?>
<Mission xmlns="http://ProjectMalmo.microsoft.com" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<About>
<Summary>Normal life</Summary>
</About>
<ServerSection>
<ServerHandlers>
<DefaultWorldGenerator />
</ServerHandlers>
</ServerSection>
<AgentSection mode="Survival">
<Name>Rover</Name>
<AgentStart>
<Inventory>
<InventoryBlock slot="0" type="glowstone" quantity="63"/>
</Inventory>
</AgentStart>
<AgentHandlers>
<ContinuousMovementCommands/>
<ObservationFromFullStats/>
</AgentHandlers>
</AgentSection>
</Mission>'''
# Variety of strategies for dealing with loss of motion:
commandSequences=[
"jump 1; move 1; wait 1; jump 0; move 1; wait 2", # attempt to jump over obstacle
"turn 0.5; wait 1; turn 0; move 1; wait 2", # turn right a little
"turn -0.5; wait 1; turn 0; move 1; wait 2", # turn left a little
"move 0; attack 1; wait 5; pitch 0.5; wait 1; pitch 0; attack 1; wait 5; pitch -0.5; wait 1; pitch 0; attack 0; move 1; wait 2", # attempt to destroy some obstacles
"move 0; pitch 1; wait 2; pitch 0; use 1; jump 1; wait 6; use 0; jump 0; pitch -1; wait 1; pitch 0; wait 2; move 1; wait 2" # attempt to build tower under our feet
]
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) # flush print output immediately
my_mission = MalmoPython.MissionSpec(GetMissionXML(), True)
agent_host = MalmoPython.AgentHost()
try:
agent_host.parse( sys.argv )
except RuntimeError as e:
print 'ERROR:',e
print agent_host.getUsage()
exit(1)
if agent_host.receivedArgument("help"):
print agent_host.getUsage()
exit(0)
if agent_host.receivedArgument("test"):
my_mission.timeLimitInSeconds(20) # else mission runs forever
# Attempt to start the mission:
max_retries = 3
for retry in range(max_retries):
try:
agent_host.startMission( my_mission, MalmoPython.MissionRecordSpec() )
break
except RuntimeError as e:
if retry == max_retries - 1:
print "Error starting mission",e
print "Is the game running?"
exit(1)
else:
time.sleep(2)
# Wait for the mission to start:
world_state = agent_host.getWorldState()
while not world_state.has_mission_begun:
time.sleep(0.1)
world_state = agent_host.getWorldState()
currentSequence="move 1; wait 4" # start off by moving
currentSpeed = 0.0
distTravelledAtLastCheck = 0.0
timeStampAtLastCheck = datetime.datetime.now()
cyclesPerCheck = 10 # controls how quickly the agent responds to getting stuck, and the amount of time it waits for on a "wait" command.
currentCycle = 0
waitCycles = 0
# Main loop:
while world_state.is_mission_running:
world_state = agent_host.getWorldState()
if world_state.number_of_observations_since_last_state > 0:
obvsText = world_state.observations[-1].text
currentCycle += 1
if currentCycle == cyclesPerCheck: # Time to check our speed and decrement our wait counter (if set):
currentCycle = 0
if waitCycles > 0:
waitCycles -= 1
# Now use the latest observation to calculate our approximate speed:
data = json.loads(obvsText) # observation comes in as a JSON string...
dist = data.get(u'DistanceTravelled', 0) #... containing a "DistanceTravelled" field (amongst other things).
timestamp = world_state.observations[-1].timestamp # timestamp arrives as a python DateTime object
delta_dist = dist - distTravelledAtLastCheck
delta_time = timestamp - timeStampAtLastCheck
currentSpeed = 1000000.0 * delta_dist / float(delta_time.microseconds) # "centimetres" per second?
distTravelledAtLastCheck = dist
timeStampAtLastCheck = timestamp
if waitCycles == 0:
# Time to execute the next command, if we have one:
if currentSequence != "":
commands = currentSequence.split(";", 1)
command = commands[0].strip()
if len(commands) > 1:
currentSequence = commands[1]
else:
currentSequence = ""
print command
verb,sep,param = command.partition(" ")
if verb == "wait": # "wait" isn't a Malmo command - it's just used here to pause execution of our "programme".
waitCycles = int(param.strip())
else:
agent_host.sendCommand(command) # Send the command to Minecraft.
if currentSequence == "" and currentSpeed < 50 and waitCycles == 0: # Are we stuck?
currentSequence = random.choice(commandSequences) # Choose a random action (or insert your own logic here for choosing more sensibly...)
print "Stuck! Chosen programme: " + currentSequence
# Mission has ended. | gpl-2.0 |
tempbottle/servo | tests/wpt/web-platform-tests/tools/html5lib/html5lib/tests/mockParser.py | 452 | 1263 | from __future__ import absolute_import, division, unicode_literals
import sys
import os
if __name__ == '__main__':
# Allow us to import from the src directory
os.chdir(os.path.split(os.path.abspath(__file__))[0])
sys.path.insert(0, os.path.abspath(os.path.join(os.pardir, "src")))
from html5lib.tokenizer import HTMLTokenizer
class HTMLParser(object):
""" Fake parser to test tokenizer output """
def parse(self, stream, output=True):
tokenizer = HTMLTokenizer(stream)
for token in tokenizer:
if output:
print(token)
if __name__ == "__main__":
x = HTMLParser()
if len(sys.argv) > 1:
if len(sys.argv) > 2:
import hotshot
import hotshot.stats
prof = hotshot.Profile('stats.prof')
prof.runcall(x.parse, sys.argv[1], False)
prof.close()
stats = hotshot.stats.load('stats.prof')
stats.strip_dirs()
stats.sort_stats('time')
stats.print_stats()
else:
x.parse(sys.argv[1])
else:
print("""Usage: python mockParser.py filename [stats]
If stats is specified the hotshots profiler will run and output the
stats instead.
""")
| mpl-2.0 |
Winnak/Anemone | Anemone/views/configuration.py | 1 | 2652 | """ View for configuration. """
from os.path import join as path
from flask import render_template, g, redirect, session, flash, url_for, request
from Anemone import app, schedule
from Anemone.models import Project
import Anemone.abcfile
@app.route("/<project>/configuration", methods=["GET", "POST"])
def configuration_view(project):
""" Displays the view for configuration. """
project_query = Project.select().where(Project.slug == project).first()
if project_query is None:
flash("invalid project")
return redirect(url_for("projects"))
session["project"] = project_query
g.selected_tab = "configuration"
settings = None
if request.method == "GET":
settings = Anemone.abcfile.parse(path(project_query.path, "build.abc"))
elif request.method == "POST":
configuration_post(project_query, request)
return render_template("configure.html", ssh=open(app.config["SSH_PUBLIC"]).readline(),
build=settings, unity=app.config["UNITY_PATH"])
#pylint: disable=R0912
# disabling "too many branches", which is true, but this looks nice currently.
def configuration_post(project, req):
""" The post part of the configuration view. """
error = ""
if req.form.get("name", None) is None:
flash("Project name must be something", category="error")
error += "name "
else:
project.name = req.form["name"]
if req.form.get("slug", None) is None: #TODO: Check if unique
flash("Project slug must be something (should be automaticly generated)", category="error")
error += "slug "
else:
project.slug = req.form["slug"]
if req.form.get("path", None) is None:
flash("Folder path must be something with a in order to be able to build the project.")
error += "output "
else:
project.path = req.form["path"]
if req.form.get("output", None) is None:
flash("Project Output folder must be something", category="error")
error += "path "
else:
project.output = req.form["output"]
if req.form.get("description", None) is not None:
if len(req.form["description"]) > 1:
project.description = req.form["description"]
if req.form.get("scheduleinterval", None) is None:
schedule.pause_job("building_" + str(project.id))
elif isinstance(req.form["scheduleinterval"]):
schedule.modify_job("building_" + str(project.id), hours=req.form["scheduleinterval"])
schedule.resume_job("building_" + str(project.id))
if error is not "":
print(error)
project.save()
#pylint: enable=R0912
| mit |
nomaro/SickBeard_Backup | sickbeard/providers/ethor.py | 19 | 3033 | # Author: Julien Goret <jgoret@gmail.com>
# URL: https://github.com/sarakha63/Sick-Beard
#
# This file is based upon tvtorrents.py.
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import sickbeard
import generic
from sickbeard import helpers, logger, exceptions, tvcache
from lib.tvdb_api import tvdb_api, tvdb_exceptions
from sickbeard.name_parser.parser import NameParser, InvalidNameException
class EthorProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, "Ethor")
self.supportsBacklog = False
self.cache = EthorCache(self)
self.url = 'http://ethor.net/'
def isEnabled(self):
return sickbeard.ETHOR
def imageName(self):
return 'ethor.png'
class EthorCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# only poll every 15 minutes
self.minTime = 15
def _getRSSData(self):
if not sickbeard.ETHOR_KEY:
raise exceptions.AuthException("Ethor requires an API key to work correctly")
url = 'http://ethor.net/rss.php?feed=dl&cat=45,43,7&rsskey=' + sickbeard.ETHOR_KEY
logger.log(u"Ethor cache update URL: " + url, logger.DEBUG)
data = self.provider.getURL(url)
return data
def _parseItem(self, item):
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
ltvdb_api_parms['search_all_languages'] = True
(title, url) = self.provider._get_title_and_url(item)
if not title or not url:
logger.log(u"The XML returned from the Ethor RSS feed is incomplete, this result is unusable", logger.ERROR)
return
try:
myParser = NameParser()
parse_result = myParser.parse(title)
except InvalidNameException:
logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.DEBUG)
return
try:
t = tvdb_api.Tvdb(**ltvdb_api_parms)
showObj = t[parse_result.series_name]
except tvdb_exceptions.tvdb_error:
logger.log(u"TVDB timed out, unable to update episodes from TVDB", logger.ERROR)
return
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
self._addCacheEntry(name=title, url=url, tvdb_id=showObj['id'])
provider = EthorProvider()
| gpl-3.0 |
FreekingDean/home-assistant | homeassistant/components/sensor/miflora.py | 5 | 5187 | """
Support for Xiaomi Mi Flora BLE plant sensor.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.miflora/
"""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
from homeassistant.const import (
CONF_MONITORED_CONDITIONS, CONF_NAME, CONF_MAC)
REQUIREMENTS = ['miflora==0.1.13']
_LOGGER = logging.getLogger(__name__)
CONF_CACHE = 'cache_value'
CONF_FORCE_UPDATE = 'force_update'
CONF_MEDIAN = 'median'
CONF_RETRIES = 'retries'
CONF_TIMEOUT = 'timeout'
DEFAULT_FORCE_UPDATE = False
DEFAULT_MEDIAN = 3
DEFAULT_NAME = 'Mi Flora'
DEFAULT_RETRIES = 2
DEFAULT_TIMEOUT = 10
UPDATE_INTERVAL = 1200
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=UPDATE_INTERVAL)
# Sensor types are defined like: Name, units
SENSOR_TYPES = {
'temperature': ['Temperature', '°C'],
'light': ['Light intensity', 'lux'],
'moisture': ['Moisture', '%'],
'conductivity': ['Conductivity', 'µS/cm'],
'battery': ['Battery', '%'],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_MAC): cv.string,
vol.Required(CONF_MONITORED_CONDITIONS):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MEDIAN, default=DEFAULT_MEDIAN): cv.positive_int,
vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_RETRIES, default=DEFAULT_RETRIES): cv.positive_int,
vol.Optional(CONF_CACHE, default=UPDATE_INTERVAL): cv.positive_int,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the MiFlora sensor."""
from miflora import miflora_poller
cache = config.get(CONF_CACHE)
poller = miflora_poller.MiFloraPoller(
config.get(CONF_MAC), cache_timeout=cache)
force_update = config.get(CONF_FORCE_UPDATE)
median = config.get(CONF_MEDIAN)
poller.ble_timeout = config.get(CONF_TIMEOUT)
poller.retries = config.get(CONF_RETRIES)
devs = []
for parameter in config[CONF_MONITORED_CONDITIONS]:
name = SENSOR_TYPES[parameter][0]
unit = SENSOR_TYPES[parameter][1]
prefix = config.get(CONF_NAME)
if len(prefix) > 0:
name = "{} {}".format(prefix, name)
devs.append(MiFloraSensor(
poller, parameter, name, unit, force_update, median))
add_devices(devs)
class MiFloraSensor(Entity):
"""Implementing the MiFlora sensor."""
def __init__(self, poller, parameter, name, unit, force_update, median):
"""Initialize the sensor."""
self.poller = poller
self.parameter = parameter
self._unit = unit
self._name = name
self._state = None
self.data = []
self._force_update = force_update
# Median is used to filter out outliers. median of 3 will filter
# single outliers, while median of 5 will filter double outliers
# Use median_count = 1 if no filtering is required.
self.median_count = median
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the units of measurement."""
return self._unit
@property
def force_update(self):
"""Force update."""
return self._force_update
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""
Update current conditions.
This uses a rolling median over 3 values to filter out outliers.
"""
try:
_LOGGER.debug("Polling data for %s", self.name)
data = self.poller.parameter_value(self.parameter)
except IOError as ioerr:
_LOGGER.info("Polling error %s", ioerr)
data = None
return
if data is not None:
_LOGGER.debug("%s = %s", self.name, data)
self.data.append(data)
else:
_LOGGER.info("Did not receive any data from Mi Flora sensor %s",
self.name)
# Remove old data from median list or set sensor value to None
# if no data is available anymore
if len(self.data) > 0:
self.data = self.data[1:]
else:
self._state = None
return
_LOGGER.debug("Data collected: %s", self.data)
if len(self.data) > self.median_count:
self.data = self.data[1:]
if len(self.data) == self.median_count:
median = sorted(self.data)[int((self.median_count - 1) / 2)]
_LOGGER.debug("Median is: %s", median)
self._state = median
else:
_LOGGER.debug("Not yet enough data for median calculation")
| mit |
tbielawa/bitmath | tests/test_file_size.py | 1 | 10020 | # -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright © 2014 Tim Bielawa <timbielawa@gmail.com>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Tests to verify that string representations are accurate
"""
from . import TestCase
import bitmath
import os
class TestFileSize(TestCase):
# expected sizes are given in bytes
def setUp(self):
self.byte_file = './tests/file_sizes/bytes.test'
self.kibibyte_file = './tests/file_sizes/kbytes.test'
# *****************************************************************
# getsize
# *****************************************************************
##################################################################
# NIST tests
def test_getsize_byte_system_NIST(self):
"""NIST: getsize reports the correct type and size for byte sized files"""
expected = bitmath.Byte(bytes=38)
result = bitmath.getsize(self.byte_file, system=bitmath.NIST)
self.assertEqual(result, expected)
self.assertIs(type(result), bitmath.Byte)
def test_getsize_kibibyte_system_NIST(self):
"""NIST: getsize reports the correct type and size for kibibyte sized files"""
expected = bitmath.KiB(bytes=1024)
result = bitmath.getsize(self.kibibyte_file, system=bitmath.NIST)
self.assertEqual(result, expected)
self.assertIs(type(result), bitmath.KiB)
##################################################################
# SI tests
def test_getsize_byte_system_SI(self):
"""SI: getsize reports the correct type and size for byte sized files"""
expected = bitmath.Byte(bytes=38)
result = bitmath.getsize(self.byte_file, system=bitmath.SI)
self.assertEqual(result, expected)
self.assertIs(type(result), bitmath.Byte)
def test_getsize_kibibyte_system_SI(self):
"""SI: getsize reports the correct type and size for kibibyte sized files"""
expected = bitmath.kB(bytes=1024)
result = bitmath.getsize(self.kibibyte_file, system=bitmath.SI)
self.assertEqual(result, expected)
self.assertIs(type(result), bitmath.kB)
# *****************************************************************
# listdir
# *****************************************************************
def test_listdir_nosymlinks(self):
"""listdir: no symbolic links in tree measures right
Assume a directory tree where no sub-directories are symbolic links::
$ tree ./tests/listdir_nosymlinks
./tests/listdir_nosymlinks
`-- depth1
`-- depth2
|-- 1024_byte_file
`-- 10_byte_file
2 directories, 2 files
And the files, ``tests/listdir_nosymlinks/depth1/depth2/10_byte_file``
and ``tests/listdir_nosymlinks/depth1/depth2/1024_byte_file`` are 10
Bytes and 1024 Bytes in size, respectively.
Then:
>>> for f in bitmath.listdir('./tests/listdir_nosymlinks'):
... print f
Would yield 2-tuple's of:
('/path/tests/listdir_nosymlinks/depth1/depth2/10_byte_file', Byte(10.0))
('/path/tests/listdir_nosymlinks/depth1/depth2/1024_byte_file', KiB(1.0))
"""
# Call with relpath=True so the paths are easier to verify
contents = list(bitmath.listdir('./tests/listdir_nosymlinks/',
relpath=True))
# Ensure the returned paths match the expected paths
discovered_paths = [
contents[0][0],
contents[1][0],
]
expected_paths = [
'tests/listdir_nosymlinks/depth1/depth2/10_byte_file',
'tests/listdir_nosymlinks/depth1/depth2/1024_byte_file'
]
self.assertListEqual(discovered_paths, expected_paths)
expected_sizes = [
bitmath.Byte(10.0),
bitmath.Byte(1024.0)
]
discovered_sizes = [
contents[0][1],
contents[1][1]
]
self.assertListEqual(discovered_sizes, expected_sizes)
# 2018-03-18 - Commenting this out for now. This is failing during
# RPM building. I have no idea why or when this began
# happening. Tests work from the command line, but not during the
# %check part of RPM building.
#
# It APPEARS that rpmbuild is dereferencing symlinks when
# unpacking and copying the dist archive contents. Rather than
# '10_byte_file_link' appearing as a link, it is a real file.
# @unittest.expectedFailure
# def test_listdir_symlinks_nofollow(self):
# """listdir: symbolic links in tree not followed
# Similar assumptions as in test_listdir_nosymlinks, except the
# directory structure looks like this:
# $ tree tests/listdir_symlinks
# tests/listdir_symlinks
# |-- 10_byte_file_link -> ../listdir/10_byte_file
# `-- depth1
# `-- depth2
# `-- 10_byte_file
# 2 directories, 2 files
# """
# # Call with relpath=True so the paths are easier to verify
# contents = list(bitmath.listdir('./tests/listdir_symlinks/', relpath=True))
# # Ensure the returned path matches the expected path
# self.assertEqual(contents[0][0], 'tests/listdir_symlinks/depth1/depth2/10_byte_file')
# # Ensure the measured size is what we expect
# self.assertEqual(contents[0][1], bitmath.Byte(10.0))
def test_listdir_symlinks_follow(self):
"""listdir: symbolic links in tree are followed
Same assumptions as in test_listdir_symlinks_nofollow.
"""
# Call with relpath=True so the paths are easier to verify
contents = list(bitmath.listdir('./tests/listdir_symlinks/',
followlinks=True,
relpath=True))
# Ensure the returned path matches the expected path
expected_paths = [
'tests/listdir_symlinks/10_byte_file_link',
'tests/listdir_symlinks/depth1/depth2/10_byte_file'
]
discovered_paths = [
contents[0][0],
contents[1][0]
]
self.assertListEqual(discovered_paths, expected_paths)
# Ensure the measured size is what we expect
expected_sizes = [
bitmath.Byte(10.0),
bitmath.Byte(10.0)
]
discovered_sizes = [
contents[0][1],
contents[1][1]
]
self.assertListEqual(discovered_sizes, expected_sizes)
def test_listdir_symlinks_follow_relpath_false(self):
"""listdir: symlinks followed, absolute paths are returned
Same assumptions as in test_listdir_symlinks_follow. Difference is
that the 0th item of the tuple returns a fully qualified path.
"""
contents = list(bitmath.listdir('./tests/listdir_symlinks/',
followlinks=True))
# Ensure the returned path matches the expected path and
# begins with the present working directory
pwd = os.path.realpath('.')
expected_paths = [
os.path.join(pwd, contents[0][0]),
os.path.join(pwd, contents[1][0])
]
discovered_paths = [
contents[0][0],
contents[1][0]
]
self.assertListEqual(discovered_paths, expected_paths)
# Ensure the measured size is what we expect
expected_sizes = [
bitmath.Byte(10.0),
bitmath.Byte(10.0)
]
discovered_sizes = [
contents[0][1],
contents[1][1]
]
self.assertListEqual(discovered_sizes, expected_sizes)
def test_listdir_filtering_nosymlinks(self):
"""listdir: no symbolic links in tree measures right with a filter
Same assumptions as test_listdir_nosymlinks."""
# Call with relpath=True so the paths are easier to verify
contents = list(bitmath.listdir('./tests/listdir_nosymlinks/',
relpath=True,
# Should only find 1 file, 1024_byte_file
filter='1024*'))
# Ensure the returned path matches the expected path
self.assertEqual(contents[0][0],
'tests/listdir_nosymlinks/depth1/depth2/1024_byte_file')
# Ensure the measured size is what we expect
self.assertEqual(contents[0][1], bitmath.KiB(1.0))
def test_listdir_filtering_empty_match_nosymlinks(self):
"""listdir: filtering with nosymlinks returns 0 matches for a filter
Same assumptions as test_listdir_nosymlinks."""
# Call with relpath=True so the paths are easier to verify
contents = list(bitmath.listdir('./tests/listdir_nosymlinks/',
relpath=True,
# Should find no matches
filter='*notafile*'))
# There should be one file discovered
self.assertEqual(len(contents), int(0))
| mit |
aclifton/cpeg853-gem5 | src/dev/x86/PcSpeaker.py | 69 | 1839 | # Copyright (c) 2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
from m5.params import *
from m5.proxy import *
from Device import BasicPioDevice
class PcSpeaker(BasicPioDevice):
type = 'PcSpeaker'
cxx_class = 'X86ISA::Speaker'
cxx_header = "dev/x86/speaker.hh"
i8254 = Param.I8254('Timer that drives the speaker')
| bsd-3-clause |
nkgilley/home-assistant | homeassistant/components/lutron_caseta/switch.py | 7 | 1482 | """Support for Lutron Caseta switches."""
import logging
from homeassistant.components.switch import DOMAIN, SwitchEntity
from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Lutron Caseta switch platform.
Adds switches from the Caseta bridge associated with the config_entry as
switch entities.
"""
entities = []
bridge = hass.data[CASETA_DOMAIN][config_entry.entry_id]
switch_devices = bridge.get_devices_by_domain(DOMAIN)
for switch_device in switch_devices:
entity = LutronCasetaLight(switch_device, bridge)
entities.append(entity)
async_add_entities(entities, True)
return True
class LutronCasetaLight(LutronCasetaDevice, SwitchEntity):
"""Representation of a Lutron Caseta switch."""
async def async_turn_on(self, **kwargs):
"""Turn the switch on."""
self._smartbridge.turn_on(self.device_id)
async def async_turn_off(self, **kwargs):
"""Turn the switch off."""
self._smartbridge.turn_off(self.device_id)
@property
def is_on(self):
"""Return true if device is on."""
return self._device["current_state"] > 0
async def async_update(self):
"""Update when forcing a refresh of the device."""
self._device = self._smartbridge.get_device_by_id(self.device_id)
_LOGGER.debug(self._device)
| apache-2.0 |
eneldoserrata/marcos_openerp | addons/sale_order_dates/__init__.py | 441 | 1071 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale_order_dates
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
sschiau/swift | utils/gyb_syntax_support/Node.py | 1 | 3036 | from __future__ import print_function
import sys
from kinds import SYNTAX_BASE_KINDS, kind_to_type, lowercase_first_word
def error(msg):
print('error: ' + msg, file=sys.stderr)
sys.exit(-1)
class Node(object):
"""
A Syntax node, possibly with children.
If the kind is "SyntaxCollection", then this node is considered a Syntax
Collection that will expose itself as a typedef rather than a concrete
subclass.
"""
def __init__(self, name, description=None, kind=None, traits=None,
children=None, element=None, element_name=None,
element_choices=None, omit_when_empty=False):
self.syntax_kind = name
self.swift_syntax_kind = lowercase_first_word(name)
self.name = kind_to_type(self.syntax_kind)
self.description = description
self.traits = traits or []
self.children = children or []
self.base_kind = kind
if self.base_kind == 'SyntaxCollection':
self.base_type = 'Syntax'
else:
self.base_type = kind_to_type(self.base_kind)
if self.base_kind not in SYNTAX_BASE_KINDS:
error("unknown base kind '%s' for node '%s'" %
(self.base_kind, self.syntax_kind))
self.omit_when_empty = omit_when_empty
self.collection_element = element or ""
# For SyntaxCollections make sure that the element_name is set.
assert(not self.is_syntax_collection() or element_name or
(element and element != 'Syntax'))
# If there's a preferred name for the collection element that differs
# from its supertype, use that.
self.collection_element_name = element_name or self.collection_element
self.collection_element_type = kind_to_type(self.collection_element)
self.collection_element_choices = element_choices or []
def is_base(self):
"""
Returns `True` if this node declares one of the base syntax kinds.
"""
return self.syntax_kind in SYNTAX_BASE_KINDS
def is_syntax_collection(self):
"""
Returns `True` if this node is a subclass of SyntaxCollection.
"""
return self.base_kind == "SyntaxCollection"
def requires_validation(self):
"""
Returns `True` if this node should have a `validate` method associated.
"""
return self.is_buildable()
def is_unknown(self):
"""
Returns `True` if this node is an `Unknown` syntax subclass.
"""
return "Unknown" in self.syntax_kind
def is_buildable(self):
"""
Returns `True` if this node should have a builder associated.
"""
return not self.is_base() and \
not self.is_unknown() and \
not self.is_syntax_collection()
def shall_be_omitted_when_empty(self):
"""
Returns 'True' if this node shall not be created while parsing if it
has no children.
"""
return self.omit_when_empty
| apache-2.0 |
dmsimard/ansible | lib/ansible/errors/__init__.py | 15 | 14564 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import traceback
from ansible.errors.yaml_strings import (
YAML_COMMON_DICT_ERROR,
YAML_COMMON_LEADING_TAB_ERROR,
YAML_COMMON_PARTIALLY_QUOTED_LINE_ERROR,
YAML_COMMON_UNBALANCED_QUOTES_ERROR,
YAML_COMMON_UNQUOTED_COLON_ERROR,
YAML_COMMON_UNQUOTED_VARIABLE_ERROR,
YAML_POSITION_DETAILS,
YAML_AND_SHORTHAND_ERROR,
)
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.common._collections_compat import Sequence
class AnsibleError(Exception):
'''
This is the base class for all errors raised from Ansible code,
and can be instantiated with two optional parameters beyond the
error message to control whether detailed information is displayed
when the error occurred while parsing a data file of some kind.
Usage:
raise AnsibleError('some message here', obj=obj, show_content=True)
Where "obj" is some subclass of ansible.parsing.yaml.objects.AnsibleBaseYAMLObject,
which should be returned by the DataLoader() class.
'''
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None):
super(AnsibleError, self).__init__(message)
self._show_content = show_content
self._suppress_extended_error = suppress_extended_error
self._message = to_native(message)
self.obj = obj
self.orig_exc = orig_exc
@property
def message(self):
# we import this here to prevent an import loop problem,
# since the objects code also imports ansible.errors
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject
message = [self._message]
if isinstance(self.obj, AnsibleBaseYAMLObject):
extended_error = self._get_extended_error()
if extended_error and not self._suppress_extended_error:
message.append(
'\n\n%s' % to_native(extended_error)
)
elif self.orig_exc:
message.append('. %s' % to_native(self.orig_exc))
return ''.join(message)
@message.setter
def message(self, val):
self._message = val
def __str__(self):
return self.message
def __repr__(self):
return self.message
def _get_error_lines_from_file(self, file_name, line_number):
'''
Returns the line in the file which corresponds to the reported error
location, as well as the line preceding it (if the error did not
occur on the first line), to provide context to the error.
'''
target_line = ''
prev_line = ''
with open(file_name, 'r') as f:
lines = f.readlines()
# In case of a YAML loading error, PyYAML will report the very last line
# as the location of the error. Avoid an index error here in order to
# return a helpful message.
file_length = len(lines)
if line_number >= file_length:
line_number = file_length - 1
# If target_line contains only whitespace, move backwards until
# actual code is found. If there are several empty lines after target_line,
# the error lines would just be blank, which is not very helpful.
target_line = lines[line_number]
while not target_line.strip():
line_number -= 1
target_line = lines[line_number]
if line_number > 0:
prev_line = lines[line_number - 1]
return (target_line, prev_line)
def _get_extended_error(self):
'''
Given an object reporting the location of the exception in a file, return
detailed information regarding it including:
* the line which caused the error as well as the one preceding it
* causes and suggested remedies for common syntax errors
If this error was created with show_content=False, the reporting of content
is suppressed, as the file contents may be sensitive (ie. vault data).
'''
error_message = ''
try:
(src_file, line_number, col_number) = self.obj.ansible_pos
error_message += YAML_POSITION_DETAILS % (src_file, line_number, col_number)
if src_file not in ('<string>', '<unicode>') and self._show_content:
(target_line, prev_line) = self._get_error_lines_from_file(src_file, line_number - 1)
target_line = to_text(target_line)
prev_line = to_text(prev_line)
if target_line:
stripped_line = target_line.replace(" ", "")
# Check for k=v syntax in addition to YAML syntax and set the appropriate error position,
# arrow index
if re.search(r'\w+(\s+)?=(\s+)?[\w/-]+', prev_line):
error_position = prev_line.rstrip().find('=')
arrow_line = (" " * error_position) + "^ here"
error_message = YAML_POSITION_DETAILS % (src_file, line_number - 1, error_position + 1)
error_message += "\nThe offending line appears to be:\n\n%s\n%s\n\n" % (prev_line.rstrip(), arrow_line)
error_message += YAML_AND_SHORTHAND_ERROR
else:
arrow_line = (" " * (col_number - 1)) + "^ here"
error_message += "\nThe offending line appears to be:\n\n%s\n%s\n%s\n" % (prev_line.rstrip(), target_line.rstrip(), arrow_line)
# TODO: There may be cases where there is a valid tab in a line that has other errors.
if '\t' in target_line:
error_message += YAML_COMMON_LEADING_TAB_ERROR
# common error/remediation checking here:
# check for unquoted vars starting lines
if ('{{' in target_line and '}}' in target_line) and ('"{{' not in target_line or "'{{" not in target_line):
error_message += YAML_COMMON_UNQUOTED_VARIABLE_ERROR
# check for common dictionary mistakes
elif ":{{" in stripped_line and "}}" in stripped_line:
error_message += YAML_COMMON_DICT_ERROR
# check for common unquoted colon mistakes
elif (len(target_line) and
len(target_line) > 1 and
len(target_line) > col_number and
target_line[col_number] == ":" and
target_line.count(':') > 1):
error_message += YAML_COMMON_UNQUOTED_COLON_ERROR
# otherwise, check for some common quoting mistakes
else:
# FIXME: This needs to split on the first ':' to account for modules like lineinfile
# that may have lines that contain legitimate colons, e.g., line: 'i ALL= (ALL) NOPASSWD: ALL'
# and throw off the quote matching logic.
parts = target_line.split(":")
if len(parts) > 1:
middle = parts[1].strip()
match = False
unbalanced = False
if middle.startswith("'") and not middle.endswith("'"):
match = True
elif middle.startswith('"') and not middle.endswith('"'):
match = True
if (len(middle) > 0 and
middle[0] in ['"', "'"] and
middle[-1] in ['"', "'"] and
target_line.count("'") > 2 or
target_line.count('"') > 2):
unbalanced = True
if match:
error_message += YAML_COMMON_PARTIALLY_QUOTED_LINE_ERROR
if unbalanced:
error_message += YAML_COMMON_UNBALANCED_QUOTES_ERROR
except (IOError, TypeError):
error_message += '\n(could not open file to display line)'
except IndexError:
error_message += '\n(specified line no longer in file, maybe it changed?)'
return error_message
class AnsibleAssertionError(AnsibleError, AssertionError):
'''Invalid assertion'''
pass
class AnsibleOptionsError(AnsibleError):
''' bad or incomplete options passed '''
pass
class AnsibleParserError(AnsibleError):
''' something was detected early that is wrong about a playbook or data file '''
pass
class AnsibleInternalError(AnsibleError):
''' internal safeguards tripped, something happened in the code that should never happen '''
pass
class AnsibleRuntimeError(AnsibleError):
''' ansible had a problem while running a playbook '''
pass
class AnsibleModuleError(AnsibleRuntimeError):
''' a module failed somehow '''
pass
class AnsibleConnectionFailure(AnsibleRuntimeError):
''' the transport / connection_plugin had a fatal error '''
pass
class AnsibleAuthenticationFailure(AnsibleConnectionFailure):
'''invalid username/password/key'''
pass
class AnsibleCallbackError(AnsibleRuntimeError):
''' a callback failure '''
pass
class AnsibleTemplateError(AnsibleRuntimeError):
'''A template related error'''
pass
class AnsibleFilterError(AnsibleTemplateError):
''' a templating failure '''
pass
class AnsibleLookupError(AnsibleTemplateError):
''' a lookup failure '''
pass
class AnsibleUndefinedVariable(AnsibleTemplateError):
''' a templating failure '''
pass
class AnsibleFileNotFound(AnsibleRuntimeError):
''' a file missing failure '''
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, paths=None, file_name=None):
self.file_name = file_name
self.paths = paths
if message:
message += "\n"
if self.file_name:
message += "Could not find or access '%s'" % to_text(self.file_name)
else:
message += "Could not find file"
if self.paths and isinstance(self.paths, Sequence):
searched = to_text('\n\t'.join(self.paths))
if message:
message += "\n"
message += "Searched in:\n\t%s" % searched
message += " on the Ansible Controller.\nIf you are using a module and expect the file to exist on the remote, see the remote_src option"
super(AnsibleFileNotFound, self).__init__(message=message, obj=obj, show_content=show_content,
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc)
# These Exceptions are temporary, using them as flow control until we can get a better solution.
# DO NOT USE as they will probably be removed soon.
# We will port the action modules in our tree to use a context manager instead.
class AnsibleAction(AnsibleRuntimeError):
''' Base Exception for Action plugin flow control '''
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
super(AnsibleAction, self).__init__(message=message, obj=obj, show_content=show_content,
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc)
if result is None:
self.result = {}
else:
self.result = result
class AnsibleActionSkip(AnsibleAction):
''' an action runtime skip'''
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
super(AnsibleActionSkip, self).__init__(message=message, obj=obj, show_content=show_content,
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc, result=result)
self.result.update({'skipped': True, 'msg': message})
class AnsibleActionFail(AnsibleAction):
''' an action runtime failure'''
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
super(AnsibleActionFail, self).__init__(message=message, obj=obj, show_content=show_content,
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc, result=result)
self.result.update({'failed': True, 'msg': message, 'exception': traceback.format_exc()})
class _AnsibleActionDone(AnsibleAction):
''' an action runtime early exit'''
pass
class AnsiblePluginError(AnsibleError):
''' base class for Ansible plugin-related errors that do not need AnsibleError contextual data '''
def __init__(self, message=None, plugin_load_context=None):
super(AnsiblePluginError, self).__init__(message)
self.plugin_load_context = plugin_load_context
class AnsiblePluginRemovedError(AnsiblePluginError):
''' a requested plugin has been removed '''
pass
class AnsiblePluginCircularRedirect(AnsiblePluginError):
'''a cycle was detected in plugin redirection'''
pass
class AnsibleCollectionUnsupportedVersionError(AnsiblePluginError):
'''a collection is not supported by this version of Ansible'''
pass
class AnsibleFilterTypeError(AnsibleTemplateError, TypeError):
''' a Jinja filter templating failure due to bad type'''
pass
| gpl-3.0 |
WeLikeAlpacas/python-pubsub | qpaca/backend/rabbitmq.py | 3 | 2838 | import uuid
from datetime import datetime
from kombu.mixins import ConsumerMixin
from qpaca.backend.base import BaseSubscriber, BasePublisher
from qpaca.helpers import get_config, logger
from qpaca.backend.handlers import RabbitMQHandler
from qpaca.monitoring.influx import InfluxDB
class RabbitMQPublisher(BasePublisher, RabbitMQHandler):
"""Implements a RabbitMQ Publisher"""
def __init__(self, config=None):
self.config = config or get_config('rabbitmq').get('publisher', None)
self.connection = self._connect()
self.monitor = InfluxDB(
'publisher', config or get_config('monitoring').get('influxdb'))
def start(self):
"""Create everything necessary to send a message"""
logger.debug('Starting RabbitMQ Publisher')
self._exchange = self._create_exchange()
self._producer = self._create_producer()
def publish(self, message):
"""
Send a message to RabbitMQ exchange
return a unique id for future result query
"""
message_id = str(uuid.uuid4())
message = {'payload': message,
'message_id': message_id,
'reply_to': None}
self._producer.publish(
message, exchange=self._exchange, **self.config.get('publish'))
logger.info('Message sent: {0}'.format(message))
point = (datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f'"), 1)
self.monitor.write(point)
return message_id
class RabbitMQSubscriber(ConsumerMixin, BaseSubscriber, RabbitMQHandler):
def __init__(self, config=None):
self.config = config or get_config('rabbitmq').get('subscriber', None)
self.connection = self._connect()
self.monitor = InfluxDB(
'subscriber', config or get_config('monitoring').get('influxdb'))
def start(self, callback):
"""Create everything necessary to receive a message"""
logger.debug('Starting RabbitMQ Subscriber')
self._exchange = self._create_exchange()
self._queue = self._create_queue()
self._callback = callback
def run_forever(self):
"""Call kombu.ConsumerMixin run function
It will start consume new messages
"""
self.run()
def get_consumers(self, consumer, channel):
"""Return a list with consumers"""
return [consumer(
queues=[self._queue],
callbacks=[self.on_message],
**self.config.get('consumer'))]
def on_message(self, body, message):
"""it is called every time a new message is received"""
logger.info('Message received: {0}'.format(body))
self._callback(body, message)
message.ack()
point = (datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f'"), 1)
self.monitor.write(point)
| mit |
olhoneles/olhoneles | montanha/management/commands/consolidate.py | 1 | 11482 | # -*- coding: utf-8 -*-
#
# Copyright (©) 2014 Gustavo Noronha Silva
# Copyright (©) 2016 Marcelo Jorge Vieira
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This hack makes django less memory hungry (it caches queries when running
# with debug enabled.
import codecs
import sys
from datetime import date
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db.models import Sum
from montanha.models import (
Institution, Expense, ExpenseNature, Legislator, Supplier, PerNature,
PerNatureByYear, PerNatureByMonth, PerLegislator, BiggestSupplierForYear
)
from montanha.util import (
filter_for_institution, get_date_ranges_from_data, ensure_years_in_range
)
settings.DEBUG = False
sys.stdout = codecs.getwriter("utf-8")(sys.stdout)
sys.stderr = codecs.getwriter("utf-8")(sys.stderr)
class Command(BaseCommand):
help = "Collects data for a number of sources"
def add_arguments(self, parser):
parser.add_argument('house', type=str, nargs='*', default='')
parser.add_argument(
'--agnostic',
action='store_true',
dest='agnostic',
default=False,
)
def handle(self, *args, **options):
for house in options.get('house'):
try:
institution = Institution.objects.get(siglum__iexact=house)
print u'Consolidating data for %s' % (institution.name)
self.per_nature(institution)
self.per_legislator(institution)
except Institution.DoesNotExist:
print u'Institution %s does not exist' % house
if options.get('agnostic'):
self.agnostic()
def per_nature(self, institution):
PerNature.objects.filter(institution=institution).delete()
PerNatureByYear.objects.filter(institution=institution).delete()
PerNatureByMonth.objects.filter(institution=institution).delete()
data = Expense.objects.all()
data = filter_for_institution(data, institution)
date_ranges = get_date_ranges_from_data(institution, data)
data = data \
.values('nature__id') \
.annotate(expensed=Sum('expensed')) \
.order_by('-expensed')
years = [d.year for d in Expense.objects.dates('date', 'year')]
years = ensure_years_in_range(date_ranges, years)
per_natures_to_create = list()
per_natures_by_year_to_create = list()
per_natures_by_month_to_create = list()
for item in data:
# Totals
nature = ExpenseNature.objects.get(id=item['nature__id'])
p = PerNature(
institution=institution,
date_start=date_ranges['cdf'],
date_end=date_ranges['cdt'],
nature=nature,
expensed=item['expensed']
)
per_natures_to_create.append(p)
# Totals for Legislature
per_natures_to_create += self._per_nature_total_for_legislature(
institution, nature
)
# By Year
year_to_create, month_to_create = self._per_nature_by_year(
years, institution, nature
)
per_natures_by_year_to_create += year_to_create
per_natures_by_month_to_create += month_to_create
PerNature.objects.bulk_create(per_natures_to_create)
PerNatureByMonth.objects.bulk_create(per_natures_by_month_to_create)
PerNatureByYear.objects.bulk_create(per_natures_by_year_to_create)
def _per_nature_total_for_legislature(self, institution, nature):
per_natures_to_create = list()
for legislature in institution.legislature_set.all():
print u'[%s] Consolidating nature %s totals for legislature %d-%d…' % (
institution.siglum,
nature.name,
legislature.date_start.year,
legislature.date_end.year
)
legislature_data = Expense.objects \
.filter(nature=nature) \
.filter(mandate__legislature=legislature)
legislature_ranges = get_date_ranges_from_data(institution, legislature_data)
legislature_data = legislature_data \
.values('nature__id') \
.annotate(expensed=Sum('expensed')) \
.order_by('-expensed')
if legislature_data:
legislature_data = legislature_data[0]
else:
legislature_data = dict(expensed='0.')
p = PerNature(
institution=institution,
legislature=legislature,
date_start=legislature_ranges['cdf'],
date_end=legislature_ranges['cdt'],
nature=nature,
expensed=legislature_data['expensed']
)
per_natures_to_create.append(p)
return per_natures_to_create
def _per_nature_by_year(self, years, institution, nature):
per_natures_by_year_to_create = list()
per_natures_by_month_to_create = list()
for year in years:
print u'[%s] Consolidating nature %s totals for year %d…' % (
institution.siglum, nature.name, year
)
year_data = Expense.objects \
.filter(nature=nature) \
.filter(date__year=year)
year_data = filter_for_institution(year_data, institution)
# By Month
per_natures_by_month_to_create += self._per_nature_by_month(
year_data, year, institution, nature
)
year_data = year_data \
.values('nature__id') \
.annotate(expensed=Sum("expensed"))
if year_data:
year_data = year_data[0]
else:
year_data = dict(expensed='0.')
p = PerNatureByYear(
institution=institution,
year=year,
nature=nature,
expensed=float(year_data['expensed'])
)
per_natures_by_year_to_create.append(p)
return per_natures_by_year_to_create, per_natures_by_month_to_create
def _per_nature_by_month(self, year_data, year, institution, nature):
per_natures_by_month_to_create = list()
last_date = year_data and year_data.order_by('-date')[0].date or date.today()
for month in range(1, 13):
print u'[%s] Consolidating nature %s totals for %d-%d…' % (
institution.siglum, nature.name, year, month
)
month_date = date(year, month, 1)
if month_date >= last_date:
break
mdata = year_data.filter(date__month=month) \
.values('nature__id') \
.annotate(expensed=Sum('expensed')) \
.order_by('-expensed')
if mdata:
mdata = mdata[0]
else:
mdata = dict(expensed='0.')
p = PerNatureByMonth(
institution=institution,
date=month_date,
nature=nature,
expensed=float(mdata['expensed'])
)
per_natures_by_month_to_create.append(p)
return per_natures_by_month_to_create
def per_legislator(self, institution):
PerLegislator.objects.filter(institution=institution).delete()
data = Expense.objects.all()
data = filter_for_institution(data, institution)
date_ranges = get_date_ranges_from_data(institution, data)
data = data \
.values('mandate__legislator__id') \
.annotate(expensed=Sum('expensed'))
per_legislators_to_create = list()
for item in data:
legislator = Legislator.objects.get(id=int(item['mandate__legislator__id']))
# Totals for Legislature
for legislature in institution.legislature_set.all():
print u'[%s] Consolidating legislator %s totals for legislature %d-%d…' % (
institution.siglum,
legislator.name,
legislature.date_start.year,
legislature.date_end.year
)
legislature_data = Expense.objects \
.filter(mandate__legislature=legislature) \
.filter(mandate__legislator=legislator) \
.values('mandate__legislator__id') \
.annotate(expensed=Sum('expensed')) \
.order_by('-expensed')
if legislature_data:
legislature_data = legislature_data[0]
else:
legislature_data = dict(expensed='0.')
p = PerLegislator(
institution=institution,
legislature=legislature,
date_start=date_ranges['cdf'],
date_end=date_ranges['cdt'],
legislator=legislator,
expensed=legislature_data['expensed']
)
per_legislators_to_create.append(p)
print u'[%s] Consolidating totals for legislator %s…' % (
institution.siglum, legislator.name
)
p = PerLegislator(
institution=institution,
date_start=date_ranges['cdf'],
date_end=date_ranges['cdt'],
legislator=legislator,
expensed=item['expensed']
)
per_legislators_to_create.append(p)
PerLegislator.objects.bulk_create(per_legislators_to_create)
def agnostic(self):
# Institution-agnostic consolidations - biggest suppliers
print u'Consolidating institution-agnostic totals…'
BiggestSupplierForYear.objects.all().delete()
years = [d.year for d in Expense.objects.dates('date', 'year')]
for year in years:
print u'Consolidating supplier totals for year %d…' % year
data = Expense.objects \
.filter(date__year=year) \
.values('supplier__id') \
.annotate(expensed=Sum('expensed')) \
.order_by('-expensed')
biggest_suppliers_for_year_to_add = list()
for item in data:
supplier = Supplier.objects.get(id=item['supplier__id'])
b = BiggestSupplierForYear(
supplier=supplier,
year=year,
expensed=item['expensed']
)
biggest_suppliers_for_year_to_add.append(b)
BiggestSupplierForYear.objects.bulk_create(biggest_suppliers_for_year_to_add)
| agpl-3.0 |
siliconsmiley/QGIS | python/ext-libs/owslib/wcs.py | 30 | 1528 | # -*- coding: ISO-8859-15 -*-
# =============================================================================
# Copyright (c) 2004, 2006 Sean C. Gillies
# Copyright (c) 2007 STFC <http://www.stfc.ac.uk>
#
# Authors :
# Dominic Lowe <d.lowe@rl.ac.uk>
#
# Contact email: d.lowe@rl.ac.uk
# =============================================================================
"""
Web Coverage Server (WCS) methods and metadata. Factory function.
"""
import urllib2
import etree
from coverage import wcs100, wcs110, wcsBase
def WebCoverageService(url, version=None, xml=None, cookies=None, timeout=30):
''' wcs factory function, returns a version specific WebCoverageService object '''
if version is None:
if xml is None:
reader = wcsBase.WCSCapabilitiesReader()
request = reader.capabilities_url(url)
if cookies is None:
xml = urllib2.urlopen(request, timeout=timeout).read()
else:
req = urllib2.Request(request)
req.add_header('Cookie', cookies)
xml=urllib2.urlopen(req, timeout=timeout)
capabilities = etree.etree.fromstring(xml)
version = capabilities.get('version')
del capabilities
if version == '1.0.0':
return wcs100.WebCoverageService_1_0_0.__new__(wcs100.WebCoverageService_1_0_0, url, xml, cookies)
elif version == '1.1.0':
return wcs110.WebCoverageService_1_1_0.__new__(wcs110.WebCoverageService_1_1_0,url, xml, cookies)
| gpl-2.0 |
js850/nested_sampling | examples/harmonic/run_hparticle_distributed.py | 1 | 3007 | import argparse
from nested_sampling import NestedSampling, MonteCarloWalker, Harmonic, run_nested_sampling, Replica
"""
To run this example a dispatcher and N workers must have been started beforehand
"""
def main():
parser = argparse.ArgumentParser(description="do nested sampling on a p[article in a n-dimensional Harmonic well")
parser.add_argument("-K", "--nreplicas", type=int, help="number of replicas", default=300)
parser.add_argument("-A", "--ndof", type=int, help="number of degrees of freedom", default=4)
parser.add_argument("-P", "--nproc", type=int, help="number of processors", default=1)
parser.add_argument("-N", "--nsteps", type=int, help="number of MC steps per NS iteration", default=100)
parser.add_argument("--stepsize", type=float, help="stepsize, adapted between NS iterations", default=0.1)
parser.add_argument("--etol", type=float, help="energy tolerance: the calculation terminates when the energy difference \
between Emax and Emin is less than etol", default=0.01)
parser.add_argument("-q", action="store_true", help="turn off verbose printing of information at every step")
parser.add_argument("--dispatcherURI", action="store_true", help="use URI of the dispatcher server in default location",default=False)
parser.add_argument("--dispatcherURI-file", type=str, help="use URI of the dispatcher server if different from default",default=None)
#set basic parameters
args = parser.parse_args()
ndof = args.ndof
nproc = args.nproc
nsteps = args.nsteps
nreplicas = args.nreplicas
stepsize = args.stepsize
etol = args.etol
#try to read dispatecher URI from default file location
if args.dispatcherURI is True:
with open ("dispatcher_uri.dat", "r") as rfile:
dispatcherURI = rfile.read().replace('\n', '')
elif args.dispatcherURI_file != None:
with open (args.dispatcherURI_file, "r") as rfile:
dispatcherURI = rfile.read().replace('\n', '')
else:
dispatcherURI = None
#construct potential (cost function)
potential = Harmonic(ndof)
#construct Monte Carlo walker
mc_runner = MonteCarloWalker(potential, mciter=nsteps)
#initialise replicas (initial uniformly samples set of configurations)
replicas = []
for _ in xrange(nreplicas):
x = potential.get_random_configuration()
replicas.append(Replica(x, potential.get_energy(x)))
#construct Nested Sampling object and pass dispatcher address
ns = NestedSampling(replicas, mc_runner, stepsize=stepsize, nproc=nproc, dispatcher_URI=dispatcherURI,
max_stepsize=10, verbose=not args.q)
#run Nested Sampling (NS), output:
## label.energies (one for each iteration)
## label.replicas_final (live replica energies when NS terminates)
run_nested_sampling(ns, label="run_hparticle", etol=etol)
if __name__ == "__main__":
main() | bsd-2-clause |
aikramer2/spaCy | spacy/lang/nl/lex_attrs.py | 3 | 1353 | # coding: utf8
from __future__ import unicode_literals
from ...attrs import LIKE_NUM
_num_words = set("""
nul een één twee drie vier vijf zes zeven acht negen tien elf twaalf dertien
veertien twintig dertig veertig vijftig zestig zeventig tachtig negentig honderd
duizend miljoen miljard biljoen biljard triljoen triljard
""".split())
_ordinal_words = set("""
eerste tweede derde vierde vijfde zesde zevende achtste negende tiende elfde
twaalfde dertiende veertiende twintigste dertigste veertigste vijftigste
zestigste zeventigste tachtigste negentigste honderdste duizendste miljoenste
miljardste biljoenste biljardste triljoenste triljardste
""".split())
def like_num(text):
# This only does the most basic check for whether a token is a digit
# or matches one of the number words. In order to handle numbers like
# "drieëntwintig", more work is required.
# See this discussion: https://github.com/explosion/spaCy/pull/1177
text = text.replace(',', '').replace('.', '')
if text.isdigit():
return True
if text.count('/') == 1:
num, denom = text.split('/')
if num.isdigit() and denom.isdigit():
return True
if text.lower() in _num_words:
return True
if text.lower() in _ordinal_words:
return True
return False
LEX_ATTRS = {
LIKE_NUM: like_num
}
| mit |
timduru/platform-external-chromium_org | tools/telemetry/telemetry/core/chrome/desktop_browser_finder_unittest.py | 29 | 6927 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.core import browser_options
from telemetry.core.chrome import desktop_browser_finder
from telemetry.unittest import system_stub
# This file verifies the logic for finding a browser instance on all platforms
# at once. It does so by providing stubs for the OS/sys/subprocess primitives
# that the underlying finding logic usually uses to locate a suitable browser.
# We prefer this approach to having to run the same test on every platform on
# which we want this code to work.
class FindTestBase(unittest.TestCase):
def setUp(self):
self._options = browser_options.BrowserOptions()
self._options.chrome_root = '../../../'
self._stubs = system_stub.Override(desktop_browser_finder,
['os', 'subprocess', 'sys'])
def tearDown(self):
self._stubs.Restore()
@property
def _files(self):
return self._stubs.os.path.files
def DoFindAll(self):
return desktop_browser_finder.FindAllAvailableBrowsers(self._options)
def DoFindAllTypes(self):
browsers = self.DoFindAll()
return [b.browser_type for b in browsers]
def has_type(array, browser_type):
return len([x for x in array if x.browser_type == browser_type]) != 0
class FindSystemTest(FindTestBase):
def setUp(self):
super(FindSystemTest, self).setUp()
self._stubs.sys.platform = 'win32'
def testFindProgramFiles(self):
self._files.append(
'C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe')
self._stubs.os.program_files = 'C:\\Program Files'
self.assertTrue('system' in self.DoFindAllTypes())
def testFindProgramFilesX86(self):
self._files.append(
'C:\\Program Files(x86)\\Google\\Chrome\\Application\\chrome.exe')
self._stubs.os.program_files_x86 = 'C:\\Program Files(x86)'
self.assertTrue('system' in self.DoFindAllTypes())
def testFindLocalAppData(self):
self._files.append(
'C:\\Local App Data\\Google\\Chrome\\Application\\chrome.exe')
self._stubs.os.local_app_data = 'C:\\Local App Data'
self.assertTrue('system' in self.DoFindAllTypes())
class FindLocalBuildsTest(FindTestBase):
def setUp(self):
super(FindLocalBuildsTest, self).setUp()
self._stubs.sys.platform = 'win32'
def testFindBuild(self):
self._files.append('..\\..\\..\\build\\Release\\chrome.exe')
self.assertTrue('release' in self.DoFindAllTypes())
def testFindOut(self):
self._files.append('..\\..\\..\\out\\Release\\chrome.exe')
self.assertTrue('release' in self.DoFindAllTypes())
def testFindSconsbuild(self):
self._files.append('..\\..\\..\\sconsbuild\\Release\\chrome.exe')
self.assertTrue('release' in self.DoFindAllTypes())
def testFindXcodebuild(self):
self._files.append('..\\..\\..\\xcodebuild\\Release\\chrome.exe')
self.assertTrue('release' in self.DoFindAllTypes())
class OSXFindTest(FindTestBase):
def setUp(self):
super(OSXFindTest, self).setUp()
self._stubs.sys.platform = 'darwin'
self._files.append('/Applications/Google Chrome Canary.app/'
'Contents/MacOS/Google Chrome Canary')
self._files.append('/Applications/Google Chrome.app/' +
'Contents/MacOS/Google Chrome')
self._files.append(
'../../../out/Release/Chromium.app/Contents/MacOS/Chromium')
self._files.append(
'../../../out/Debug/Chromium.app/Contents/MacOS/Chromium')
self._files.append(
'../../../out/Release/Content Shell.app/Contents/MacOS/Content Shell')
self._files.append(
'../../../out/Debug/Content Shell.app/Contents/MacOS/Content Shell')
def testFindAll(self):
types = self.DoFindAllTypes()
self.assertEquals(
set(types),
set(['debug', 'release',
'content-shell-debug', 'content-shell-release',
'canary', 'system']))
class LinuxFindTest(FindTestBase):
def setUp(self):
super(LinuxFindTest, self).setUp()
self._stubs.sys.platform = 'linux2'
self._files.append('/foo/chrome')
self._files.append('../../../out/Release/chrome')
self._files.append('../../../out/Debug/chrome')
self._files.append('../../../out/Release/content_shell')
self._files.append('../../../out/Debug/content_shell')
self.has_google_chrome_on_path = False
this = self
def call_hook(*args, **kwargs): # pylint: disable=W0613
if this.has_google_chrome_on_path:
return 0
raise OSError('Not found')
self._stubs.subprocess.call = call_hook
def testFindAllWithExact(self):
types = self.DoFindAllTypes()
self.assertEquals(
set(types),
set(['debug', 'release',
'content-shell-debug', 'content-shell-release']))
def testFindWithProvidedExecutable(self):
self._options.browser_executable = '/foo/chrome'
self.assertTrue('exact' in self.DoFindAllTypes())
def testFindUsingDefaults(self):
self.has_google_chrome_on_path = True
self.assertTrue('release' in self.DoFindAllTypes())
del self._files[1]
self.has_google_chrome_on_path = True
self.assertTrue('system' in self.DoFindAllTypes())
self.has_google_chrome_on_path = False
del self._files[1]
self.assertEquals(['content-shell-debug', 'content-shell-release'],
self.DoFindAllTypes())
def testFindUsingRelease(self):
self.assertTrue('release' in self.DoFindAllTypes())
class WinFindTest(FindTestBase):
def setUp(self):
super(WinFindTest, self).setUp()
self._stubs.sys.platform = 'win32'
self._stubs.os.local_app_data = 'c:\\Users\\Someone\\AppData\\Local'
self._files.append('c:\\tmp\\chrome.exe')
self._files.append('..\\..\\..\\build\\Release\\chrome.exe')
self._files.append('..\\..\\..\\build\\Debug\\chrome.exe')
self._files.append('..\\..\\..\\build\\Release\\content_shell.exe')
self._files.append('..\\..\\..\\build\\Debug\\content_shell.exe')
self._files.append(self._stubs.os.local_app_data + '\\' +
'Google\\Chrome\\Application\\chrome.exe')
self._files.append(self._stubs.os.local_app_data + '\\' +
'Google\\Chrome SxS\\Application\\chrome.exe')
def testFindAllGivenDefaults(self):
types = self.DoFindAllTypes()
self.assertEquals(set(types),
set(['debug', 'release',
'content-shell-debug', 'content-shell-release',
'system', 'canary']))
def testFindAllWithExact(self):
self._options.browser_executable = 'c:\\tmp\\chrome.exe'
types = self.DoFindAllTypes()
self.assertEquals(
set(types),
set(['exact',
'debug', 'release',
'content-shell-debug', 'content-shell-release',
'system', 'canary']))
| bsd-3-clause |
manasapte/pants | tests/python/pants_test/backend/codegen/tasks/test_ragel_gen.py | 12 | 2331 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from textwrap import dedent
from pants.backend.codegen.targets.java_ragel_library import JavaRagelLibrary
from pants.backend.codegen.tasks.ragel_gen import RagelGen, calculate_genfile
from pants.util.contextutil import temporary_file
from pants.util.dirutil import safe_mkdtemp
from pants_test.tasks.task_test_base import TaskTestBase
ragel_file_contents = dedent("""
package com.example.atoi;
%%{
machine parser;
action minus {
negative = true;
}
action digit {
val *= 10;
val += fc - '0';
}
main := ( '-'@minus )? ( [0-9] @digit ) + '\0';
}%%
public class Parser {
%% write data;
public static int parse(CharSequence input) {
StringBuilder builder = new StringBuilder(input);
builder.append('\0');
char[] data = builder.toString().toCharArray();
int p = 0;
int pe = data.length;
int eof = pe;
int cs;
boolean negative = false;
int val = 0;
%% write init;
%% write exec;
if (negative)
return -val;
else
return val;
}
}
""")
class RagelGenTest(TaskTestBase):
@classmethod
def task_type(cls):
return RagelGen
def test_ragel_gen(self):
self.create_file(relpath='test_ragel_gen/atoi.rl', contents=ragel_file_contents)
target = self.make_target(spec='test_ragel_gen:atoi',
target_type=JavaRagelLibrary,
sources=['atoi.rl'])
task = self.create_task(self.context(target_roots=[target]))
target_workdir = safe_mkdtemp(dir=self.test_workdir)
task.execute_codegen(target, target_workdir)
generated_files = []
for root, _, files in os.walk(target_workdir):
generated_files.extend(os.path.relpath(os.path.join(root, f), target_workdir) for f in files)
self.assertEqual(['com/example/atoi/Parser.java'], generated_files)
def test_smoke(self):
with temporary_file() as fp:
fp.write(ragel_file_contents)
fp.flush()
self.assertEquals(calculate_genfile(fp.name), 'com/example/atoi/Parser.java')
| apache-2.0 |
marc-sensenich/ansible | lib/ansible/modules/network/slxos/slxos_vlan.py | 41 | 9651 | #!/usr/bin/python
#
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: slxos_vlan
version_added: "2.6"
author: "Lindsay Hill (@lindsayhill)"
short_description: Manage VLANs on Extreme Networks SLX-OS network devices
description:
- This module provides declarative management of VLANs
on Extreme SLX-OS network devices.
notes:
- Tested against SLX-OS 17s.1.02
options:
name:
description:
- Name of the VLAN.
vlan_id:
description:
- ID of the VLAN. Range 1-4094.
required: true
interfaces:
description:
- List of interfaces that should be associated to the VLAN.
required: true
delay:
description:
- Delay the play should wait to check for declarative intent params values.
default: 10
aggregate:
description: List of VLANs definitions.
purge:
description:
- Purge VLANs not defined in the I(aggregate) parameter.
type: bool
default: no
state:
description:
- State of the VLAN configuration.
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: Create vlan
slxos_vlan:
vlan_id: 100
name: test-vlan
state: present
- name: Add interfaces to VLAN
slxos_vlan:
vlan_id: 100
interfaces:
- Ethernet 0/1
- Ethernet 0/2
- name: Delete vlan
slxos_vlan:
vlan_id: 100
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- vlan 100
- name test-vlan
"""
import re
import time
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.slxos.slxos import load_config, run_commands
def search_obj_in_list(vlan_id, lst):
obj = list()
for o in lst:
if o['vlan_id'] == vlan_id:
return o
return None
def map_obj_to_commands(updates, module):
commands = list()
want, have = updates
purge = module.params['purge']
for w in want:
vlan_id = w['vlan_id']
name = w['name']
interfaces = w['interfaces']
state = w['state']
obj_in_have = search_obj_in_list(vlan_id, have)
if state == 'absent':
if obj_in_have:
commands.append('no vlan %s' % vlan_id)
elif state == 'present':
if not obj_in_have:
commands.append('vlan %s' % vlan_id)
if name:
commands.append('name %s' % name)
if interfaces:
for i in interfaces:
commands.append('interface %s' % i)
commands.append('switchport')
commands.append('switchport mode access')
commands.append('switchport access vlan %s' % vlan_id)
else:
if name:
if name != obj_in_have['name']:
commands.append('vlan %s' % vlan_id)
commands.append('name %s' % name)
if interfaces:
if not obj_in_have['interfaces']:
for i in interfaces:
commands.append('vlan %s ' % vlan_id)
commands.append('interface %s' % i)
commands.append('switchport')
commands.append('switchport mode access')
commands.append('switchport access vlan %s' % vlan_id)
elif set(interfaces) != set(obj_in_have['interfaces']):
missing_interfaces = list(set(interfaces) - set(obj_in_have['interfaces']))
for i in missing_interfaces:
commands.append('vlan %s' % vlan_id)
commands.append('interface %s' % i)
commands.append('switchport')
commands.append('switchport mode access')
commands.append('switchport access vlan %s' % vlan_id)
superfluous_interfaces = list(set(obj_in_have['interfaces']) - set(interfaces))
for i in superfluous_interfaces:
commands.append('vlan %s' % vlan_id)
commands.append('interface %s' % i)
commands.append('switchport mode access')
commands.append('no switchport access vlan %s' % vlan_id)
if purge:
for h in have:
obj_in_want = search_obj_in_list(h['vlan_id'], want)
if not obj_in_want and h['vlan_id'] != '1':
commands.append('no vlan %s' % h['vlan_id'])
return commands
def map_params_to_obj(module):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
d = item.copy()
d['vlan_id'] = str(d['vlan_id'])
obj.append(d)
else:
obj.append({
'vlan_id': str(module.params['vlan_id']),
'name': module.params['name'],
'interfaces': module.params['interfaces'],
'state': module.params['state']
})
return obj
def map_config_to_obj(module):
output = run_commands(module, ['show vlan brief'])
lines = output[0].strip().splitlines()[5:]
if not lines:
return list()
objs = list()
obj = {}
for l in lines:
splitted_line = re.split(r'([0-9]+)? +(\S.{14})? +(ACTIVE|INACTIVE\(.+?\))? +(Eth .+?|Po .+?)\([ut]\)\s*$', l.rstrip())
if len(splitted_line) == 1:
# Handle situation where VLAN is configured, but has no associated ports
inactive = re.match(r'([0-9]+)? +(\S.{14}) +INACTIVE\(no member port\)$', l.rstrip())
if inactive:
splitted_line = ['', inactive.groups()[0], inactive.groups()[1], '', '']
else:
continue
splitted_line[4] = splitted_line[4].replace('Eth', 'Ethernet').replace('Po', 'Port-channel')
if splitted_line[1] is None:
obj['interfaces'].append(splitted_line[4])
continue
obj = {}
obj['vlan_id'] = splitted_line[1]
obj['name'] = splitted_line[2].strip()
obj['interfaces'] = [splitted_line[4]]
objs.append(obj)
return objs
def check_declarative_intent_params(want, module):
if module.params['interfaces']:
time.sleep(module.params['delay'])
have = map_config_to_obj(module)
for w in want:
for i in w['interfaces']:
obj_in_have = search_obj_in_list(w['vlan_id'], have)
if obj_in_have and 'interfaces' in obj_in_have and i not in obj_in_have['interfaces']:
module.fail_json(msg="Interface %s not configured on vlan %s" % (i, w['vlan_id']))
def main():
""" main entry point for module execution
"""
element_spec = dict(
vlan_id=dict(type='int'),
name=dict(),
interfaces=dict(type='list'),
delay=dict(default=10, type='int'),
state=dict(default='present',
choices=['present', 'absent'])
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['vlan_id'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
purge=dict(default=False, type='bool')
)
argument_spec.update(element_spec)
required_one_of = [['vlan_id', 'aggregate']]
mutually_exclusive = [['vlan_id', 'aggregate']]
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have), module)
result['commands'] = commands
if commands:
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
if result['changed']:
check_declarative_intent_params(want, module)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
tillahoffmann/tensorflow | tensorflow/contrib/boosted_trees/estimator_batch/custom_loss_head.py | 87 | 3030 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of `head.Head` with custom loss and link function."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
class CustomLossHead(head_lib._RegressionHead): # pylint: disable=protected-access
"""A Head object with custom loss function and link function."""
def __init__(self,
loss_fn,
link_fn,
logit_dimension,
head_name=None,
weight_column_name=None,
metrics_fn=None):
"""`Head` for specifying arbitrary loss function.
Args:
loss_fn: Loss function.
link_fn: Function that converts logits to prediction.
logit_dimension: Number of dimensions for the logits.
head_name: name of the head. Predictions, summary, metrics keys are
suffixed by `"/" + head_name` and the default variable scope is
`head_name`.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
metrics_fn: a function that takes predictions dict, labels and weights and
returns a dictionary of metrics to be calculated.
"""
def loss_wrapper(labels, logits, weight_tensor):
if weight_tensor is None:
weight_tensor = array_ops.ones(
shape=[array_ops.shape(labels)[0], 1], dtype=dtypes.float32)
weighted_loss, _ = loss_fn(labels, weight_tensor, logits)
average_loss = math_ops.reduce_mean(weighted_loss)
return average_loss, average_loss / math_ops.reduce_mean(weight_tensor)
super(CustomLossHead, self).__init__(
loss_fn=loss_wrapper,
link_fn=link_fn,
head_name=head_name,
weight_column_name=weight_column_name,
enable_centered_bias=False,
label_dimension=logit_dimension)
self._metrics_fn = metrics_fn
def _metrics(self, eval_loss, predictions, labels, weights):
if self._metrics_fn is not None:
return self._metrics_fn(predictions, labels, weights)
| apache-2.0 |
yesudeep/greatship | app/console/test/appengine_test.py | 11 | 2722 | #!/usr/bin/env python
#
# testpaths.py - Set up the correct sys.path for the test suite to run.
#
# Copyright 2008-2009 Proven Corporation Co., Ltd., Thailand
#
# This file is part of App Engine Console.
#
# App Engine Console is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# App Engine Console is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with App Engine Console; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
import time
import unittest
import test_environment
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_file_stub
from google.appengine.api import mail_stub
from google.appengine.api import urlfetch_stub
from google.appengine.api import user_service_stub
#from google3.apphosting.api import urlfetch_stub
#from google3.apphosting.api import user_service_stub
APP_ID = u'test_app'
AUTH_DOMAIN = 'gmail.com'
LOGGED_IN_USER = 'test@example.com' # set to '' for no logged in user
def initialSetup():
"""This needs to run once before any app engine code runs to set up the environment."""
os.environ['TZ'] = 'UTC'
os.environ['USER_EMAIL'] = LOGGED_IN_USER
os.environ['AUTH_DOMAIN'] = AUTH_DOMAIN
os.environ['REMOTE_ADDR'] = '127.0.0.1'
os.environ['APPLICATION_ID'] = APP_ID
os.environ['SERVER_SOFTWARE'] = 'App Engine Testing'
os.environ['CURRENT_VERSION_ID'] = os.environ['app_version']
class AppEngineTest(unittest.TestCase):
def setUp(self):
# Ensure we're in UTC.
time.tzset()
# Start with a fresh api proxy.
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
# Use a fresh stub datastore.
stub = datastore_file_stub.DatastoreFileStub(APP_ID, '/dev/null', '/dev/null')
apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', stub)
# Use a fresh stub UserService.
apiproxy_stub_map.apiproxy.RegisterStub('user', user_service_stub.UserServiceStub())
# Use a fresh urlfetch stub.
apiproxy_stub_map.apiproxy.RegisterStub('urlfetch', urlfetch_stub.URLFetchServiceStub())
# Use a fresh mail stub.
apiproxy_stub_map.apiproxy.RegisterStub('mail', mail_stub.MailServiceStub())
initialSetup()
| mit |
glovebx/odoo | openerp/addons/base/tests/test_view_validation.py | 396 | 3427 | # This test can be run stand-alone with something like:
# > PYTHONPATH=. python2 openerp/tests/test_view_validation.py
from lxml import etree
from StringIO import StringIO
import unittest2
from openerp.tools.view_validation import (valid_page_in_book, valid_att_in_form, valid_type_in_colspan,
valid_type_in_col, valid_att_in_field, valid_att_in_label,
valid_field_in_graph, valid_field_in_tree
)
invalid_form = etree.parse(StringIO('''\
<form>
<label></label>
<group>
<div>
<page></page>
<label colspan="True"></label>
<field></field>
</div>
</group>
<notebook>
<page>
<group col="Two">
<div>
<label></label>
<field colspan="Five"> </field>
</div>
</group>
</page>
</notebook>
</form>
''')).getroot()
valid_form = etree.parse(StringIO('''\
<form string="">
<field name=""></field>
<field name=""></field>
<notebook>
<page>
<field name=""></field>
<label string=""></label>
<field name=""></field>
</page>
<page>
<group colspan="5" col="2">
<label for=""></label>
<label string="" colspan="5"></label>
</group>
</page>
</notebook>
</form>
''')).getroot()
invalid_graph = etree.parse(StringIO('''\
<graph>
<label/>
<group>
<div>
<field></field>
<field></field>
</div>
</group>
</graph>
''')).getroot()
valid_graph = etree.parse(StringIO('''\
<graph string="">
<field name=""></field>
<field name=""></field>
</graph>
''')).getroot()
invalid_tree = etree.parse(StringIO('''\
<tree>
<group>
<div>
<field></field>
<field></field>
</div>
</group>
</tree>
''')).getroot()
valid_tree = etree.parse(StringIO('''\
<tree string="">
<field name=""></field>
<field name=""></field>
<button/>
<field name=""></field>
</tree>
''')).getroot()
class test_view_validation(unittest2.TestCase):
""" Test the view validation code (but not the views themselves). """
def test_page_validation(self):
assert not valid_page_in_book(invalid_form)
assert valid_page_in_book(valid_form)
def test_all_field_validation(self):
assert not valid_att_in_field(invalid_form)
assert valid_att_in_field(valid_form)
def test_all_label_validation(self):
assert not valid_att_in_label(invalid_form)
assert valid_att_in_label(valid_form)
def test_form_string_validation(self):
assert valid_att_in_form(valid_form)
def test_graph_validation(self):
assert not valid_field_in_graph(invalid_graph)
assert valid_field_in_graph(valid_graph)
def test_tree_validation(self):
assert not valid_field_in_tree(invalid_tree)
assert valid_field_in_tree(valid_tree)
def test_colspan_datatype_validation(self):
assert not valid_type_in_colspan(invalid_form)
assert valid_type_in_colspan(valid_form)
def test_col_datatype_validation(self):
assert not valid_type_in_col(invalid_form)
assert valid_type_in_col(valid_form)
if __name__ == '__main__':
unittest2.main()
| agpl-3.0 |
vjmac15/Lyilis | lib/youtube_dl/extractor/eyedotv.py | 59 | 2687 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
xpath_text,
parse_duration,
ExtractorError,
)
class EyedoTVIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?eyedo\.tv/[^/]+/(?:#!/)?Live/Detail/(?P<id>[0-9]+)'
_TEST = {
'url': 'https://www.eyedo.tv/en-US/#!/Live/Detail/16301',
'md5': 'ba14f17995cdfc20c36ba40e21bf73f7',
'info_dict': {
'id': '16301',
'ext': 'mp4',
'title': 'Journée du conseil scientifique de l\'Afnic 2015',
'description': 'md5:4abe07293b2f73efc6e1c37028d58c98',
'uploader': 'Afnic Live',
'uploader_id': '8023',
}
}
_ROOT_URL = 'http://live.eyedo.net:1935/'
def _real_extract(self, url):
video_id = self._match_id(url)
video_data = self._download_xml('http://eyedo.tv/api/live/GetLive/%s' % video_id, video_id)
def _add_ns(path):
return self._xpath_ns(path, 'http://schemas.datacontract.org/2004/07/EyeDo.Core.Implementation.Web.ViewModels.Api')
title = xpath_text(video_data, _add_ns('Titre'), 'title', True)
state_live_code = xpath_text(video_data, _add_ns('StateLiveCode'), 'title', True)
if state_live_code == 'avenir':
raise ExtractorError(
'%s said: We\'re sorry, but this video is not yet available.' % self.IE_NAME,
expected=True)
is_live = state_live_code == 'live'
m3u8_url = None
# http://eyedo.tv/Content/Html5/Scripts/html5view.js
if is_live:
if xpath_text(video_data, 'Cdn') == 'true':
m3u8_url = 'http://rrr.sz.xlcdn.com/?account=eyedo&file=A%s&type=live&service=wowza&protocol=http&output=playlist.m3u8' % video_id
else:
m3u8_url = self._ROOT_URL + 'w/%s/eyedo_720p/playlist.m3u8' % video_id
else:
m3u8_url = self._ROOT_URL + 'replay-w/%s/mp4:%s.mp4/playlist.m3u8' % (video_id, video_id)
return {
'id': video_id,
'title': title,
'formats': self._extract_m3u8_formats(
m3u8_url, video_id, 'mp4', 'm3u8_native'),
'description': xpath_text(video_data, _add_ns('Description')),
'duration': parse_duration(xpath_text(video_data, _add_ns('Duration'))),
'uploader': xpath_text(video_data, _add_ns('Createur')),
'uploader_id': xpath_text(video_data, _add_ns('CreateurId')),
'chapter': xpath_text(video_data, _add_ns('ChapitreTitre')),
'chapter_id': xpath_text(video_data, _add_ns('ChapitreId')),
}
| gpl-3.0 |
stutivarshney/Bal-Aveksha | WebServer/BalAvekshaEnv/lib/python3.5/site-packages/django/test/utils.py | 28 | 23350 | import logging
import re
import sys
import time
import warnings
from contextlib import contextmanager
from functools import wraps
from unittest import TestCase, skipIf, skipUnless
from xml.dom.minidom import Node, parseString
from django.apps import apps
from django.apps.registry import Apps
from django.conf import UserSettingsHolder, settings
from django.core import mail
from django.core.signals import request_started
from django.db import reset_queries
from django.db.models.options import Options
from django.http import request
from django.template import Template
from django.test.signals import setting_changed, template_rendered
from django.urls import get_script_prefix, set_script_prefix
from django.utils import six
from django.utils.decorators import available_attrs
from django.utils.encoding import force_str
from django.utils.translation import deactivate
try:
import jinja2
except ImportError:
jinja2 = None
__all__ = (
'Approximate', 'ContextList', 'isolate_lru_cache', 'get_runner',
'modify_settings', 'override_settings',
'requires_tz_support',
'setup_test_environment', 'teardown_test_environment',
)
TZ_SUPPORT = hasattr(time, 'tzset')
class Approximate(object):
def __init__(self, val, places=7):
self.val = val
self.places = places
def __repr__(self):
return repr(self.val)
def __eq__(self, other):
if self.val == other:
return True
return round(abs(self.val - other), self.places) == 0
class ContextList(list):
"""A wrapper that provides direct key access to context items contained
in a list of context objects.
"""
def __getitem__(self, key):
if isinstance(key, six.string_types):
for subcontext in self:
if key in subcontext:
return subcontext[key]
raise KeyError(key)
else:
return super(ContextList, self).__getitem__(key)
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
return True
def keys(self):
"""
Flattened keys of subcontexts.
"""
keys = set()
for subcontext in self:
for dict in subcontext:
keys |= set(dict.keys())
return keys
def instrumented_test_render(self, context):
"""
An instrumented Template render method, providing a signal
that can be intercepted by the test system Client
"""
template_rendered.send(sender=self, template=self, context=context)
return self.nodelist.render(context)
def setup_test_environment():
"""
Perform global pre-test setup, such as installing the instrumented template
renderer and setting the email backend to the locmem email backend.
"""
Template._original_render = Template._render
Template._render = instrumented_test_render
# Storing previous values in the settings module itself is problematic.
# Store them in arbitrary (but related) modules instead. See #20636.
mail._original_email_backend = settings.EMAIL_BACKEND
settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
request._original_allowed_hosts = settings.ALLOWED_HOSTS
settings.ALLOWED_HOSTS = ['*']
mail.outbox = []
deactivate()
def teardown_test_environment():
"""
Perform any global post-test teardown, such as restoring the original
template renderer and restoring the email sending functions.
"""
Template._render = Template._original_render
del Template._original_render
settings.EMAIL_BACKEND = mail._original_email_backend
del mail._original_email_backend
settings.ALLOWED_HOSTS = request._original_allowed_hosts
del request._original_allowed_hosts
del mail.outbox
def get_runner(settings, test_runner_class=None):
if not test_runner_class:
test_runner_class = settings.TEST_RUNNER
test_path = test_runner_class.split('.')
# Allow for Python 2.5 relative paths
if len(test_path) > 1:
test_module_name = '.'.join(test_path[:-1])
else:
test_module_name = '.'
test_module = __import__(test_module_name, {}, {}, force_str(test_path[-1]))
test_runner = getattr(test_module, test_path[-1])
return test_runner
class TestContextDecorator(object):
"""
A base class that can either be used as a context manager during tests
or as a test function or unittest.TestCase subclass decorator to perform
temporary alterations.
`attr_name`: attribute assigned the return value of enable() if used as
a class decorator.
`kwarg_name`: keyword argument passing the return value of enable() if
used as a function decorator.
"""
def __init__(self, attr_name=None, kwarg_name=None):
self.attr_name = attr_name
self.kwarg_name = kwarg_name
def enable(self):
raise NotImplementedError
def disable(self):
raise NotImplementedError
def __enter__(self):
return self.enable()
def __exit__(self, exc_type, exc_value, traceback):
self.disable()
def decorate_class(self, cls):
if issubclass(cls, TestCase):
decorated_setUp = cls.setUp
decorated_tearDown = cls.tearDown
def setUp(inner_self):
context = self.enable()
if self.attr_name:
setattr(inner_self, self.attr_name, context)
decorated_setUp(inner_self)
def tearDown(inner_self):
decorated_tearDown(inner_self)
self.disable()
cls.setUp = setUp
cls.tearDown = tearDown
return cls
raise TypeError('Can only decorate subclasses of unittest.TestCase')
def decorate_callable(self, func):
@wraps(func, assigned=available_attrs(func))
def inner(*args, **kwargs):
with self as context:
if self.kwarg_name:
kwargs[self.kwarg_name] = context
return func(*args, **kwargs)
return inner
def __call__(self, decorated):
if isinstance(decorated, type):
return self.decorate_class(decorated)
elif callable(decorated):
return self.decorate_callable(decorated)
raise TypeError('Cannot decorate object of type %s' % type(decorated))
class override_settings(TestContextDecorator):
"""
Acts as either a decorator or a context manager. If it's a decorator it
takes a function and returns a wrapped function. If it's a contextmanager
it's used with the ``with`` statement. In either event entering/exiting
are called before and after, respectively, the function/block is executed.
"""
def __init__(self, **kwargs):
self.options = kwargs
super(override_settings, self).__init__()
def enable(self):
# Keep this code at the beginning to leave the settings unchanged
# in case it raises an exception because INSTALLED_APPS is invalid.
if 'INSTALLED_APPS' in self.options:
try:
apps.set_installed_apps(self.options['INSTALLED_APPS'])
except Exception:
apps.unset_installed_apps()
raise
override = UserSettingsHolder(settings._wrapped)
for key, new_value in self.options.items():
setattr(override, key, new_value)
self.wrapped = settings._wrapped
settings._wrapped = override
for key, new_value in self.options.items():
setting_changed.send(sender=settings._wrapped.__class__,
setting=key, value=new_value, enter=True)
def disable(self):
if 'INSTALLED_APPS' in self.options:
apps.unset_installed_apps()
settings._wrapped = self.wrapped
del self.wrapped
for key in self.options:
new_value = getattr(settings, key, None)
setting_changed.send(sender=settings._wrapped.__class__,
setting=key, value=new_value, enter=False)
def save_options(self, test_func):
if test_func._overridden_settings is None:
test_func._overridden_settings = self.options
else:
# Duplicate dict to prevent subclasses from altering their parent.
test_func._overridden_settings = dict(
test_func._overridden_settings, **self.options)
def decorate_class(self, cls):
from django.test import SimpleTestCase
if not issubclass(cls, SimpleTestCase):
raise ValueError(
"Only subclasses of Django SimpleTestCase can be decorated "
"with override_settings")
self.save_options(cls)
return cls
class modify_settings(override_settings):
"""
Like override_settings, but makes it possible to append, prepend or remove
items instead of redefining the entire list.
"""
def __init__(self, *args, **kwargs):
if args:
# Hack used when instantiating from SimpleTestCase.setUpClass.
assert not kwargs
self.operations = args[0]
else:
assert not args
self.operations = list(kwargs.items())
super(override_settings, self).__init__()
def save_options(self, test_func):
if test_func._modified_settings is None:
test_func._modified_settings = self.operations
else:
# Duplicate list to prevent subclasses from altering their parent.
test_func._modified_settings = list(
test_func._modified_settings) + self.operations
def enable(self):
self.options = {}
for name, operations in self.operations:
try:
# When called from SimpleTestCase.setUpClass, values may be
# overridden several times; cumulate changes.
value = self.options[name]
except KeyError:
value = list(getattr(settings, name, []))
for action, items in operations.items():
# items my be a single value or an iterable.
if isinstance(items, six.string_types):
items = [items]
if action == 'append':
value = value + [item for item in items if item not in value]
elif action == 'prepend':
value = [item for item in items if item not in value] + value
elif action == 'remove':
value = [item for item in value if item not in items]
else:
raise ValueError("Unsupported action: %s" % action)
self.options[name] = value
super(modify_settings, self).enable()
class override_system_checks(TestContextDecorator):
"""
Acts as a decorator. Overrides list of registered system checks.
Useful when you override `INSTALLED_APPS`, e.g. if you exclude `auth` app,
you also need to exclude its system checks.
"""
def __init__(self, new_checks, deployment_checks=None):
from django.core.checks.registry import registry
self.registry = registry
self.new_checks = new_checks
self.deployment_checks = deployment_checks
super(override_system_checks, self).__init__()
def enable(self):
self.old_checks = self.registry.registered_checks
self.registry.registered_checks = self.new_checks
self.old_deployment_checks = self.registry.deployment_checks
if self.deployment_checks is not None:
self.registry.deployment_checks = self.deployment_checks
def disable(self):
self.registry.registered_checks = self.old_checks
self.registry.deployment_checks = self.old_deployment_checks
def compare_xml(want, got):
"""Tries to do a 'xml-comparison' of want and got. Plain string
comparison doesn't always work because, for example, attribute
ordering should not be important. Comment nodes are not considered in the
comparison. Leading and trailing whitespace is ignored on both chunks.
Based on https://github.com/lxml/lxml/blob/master/src/lxml/doctestcompare.py
"""
_norm_whitespace_re = re.compile(r'[ \t\n][ \t\n]+')
def norm_whitespace(v):
return _norm_whitespace_re.sub(' ', v)
def child_text(element):
return ''.join(c.data for c in element.childNodes
if c.nodeType == Node.TEXT_NODE)
def children(element):
return [c for c in element.childNodes
if c.nodeType == Node.ELEMENT_NODE]
def norm_child_text(element):
return norm_whitespace(child_text(element))
def attrs_dict(element):
return dict(element.attributes.items())
def check_element(want_element, got_element):
if want_element.tagName != got_element.tagName:
return False
if norm_child_text(want_element) != norm_child_text(got_element):
return False
if attrs_dict(want_element) != attrs_dict(got_element):
return False
want_children = children(want_element)
got_children = children(got_element)
if len(want_children) != len(got_children):
return False
for want, got in zip(want_children, got_children):
if not check_element(want, got):
return False
return True
def first_node(document):
for node in document.childNodes:
if node.nodeType != Node.COMMENT_NODE:
return node
want, got = strip_quotes(want, got)
want = want.strip().replace('\\n', '\n')
got = got.strip().replace('\\n', '\n')
# If the string is not a complete xml document, we may need to add a
# root element. This allow us to compare fragments, like "<foo/><bar/>"
if not want.startswith('<?xml'):
wrapper = '<root>%s</root>'
want = wrapper % want
got = wrapper % got
# Parse the want and got strings, and compare the parsings.
want_root = first_node(parseString(want))
got_root = first_node(parseString(got))
return check_element(want_root, got_root)
def strip_quotes(want, got):
"""
Strip quotes of doctests output values:
>>> strip_quotes("'foo'")
"foo"
>>> strip_quotes('"foo"')
"foo"
"""
def is_quoted_string(s):
s = s.strip()
return len(s) >= 2 and s[0] == s[-1] and s[0] in ('"', "'")
def is_quoted_unicode(s):
s = s.strip()
return len(s) >= 3 and s[0] == 'u' and s[1] == s[-1] and s[1] in ('"', "'")
if is_quoted_string(want) and is_quoted_string(got):
want = want.strip()[1:-1]
got = got.strip()[1:-1]
elif is_quoted_unicode(want) and is_quoted_unicode(got):
want = want.strip()[2:-1]
got = got.strip()[2:-1]
return want, got
def str_prefix(s):
return s % {'_': '' if six.PY3 else 'u'}
class CaptureQueriesContext(object):
"""
Context manager that captures queries executed by the specified connection.
"""
def __init__(self, connection):
self.connection = connection
def __iter__(self):
return iter(self.captured_queries)
def __getitem__(self, index):
return self.captured_queries[index]
def __len__(self):
return len(self.captured_queries)
@property
def captured_queries(self):
return self.connection.queries[self.initial_queries:self.final_queries]
def __enter__(self):
self.force_debug_cursor = self.connection.force_debug_cursor
self.connection.force_debug_cursor = True
self.initial_queries = len(self.connection.queries_log)
self.final_queries = None
request_started.disconnect(reset_queries)
return self
def __exit__(self, exc_type, exc_value, traceback):
self.connection.force_debug_cursor = self.force_debug_cursor
request_started.connect(reset_queries)
if exc_type is not None:
return
self.final_queries = len(self.connection.queries_log)
class ignore_warnings(TestContextDecorator):
def __init__(self, **kwargs):
self.ignore_kwargs = kwargs
if 'message' in self.ignore_kwargs or 'module' in self.ignore_kwargs:
self.filter_func = warnings.filterwarnings
else:
self.filter_func = warnings.simplefilter
super(ignore_warnings, self).__init__()
def enable(self):
self.catch_warnings = warnings.catch_warnings()
self.catch_warnings.__enter__()
self.filter_func('ignore', **self.ignore_kwargs)
def disable(self):
self.catch_warnings.__exit__(*sys.exc_info())
@contextmanager
def patch_logger(logger_name, log_level, log_kwargs=False):
"""
Context manager that takes a named logger and the logging level
and provides a simple mock-like list of messages received
"""
calls = []
def replacement(msg, *args, **kwargs):
call = msg % args
calls.append((call, kwargs) if log_kwargs else call)
logger = logging.getLogger(logger_name)
orig = getattr(logger, log_level)
setattr(logger, log_level, replacement)
try:
yield calls
finally:
setattr(logger, log_level, orig)
# On OSes that don't provide tzset (Windows), we can't set the timezone
# in which the program runs. As a consequence, we must skip tests that
# don't enforce a specific timezone (with timezone.override or equivalent),
# or attempt to interpret naive datetimes in the default timezone.
requires_tz_support = skipUnless(
TZ_SUPPORT,
"This test relies on the ability to run a program in an arbitrary "
"time zone, but your operating system isn't able to do that."
)
@contextmanager
def extend_sys_path(*paths):
"""Context manager to temporarily add paths to sys.path."""
_orig_sys_path = sys.path[:]
sys.path.extend(paths)
try:
yield
finally:
sys.path = _orig_sys_path
@contextmanager
def isolate_lru_cache(lru_cache_object):
"""Clear the cache of an LRU cache object on entering and exiting."""
lru_cache_object.cache_clear()
try:
yield
finally:
lru_cache_object.cache_clear()
@contextmanager
def captured_output(stream_name):
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO.
Note: This function and the following ``captured_std*`` are copied
from CPython's ``test.support`` module."""
orig_stdout = getattr(sys, stream_name)
setattr(sys, stream_name, six.StringIO())
try:
yield getattr(sys, stream_name)
finally:
setattr(sys, stream_name, orig_stdout)
def captured_stdout():
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
print("hello")
self.assertEqual(stdout.getvalue(), "hello\n")
"""
return captured_output("stdout")
def captured_stderr():
"""Capture the output of sys.stderr:
with captured_stderr() as stderr:
print("hello", file=sys.stderr)
self.assertEqual(stderr.getvalue(), "hello\n")
"""
return captured_output("stderr")
def captured_stdin():
"""Capture the input to sys.stdin:
with captured_stdin() as stdin:
stdin.write('hello\n')
stdin.seek(0)
# call test code that consumes from sys.stdin
captured = input()
self.assertEqual(captured, "hello")
"""
return captured_output("stdin")
def reset_warning_registry():
"""
Clear warning registry for all modules. This is required in some tests
because of a bug in Python that prevents warnings.simplefilter("always")
from always making warnings appear: http://bugs.python.org/issue4180
The bug was fixed in Python 3.4.2.
"""
key = "__warningregistry__"
for mod in sys.modules.values():
if hasattr(mod, key):
getattr(mod, key).clear()
@contextmanager
def freeze_time(t):
"""
Context manager to temporarily freeze time.time(). This temporarily
modifies the time function of the time module. Modules which import the
time function directly (e.g. `from time import time`) won't be affected
This isn't meant as a public API, but helps reduce some repetitive code in
Django's test suite.
"""
_real_time = time.time
time.time = lambda: t
try:
yield
finally:
time.time = _real_time
def require_jinja2(test_func):
"""
Decorator to enable a Jinja2 template engine in addition to the regular
Django template engine for a test or skip it if Jinja2 isn't available.
"""
test_func = skipIf(jinja2 is None, "this test requires jinja2")(test_func)
test_func = override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}, {
'BACKEND': 'django.template.backends.jinja2.Jinja2',
'APP_DIRS': True,
'OPTIONS': {'keep_trailing_newline': True},
}])(test_func)
return test_func
class override_script_prefix(TestContextDecorator):
"""
Decorator or context manager to temporary override the script prefix.
"""
def __init__(self, prefix):
self.prefix = prefix
super(override_script_prefix, self).__init__()
def enable(self):
self.old_prefix = get_script_prefix()
set_script_prefix(self.prefix)
def disable(self):
set_script_prefix(self.old_prefix)
class LoggingCaptureMixin(object):
"""
Capture the output from the 'django' logger and store it on the class's
logger_output attribute.
"""
def setUp(self):
self.logger = logging.getLogger('django')
self.old_stream = self.logger.handlers[0].stream
self.logger_output = six.StringIO()
self.logger.handlers[0].stream = self.logger_output
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
class isolate_apps(TestContextDecorator):
"""
Act as either a decorator or a context manager to register models defined
in its wrapped context to an isolated registry.
The list of installed apps the isolated registry should contain must be
passed as arguments.
Two optional keyword arguments can be specified:
`attr_name`: attribute assigned the isolated registry if used as a class
decorator.
`kwarg_name`: keyword argument passing the isolated registry if used as a
function decorator.
"""
def __init__(self, *installed_apps, **kwargs):
self.installed_apps = installed_apps
super(isolate_apps, self).__init__(**kwargs)
def enable(self):
self.old_apps = Options.default_apps
apps = Apps(self.installed_apps)
setattr(Options, 'default_apps', apps)
return apps
def disable(self):
setattr(Options, 'default_apps', self.old_apps)
def tag(*tags):
"""
Decorator to add tags to a test class or method.
"""
def decorator(obj):
setattr(obj, 'tags', set(tags))
return obj
return decorator
| gpl-3.0 |
javilonas/NCam | cross/android-toolchain/lib/python2.7/distutils/command/install_lib.py | 251 | 8338 | """distutils.command.install_lib
Implements the Distutils 'install_lib' command
(install all Python modules)."""
__revision__ = "$Id$"
import os
import sys
from distutils.core import Command
from distutils.errors import DistutilsOptionError
# Extension for Python source files.
if hasattr(os, 'extsep'):
PYTHON_SOURCE_EXTENSION = os.extsep + "py"
else:
PYTHON_SOURCE_EXTENSION = ".py"
class install_lib(Command):
description = "install all Python modules (extensions and pure Python)"
# The byte-compilation options are a tad confusing. Here are the
# possible scenarios:
# 1) no compilation at all (--no-compile --no-optimize)
# 2) compile .pyc only (--compile --no-optimize; default)
# 3) compile .pyc and "level 1" .pyo (--compile --optimize)
# 4) compile "level 1" .pyo only (--no-compile --optimize)
# 5) compile .pyc and "level 2" .pyo (--compile --optimize-more)
# 6) compile "level 2" .pyo only (--no-compile --optimize-more)
#
# The UI for this is two option, 'compile' and 'optimize'.
# 'compile' is strictly boolean, and only decides whether to
# generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
# decides both whether to generate .pyo files and what level of
# optimization to use.
user_options = [
('install-dir=', 'd', "directory to install to"),
('build-dir=','b', "build directory (where to install from)"),
('force', 'f', "force installation (overwrite existing files)"),
('compile', 'c', "compile .py to .pyc [default]"),
('no-compile', None, "don't compile .py files"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('skip-build', None, "skip the build steps"),
]
boolean_options = ['force', 'compile', 'skip-build']
negative_opt = {'no-compile' : 'compile'}
def initialize_options(self):
# let the 'install' command dictate our installation directory
self.install_dir = None
self.build_dir = None
self.force = 0
self.compile = None
self.optimize = None
self.skip_build = None
def finalize_options(self):
# Get all the information we need to install pure Python modules
# from the umbrella 'install' command -- build (source) directory,
# install (target) directory, and whether to compile .py files.
self.set_undefined_options('install',
('build_lib', 'build_dir'),
('install_lib', 'install_dir'),
('force', 'force'),
('compile', 'compile'),
('optimize', 'optimize'),
('skip_build', 'skip_build'),
)
if self.compile is None:
self.compile = 1
if self.optimize is None:
self.optimize = 0
if not isinstance(self.optimize, int):
try:
self.optimize = int(self.optimize)
if self.optimize not in (0, 1, 2):
raise AssertionError
except (ValueError, AssertionError):
raise DistutilsOptionError, "optimize must be 0, 1, or 2"
def run(self):
# Make sure we have built everything we need first
self.build()
# Install everything: simply dump the entire contents of the build
# directory to the installation directory (that's the beauty of
# having a build directory!)
outfiles = self.install()
# (Optionally) compile .py to .pyc
if outfiles is not None and self.distribution.has_pure_modules():
self.byte_compile(outfiles)
# -- Top-level worker functions ------------------------------------
# (called from 'run()')
def build(self):
if not self.skip_build:
if self.distribution.has_pure_modules():
self.run_command('build_py')
if self.distribution.has_ext_modules():
self.run_command('build_ext')
def install(self):
if os.path.isdir(self.build_dir):
outfiles = self.copy_tree(self.build_dir, self.install_dir)
else:
self.warn("'%s' does not exist -- no Python modules to install" %
self.build_dir)
return
return outfiles
def byte_compile(self, files):
if sys.dont_write_bytecode:
self.warn('byte-compiling is disabled, skipping.')
return
from distutils.util import byte_compile
# Get the "--root" directory supplied to the "install" command,
# and use it as a prefix to strip off the purported filename
# encoded in bytecode files. This is far from complete, but it
# should at least generate usable bytecode in RPM distributions.
install_root = self.get_finalized_command('install').root
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=install_root,
dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=install_root,
verbose=self.verbose, dry_run=self.dry_run)
# -- Utility methods -----------------------------------------------
def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
if not has_any:
return []
build_cmd = self.get_finalized_command(build_cmd)
build_files = build_cmd.get_outputs()
build_dir = getattr(build_cmd, cmd_option)
prefix_len = len(build_dir) + len(os.sep)
outputs = []
for file in build_files:
outputs.append(os.path.join(output_dir, file[prefix_len:]))
return outputs
def _bytecode_filenames(self, py_filenames):
bytecode_files = []
for py_file in py_filenames:
# Since build_py handles package data installation, the
# list of outputs can contain more than just .py files.
# Make sure we only report bytecode for the .py files.
ext = os.path.splitext(os.path.normcase(py_file))[1]
if ext != PYTHON_SOURCE_EXTENSION:
continue
if self.compile:
bytecode_files.append(py_file + "c")
if self.optimize > 0:
bytecode_files.append(py_file + "o")
return bytecode_files
# -- External interface --------------------------------------------
# (called by outsiders)
def get_outputs(self):
"""Return the list of files that would be installed if this command
were actually run. Not affected by the "dry-run" flag or whether
modules have actually been built yet.
"""
pure_outputs = \
self._mutate_outputs(self.distribution.has_pure_modules(),
'build_py', 'build_lib',
self.install_dir)
if self.compile:
bytecode_outputs = self._bytecode_filenames(pure_outputs)
else:
bytecode_outputs = []
ext_outputs = \
self._mutate_outputs(self.distribution.has_ext_modules(),
'build_ext', 'build_lib',
self.install_dir)
return pure_outputs + bytecode_outputs + ext_outputs
def get_inputs(self):
"""Get the list of files that are input to this command, ie. the
files that get installed as they are named in the build tree.
The files in this list correspond one-to-one to the output
filenames returned by 'get_outputs()'.
"""
inputs = []
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
inputs.extend(build_py.get_outputs())
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
inputs.extend(build_ext.get_outputs())
return inputs
| gpl-3.0 |
saurabh6790/omn-app | setup/doctype/sales_partner/sales_partner.py | 30 | 1567 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import cint, cstr, filter_strip_join
from webnotes.webutils import WebsiteGenerator, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, doc, doclist=None):
self.doc = doc
self.doclist = doclist
def validate(self):
if self.doc.partner_website and not self.doc.partner_website.startswith("http"):
self.doc.partner_website = "http://" + self.doc.partner_website
def on_update(self):
WebsiteGenerator.on_update(self)
if self.doc.page_name:
clear_cache("partners")
def get_contacts(self,nm):
if nm:
contact_details =webnotes.conn.convert_to_lists(webnotes.conn.sql("select name, CONCAT(IFNULL(first_name,''),' ',IFNULL(last_name,'')),contact_no,email_id from `tabContact` where sales_partner = '%s'"%nm))
return contact_details
else:
return ''
def get_context(self):
address = webnotes.conn.get_value("Address",
{"sales_partner": self.doc.name, "is_primary_address": 1},
"*", as_dict=True)
if address:
city_state = ", ".join(filter(None, [address.city, address.state]))
address_rows = [address.address_line1, address.address_line2,
city_state, address.pincode, address.country]
self.doc.fields.update({
"email": address.email_id,
"partner_address": filter_strip_join(address_rows, "\n<br>"),
"phone": filter_strip_join(cstr(address.phone).split(","), "\n<br>")
}) | agpl-3.0 |
bmallred/pyfreerdp | pyfreerdp.py | 1 | 2734 | #!/usr/bin/python
from gi.repository import Gtk
import subprocess
class LauncherWindow(Gtk.Window):
"""
FreeRDP launcher window.
"""
def __init__(self):
"""
Initialize the window and widgets
"""
Gtk.Window.__init__(self, title="Python FreeRDP Launcher")
table = Gtk.Table(4, 2, False)
self.add(table)
label = Gtk.Label("Host and port")
table.attach(label, 0, 1, 0, 1)
self.addressEntry = Gtk.Entry()
table.attach(self.addressEntry, 1, 2, 0, 1)
label = Gtk.Label("Username")
table.attach(label, 0, 1, 1, 2)
self.usernameEntry = Gtk.Entry()
table.attach(self.usernameEntry, 1, 2, 1, 2)
label = Gtk.Label("Password")
table.attach(label, 0, 1, 2, 3)
self.passwordEntry = Gtk.Entry()
self.passwordEntry.set_visibility(False)
table.attach(self.passwordEntry, 1, 2, 2, 3)
button = Gtk.Button(label="Connect")
button.connect("clicked", self.connectTo)
table.attach(button, 0, 1, 3, 4)
button = Gtk.Button(label="Close")
button.connect("clicked", self.closeWindow)
table.attach(button, 1, 2, 3, 4)
def connectTo(self, button):
"""
Attempt to connect using the user input.
"""
try:
code = -1
command = [
"/usr/bin/xfreerdp",
"-K",
"-g", "workarea",
"--ignore-certificate",
"--rfx",
"--rfx-mode", "video",
"--plugin", "cliprdr",
"--plugin", "rdpsnd", "--data", "alsa", "--",
"--plugin", "drdynvc", "--data", "audin", "--"]
address = self.addressEntry.get_text()
username = self.usernameEntry.get_text()
password = self.passwordEntry.get_text()
# If the username and password are present then use them.
if username and password:
command.extend([
"-u", username,
"-p", "\"{0}\"".format(password)])
# If an address is present attempt to connect.
if address:
command.extend([address, "&"])
code = subprocess.call(" ".join(command), shell=True)
# Check the return code (if any) and close on success.
if code == 0:
Gtk.main_quit()
except:
pass
def closeWindow(self, button):
"""
Close the window and end the program.
"""
Gtk.main_quit()
window = LauncherWindow()
window.connect("delete-event", Gtk.main_quit)
window.show_all()
Gtk.main()
| gpl-2.0 |
Mj258/weiboapi | srapyDemo/envs/Lib/site-packages/twisted/conch/insults/text.py | 40 | 5415 | # -*- test-case-name: twisted.conch.test.test_text -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Character attribute manipulation API.
This module provides a domain-specific language (using Python syntax)
for the creation of text with additional display attributes associated
with it. It is intended as an alternative to manually building up
strings containing ECMA 48 character attribute control codes. It
currently supports foreground and background colors (black, red,
green, yellow, blue, magenta, cyan, and white), intensity selection,
underlining, blinking and reverse video. Character set selection
support is planned.
Character attributes are specified by using two Python operations:
attribute lookup and indexing. For example, the string \"Hello
world\" with red foreground and all other attributes set to their
defaults, assuming the name twisted.conch.insults.text.attributes has
been imported and bound to the name \"A\" (with the statement C{from
twisted.conch.insults.text import attributes as A}, for example) one
uses this expression::
A.fg.red[\"Hello world\"]
Other foreground colors are set by substituting their name for
\"red\". To set both a foreground and a background color, this
expression is used::
A.fg.red[A.bg.green[\"Hello world\"]]
Note that either A.bg.green can be nested within A.fg.red or vice
versa. Also note that multiple items can be nested within a single
index operation by separating them with commas::
A.bg.green[A.fg.red[\"Hello\"], " ", A.fg.blue[\"world\"]]
Other character attributes are set in a similar fashion. To specify a
blinking version of the previous expression::
A.blink[A.bg.green[A.fg.red[\"Hello\"], " ", A.fg.blue[\"world\"]]]
C{A.reverseVideo}, C{A.underline}, and C{A.bold} are also valid.
A third operation is actually supported: unary negation. This turns
off an attribute when an enclosing expression would otherwise have
caused it to be on. For example::
A.underline[A.fg.red[\"Hello\", -A.underline[\" world\"]]]
A formatting structure can then be serialized into a string containing the
necessary VT102 control codes with L{assembleFormattedText}.
@see: L{twisted.conch.insults.text.attributes}
@author: Jp Calderone
"""
from twisted.conch.insults import helper, insults
from twisted.python import _textattributes
from twisted.python.deprecate import deprecatedModuleAttribute
from twisted.python.versions import Version
flatten = _textattributes.flatten
deprecatedModuleAttribute(
Version('Twisted', 13, 1, 0),
'Use twisted.conch.insults.text.assembleFormattedText instead.',
'twisted.conch.insults.text',
'flatten')
_TEXT_COLORS = {
'black': helper.BLACK,
'red': helper.RED,
'green': helper.GREEN,
'yellow': helper.YELLOW,
'blue': helper.BLUE,
'magenta': helper.MAGENTA,
'cyan': helper.CYAN,
'white': helper.WHITE}
class _CharacterAttributes(_textattributes.CharacterAttributesMixin):
"""
Factory for character attributes, including foreground and background color
and non-color attributes such as bold, reverse video and underline.
Character attributes are applied to actual text by using object
indexing-syntax (C{obj['abc']}) after accessing a factory attribute, for
example::
attributes.bold['Some text']
These can be nested to mix attributes::
attributes.bold[attributes.underline['Some text']]
And multiple values can be passed::
attributes.normal[attributes.bold['Some'], ' text']
Non-color attributes can be accessed by attribute name, available
attributes are:
- bold
- blink
- reverseVideo
- underline
Available colors are:
0. black
1. red
2. green
3. yellow
4. blue
5. magenta
6. cyan
7. white
@ivar fg: Foreground colors accessed by attribute name, see above
for possible names.
@ivar bg: Background colors accessed by attribute name, see above
for possible names.
"""
fg = _textattributes._ColorAttribute(
_textattributes._ForegroundColorAttr, _TEXT_COLORS)
bg = _textattributes._ColorAttribute(
_textattributes._BackgroundColorAttr, _TEXT_COLORS)
attrs = {
'bold': insults.BOLD,
'blink': insults.BLINK,
'underline': insults.UNDERLINE,
'reverseVideo': insults.REVERSE_VIDEO}
def assembleFormattedText(formatted):
"""
Assemble formatted text from structured information.
Currently handled formatting includes: bold, blink, reverse, underline and
color codes.
For example::
from twisted.conch.insults.text import attributes as A
assembleFormattedText(
A.normal[A.bold['Time: '], A.fg.lightRed['Now!']])
Would produce "Time: " in bold formatting, followed by "Now!" with a
foreground color of light red and without any additional formatting.
@param formatted: Structured text and attributes.
@rtype: C{str}
@return: String containing VT102 control sequences that mimic those
specified by L{formatted}.
@see: L{twisted.conch.insults.text.attributes}
@since: 13.1
"""
return _textattributes.flatten(
formatted, helper._FormattingState(), 'toVT102')
attributes = _CharacterAttributes()
__all__ = ['attributes', 'flatten']
| mit |
lightcn/odoo | addons/l10n_fr/__init__.py | 424 | 1447 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2008 JAILLET Simon - CrysaLEAD - www.crysalead.fr
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import l10n_fr
import report
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
initOS/server-tools | fetchmail_notify_error_to_sender/fetchmail.py | 35 | 1291 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Lorenzo Battistini <lorenzo.battistini@agilebg.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class fetchmail_server(models.Model):
_inherit = 'fetchmail.server'
error_notice_template_id = fields.Many2one(
'email.template', string="Error notice template",
help="Set here the template to use to send notice to sender when "
"errors occur while fetching email")
| agpl-3.0 |
mozilla/captain | vendor/lib/python/requests/packages/charade/gb2312prober.py | 231 | 1722 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import GB2312DistributionAnalysis
from .mbcssm import GB2312SMModel
class GB2312Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(GB2312SMModel)
self._mDistributionAnalyzer = GB2312DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "GB2312"
| mpl-2.0 |
cchurch/ansible | lib/ansible/module_utils/facts/hardware/linux.py | 25 | 31344 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import collections
import errno
import glob
import json
import os
import re
import sys
import time
from multiprocessing import cpu_count
from multiprocessing.pool import ThreadPool
from ansible.module_utils._text import to_text
from ansible.module_utils.six import iteritems
from ansible.module_utils.common.text.formatters import bytes_to_human
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
from ansible.module_utils.facts.utils import get_file_content, get_file_lines, get_mount_size
# import this as a module to ensure we get the same module instance
from ansible.module_utils.facts import timeout
def get_partition_uuid(partname):
try:
uuids = os.listdir("/dev/disk/by-uuid")
except OSError:
return
for uuid in uuids:
dev = os.path.realpath("/dev/disk/by-uuid/" + uuid)
if dev == ("/dev/" + partname):
return uuid
return None
class LinuxHardware(Hardware):
"""
Linux-specific subclass of Hardware. Defines memory and CPU facts:
- memfree_mb
- memtotal_mb
- swapfree_mb
- swaptotal_mb
- processor (a list)
- processor_cores
- processor_count
In addition, it also defines number of DMI facts and device facts.
"""
platform = 'Linux'
# Originally only had these four as toplevelfacts
ORIGINAL_MEMORY_FACTS = frozenset(('MemTotal', 'SwapTotal', 'MemFree', 'SwapFree'))
# Now we have all of these in a dict structure
MEMORY_FACTS = ORIGINAL_MEMORY_FACTS.union(('Buffers', 'Cached', 'SwapCached'))
# regex used against findmnt output to detect bind mounts
BIND_MOUNT_RE = re.compile(r'.*\]')
# regex used against mtab content to find entries that are bind mounts
MTAB_BIND_MOUNT_RE = re.compile(r'.*bind.*"')
# regex used for replacing octal escape sequences
OCTAL_ESCAPE_RE = re.compile(r'\\[0-9]{3}')
def populate(self, collected_facts=None):
hardware_facts = {}
self.module.run_command_environ_update = {'LANG': 'C', 'LC_ALL': 'C', 'LC_NUMERIC': 'C'}
cpu_facts = self.get_cpu_facts(collected_facts=collected_facts)
memory_facts = self.get_memory_facts()
dmi_facts = self.get_dmi_facts()
device_facts = self.get_device_facts()
uptime_facts = self.get_uptime_facts()
lvm_facts = self.get_lvm_facts()
mount_facts = {}
try:
mount_facts = self.get_mount_facts()
except timeout.TimeoutError:
pass
hardware_facts.update(cpu_facts)
hardware_facts.update(memory_facts)
hardware_facts.update(dmi_facts)
hardware_facts.update(device_facts)
hardware_facts.update(uptime_facts)
hardware_facts.update(lvm_facts)
hardware_facts.update(mount_facts)
return hardware_facts
def get_memory_facts(self):
memory_facts = {}
if not os.access("/proc/meminfo", os.R_OK):
return memory_facts
memstats = {}
for line in get_file_lines("/proc/meminfo"):
data = line.split(":", 1)
key = data[0]
if key in self.ORIGINAL_MEMORY_FACTS:
val = data[1].strip().split(' ')[0]
memory_facts["%s_mb" % key.lower()] = int(val) // 1024
if key in self.MEMORY_FACTS:
val = data[1].strip().split(' ')[0]
memstats[key.lower()] = int(val) // 1024
if None not in (memstats.get('memtotal'), memstats.get('memfree')):
memstats['real:used'] = memstats['memtotal'] - memstats['memfree']
if None not in (memstats.get('cached'), memstats.get('memfree'), memstats.get('buffers')):
memstats['nocache:free'] = memstats['cached'] + memstats['memfree'] + memstats['buffers']
if None not in (memstats.get('memtotal'), memstats.get('nocache:free')):
memstats['nocache:used'] = memstats['memtotal'] - memstats['nocache:free']
if None not in (memstats.get('swaptotal'), memstats.get('swapfree')):
memstats['swap:used'] = memstats['swaptotal'] - memstats['swapfree']
memory_facts['memory_mb'] = {
'real': {
'total': memstats.get('memtotal'),
'used': memstats.get('real:used'),
'free': memstats.get('memfree'),
},
'nocache': {
'free': memstats.get('nocache:free'),
'used': memstats.get('nocache:used'),
},
'swap': {
'total': memstats.get('swaptotal'),
'free': memstats.get('swapfree'),
'used': memstats.get('swap:used'),
'cached': memstats.get('swapcached'),
},
}
return memory_facts
def get_cpu_facts(self, collected_facts=None):
cpu_facts = {}
collected_facts = collected_facts or {}
i = 0
vendor_id_occurrence = 0
model_name_occurrence = 0
processor_occurence = 0
physid = 0
coreid = 0
sockets = {}
cores = {}
xen = False
xen_paravirt = False
try:
if os.path.exists('/proc/xen'):
xen = True
else:
for line in get_file_lines('/sys/hypervisor/type'):
if line.strip() == 'xen':
xen = True
# Only interested in the first line
break
except IOError:
pass
if not os.access("/proc/cpuinfo", os.R_OK):
return cpu_facts
cpu_facts['processor'] = []
for line in get_file_lines('/proc/cpuinfo'):
data = line.split(":", 1)
key = data[0].strip()
try:
val = data[1].strip()
except IndexError:
val = ""
if xen:
if key == 'flags':
# Check for vme cpu flag, Xen paravirt does not expose this.
# Need to detect Xen paravirt because it exposes cpuinfo
# differently than Xen HVM or KVM and causes reporting of
# only a single cpu core.
if 'vme' not in val:
xen_paravirt = True
# model name is for Intel arch, Processor (mind the uppercase P)
# works for some ARM devices, like the Sheevaplug.
# 'ncpus active' is SPARC attribute
if key in ['model name', 'Processor', 'vendor_id', 'cpu', 'Vendor', 'processor']:
if 'processor' not in cpu_facts:
cpu_facts['processor'] = []
cpu_facts['processor'].append(val)
if key == 'vendor_id':
vendor_id_occurrence += 1
if key == 'model name':
model_name_occurrence += 1
if key == 'processor':
processor_occurence += 1
i += 1
elif key == 'physical id':
physid = val
if physid not in sockets:
sockets[physid] = 1
elif key == 'core id':
coreid = val
if coreid not in sockets:
cores[coreid] = 1
elif key == 'cpu cores':
sockets[physid] = int(val)
elif key == 'siblings':
cores[coreid] = int(val)
elif key == '# processors':
cpu_facts['processor_cores'] = int(val)
elif key == 'ncpus active':
i = int(val)
# Skip for platforms without vendor_id/model_name in cpuinfo (e.g ppc64le)
if vendor_id_occurrence > 0:
if vendor_id_occurrence == model_name_occurrence:
i = vendor_id_occurrence
# The fields for ARM CPUs do not always include 'vendor_id' or 'model name',
# and sometimes includes both 'processor' and 'Processor'.
# The fields for Power CPUs include 'processor' and 'cpu'.
# Always use 'processor' count for ARM and Power systems
if collected_facts.get('ansible_architecture', '').startswith(('armv', 'aarch', 'ppc')):
i = processor_occurence
# FIXME
if collected_facts.get('ansible_architecture') != 's390x':
if xen_paravirt:
cpu_facts['processor_count'] = i
cpu_facts['processor_cores'] = i
cpu_facts['processor_threads_per_core'] = 1
cpu_facts['processor_vcpus'] = i
else:
if sockets:
cpu_facts['processor_count'] = len(sockets)
else:
cpu_facts['processor_count'] = i
socket_values = list(sockets.values())
if socket_values and socket_values[0]:
cpu_facts['processor_cores'] = socket_values[0]
else:
cpu_facts['processor_cores'] = 1
core_values = list(cores.values())
if core_values:
cpu_facts['processor_threads_per_core'] = core_values[0] // cpu_facts['processor_cores']
else:
cpu_facts['processor_threads_per_core'] = 1 // cpu_facts['processor_cores']
cpu_facts['processor_vcpus'] = (cpu_facts['processor_threads_per_core'] *
cpu_facts['processor_count'] * cpu_facts['processor_cores'])
return cpu_facts
def get_dmi_facts(self):
''' learn dmi facts from system
Try /sys first for dmi related facts.
If that is not available, fall back to dmidecode executable '''
dmi_facts = {}
if os.path.exists('/sys/devices/virtual/dmi/id/product_name'):
# Use kernel DMI info, if available
# DMI SPEC -- https://www.dmtf.org/sites/default/files/standards/documents/DSP0134_3.2.0.pdf
FORM_FACTOR = ["Unknown", "Other", "Unknown", "Desktop",
"Low Profile Desktop", "Pizza Box", "Mini Tower", "Tower",
"Portable", "Laptop", "Notebook", "Hand Held", "Docking Station",
"All In One", "Sub Notebook", "Space-saving", "Lunch Box",
"Main Server Chassis", "Expansion Chassis", "Sub Chassis",
"Bus Expansion Chassis", "Peripheral Chassis", "RAID Chassis",
"Rack Mount Chassis", "Sealed-case PC", "Multi-system",
"CompactPCI", "AdvancedTCA", "Blade", "Blade Enclosure",
"Tablet", "Convertible", "Detachable", "IoT Gateway",
"Embedded PC", "Mini PC", "Stick PC"]
DMI_DICT = {
'bios_date': '/sys/devices/virtual/dmi/id/bios_date',
'bios_version': '/sys/devices/virtual/dmi/id/bios_version',
'form_factor': '/sys/devices/virtual/dmi/id/chassis_type',
'product_name': '/sys/devices/virtual/dmi/id/product_name',
'product_serial': '/sys/devices/virtual/dmi/id/product_serial',
'product_uuid': '/sys/devices/virtual/dmi/id/product_uuid',
'product_version': '/sys/devices/virtual/dmi/id/product_version',
'system_vendor': '/sys/devices/virtual/dmi/id/sys_vendor'
}
for (key, path) in DMI_DICT.items():
data = get_file_content(path)
if data is not None:
if key == 'form_factor':
try:
dmi_facts['form_factor'] = FORM_FACTOR[int(data)]
except IndexError:
dmi_facts['form_factor'] = 'unknown (%s)' % data
else:
dmi_facts[key] = data
else:
dmi_facts[key] = 'NA'
else:
# Fall back to using dmidecode, if available
dmi_bin = self.module.get_bin_path('dmidecode')
DMI_DICT = {
'bios_date': 'bios-release-date',
'bios_version': 'bios-version',
'form_factor': 'chassis-type',
'product_name': 'system-product-name',
'product_serial': 'system-serial-number',
'product_uuid': 'system-uuid',
'product_version': 'system-version',
'system_vendor': 'system-manufacturer'
}
for (k, v) in DMI_DICT.items():
if dmi_bin is not None:
(rc, out, err) = self.module.run_command('%s -s %s' % (dmi_bin, v))
if rc == 0:
# Strip out commented lines (specific dmidecode output)
thisvalue = ''.join([line for line in out.splitlines() if not line.startswith('#')])
try:
json.dumps(thisvalue)
except UnicodeDecodeError:
thisvalue = "NA"
dmi_facts[k] = thisvalue
else:
dmi_facts[k] = 'NA'
else:
dmi_facts[k] = 'NA'
return dmi_facts
def _run_lsblk(self, lsblk_path):
# call lsblk and collect all uuids
# --exclude 2 makes lsblk ignore floppy disks, which are slower to answer than typical timeouts
# this uses the linux major device number
# for details see https://www.kernel.org/doc/Documentation/devices.txt
args = ['--list', '--noheadings', '--paths', '--output', 'NAME,UUID', '--exclude', '2']
cmd = [lsblk_path] + args
rc, out, err = self.module.run_command(cmd)
return rc, out, err
def _lsblk_uuid(self):
uuids = {}
lsblk_path = self.module.get_bin_path("lsblk")
if not lsblk_path:
return uuids
rc, out, err = self._run_lsblk(lsblk_path)
if rc != 0:
return uuids
# each line will be in format:
# <devicename><some whitespace><uuid>
# /dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
for lsblk_line in out.splitlines():
if not lsblk_line:
continue
line = lsblk_line.strip()
fields = line.rsplit(None, 1)
if len(fields) < 2:
continue
device_name, uuid = fields[0].strip(), fields[1].strip()
if device_name in uuids:
continue
uuids[device_name] = uuid
return uuids
def _udevadm_uuid(self, device):
# fallback for versions of lsblk <= 2.23 that don't have --paths, see _run_lsblk() above
uuid = 'N/A'
udevadm_path = self.module.get_bin_path('udevadm')
if not udevadm_path:
return uuid
cmd = [udevadm_path, 'info', '--query', 'property', '--name', device]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
return uuid
# a snippet of the output of the udevadm command below will be:
# ...
# ID_FS_TYPE=ext4
# ID_FS_USAGE=filesystem
# ID_FS_UUID=57b1a3e7-9019-4747-9809-7ec52bba9179
# ...
m = re.search('ID_FS_UUID=(.*)\n', out)
if m:
uuid = m.group(1)
return uuid
def _run_findmnt(self, findmnt_path):
args = ['--list', '--noheadings', '--notruncate']
cmd = [findmnt_path] + args
rc, out, err = self.module.run_command(cmd, errors='surrogate_then_replace')
return rc, out, err
def _find_bind_mounts(self):
bind_mounts = set()
findmnt_path = self.module.get_bin_path("findmnt")
if not findmnt_path:
return bind_mounts
rc, out, err = self._run_findmnt(findmnt_path)
if rc != 0:
return bind_mounts
# find bind mounts, in case /etc/mtab is a symlink to /proc/mounts
for line in out.splitlines():
fields = line.split()
# fields[0] is the TARGET, fields[1] is the SOURCE
if len(fields) < 2:
continue
# bind mounts will have a [/directory_name] in the SOURCE column
if self.BIND_MOUNT_RE.match(fields[1]):
bind_mounts.add(fields[0])
return bind_mounts
def _mtab_entries(self):
mtab_file = '/etc/mtab'
if not os.path.exists(mtab_file):
mtab_file = '/proc/mounts'
mtab = get_file_content(mtab_file, '')
mtab_entries = []
for line in mtab.splitlines():
fields = line.split()
if len(fields) < 4:
continue
mtab_entries.append(fields)
return mtab_entries
@staticmethod
def _replace_octal_escapes_helper(match):
# Convert to integer using base8 and then convert to character
return chr(int(match.group()[1:], 8))
def _replace_octal_escapes(self, value):
return self.OCTAL_ESCAPE_RE.sub(self._replace_octal_escapes_helper, value)
def get_mount_info(self, mount, device, uuids):
mount_size = get_mount_size(mount)
# _udevadm_uuid is a fallback for versions of lsblk <= 2.23 that don't have --paths
# see _run_lsblk() above
# https://github.com/ansible/ansible/issues/36077
uuid = uuids.get(device, self._udevadm_uuid(device))
return mount_size, uuid
def get_mount_facts(self):
mounts = []
# gather system lists
bind_mounts = self._find_bind_mounts()
uuids = self._lsblk_uuid()
mtab_entries = self._mtab_entries()
# start threads to query each mount
results = {}
pool = ThreadPool(processes=min(len(mtab_entries), cpu_count()))
maxtime = globals().get('GATHER_TIMEOUT') or timeout.DEFAULT_GATHER_TIMEOUT
for fields in mtab_entries:
# Transform octal escape sequences
fields = [self._replace_octal_escapes(field) for field in fields]
device, mount, fstype, options = fields[0], fields[1], fields[2], fields[3]
if not device.startswith('/') and ':/' not in device or fstype == 'none':
continue
mount_info = {'mount': mount,
'device': device,
'fstype': fstype,
'options': options}
if mount in bind_mounts:
# only add if not already there, we might have a plain /etc/mtab
if not self.MTAB_BIND_MOUNT_RE.match(options):
mount_info['options'] += ",bind"
results[mount] = {'info': mount_info,
'extra': pool.apply_async(self.get_mount_info, (mount, device, uuids)),
'timelimit': time.time() + maxtime}
pool.close() # done with new workers, start gc
# wait for workers and get results
while results:
for mount in results:
res = results[mount]['extra']
if res.ready():
if res.successful():
mount_size, uuid = res.get()
if mount_size:
results[mount]['info'].update(mount_size)
results[mount]['info']['uuid'] = uuid or 'N/A'
else:
# give incomplete data
errmsg = to_text(res.get())
self.module.warn("Error prevented getting extra info for mount %s: %s." % (mount, errmsg))
results[mount]['info']['note'] = 'Could not get extra information: %s.' % (errmsg)
mounts.append(results[mount]['info'])
del results[mount]
break
elif time.time() > results[mount]['timelimit']:
results[mount]['info']['note'] = 'Timed out while attempting to get extra information.'
mounts.append(results[mount]['info'])
del results[mount]
break
else:
# avoid cpu churn
time.sleep(0.1)
return {'mounts': mounts}
def get_device_links(self, link_dir):
if not os.path.exists(link_dir):
return {}
try:
retval = collections.defaultdict(set)
for entry in os.listdir(link_dir):
try:
target = os.path.basename(os.readlink(os.path.join(link_dir, entry)))
retval[target].add(entry)
except OSError:
continue
return dict((k, list(sorted(v))) for (k, v) in iteritems(retval))
except OSError:
return {}
def get_all_device_owners(self):
try:
retval = collections.defaultdict(set)
for path in glob.glob('/sys/block/*/slaves/*'):
elements = path.split('/')
device = elements[3]
target = elements[5]
retval[target].add(device)
return dict((k, list(sorted(v))) for (k, v) in iteritems(retval))
except OSError:
return {}
def get_all_device_links(self):
return {
'ids': self.get_device_links('/dev/disk/by-id'),
'uuids': self.get_device_links('/dev/disk/by-uuid'),
'labels': self.get_device_links('/dev/disk/by-label'),
'masters': self.get_all_device_owners(),
}
def get_holders(self, block_dev_dict, sysdir):
block_dev_dict['holders'] = []
if os.path.isdir(sysdir + "/holders"):
for folder in os.listdir(sysdir + "/holders"):
if not folder.startswith("dm-"):
continue
name = get_file_content(sysdir + "/holders/" + folder + "/dm/name")
if name:
block_dev_dict['holders'].append(name)
else:
block_dev_dict['holders'].append(folder)
def get_device_facts(self):
device_facts = {}
device_facts['devices'] = {}
lspci = self.module.get_bin_path('lspci')
if lspci:
rc, pcidata, err = self.module.run_command([lspci, '-D'], errors='surrogate_then_replace')
else:
pcidata = None
try:
block_devs = os.listdir("/sys/block")
except OSError:
return device_facts
devs_wwn = {}
try:
devs_by_id = os.listdir("/dev/disk/by-id")
except OSError:
pass
else:
for link_name in devs_by_id:
if link_name.startswith("wwn-"):
try:
wwn_link = os.readlink(os.path.join("/dev/disk/by-id", link_name))
except OSError:
continue
devs_wwn[os.path.basename(wwn_link)] = link_name[4:]
links = self.get_all_device_links()
device_facts['device_links'] = links
for block in block_devs:
virtual = 1
sysfs_no_links = 0
try:
path = os.readlink(os.path.join("/sys/block/", block))
except OSError:
e = sys.exc_info()[1]
if e.errno == errno.EINVAL:
path = block
sysfs_no_links = 1
else:
continue
sysdir = os.path.join("/sys/block", path)
if sysfs_no_links == 1:
for folder in os.listdir(sysdir):
if "device" in folder:
virtual = 0
break
d = {}
d['virtual'] = virtual
d['links'] = {}
for (link_type, link_values) in iteritems(links):
d['links'][link_type] = link_values.get(block, [])
diskname = os.path.basename(sysdir)
for key in ['vendor', 'model', 'sas_address', 'sas_device_handle']:
d[key] = get_file_content(sysdir + "/device/" + key)
sg_inq = self.module.get_bin_path('sg_inq')
if sg_inq:
device = "/dev/%s" % (block)
rc, drivedata, err = self.module.run_command([sg_inq, device])
if rc == 0:
serial = re.search(r"Unit serial number:\s+(\w+)", drivedata)
if serial:
d['serial'] = serial.group(1)
for key, test in [('removable', '/removable'),
('support_discard', '/queue/discard_granularity'),
]:
d[key] = get_file_content(sysdir + test)
if diskname in devs_wwn:
d['wwn'] = devs_wwn[diskname]
d['partitions'] = {}
for folder in os.listdir(sysdir):
m = re.search("(" + diskname + r"[p]?\d+)", folder)
if m:
part = {}
partname = m.group(1)
part_sysdir = sysdir + "/" + partname
part['links'] = {}
for (link_type, link_values) in iteritems(links):
part['links'][link_type] = link_values.get(partname, [])
part['start'] = get_file_content(part_sysdir + "/start", 0)
part['sectors'] = get_file_content(part_sysdir + "/size", 0)
part['sectorsize'] = get_file_content(part_sysdir + "/queue/logical_block_size")
if not part['sectorsize']:
part['sectorsize'] = get_file_content(part_sysdir + "/queue/hw_sector_size", 512)
part['size'] = bytes_to_human((float(part['sectors']) * 512.0))
part['uuid'] = get_partition_uuid(partname)
self.get_holders(part, part_sysdir)
d['partitions'][partname] = part
d['rotational'] = get_file_content(sysdir + "/queue/rotational")
d['scheduler_mode'] = ""
scheduler = get_file_content(sysdir + "/queue/scheduler")
if scheduler is not None:
m = re.match(r".*?(\[(.*)\])", scheduler)
if m:
d['scheduler_mode'] = m.group(2)
d['sectors'] = get_file_content(sysdir + "/size")
if not d['sectors']:
d['sectors'] = 0
d['sectorsize'] = get_file_content(sysdir + "/queue/logical_block_size")
if not d['sectorsize']:
d['sectorsize'] = get_file_content(sysdir + "/queue/hw_sector_size", 512)
d['size'] = bytes_to_human(float(d['sectors']) * 512.0)
d['host'] = ""
# domains are numbered (0 to ffff), bus (0 to ff), slot (0 to 1f), and function (0 to 7).
m = re.match(r".+/([a-f0-9]{4}:[a-f0-9]{2}:[0|1][a-f0-9]\.[0-7])/", sysdir)
if m and pcidata:
pciid = m.group(1)
did = re.escape(pciid)
m = re.search("^" + did + r"\s(.*)$", pcidata, re.MULTILINE)
if m:
d['host'] = m.group(1)
self.get_holders(d, sysdir)
device_facts['devices'][diskname] = d
return device_facts
def get_uptime_facts(self):
uptime_facts = {}
uptime_file_content = get_file_content('/proc/uptime')
if uptime_file_content:
uptime_seconds_string = uptime_file_content.split(' ')[0]
uptime_facts['uptime_seconds'] = int(float(uptime_seconds_string))
return uptime_facts
def _find_mapper_device_name(self, dm_device):
dm_prefix = '/dev/dm-'
mapper_device = dm_device
if dm_device.startswith(dm_prefix):
dmsetup_cmd = self.module.get_bin_path('dmsetup', True)
mapper_prefix = '/dev/mapper/'
rc, dm_name, err = self.module.run_command("%s info -C --noheadings -o name %s" % (dmsetup_cmd, dm_device))
if rc == 0:
mapper_device = mapper_prefix + dm_name.rstrip()
return mapper_device
def get_lvm_facts(self):
""" Get LVM Facts if running as root and lvm utils are available """
lvm_facts = {}
if os.getuid() == 0 and self.module.get_bin_path('vgs'):
lvm_util_options = '--noheadings --nosuffix --units g --separator ,'
vgs_path = self.module.get_bin_path('vgs')
# vgs fields: VG #PV #LV #SN Attr VSize VFree
vgs = {}
if vgs_path:
rc, vg_lines, err = self.module.run_command('%s %s' % (vgs_path, lvm_util_options))
for vg_line in vg_lines.splitlines():
items = vg_line.strip().split(',')
vgs[items[0]] = {'size_g': items[-2],
'free_g': items[-1],
'num_lvs': items[2],
'num_pvs': items[1]}
lvs_path = self.module.get_bin_path('lvs')
# lvs fields:
# LV VG Attr LSize Pool Origin Data% Move Log Copy% Convert
lvs = {}
if lvs_path:
rc, lv_lines, err = self.module.run_command('%s %s' % (lvs_path, lvm_util_options))
for lv_line in lv_lines.splitlines():
items = lv_line.strip().split(',')
lvs[items[0]] = {'size_g': items[3], 'vg': items[1]}
pvs_path = self.module.get_bin_path('pvs')
# pvs fields: PV VG #Fmt #Attr PSize PFree
pvs = {}
if pvs_path:
rc, pv_lines, err = self.module.run_command('%s %s' % (pvs_path, lvm_util_options))
for pv_line in pv_lines.splitlines():
items = pv_line.strip().split(',')
pvs[self._find_mapper_device_name(items[0])] = {
'size_g': items[4],
'free_g': items[5],
'vg': items[1]}
lvm_facts['lvm'] = {'lvs': lvs, 'vgs': vgs, 'pvs': pvs}
return lvm_facts
class LinuxHardwareCollector(HardwareCollector):
_platform = 'Linux'
_fact_class = LinuxHardware
required_facts = set(['platform'])
| gpl-3.0 |
mikeireland/pymfe | pymfe/rv.py | 1 | 37463 | """This module/class contains functionality for computing (and plotting) radial
velocities and creating reference spectra for extracted fluxes. This should
ideally remain independent of the extraction method, such that it does not
matter which spectrograph took the data, nor what "Spectrograph" object was
used for extraction.
Most of the code below has been moved from the script "test_rhea2_extract.py".
Work still needs to be done post-refactor to ensure function input and outputs
are sensible, their docstrings are informative and they follow the principles of
Object Oriented Programming - such as the Single Responsibility Principle (Along
with a general clean up of the code and comments, such as having the code meet
the python line length guidelines --> the main benefit of which is having
multiple editors open side by side on smaller screens)
TODO
1) Move extract method to either extract module or rhea
2) Try to separate calculation/processing of data from saving/loading/displaying
3) Tidy up inputs to functions (e.g. cull unnecessary input parameters)
4) Make create_ref_spect() output variances (Median Absolute Deviations)
5) Possibly have dark calibration (for both flats and science frames) in its own
method. This would clean up the existing extract method, removing the need
to check whether darks and flats had been passed in (or varying permutations
of each - e.g. in the case where some of the data has already been dark
corrected, such as the solar data)
"""
from __future__ import division, print_function
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize as op
import scipy.interpolate as interp
from astropy.time import Time
from astropy.coordinates import SkyCoord
from astropy import constants as const
import PyAstronomy.pyasl as pyasl
import opticstools as ot
import pdb
try:
import pyfits
except:
import astropy.io.fits as pyfits
class RadialVelocity():
"""A RadialVelocity object for calculating and plotting RVS and generating
reference spectra.
Unclear if the object needs to be initialised with any parameters at this
stage. Perhaps a file path?
"""
def __init__(self):
"""(Presently empty) constructor.
"""
pass
def rv_shift_resid(self, params, wave, spect, spect_sdev, spline_ref,
return_spect=False):
"""Find the residuals to a fit of a (subsampled)reference spectrum to an
observed spectrum.
The function for parameters p[0] through p[3] is:
.. math::
y(x) = Ref[ wave(x) * (1 - p[0]/c) ] * exp(p[1] * x^2 + p[2] * x + p[3])
Here "Ref" is a function f(wave)
Parameters
----------
params: array-like
wave: float array
Wavelengths for the observed spectrum.
spect: float array
The observed spectra
spect_sdev: float array
standard deviation of the input spectra.
spline_ref: InterpolatedUnivariateSpline instance
For interpolating the reference spectrum
return_spect: boolean
Whether to return the fitted spectrum or the residuals.
wave_ref: float array
The wavelengths of the reference spectrum
ref: float array
The reference spectrum
Returns
-------
resid: float array
The fit residuals
"""
ny = len(spect)
xx = (np.arange(ny)-ny//2)/ny
norm = np.exp(params[1]*xx**2 + params[2]*xx + params[3])
# Lets get this sign correct. A redshift (positive velocity) means that
# a given wavelength for the reference corresponds to a longer
# wavelength for the target, which in turn means that the target
# wavelength has to be interpolated onto shorter wavelengths for the
# reference.
fitted_spect = spline_ref(wave*(1.0 - params[0]/const.c.si.value))*norm
if return_spect:
return fitted_spect
else:
return (fitted_spect - spect)/spect_sdev
def rv_shift_chi2(self, params, wave, spect, spect_sdev, spline_ref):
"""Find the chi-squared for an RV fit. Just a wrapper for rv_shift_resid,
so the docstring is cut and paste!
The function for parameters p[0] through p[3] is:
.. math::
y(x) = Ref[ wave(x) * (1 - p[0]/c) ] * exp(p[1] * x^2 + p[2] * x + p[3])
Here "Ref" is a function f(wave)
Parameters
----------
params:
...
wave: float array
Wavelengths for the observed spectrum.
spect: float array
The observed spectrum
spect_sdev:
...
spline_ref:
...
return_spect: boolean
Whether to return the fitted spectrum or the
wave_ref: float array
The wavelengths of the reference spectrum
ref: float array
The reference spectrum
Returns
-------
chi2:
The fit chi-squared
"""
return np.sum(self.rv_shift_resid(params, wave, spect, spect_sdev, spline_ref)**2)
def rv_shift_jac(self, params, wave, spect, spect_sdev, spline_ref):
r"""Explicit Jacobian function for rv_shift_resid.
This is not a completely analytic solution, but without it there seems to be
numerical instability.
The key equations are:
.. math:: f(x) = R( \lambda(x) (1 - p_0/c) ) \times \exp(p_1 x^2 + p_2 + p_3)
g(x) = (f(x) - d(x))/\sigma(x)
\frac{dg}{dp_0}(x) \approx [f(x + 1 m/s) -f(x) ]/\sigma(x)
\frac{dg}{dp_1}(x) = x^2 f(x) / \sigma(x)
\frac{dg}{dp_2}(x) = x f(x) / \sigma(x)
\frac{dg}{dp_3}(x) = f(x) / \sigma(x)
Parameters
----------
params: float array
wave: float array
Wavelengths for the observed spectrum.
spect: float array
The observed spectrum
spect_sdev:
...
spline_ref:
...
Returns
-------
jac:
The Jacobian.
"""
ny = len(spect)
xx = (np.arange(ny)-ny//2)/ny
norm = np.exp(params[1]*xx**2 + params[2]*xx + params[3])
fitted_spect = spline_ref(wave*(1.0 - params[0]/const.c.si.value))*norm
jac = np.empty( (ny,4) )
#The Jacobian is the derivative of fitted_spect/sdev with respect to
#p[0] through p[3]
jac[:,3] = fitted_spect/spect_sdev
jac[:,2] = fitted_spect*xx/spect_sdev
jac[:,1] = fitted_spect*xx**2/spect_sdev
jac[:,0] = (spline_ref(wave*(1.0 - (params[0] + 1.0)/const.c.si.value))*
norm - fitted_spect)/spect_sdev
return jac
def create_ref_spect(self, wave, fluxes, vars, bcors, rebin_fact=2,
gauss_sdev=1.0, med_cut=0.6,gauss_hw=7,threshold=100):
"""Create a reference spectrum from a series of target spectra.
The process is:
1) Re-grid the spectra into a rebin_fact times smaller wavelength grid.
2) The spectra are barycentrically corrected by linear interpolation. Note
that when used on a small data set, typically the spectra will be shifted by
many km/s. For an RV-stable star, the fitting process then needs to find the
opposite of this barycentric velocity.
3) Remove bad (i.e. low flux) files.
4) Median combine the spectra.
5) Convolve the result by a Gaussian to remove high spatial frequency noise. This
can be important when the reference spectrum is created from only a small
number of input spectra, and high-frequency noise can be effectively fitted to
itself.
Parameters
----------
wave: 2D np.array(float)
Wavelength coordinate map of form (Order, Wavelength/pixel)
fluxes: 3D np.array(float)
Fluxes of form (Observation, Order, Flux/pixel)
vars: 3D np.array(float)
Variance of form (Observation, Order, Variance/pixel)
bcors: 1D np.array(float)
Barycentric correction for each observation.
rebin_fact: int
Factor by which to rebin.
gauss_sdev:
...
med_cut:
...
gauss_hw:
...
Returns
-------
wave_ref: 2D np.array(float)
Wavelength coordinate map of form (Order, Wavelength/pixel*2+2),
where the wavelength scale has been interpolated.
ref_spect: 2D np.array(float)
Reference spectrum of form (Order, Flux/pixel*2+2),
where the flux scale has been interpolated.
"""
nm = fluxes.shape[1]
ny = fluxes.shape[2]
nf = fluxes.shape[0]
C = const.c.si.value
#Create arrays for our outputs.
wave_ref = np.empty( (nm,rebin_fact*ny + 2) )
ref_spect = np.empty( (nm,rebin_fact*ny + 2) )
#First, rebin everything, using opticstools.utils.regrid_fft
new_shape = (fluxes.shape[1],rebin_fact*fluxes.shape[2])
fluxes_rebin = np.empty( (fluxes.shape[0],fluxes.shape[1],
rebin_fact*fluxes.shape[2]) )
for i in range(nf):
fluxes_rebin[i] = ot.utils.regrid_fft(fluxes[i],new_shape)
#Create the final wavelength grid.
for j in range(nm):
wave_ref[j,1:-1] = np.interp(np.arange(rebin_fact*ny)/rebin_fact,
np.arange(ny),wave[j,:])
#Fill in the end wavelengths, including +/-100 km/s from the ends.
wave_ref[j,-2] = wave_ref[j,-3] + (wave_ref[j,-3]-wave_ref[j,-4])
wave_ref[j,0] = wave_ref[j,1] * (C + 1e5)/C
wave_ref[j,-1] = wave_ref[j,-2] * (C - 1e5)/C
#Barycentric correct. For a positive barycentric velocity, the observer is
#moving towards the star, which means that star is blue-shifted and the
#correct rest-frame spectrum is at longer wavelengths. The interpolation
#below shifts the spectrum to the red, as required.
for i in range(nf):
for j in range(nm):
# Awkwardly, we've extended the wavelength scale by 2 elements,
# but haven't yet extended the fluxes...
ww = wave_ref[j,1:-1]
fluxes_rebin[i,j] = np.interp(ww*(1-bcors[i]/C), ww[::-1],
fluxes_rebin[i,j,::-1])
#!!! New Code. This was already checked and makes no sense.
#Combine the spectra.
flux_meds = np.median(fluxes_rebin,axis=2)
flux_files = np.median(flux_meds,axis=1)
if med_cut > 0:
good_files = np.where(flux_files > med_cut*np.median(flux_files))[0]
else:
good_files = np.arange(len(flux_files),dtype=np.int)
flux_orders = np.median(flux_meds[good_files],axis=0)
flux_norm = fluxes_rebin.copy()
for g in good_files:
for j in range(nm):
flux_norm[g,j,:] /= flux_meds[g,j]
#pdb.set_trace()
#Create a median over files
flux_ref = np.median(flux_norm[good_files],axis=0)
#Multiply this by the median for each order
for j in range(nm):
flux_ref[j] *= flux_orders[j]
#Threshold the data whenever the flux is less than "threshold"
if (threshold > 0):
bad = flux_ref<2*threshold
flux_ref[bad] *= np.maximum(flux_ref[bad]-threshold,0)/threshold
# Create a Gaussian smoothing function for the reference spectrum. This
# is needed to prevent a bias to zero radial velocity, especially in the
# case of few data points.
gg = np.exp(-(np.arange(2*gauss_hw+1)-gauss_hw)**2/2.0/gauss_sdev**2)
gg /= np.sum(gg)
one_order = np.empty(flux_ref.shape[1] + 2*gauss_hw)
for j in range(nm):
one_order[gauss_hw:-gauss_hw] = flux_ref[j,:]
one_order[:gauss_hw] = one_order[gauss_hw]
one_order[-gauss_hw:] = one_order[-gauss_hw-1]
ref_spect[j,:] = np.convolve(one_order, gg,
mode='same')[gauss_hw-1:1-gauss_hw]
return wave_ref, ref_spect
def extract_spectra(self, files, extractor, star_dark=None, flat_files=None,
flat_dark=None, location=('151.2094','-33.865',100.0),
coord=None, do_bcor=True, ra_dec_hr=False):
"""Extract the spectrum from a file, given a dark file, a flat file and
a dark for the flat. The process is:
1) Dark correcting the data and the flat fields.
2) Computing (but not applying) Barycentric corrections.
3) Extracting the data and the flat fields using the extract module, to form
:math:`f_m(x)`, the flux for orders m and dispersion direction pixels x.
4) Normalising the flat fields, so that the median of each order is 1.0.
5) Dividing by the extracted flat field. Uncertainties from the flat field are
added in quadrature.
TODO: Not the neatest implementation, but should account for the fact that
there are no flats or darks for the ThAr frames. Might be worth tidying
up and making the implementation a little more elegant.
Parameters
----------
files: list of strings
One string for each file. CAn be on separate nights - a full
pathname should be given.
star_dark:
flat_files: list of strings.
One string for each star file. CAn be on separate nights - a full
pathname should be given.
flat_dark:
location: (lattitude:string, longitude:string, elevation:string)
The location on Earth where the data were taken.
coord: astropy.coordinates.sky_coordinate.SkyCoord
The coordinates of the observation site
do_bcor: boolean
Flag for whether to do barycentric correction
Returns
-------
fluxes: 3D np.array(float)
Fluxes of form (Observation, Order, Flux/pixel)
vars: 3D np.array(float)
Variance of form (Observation, Order, Variance/pixel)
bcors: 1D np.array(float)
Barycentric correction for each observation.
wave: 2D np.array(float)
Wavelength coordinate map of form (Order, Wavelength/pixel)
mjds: 1D np.array(float)
Modified Julian Date (MJD) of each observation.
"""
# Initialise list of return values
# Each index represents a single observation
fluxes = []
vars = []
dates = []
bcors = []
#!!! This is dodgy, as files and flat_files should go together in a dict
for ix,file in enumerate(files):
# Dark correct the science and flat frames
# Only if flat/darks have been supplied --> ThAr might not have them
# If not supplied, just use science/reference data
try:
# Dark correct science frames
if len(star_dark) > 0:
data = pyfits.getdata(file) - star_dark
else:
data = pyfits.getdata(file)
# Dark correct flats
if len(flat_files) > 0 and len(flat_dark) > 0:
flat = pyfits.getdata(flat_files[ix]) - flat_dark
elif len(flat_files) > 0:
flat = pyfits.getdata(flat_files[ix])
except:
print('Unable to calibrate file ' + file +
'. Check that format of data arrays are consistent.')
print(pyfits.getdata(file).shape)
print(star_dark.shape)
continue
header = pyfits.getheader(file)
date = Time(header['JD'], format='jd', location=location)
dates.append(date)
# Determine the barycentric correction
if do_bcor:
if not coord:
# Depending on whether the RA and DEC is saved in hours or
# degrees, load and create a SkyCoord object
if ra_dec_hr:
ra_deg = float(header['RA'])*15
else:
ra_deg = float(header['RA'])
dec_deg = float(header['DEC'])
coord = SkyCoord(ra=ra_deg, dec=dec_deg, unit='deg')
if not location:
location=(float(header['LONG']), float(header['LAT']),
float(header['HEIGHT']))
#(obs_long, obs_lat, obs_alt, ra2000, dec2000, jd, debug=False)
#pdb.set_trace()
bcors.append(1e3*pyasl.helcorr(float(location[0]),
float(location[1]),location[2],coord.ra.deg,
coord.dec.deg,date.jd)[0] )
else:
bcors.append(0.0)
# Extract the fluxes and variance for the science and flat frames
print("Extracting spectra from file #", str(ix))
flux, var = extractor.one_d_extract(data=data, rnoise=20.0)
# Continue only when flats have been supplied
# Perform flat field correction and adjust variances
if len(flat_files) > 0:
flat_flux, fvar = extractor.one_d_extract(data=flat,
rnoise=20.0)
for j in range(flat_flux.shape[0]):
medf = np.median(flat_flux[j])
flat_flux[j] /= medf
fvar[j] /= medf**2
#Calculate the variance after dividing by the flat
var = var/flat_flux**2 + fvar * flux**2/flat_flux**4
#Now normalise the flux.
flux /= flat_flux
# Regardless of whether the data has been flat field corrected,
# append to the arrays and continue
fluxes.append(flux[:,:,0])
vars.append(var[:,:,0])
fluxes = np.array(fluxes)
vars = np.array(vars)
bcors = np.array(bcors)
mjds = np.array([d.mjd for d in dates])
return fluxes, vars, bcors, mjds
def calculate_rv_shift(self, wave_ref, ref_spect, fluxes, vars, bcors,
wave,return_fitted_spects=False,bad_threshold=10):
"""Calculates the Radial Velocity of each spectrum
The radial velocity shift of the reference spectrum required
to match the flux in each order in each input spectrum is calculated
The input fluxes to this method are flat-fielded data, which are then fitted with
a barycentrically corrected reference spectrum :math:`R(\lambda)`, according to
the following equation:
.. math::
f(x) = R( \lambda(x) (1 - p_0/c) ) \\times \exp(p_1 x^2 + p_2 + p_3)
The first term in this equation is simply the velocity corrected spectrum, based on a
the arc-lamp derived reference wavelength scale :math:`\lambda(x)` for pixels coordinates x.
The second term in the equation is a continuum normalisation - a shifted Gaussian was
chosen as a function that is non-zero everywhere. The scipy.optimize.leastsq function is used
to find the best fitting set fof parameters :math:`p_0` through to :math`p_3`.
The reference spectrum function :math:`R(\lambda)` is created using a wavelength grid
which is over-sampled with respect to the data by a factor of 2. Individual fitted
wavelengths are then found by cubic spline interpolation on this :math:`R_j(\lambda_j)`
discrete grid.
Parameters
----------
wave_ref: 2D np.array(float)
Wavelength coordinate map of form (Order, Wavelength/pixel*2+2),
where the wavelength scale has been interpolated.
ref_spect: 2D np.array(float)
Reference spectrum of form (Order, Flux/pixel*2+2),
where the flux scale has been interpolated.
fluxes: 3D np.array(float)
Fluxes of form (Observation, Order, Flux/pixel)
vars: 3D np.array(float)
Variance of form (Observation, Order, Variance/pixel)
bcors: 1D np.array(float)
Barycentric correction for each observation.
wave: 2D np.array(float)
Wavelength coordinate map of form (Order, Wavelength/pixel)
Returns
-------
rvs: 2D np.array(float)
Radial velocities of format (Observation, Order)
rv_sigs: 2D np.array(float)
Radial velocity sigmas of format (Observation, Order)
"""
nm = fluxes.shape[1]
ny = fluxes.shape[2]
nf = fluxes.shape[0]
rvs = np.zeros( (nf,nm) )
rv_sigs = np.zeros( (nf,nm) )
initp = np.zeros(4)
initp[3]=0.5
initp[0]=0.0
spect_sdev = np.sqrt(vars)
fitted_spects = np.empty(fluxes.shape)
for i in range(nf):
# Start with initial guess of no intrinsic RV for the target.
initp[0] = -bcors[i] #!!! New Change
nbad=0
for j in range(nm):
# This is the *only* non-linear interpolation function that
# doesn't take forever
spl_ref = interp.InterpolatedUnivariateSpline(wave_ref[j,::-1],
ref_spect[j,::-1])
args = (wave[j,:], fluxes[i,j,:], spect_sdev[i,j,:], spl_ref)
# Remove edge effects in a slightly dodgy way.
# 20 pixels is about 30km/s.
args[2][:20] = np.inf
args[2][-20:] = np.inf
the_fit = op.leastsq(self.rv_shift_resid, initp, args=args,diag=[1e3,1,1,1],Dfun=self.rv_shift_jac, full_output=True)
#the_fit = op.leastsq(self.rv_shift_resid, initp, args=args,diag=[1e3,1e-6,1e-3,1], full_output=True,epsfcn=1e-9)
#The following line also doesn't work "out of the box".
#the_fit = op.minimize(self.rv_shift_chi2,initp,args=args)
#pdb.set_trace()
#Remove bad points...
resid = self.rv_shift_resid( the_fit[0], *args)
wbad = np.where( np.abs(resid) > bad_threshold)[0]
nbad += len(wbad)
#15 bad pixels in a single order is *crazy*
if len(wbad)>20:
fitted_spect = self.rv_shift_resid(the_fit[0], *args, return_spect=True)
plt.clf()
plt.plot(args[0], args[1])
plt.plot(args[0][wbad], args[1][wbad],'o')
plt.plot(args[0], fitted_spect)
plt.xlabel("Wavelength")
plt.ylabel("Flux")
#print("Lots of 'bad' pixels. Type c to continue if not a problem")
#pdb.set_trace()
args[2][wbad] = np.inf
the_fit = op.leastsq(self.rv_shift_resid, initp,args=args, diag=[1e3,1,1,1], Dfun=self.rv_shift_jac, full_output=True)
#the_fit = op.leastsq(self.rv_shift_resid, initp,args=args, diag=[1e3,1e-6,1e-3,1], full_output=True, epsfcn=1e-9)
#Some outputs for testing
fitted_spects[i,j] = self.rv_shift_resid(the_fit[0], *args, return_spect=True)
if ( np.abs(the_fit[0][0] - bcors[i]) < 1e-4 ):
#pdb.set_trace() #This shouldn't happen, and indicates a problem with the fit.
pass
#Save the fit and the uncertainty.
rvs[i,j] = the_fit[0][0]
try:
rv_sigs[i,j] = np.sqrt(the_fit[1][0,0])
except:
rv_sigs[i,j] = np.NaN
print("Done file {0:d}. Bad spectral pixels: {1:d}".format(i,nbad))
if return_fitted_spects:
return rvs, rv_sigs, fitted_spects
else:
return rvs, rv_sigs
def save_fluxes(self, files, fluxes, vars, bcors, wave, mjds, out_path):
"""Method to save the extracted spectra.
TODO:
Might want to remove the dependence on files (to get the headers) as it
will prevent (or complicate) the saving of the reference spectrum.
Parameters
----------
fluxes: 3D np.array(float)
Fluxes of form (Observation, Order, Flux/pixel)
vars: 3D np.array(float)
Variance of form (Observation, Order, Variance/pixel)
bcors: 1D np.array(float)
Barycentric correction for each observation.
wave: 2D np.array(float)
Wavelength coordinate map of form (Order, Wavelength/pixel)
mjds: 1D np.array(float)
Modified Julian Date (MJD) of each observation.
out_path: String
The directory to save the extracted fluxes.
"""
# Loop through each extracted spectrum
for i, file in enumerate(files):
#try:
# Extract the header information from the file
header = pyfits.getheader(file)
file_name = file.split("/")[-1].split(".")[0] + "_extracted.fits"
full_path = out_path + file_name
# Save to fits
hl = pyfits.HDUList()
hl.append(pyfits.ImageHDU(fluxes[i], header))
hl.append(pyfits.ImageHDU(vars[i]))
hl.append(pyfits.ImageHDU(wave))
col1 = pyfits.Column(name='bcor', format='D',
array=np.array([bcors[i]]))
col2 = pyfits.Column(name='mjd', format='D',
array=np.array([mjds[i]]))
cols = pyfits.ColDefs([col1, col2])
hl.append(pyfits.new_table(cols))
hl.writeto(full_path, clobber=True)
#except:
#print("Error: Some files may not have been saved.")
#print("Likely due to incompatible array sizes for frames.")
#continue
def save_ref_spect(self, files, ref_spect, vars_ref, wave_ref, bcors, mjds,
out_path, object):
"""Method to save an extracted reference spectrum
Parameters
----------
ref_spect: 3D np.array(float)
Fluxes of form (Observation, Order, Flux/pixel)
vars_ref: 3D np.array(float)
Variance of form (Observation, Order, Variance/pixel)
wave_ref: 2D np.array(float)
Wavelength coordinate map of form (Order, Wavelength/pixel)
bcors: 1D np.array(float)
Barycentric correction for each observation used to create ref_spect
mjds: 1D np.array(float)
Modified Julian Date (MJD) of each observation used to create
ref_spect
out_path: String
The directory to save the reference spectrum
object: String
The object object observed.
"""
header = pyfits.header.Header()
n = str(len(files))
full_path = out_path + "reference_spectrum_" + n + "_" + object +".fits"
# Record which spectra were used to create the reference
for i, file in enumerate(files):
# Extract the file name of each file and store in the header
file_name = file.split("/")[-1].split(".")[0] + "_extracted.fits"
header_name = "COMB" + str(i)
comment = "Combined spectrum #" + str(i)
header[header_name] = (file_name, comment)
# Save to fits
hl = pyfits.HDUList()
hl.append(pyfits.ImageHDU(ref_spect, header))
hl.append(pyfits.ImageHDU(vars_ref[0]))
hl.append(pyfits.ImageHDU(wave_ref))
col1 = pyfits.Column(name='bcor', format='D', array=np.array([bcors[0]]))
col2 = pyfits.Column(name='mjd', format='D',
array=np.array([mjds[0]]))
cols = pyfits.ColDefs([col1, col2])
hl.append(pyfits.new_table(cols))
hl.writeto(full_path, clobber=True)
def load_ref_spect(self, path):
"""Method to load a previously saved reference spectrum
Parameters
----------
path: string
The file path to the saved reference spectrum.
Returns
-------
ref_spect: 3D np.array(float)
Fluxes of form (Observation, Order, Flux/pixel)
vars_ref: 3D np.array(float)
Variance of form (Observation, Order, Variance/pixel)
wave_ref: 2D np.array(float)
Wavelength coordinate map of form (Order, Wavelength/pixel)
bcors_ref: 1D np.array(float)
Barycentric correction for each observation used to create ref_spect
mjds_ref: 1D np.array(float)
Modified Julian Date (MJD) of each observation used to create
ref_spect
"""
hl = pyfits.open(path)
ref_spect = hl[0].data
vars_ref = hl[1].data
wave_ref = hl[2].data
bcors_ref = hl[3].data['bcor'][0]
mjds_ref = hl[3].data['mjd'][0]
hl.close()
return ref_spect, vars_ref, wave_ref, bcors_ref, mjds_ref
def load_fluxes(self, files):
"""Loads previously saved fluxes.
Parameters
----------
files: [string]
String list of filepaths of the saved fluxes
Returns
-------
fluxes: 3D np.array(float)
Fluxes of form (Observation, Order, Flux/pixel)
vars: 3D np.array(float)
Variance of form (Observation, Order, Variance/pixel)
bcors: 1D np.array(float)
Barycentric correction for each observation.
wave: 2D np.array(float)
Wavelength coordinate map of form (Order, Wavelength/pixel)
mjds: 1D np.array(float)
Modified Julian Date (MJD) of each observation.
"""
fluxes = []
vars = []
wave = []
bcors = []
mjds = []
for f in files:
hl = pyfits.open(f)
fluxes.append(hl[0].data)
vars.append(hl[1].data)
wave = hl[2].data # Only need one (assumption of same instrument)
bcors.append(hl[3].data['bcor'][0])
mjds.append(hl[3].data['mjd'][0])
hl.close()
fluxes = np.array(fluxes)
vars = np.array(vars)
#wave = np.array(hl[2].data)
bcors = np.array(bcors)
mjds = np.array(mjds)
return fluxes, vars, wave, bcors, mjds
def plot_rvs(self, rvs, rv_sigs, mjds, dates, bcors, plot_title):
"""Plots the barycentrically corrected Radial Velocities.
Note:
Not complete.
Parameters
----------
rvs: 2D np.array(float)
Radial velocities of format (Observation, Order)
rv_sigs: 2D np.array(float)
Radial velocity sigmas of format (Observation, Order)
mjds: 1D np.array(float)
Modified Julian Date (MJD) of each observation.
bcors: 1D np.array(float)
Barycentric correction for each observation.
plot_title: String
Name of the plot
"""
# Dimensions (Number of observations and orders respectively)
nf = rvs.shape[0]
nm = rvs.shape[1]
# Plot the Barycentric corrected RVs. Note that a median over all orders
# is only a first step - a weighted mean is needed.
plt.clf()
rvs += bcors.repeat(nm).reshape( (nf,nm) )
rv_mn, wt_sum = np.average(rvs,axis=1, weights=1.0/rv_sigs**2,
returned=True)
rv_mn_sig = 1.0/np.sqrt(wt_sum)
rv_med1 = np.median(rvs,1)
rv_med2 = np.median(rvs[:,3:20],1)
#plt.plot_date([dates[i].plot_date for i in range(len(dates))], rv_mn)
#plt.errorbar(mjds, rv_mn, yerr=rv_mn_sig,fmt='o')
plt.errorbar(mjds, rv_med2, yerr=rv_mn_sig,fmt='o')
plt.xlabel('Date (MJD)')
plt.ylabel('Barycentric RV (m/s)')
plt.title(plot_title)
plt.plot_date([dates[i].plot_date for i in range(len(dates))], rv_mn)
plt.show()
def save_rvs(self, rvs, rv_sigs, bcor, mjds, bcor_rvs, base_save_path):
"""Method for saving calculated radial velocities and their errors to
csv files.
Parameters
----------
wave_ref: 2D np.array(float)
Wavelength coordinate map of form (Order, Wavelength/pixel*2+2),
where the wavelength scale has been interpolated.
ref_spect: 2D np.array(float)
Reference spectrum of form (Order, Flux/pixel*2+2),
where the flux scale has been interpolated.
mjds: 1D np.array(float)
Modified Julian Date (MJD) of each observation.
base_save_path: string
The base of each of the csv file paths.
"""
# Dimensions (Number of observations and orders respectively)
nf = rvs.shape[0]
nm = rvs.shape[1]
# Setup save paths
rv_file = base_save_path + "_" + str(rvs.shape[0]) + "_rvs.csv"
rv_sig_file = base_save_path + "_" + str(rvs.shape[0]) + "_rv_sig.csv"
bcor_file = base_save_path + "_" + str(rvs.shape[0]) + "_bcor.csv"
bcor_rv_file = base_save_path + "_" + str(rvs.shape[0]) + "_bcor_rv.csv"
# Headers for each csv
rv_h = "RV in m/s for each order, for each MJD epoch"
rv_sig_h = "RV uncertainties in m/s for each order, for each MJD epoch"
bcor_h = "Barycentric correction in m/s"
bcor_rvs_h = "Barycentrically corrected RVs in m/s"
# Save rvs and errors
np.savetxt(rv_file, np.append(mjds.reshape(nf,1), rvs,axis=1),
fmt="%10.4f" + nm*", %6.1f", header=rv_h)
np.savetxt(rv_sig_file, np.append(mjds.reshape(nf,1),rv_sigs,axis=1),
fmt="%10.4f" + nm*", %6.1f", header=rv_sig_h)
np.savetxt(bcor_file, np.append(mjds.reshape(nf,1),bcor.reshape(nf,1),axis=1),
fmt="%10.4f" + ", %6.1f", header=bcor_h)
np.savetxt(bcor_rv_file, np.append(mjds.reshape(nf,1), bcor_rvs,axis=1),
fmt="%10.4f" + nm*", %6.1f", header=bcor_rvs_h)
def load_rvs(self, rvs_path, rv_sig_path, bcor_path=None):
"""Opens the saved RV, RV sig and bcor csv files and formats the
contents to be easily usable and non-redundant
Parameters
----------
rvs_path: string
File path to the rv csv
rv_sig_path: string
File path to the rv sig csv
bcor_path: string
File path to the bcor csv
Returns
-------
mjds: 1D np.array(float)
Modified Julian Date (MJD) of each observation.
raw_rvs: 2D np.array(float)
Radial velocities of format (Observation, Order)
raw_rv_sigs: 2D np.array(float)
Radial velocity sigmas of format (Observation, Order)
raw_bcor: 1D np.array(float)
RV barycentric correction for each observation
bcors_rvs: 2D np.array(float)
Barycentrically corrected radial velocity sigmas of format
(Observation, Order)
"""
# Import
rvs = np.loadtxt(rvs_path, delimiter=",")
rv_sig = np.loadtxt(rv_sig_path, delimiter=",")
# Format to remove mjd values from start of each row
mjds = rvs[:,0]
raw_rvs = rvs[:,1:]
raw_rv_sig = rv_sig[:,1:]
# Number of observations and orders respectively
nf = len(mjds)
nm = raw_rvs.shape[1]
# Only deal with barycentric correction if it is passed in
# (It may not be when dealing with ThAr files)
if bcor_path is not None:
bcors = np.loadtxt(bcor_path, delimiter=",")
raw_bcor = bcors[:,1]
bcor_rvs = raw_rvs + raw_bcor.repeat(nm).reshape( (nf, nm) )
return mjds, raw_rvs, raw_rv_sig, raw_bcor, bcor_rvs
else:
return mjds, raw_rvs, raw_rv_sig
| mit |
kjordahl/xray | doc/conf.py | 3 | 13424 | # -*- coding: utf-8 -*-
#
# xray documentation build configuration file, created by
# sphinx-quickstart on Thu Feb 6 18:57:54 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
print "python exec:", sys.executable
print "sys.path:", sys.path
try:
import numpy
print "numpy: %s, %s" % (numpy.__version__, numpy.__file__)
except ImportError:
print "no numpy"
try:
import scipy
print "scipy: %s, %s" % (scipy.__version__, scipy.__file__)
except ImportError:
print "no scipy"
try:
import pandas
print "pandas: %s, %s" % (pandas.__version__, pandas.__file__)
except ImportError:
print "no pandas"
try:
import matplotlib
print "matplotlib: %s, %s" % (matplotlib.__version__, matplotlib.__file__)
except ImportError:
print "no matplotlib"
try:
import IPython
print "ipython: %s, %s" % (IPython.__version__, IPython.__file__)
except ImportError:
print "no ipython"
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Monkey patch inspect.findsource to work around a Python bug that manifests on
# RTD. Copied from IPython.core.ultratb.
# Reference: https://github.com/ipython/ipython/issues/1456
import linecache
import re
from inspect import getsourcefile, getfile, getmodule,\
ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode
def findsource(object):
"""Return the entire source file and starting line number for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of all the lines
in the file and the line number indexes a line in that list. An IOError
is raised if the source code cannot be retrieved.
FIXED version with which we monkeypatch the stdlib to work around a bug."""
file = getsourcefile(object) or getfile(object)
# If the object is a frame, then trying to get the globals dict from its
# module won't work. Instead, the frame object itself has the globals
# dictionary.
globals_dict = None
if inspect.isframe(object):
# XXX: can this ever be false?
globals_dict = object.f_globals
else:
module = getmodule(object, file)
if module:
globals_dict = module.__dict__
lines = linecache.getlines(file, globals_dict)
if not lines:
raise IOError('could not get source code')
if ismodule(object):
return lines, 0
if isclass(object):
name = object.__name__
pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
# make some effort to find the best matching class definition:
# use the one with the least indentation, which is the one
# that's most probably not inside a function definition.
candidates = []
for i in range(len(lines)):
match = pat.match(lines[i])
if match:
# if it's at toplevel, it's already the best one
if lines[i][0] == 'c':
return lines, i
# else add whitespace to candidate list
candidates.append((match.group(1), i))
if candidates:
# this will sort by whitespace, and by line number,
# less whitespace first
candidates.sort()
return lines, candidates[0][1]
else:
raise IOError('could not find class definition')
if ismethod(object):
object = object.__func__
if isfunction(object):
object = object.__code__
if istraceback(object):
object = object.tb_frame
if isframe(object):
object = object.f_code
if iscode(object):
if not hasattr(object, 'co_firstlineno'):
raise IOError('could not find function definition')
pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
pmatch = pat.match
# fperez - fix: sometimes, co_firstlineno can give a number larger than
# the length of lines, which causes an error. Safeguard against that.
lnum = min(object.co_firstlineno,len(lines))-1
while lnum > 0:
if pmatch(lines[lnum]): break
lnum -= 1
return lines, lnum
raise IOError('could not find code object')
import inspect
inspect.findsource = findsource
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.extlinks',
'sphinx.ext.mathjax',
'numpydoc',
'IPython.sphinxext.ipython_directive',
'IPython.sphinxext.ipython_console_highlighting',
]
extlinks = {'issue': ('https://github.com/xray/xray/issues/%s', 'GH')}
autosummary_generate = True
numpydoc_class_members_toctree = True
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'xray'
copyright = u'2014, xray Developers'
import xray
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = xray.version.short_version
# The full version, including alpha/beta/rc tags.
release = xray.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from
# docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'xraydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'xray.tex', u'xray Documentation',
u'xray Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'xray', u'xray Documentation',
[u'xray Developers'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'xray', u'xray Documentation',
u'xray Developers', 'xray', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/2.7/', None),
'pandas': ('http://pandas.pydata.org/pandas-docs/stable/', None),
'iris': ('http://scitools.org.uk/iris/docs/latest/', None),
'numpy': ('http://docs.scipy.org/doc/numpy/', None),
}
| apache-2.0 |
KISSMonX/micropython | tests/bytecode/pylib-tests/compileall.py | 20 | 9675 | """Module/script to byte-compile all .py files to .pyc (or .pyo) files.
When called as a script with arguments, this compiles the directories
given as arguments recursively; the -l option prevents it from
recursing into directories.
Without arguments, if compiles all modules on sys.path, without
recursing into subdirectories. (Even though it should do so for
packages -- for now, you'll have to deal with packages separately.)
See module py_compile for details of the actual byte-compilation.
"""
import os
import sys
import errno
import imp
import py_compile
import struct
__all__ = ["compile_dir","compile_file","compile_path"]
def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None,
quiet=False, legacy=False, optimize=-1):
"""Byte-compile all modules in the given directory tree.
Arguments (only dir is required):
dir: the directory to byte-compile
maxlevels: maximum recursion level (default 10)
ddir: the directory that will be prepended to the path to the
file as it is compiled into each byte-code file.
force: if True, force compilation, even if timestamps are up-to-date
quiet: if True, be quiet during compilation
legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
optimize: optimization level or -1 for level of the interpreter
"""
if not quiet:
print('Listing {!r}...'.format(dir))
try:
names = os.listdir(dir)
except os.error:
print("Can't list {!r}".format(dir))
names = []
names.sort()
success = 1
for name in names:
if name == '__pycache__':
continue
fullname = os.path.join(dir, name)
if ddir is not None:
dfile = os.path.join(ddir, name)
else:
dfile = None
if not os.path.isdir(fullname):
if not compile_file(fullname, ddir, force, rx, quiet,
legacy, optimize):
success = 0
elif (maxlevels > 0 and name != os.curdir and name != os.pardir and
os.path.isdir(fullname) and not os.path.islink(fullname)):
if not compile_dir(fullname, maxlevels - 1, dfile, force, rx,
quiet, legacy, optimize):
success = 0
return success
def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False,
legacy=False, optimize=-1):
"""Byte-compile one file.
Arguments (only fullname is required):
fullname: the file to byte-compile
ddir: if given, the directory name compiled in to the
byte-code file.
force: if True, force compilation, even if timestamps are up-to-date
quiet: if True, be quiet during compilation
legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
optimize: optimization level or -1 for level of the interpreter
"""
success = 1
name = os.path.basename(fullname)
if ddir is not None:
dfile = os.path.join(ddir, name)
else:
dfile = None
if rx is not None:
mo = rx.search(fullname)
if mo:
return success
if os.path.isfile(fullname):
if legacy:
cfile = fullname + ('c' if __debug__ else 'o')
else:
if optimize >= 0:
cfile = imp.cache_from_source(fullname,
debug_override=not optimize)
else:
cfile = imp.cache_from_source(fullname)
cache_dir = os.path.dirname(cfile)
head, tail = name[:-3], name[-3:]
if tail == '.py':
if not force:
try:
mtime = int(os.stat(fullname).st_mtime)
expect = struct.pack('<4sl', imp.get_magic(), mtime)
with open(cfile, 'rb') as chandle:
actual = chandle.read(8)
if expect == actual:
return success
except IOError:
pass
if not quiet:
print('Compiling {!r}...'.format(fullname))
try:
ok = py_compile.compile(fullname, cfile, dfile, True,
optimize=optimize)
except py_compile.PyCompileError as err:
if quiet:
print('*** Error compiling {!r}...'.format(fullname))
else:
print('*** ', end='')
# escape non-printable characters in msg
msg = err.msg.encode(sys.stdout.encoding,
errors='backslashreplace')
msg = msg.decode(sys.stdout.encoding)
print(msg)
success = 0
except (SyntaxError, UnicodeError, IOError) as e:
if quiet:
print('*** Error compiling {!r}...'.format(fullname))
else:
print('*** ', end='')
print(e.__class__.__name__ + ':', e)
success = 0
else:
if ok == 0:
success = 0
return success
def compile_path(skip_curdir=1, maxlevels=0, force=False, quiet=False,
legacy=False, optimize=-1):
"""Byte-compile all module on sys.path.
Arguments (all optional):
skip_curdir: if true, skip current directory (default True)
maxlevels: max recursion level (default 0)
force: as for compile_dir() (default False)
quiet: as for compile_dir() (default False)
legacy: as for compile_dir() (default False)
optimize: as for compile_dir() (default -1)
"""
success = 1
for dir in sys.path:
if (not dir or dir == os.curdir) and skip_curdir:
print('Skipping current directory')
else:
success = success and compile_dir(dir, maxlevels, None,
force, quiet=quiet,
legacy=legacy, optimize=optimize)
return success
def main():
"""Script main program."""
import argparse
parser = argparse.ArgumentParser(
description='Utilities to support installing Python libraries.')
parser.add_argument('-l', action='store_const', const=0,
default=10, dest='maxlevels',
help="don't recurse into subdirectories")
parser.add_argument('-f', action='store_true', dest='force',
help='force rebuild even if timestamps are up to date')
parser.add_argument('-q', action='store_true', dest='quiet',
help='output only error messages')
parser.add_argument('-b', action='store_true', dest='legacy',
help='use legacy (pre-PEP3147) compiled file locations')
parser.add_argument('-d', metavar='DESTDIR', dest='ddir', default=None,
help=('directory to prepend to file paths for use in '
'compile-time tracebacks and in runtime '
'tracebacks in cases where the source file is '
'unavailable'))
parser.add_argument('-x', metavar='REGEXP', dest='rx', default=None,
help=('skip files matching the regular expression; '
'the regexp is searched for in the full path '
'of each file considered for compilation'))
parser.add_argument('-i', metavar='FILE', dest='flist',
help=('add all the files and directories listed in '
'FILE to the list considered for compilation; '
'if "-", names are read from stdin'))
parser.add_argument('compile_dest', metavar='FILE|DIR', nargs='*',
help=('zero or more file and directory names '
'to compile; if no arguments given, defaults '
'to the equivalent of -l sys.path'))
args = parser.parse_args()
compile_dests = args.compile_dest
if (args.ddir and (len(compile_dests) != 1
or not os.path.isdir(compile_dests[0]))):
parser.exit('-d destdir requires exactly one directory argument')
if args.rx:
import re
args.rx = re.compile(args.rx)
# if flist is provided then load it
if args.flist:
try:
with (sys.stdin if args.flist=='-' else open(args.flist)) as f:
for line in f:
compile_dests.append(line.strip())
except EnvironmentError:
print("Error reading file list {}".format(args.flist))
return False
success = True
try:
if compile_dests:
for dest in compile_dests:
if os.path.isfile(dest):
if not compile_file(dest, args.ddir, args.force, args.rx,
args.quiet, args.legacy):
success = False
else:
if not compile_dir(dest, args.maxlevels, args.ddir,
args.force, args.rx, args.quiet,
args.legacy):
success = False
return success
else:
return compile_path(legacy=args.legacy)
except KeyboardInterrupt:
print("\n[interrupted]")
return False
return True
if __name__ == '__main__':
exit_status = int(not main())
sys.exit(exit_status)
| mit |
xtrasmal/gitinspector | gitinspector/optval.py | 56 | 2092 | # coding: utf-8
#
# Copyright © 2013 Ejwa Software. All rights reserved.
#
# This file is part of gitinspector.
#
# gitinspector is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# gitinspector is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with gitinspector. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import getopt
class InvalidOptionArgument(Exception):
def __init__(self, msg):
super(InvalidOptionArgument, self).__init__(msg)
self.msg = msg
def __find_arg_in_options__(arg, options):
for opt in options:
if opt[0].find(arg) == 0:
return opt
return None
def __find_options_to_extend__(long_options):
options_to_extend = []
for num, arg in enumerate(long_options):
arg = arg.split(":")
if len(arg) == 2:
long_options[num] = arg[0] + "="
options_to_extend.append(("--" + arg[0], arg[1]))
return options_to_extend
# This is a duplicate of gnu_getopt, but with support for optional arguments in long options, in the form; "arg:default_value".
def gnu_getopt(args, options, long_options):
options_to_extend = __find_options_to_extend__(long_options)
for num, arg in enumerate(args):
opt = __find_arg_in_options__(arg, options_to_extend)
if opt:
args[num] = arg + "=" + opt[1]
return getopt.gnu_getopt(args, options, long_options)
def get_boolean_argument(arg):
if isinstance(arg, bool):
return arg
elif arg == None or arg.lower() == "false" or arg.lower() == "f" or arg == "0":
return False
elif arg.lower() == "true" or arg.lower() == "t" or arg == "1":
return True
raise InvalidOptionArgument(_("The given option argument is not a valid boolean."))
| gpl-3.0 |
ryuunosukeyoshi/PartnerPoi-Bot | lib/youtube_dl/downloader/dash.py | 15 | 2843 | from __future__ import unicode_literals
from .fragment import FragmentFD
from ..compat import compat_urllib_error
from ..utils import urljoin
class DashSegmentsFD(FragmentFD):
"""
Download segments in a DASH manifest
"""
FD_NAME = 'dashsegments'
def real_download(self, filename, info_dict):
fragment_base_url = info_dict.get('fragment_base_url')
fragments = info_dict['fragments'][:1] if self.params.get(
'test', False) else info_dict['fragments']
ctx = {
'filename': filename,
'total_frags': len(fragments),
}
self._prepare_and_start_frag_download(ctx)
fragment_retries = self.params.get('fragment_retries', 0)
skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
frag_index = 0
for i, fragment in enumerate(fragments):
frag_index += 1
if frag_index <= ctx['fragment_index']:
continue
# In DASH, the first segment contains necessary headers to
# generate a valid MP4 file, so always abort for the first segment
fatal = i == 0 or not skip_unavailable_fragments
count = 0
while count <= fragment_retries:
try:
fragment_url = fragment.get('url')
if not fragment_url:
assert fragment_base_url
fragment_url = urljoin(fragment_base_url, fragment['path'])
success, frag_content = self._download_fragment(ctx, fragment_url, info_dict)
if not success:
return False
self._append_fragment(ctx, frag_content)
break
except compat_urllib_error.HTTPError as err:
# YouTube may often return 404 HTTP error for a fragment causing the
# whole download to fail. However if the same fragment is immediately
# retried with the same request data this usually succeeds (1-2 attemps
# is usually enough) thus allowing to download the whole file successfully.
# To be future-proof we will retry all fragments that fail with any
# HTTP error.
count += 1
if count <= fragment_retries:
self.report_retry_fragment(err, frag_index, count, fragment_retries)
if count > fragment_retries:
if not fatal:
self.report_skip_fragment(frag_index)
continue
self.report_error('giving up after %s fragment retries' % fragment_retries)
return False
self._finish_frag_download(ctx)
return True
| gpl-3.0 |
sassoftware/conary | conary/repository/netrepos/proxy.py | 1 | 67876 | #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import cPickle
import errno
import itertools
import os
import resource
import struct
import tempfile
import time
from conary import constants, conarycfg, trove
from conary.lib import digestlib, sha1helper, tracelog, urlparse, util
from conary.lib.http import http_error
from conary.lib.http import request as req_mod
from conary.repository import changeset, datastore, errors, netclient
from conary.repository import filecontainer, transport, xmlshims
from conary.repository.netrepos import cache, netserver, reposlog
from conary.repository.netrepos.auth_tokens import AuthToken
# A list of changeset versions we support
# These are just shortcuts
_CSVER0 = filecontainer.FILE_CONTAINER_VERSION_NO_REMOVES
_CSVER1 = filecontainer.FILE_CONTAINER_VERSION_WITH_REMOVES
_CSVER2 = filecontainer.FILE_CONTAINER_VERSION_FILEID_IDX
# The first in the list is the one the current generation clients understand
CHANGESET_VERSIONS = [ _CSVER2, _CSVER1, _CSVER0 ]
# Precedence list of versions - the version specified as key can be generated
# from the version specified as value
CHANGESET_VERSIONS_PRECEDENCE = {
_CSVER0 : _CSVER1,
_CSVER1 : _CSVER2,
}
class RepositoryVersionCache:
def get(self, caller):
basicUrl = str(caller._getBasicUrl())
uri = basicUrl.split(':', 1)[1]
if uri not in self.d:
# checkVersion protocol is stopped at 50; we don't support kwargs
# for that call, ever
parentVersions = caller.checkVersion(50)
self.d[uri] = max(set(parentVersions) & set(netserver.SERVER_VERSIONS))
return self.d[uri]
def __init__(self):
self.d = {}
class ProxyClient(util.ServerProxy):
pass
class ExtraInfo(object):
"""This class is used for passing extra information back to the server
running class (either standalone or apache)"""
__slots__ = [ 'responseHeaders', 'responseProtocol' ]
def __init__(self, responseHeaders, responseProtocol):
self.responseHeaders = responseHeaders
self.responseProtocol = responseProtocol
def getVia(self):
if not self.responseHeaders:
return None
return self.responseHeaders.get('Via', None)
class ProxyCaller:
"""
This class facilitates access to a remote repository using the same
interface as L{RepositoryCaller}.
"""
def callByName(self, methodname, version, *args, **kwargs):
"""Call a remote server method using the netserver convention."""
request = xmlshims.RequestArgs(version, args, kwargs)
response = self.callWithRequest(methodname, request)
if response.isException:
# exception occured. this lets us tunnel the error through
# without instantiating it (which would be demarshalling the
# thing just to remarshall it again)
raise ProxyRepositoryError(response.excName, response.excArgs,
response.excKwargs)
return response.result
def callWithRequest(self, methodname, request):
"""Call a remote server method using request/response objects."""
rawRequest = request.toWire()
try:
rawResponse = self.proxy._request(methodname, rawRequest)
except IOError, e:
raise errors.ProxyError(e.strerror)
except http_error.ResponseError, e:
if e.errcode == 403:
raise errors.InsufficientPermission
raise
self._lastProxy = self._transport.usedProxy
# XMLRPC responses are a 1-tuple
rawResponse, = rawResponse
return xmlshims.ResponseArgs.fromWire(request.version, rawResponse,
self._transport.responseHeaders)
def getExtraInfo(self):
"""Return extra information if available"""
return ExtraInfo(self._transport.responseHeaders,
self._transport.responseProtocol)
def __getattr__(self, method):
# Don't invoke methods that start with _
if method.startswith('_'):
raise AttributeError(method)
return lambda *args, **kwargs: self.callByName(method, *args, **kwargs)
def _getBasicUrl(self):
return self.url._replace(userpass=(None, None))
def __init__(self, url, proxy, transport, systemId):
self.url = url
self.proxy = proxy
self._lastProxy = None
self._transport = transport
self.systemId = systemId
class ProxyCallFactory:
@staticmethod
def createCaller(protocol, port, rawUrl, proxyMap, authToken, localAddr,
protocolString, headers, cfg, targetServerName,
remoteIp, isSecure, baseUrl, systemId):
entitlementList = authToken[2][:]
injEntList = cfg.entitlement.find(targetServerName)
if injEntList:
entitlementList += injEntList
userOverride = cfg.user.find(targetServerName)
if userOverride:
authToken = authToken.copy()
authToken.user, authToken.password = userOverride
url = redirectUrl(authToken, rawUrl)
url = req_mod.URL.parse(url)
via = []
# Via is a multi-valued header. Multiple occurences will be collapsed
# as a single string, separated by ,
if 'Via' in headers:
via.append(headers['via'])
if localAddr and protocolString:
via.append(formatViaHeader(localAddr, protocolString))
lheaders = {}
if via:
lheaders['Via'] = ', '.join(via)
forwarded = list(authToken.forwarded_for)
if remoteIp and remoteIp not in ['127.0.0.1', '::1'] and (
not forwarded or forwarded[-1] != remoteIp):
forwarded.append(remoteIp)
if forwarded:
lheaders['X-Forwarded-For'] = ', '.join(forwarded)
# If the proxy injected entitlements or user information, switch to
# SSL -- IF they are using
# default ports (if they specify ports, we have no way of
# knowing what port to use)
if (url.hostport.port == 80 and
(bool(injEntList) or bool(userOverride))):
hostport = url.hostport._replace(port=443)
url = url._replace(scheme='https', hostport=hostport)
transporter = transport.Transport(proxyMap=proxyMap,
serverName = targetServerName)
transporter.setExtraHeaders(lheaders)
transporter.addExtraHeaders({'X-Conary-SystemId': systemId})
transporter.setEntitlements(entitlementList)
transporter.setCompress(True)
proxy = ProxyClient(url, transporter)
return ProxyCaller(url, proxy, transporter, systemId)
class RepositoryCaller(xmlshims.NetworkConvertors):
"""
This class facilitates access to a local repository object using the same
interface as L{ProxyCaller}.
"""
# Shim calls never use a proxy, of course.
_lastProxy = None
def callByName(self, methodname, version, *args, **kwargs):
"""Call a repository method using the netserver convention."""
args = (version,) + args
return self.repos.callWrapper(
protocol=self.protocol,
port=self.port,
methodname=methodname,
authToken=self.authToken,
orderedArgs=args,
kwArgs=kwargs,
remoteIp=self.remoteIp,
rawUrl=self.rawUrl,
isSecure=self.isSecure,
systemId=self.systemId
)
def callByRequest(self, methodname, request):
"""Call a repository method using request/response objects."""
try:
result = self.callByName(methodname, request.version,
*request.args, **request.kwargs)
return xmlshims.ResponseArgs.newResult(result)
except Exception, err:
if hasattr(err, 'marshall'):
args, kwArgs = err.marshall(self)
return self.responseFilter.newException(
err.__class__.__name__, args, kwArgs)
else:
for cls, marshall in errors.simpleExceptions:
if isinstance(err, cls):
return self.responseFilter.newException(marshall,
(str(err),))
raise
def getExtraInfo(self):
"""No extra information available for a RepositoryCaller"""
return None
def __getattr__(self, method):
# Don't invoke methods that start with _
if method.startswith('_'):
raise AttributeError(method)
return lambda *args, **kwargs: self.callByName(method, *args, **kwargs)
def __init__(self, protocol, port, authToken, repos, remoteIp, rawUrl,
isSecure, systemId):
self.repos = repos
self.protocol = protocol
self.port = port
self.authToken = authToken
self.url = None
self.remoteIp = remoteIp
self.rawUrl = rawUrl
self.isSecure = isSecure
self.lastProxy = None
self.systemId = systemId
class RepositoryCallFactory:
def __init__(self, repos, logger):
self.repos = repos
self.log = logger
def createCaller(self, protocol, port, rawUrl, proxyMap, authToken,
localAddr, protocolString, headers, cfg,
targetServerName, remoteIp, isSecure, baseUrl,
systemId):
if 'via' in headers:
self.log(2, "HTTP Via: %s" % headers['via'])
return RepositoryCaller(protocol, port, authToken, self.repos,
remoteIp, baseUrl, isSecure, systemId)
class BaseProxy(xmlshims.NetworkConvertors):
# a list of the protocol versions we understand. Make sure the first
# one in the list is the lowest protocol version we support and the
# last one is the current server protocol version.
#
# for thoughts on this process, see the IM log at the end of this file
SERVER_VERSIONS = netserver.SERVER_VERSIONS
publicCalls = netserver.NetworkRepositoryServer.publicCalls
responseFilter = xmlshims.ResponseArgs
repositoryVersionCache = RepositoryVersionCache()
def __init__(self, cfg, basicUrl):
self.cfg = cfg
self.basicUrl = basicUrl
self.logFile = cfg.logFile
self.tmpPath = cfg.tmpDir
util.mkdirChain(self.tmpPath)
self.proxyMap = conarycfg.getProxyMap(cfg)
self.log = tracelog.getLog(None)
if cfg.traceLog:
(l, f) = cfg.traceLog
self.log = tracelog.getLog(filename=f, level=l, trace=l>2)
if self.logFile:
self.callLog = reposlog.RepositoryCallLogger(self.logFile, [])
else:
self.callLog = None
self.log(1, "proxy url=%s" % basicUrl)
def callWrapper(self, protocol, port, methodname, authToken, request,
remoteIp = None, rawUrl = None, localAddr = None,
protocolString = None, headers = None, isSecure = False,
systemId = None):
"""
@param localAddr: if set, a string host:port identifying the address
the client used to talk to us.
@param protocolString: if set, the protocol version the client used
(i.e. HTTP/1.0)
"""
extraInfo = None
if methodname not in self.publicCalls:
return (self.responseFilter.newException(
"MethodNotSupported", (methodname,)), extraInfo)
if not isinstance(authToken, AuthToken):
authToken = AuthToken(*authToken)
self._port = port
self._protocol = protocol
self._serverName = headers.get('X-Conary-Servername', None)
if self._serverName:
# Standalone server sends us paths, not full URLs, so don't rewrite
# those.
if rawUrl and not rawUrl.startswith('/'):
rawUrl = self._mapUrl(rawUrl)
self.setBaseUrlOverride(rawUrl, headers, isSecure)
systemId = headers.get('X-Conary-SystemId', None)
# simple proxy. FIXME: caching these might help; building all
# of this framework for every request seems dumb. it seems like
# we could get away with one total since we're just changing
# hostname/username/entitlement
caller = self.callFactory.createCaller(protocol, port, rawUrl,
self.proxyMap, authToken,
localAddr, protocolString,
headers, self.cfg,
self._serverName,
remoteIp, isSecure,
self.urlBase(), systemId)
response = None
try:
args = (request.version,) + request.args
kwargs = request.kwargs
if hasattr(self, methodname):
# Special handling at the proxy level. The logged method name
# is prefixed with a '+' to differentiate it from a vanilla
# call.
method = self.__getattribute__(methodname)
if self.callLog:
self.callLog.log(remoteIp, authToken, '+' + methodname,
args, kwargs, systemId=systemId)
r = method(caller, authToken, *args, **kwargs)
else:
# Forward directly to the next server.
if self.callLog:
self.callLog.log(remoteIp, authToken, methodname, args,
kwargs, systemId=systemId)
# This is incredibly silly.
r = caller.callByName(methodname, *args, **kwargs)
response = self.responseFilter.newResult(r)
extraInfo = caller.getExtraInfo()
except ProxyRepositoryError, e:
response = self.responseFilter.newException(e.name, e.args,
e.kwArgs)
except Exception, e:
if hasattr(e, 'marshall'):
args, kwArgs = e.marshall(self)
response = self.responseFilter.newException(
e.__class__.__name__, args, kwArgs)
else:
for klass, marshall in errors.simpleExceptions:
if isinstance(e, klass):
response = self.responseFilter.newException(
marshall, (str(e),))
break
if not response:
# this exception is not marshalled back to the client.
# re-raise it now. comment the next line out to fall into
# the debugger
raise
# uncomment the next line to translate exceptions into
# nicer errors for the client.
#return (True, ("Unknown Exception", str(e)))
# fall-through to debug this exception - this code should
# not run on production servers
import traceback, sys
from conary.lib import debugger
excInfo = sys.exc_info()
lines = traceback.format_exception(*excInfo)
print "".join(lines)
if 1 or sys.stdout.isatty() and sys.stdin.isatty():
debugger.post_mortem(excInfo[2])
raise
del self._serverName
return response, extraInfo
def setBaseUrlOverride(self, rawUrl, headers, isSecure):
if not rawUrl:
return
if not rawUrl.startswith("/"):
self._baseUrlOverride = rawUrl
elif headers and "Host" in headers:
proto = (isSecure and "https") or "http"
self._baseUrlOverride = "%s://%s%s" % (proto,
headers['Host'],
rawUrl)
def urlBase(self):
if self._baseUrlOverride is not None:
return self._baseUrlOverride
return self._getUrlBase()
def _getUrlBase(self):
return self.basicUrl % { 'port' : self._port,
'protocol' : self._protocol }
def checkVersion(self, caller, authToken, clientVersion):
self.log(2, authToken[0], "clientVersion=%s" % clientVersion)
# cut off older clients entirely, no negotiation
if clientVersion < self.SERVER_VERSIONS[0]:
raise errors.InvalidClientVersion(
'Invalid client version %s. Server accepts client versions %s '
'- read http://wiki.rpath.com/wiki/Conary:Conversion' %
(clientVersion, ', '.join(str(x) for x in self.SERVER_VERSIONS)))
parentVersions = caller.checkVersion(clientVersion)
if self.SERVER_VERSIONS is not None:
commonVersions = sorted(list(set(self.SERVER_VERSIONS) &
set(parentVersions)))
else:
commonVersions = parentVersions
return commonVersions
def getContentsStore(self):
return None
def _mapUrl(self, rawUrl):
"""Rewrite URL to follow a repositoryMap configured in the proxy."""
newBase = self.cfg.repositoryMap.find(self._serverName)
if not newBase:
return rawUrl
# Glue the new base URL to the original basename and query string
oldParts = list(urlparse.urlparse(rawUrl))
newParts = list(urlparse.urlparse(newBase))
if not newParts[2].endswith('/'):
newParts[2] += '/'
newParts[2] += os.path.basename(oldParts[2])
newParts[3:] = oldParts[3:]
return urlparse.urlunparse(newParts)
def pokeCounter(self, name, delta):
pass
class ChangeSetInfo(object):
__slots__ = (
# Fields that get pickled into the cached changeset
'filesNeeded',
'removedTroves',
'size',
'trovesNeeded',
# Transient fields that are filled out by the caching layer
'cached',
'fingerprint',
'offset',
'path',
'rawSize',
'version',
)
def pickle(self):
return cPickle.dumps(((self.trovesNeeded, self.filesNeeded,
self.removedTroves), self.size))
def open(self):
"""Return file-like object of the changeset pointed to by this info"""
container = util.ExtendedFile(self.path, 'rb', buffering=False)
rawSize = os.fstat(container.fileno()).st_size - self.offset
fobj = util.SeekableNestedFile(container, rawSize, self.offset)
return fobj
def write(self, cacheObj):
"""Write csinfo header into a changeset cache file object"""
pickled = self.pickle()
cacheObj.write(struct.pack('>I', len(pickled)) + pickled)
self.offset = 4 + len(pickled)
def __init__(self, pickled=None, cacheObj=None):
if cacheObj is not None:
# Cached changeset file with pickled csInfo header
infoSize = struct.unpack('>I', cacheObj.read(4))[0]
pickled = cacheObj.read(infoSize)
self.offset = 4 + infoSize
if pickled is not None:
((self.trovesNeeded, self.filesNeeded, self.removedTroves),
self.size) = cPickle.loads(pickled)
class ChangesetFilter(BaseProxy):
# Implements changeset caching and format conversion between changeset
# versions. The changeset cache is passed in as an object rather than
# created here to allow different types of changeset caches to be used in
# the future.
forceGetCsVersion = None
forceSingleCsJob = False
def __init__(self, cfg, basicUrl, cache):
BaseProxy.__init__(self, cfg, basicUrl)
self.csCache = cache
@staticmethod
def _getChangeSetVersion(clientVersion):
# Determine the changeset version based on the client version
return changeset.getNativeChangesetVersion(clientVersion)
def _convertChangeSet(self, csInfo, destCsVersion, csVersion):
inFobj = csInfo.open()
(fd, newCsPath) = tempfile.mkstemp(dir=self.cfg.tmpDir, suffix='.tmp')
os.close(fd)
try:
if (csVersion, destCsVersion) == (_CSVER1, _CSVER0):
size = self._convertChangeSetV1V0(inFobj, newCsPath)
elif (csVersion, destCsVersion) == (_CSVER2, _CSVER1):
inFc = filecontainer.FileContainer(inFobj)
delta = changeset._convertChangeSetV2V1(inFc, newCsPath)
size = csInfo.size + delta
else:
assert False, "Unknown versions"
except:
util.removeIfExists(newCsPath)
raise
return newCsPath, size
def _convertChangeSetV1V0(self, inFobj, newCsPath):
cs = changeset.ChangeSetFromFile(inFobj)
newCs = changeset.ChangeSet()
for tcs in cs.iterNewTroveList():
if tcs.troveType() != trove.TROVE_TYPE_REMOVED:
continue
# Even though it's possible for (old) clients to request
# removed relative changesets recursively, the update
# code never does that. Raising an exception to make
# sure we know how the code behaves.
if not tcs.isAbsolute():
raise errors.InternalServerError(
"Relative recursive changesets not supported "
"for removed troves")
ti = trove.TroveInfo(tcs.troveInfoDiff.freeze())
trvName = tcs.getName()
trvNewVersion = tcs.getNewVersion()
trvNewFlavor = tcs.getNewFlavor()
if ti.flags.isMissing():
# this was a missing trove for which we
# synthesized a removed trove object.
# The client would have a much easier time
# updating if we just made it a regular trove.
missingOldVersion = tcs.getOldVersion()
missingOldFlavor = tcs.getOldFlavor()
if missingOldVersion is None:
oldTrove = None
else:
oldTrove = trove.Trove(trvName,
missingOldVersion,
missingOldFlavor)
newTrove = trove.Trove(trvName,
trvNewVersion,
trvNewFlavor)
diff = newTrove.diff(oldTrove, absolute = tcs.isAbsolute())[0]
newCs.newTrove(diff)
else:
# this really was marked as a removed trove.
# raise a TroveMissing exception
raise errors.TroveMissing(trvName, trvNewVersion)
# we need to re-write the munged changeset for an
# old client
cs.merge(newCs)
size = cs.writeToFile(newCsPath,
versionOverride = filecontainer.FILE_CONTAINER_VERSION_NO_REMOVES)
return size
def getChangeSet(self, caller, authToken, clientVersion, chgSetList,
recurse, withFiles, withFileContents, excludeAutoSource,
changesetVersion = None, mirrorMode = False,
infoOnly = False, resumeOffset=None):
# This is how the caching algorithm works:
# - Produce verPath, a path in the digraph of possible version
# transformations. It starts with the version we need and ends with
# the version the upstream server knows how to produce.
# - For each changeset:
# - walk verPath. If version is found, add it to changeSetList and
# break (csInfo will contain the version we found, it may be newer
# than what the client needs), otherwise try the next version
# - Fetch the changesets that are missing from changeSetList, and add
# them to changeSetList. Their version is wireCsVersion. Cache them as
# such in the process.
# - Walk changeSetList; if version is newer than what the client
# expects, start doing the conversions backwards.
# Changeset version we need to produce
neededCsVersion = changesetVersion or self._getChangeSetVersion(clientVersion)
# Changeset version we expect the server to produce for us
# If we're a proxy, we can look in the cache to find the server's
# version, otherwise use the repository version
if caller.url is None:
serverVersion = ChangesetFilter.SERVER_VERSIONS[-1]
else:
serverVersion = self.repositoryVersionCache.get(caller)
wireCsVersion = self._getChangeSetVersion(serverVersion)
# forceGetCsVersion is set when this proxy object is sitting
# in front of a repository object in the same server instance
if self.forceGetCsVersion is not None:
# Talking to a repository
getCsVersion = self.forceGetCsVersion
else:
# This is a standalone proxy talking to a repository. Talk
# the latest common protocol version
getCsVersion = serverVersion
# Make sure we have a way to get from here to there
iterV = neededCsVersion
verPath = [iterV]
while iterV != wireCsVersion:
if iterV not in CHANGESET_VERSIONS_PRECEDENCE:
# No way to move forward
break
# Move one edge in the DAG, try again
iterV = CHANGESET_VERSIONS_PRECEDENCE[iterV]
verPath.append(iterV)
# This is important; if it doesn't work out the cache is likely
# not working.
if verPath[-1] != wireCsVersion:
raise errors.InvalidClientVersion(
"Unable to produce changeset version %s "
"with upstream server %s" % (neededCsVersion, wireCsVersion))
try:
changeSetList = self._getNeededChangeSets(caller,
authToken, verPath, chgSetList, serverVersion,
getCsVersion, wireCsVersion, neededCsVersion,
recurse, withFiles, withFileContents, excludeAutoSource,
mirrorMode, infoOnly)
finally:
if self.csCache:
# In case we missed releasing some of the locks
self.csCache.resetLocks()
if not infoOnly:
manifest = netserver.ManifestWriter(self.cfg.tmpDir,
resumeOffset=resumeOffset)
for csInfo in changeSetList:
manifest.append(csInfo.path,
expandedSize=csInfo.size,
isChangeset=True,
preserveFile=csInfo.cached,
offset=csInfo.offset,
)
name = manifest.close()
url = os.path.join(self.urlBase(), "changeset?%s" % name)
else:
url = ''
if clientVersion < 50:
allSizes = [ x.size for x in changeSetList ]
allTrovesNeeded = [ x for x in itertools.chain(
*[ x.trovesNeeded for x in changeSetList ] ) ]
allFilesNeeded = [ x for x in itertools.chain(
*[ x.filesNeeded for x in changeSetList ] ) ]
allTrovesRemoved = [ x for x in itertools.chain(
*[ x.removedTroves for x in changeSetList ] ) ]
# client versions >= 44 use strings instead of ints for size
# because xmlrpclib can't marshal ints > 2GiB
if clientVersion >= 44:
allSizes = [ str(x) for x in allSizes ]
else:
for size in allSizes:
if size >= 0x80000000:
raise errors.InvalidClientVersion(
'This version of Conary does not support downloading '
'changesets larger than 2 GiB. Please install a new '
'Conary client.')
if clientVersion < 38:
return (url, allSizes, allTrovesNeeded, allFilesNeeded)
return (url, allSizes, allTrovesNeeded, allFilesNeeded,
allTrovesRemoved)
items = [ (str(x.size), x.trovesNeeded, x.filesNeeded, x.removedTroves)
for x in changeSetList ]
if clientVersion < 73:
return url, items
else:
extra = {}
if resumeOffset:
extra['resumeOffset'] = resumeOffset
# TODO: add checksum/tag/whatever here
return url, items, extra
def _callGetChangeSetFingerprints(self, caller, chgSetList,
recurse, withFiles, withFileContents, excludeAutoSource,
mirrorMode):
fingerprints = [ '' ] * len(chgSetList)
if self.csCache:
try:
if mirrorMode:
fingerprints = caller.getChangeSetFingerprints(49,
chgSetList, recurse, withFiles, withFileContents,
excludeAutoSource, mirrorMode)
else:
fingerprints = caller.getChangeSetFingerprints(43,
chgSetList, recurse, withFiles, withFileContents,
excludeAutoSource)
except errors.MethodNotSupported:
# old server; act like no fingerprints were returned
pass
return fingerprints
# mixins can override this (to provide fingerprint caching, perhaps)
def lookupFingerprints(self, caller, authToken, chgSetList, recurse,
withFiles, withFileContents, excludeAutoSource,
mirrorMode):
return self._callGetChangeSetFingerprints(
caller, chgSetList, recurse, withFiles,
withFileContents, excludeAutoSource, mirrorMode)
def getChangeSetFingerprints(self, caller, authToken, clientVersion,
chgSetList, recurse, withFiles, withFileContents,
excludeAutoSource, mirrorMode=False):
return self.lookupFingerprints(caller, authToken, chgSetList, recurse,
withFiles, withFileContents, excludeAutoSource, mirrorMode)
def _callGetChangeSet(self, caller, changeSetList, getCsVersion,
wireCsVersion, neededCsVersion, neededFiles, recurse,
withFiles, withFileContents, excludeAutoSource, mirrorMode,
infoOnly):
if getCsVersion >= 51 and wireCsVersion == neededCsVersion:
# We may be able to get proper size information for this from
# underlying server without fetcing the changeset (this isn't
# true for internal servers or old protocols)
rc = caller.getChangeSet(getCsVersion,
[ x[1][0] for x in neededFiles ],
recurse, withFiles, withFileContents,
excludeAutoSource,
neededCsVersion, mirrorMode,
infoOnly)
elif getCsVersion >= 49:
rc = caller.getChangeSet(getCsVersion,
[ x[1][0] for x in neededFiles ],
recurse, withFiles, withFileContents,
excludeAutoSource,
wireCsVersion, mirrorMode)
else:
# We don't support requesting specific changeset versions
rc = caller.getChangeSet(getCsVersion,
[ x[1][0] for x in neededFiles ],
recurse, withFiles, withFileContents,
excludeAutoSource)
csInfoList = []
url = rc[0]
if getCsVersion < 50:
# convert pre-protocol 50 returns into a protocol 50 return
# turn list of sizes back into a single size
assert(len(rc[1]) == 1)
rc[1] = rc[1][0]
rc = rc[1:]
if getCsVersion < 38:
# protocol version 38 does not return removedTroves.
# tack an empty list on it
rc.append([])
allInfo = [ rc ]
else:
allInfo = rc[1]
for info in allInfo:
csInfo = ChangeSetInfo()
(size, trovesNeeded, filesNeeded, removedTroves) = info[0:4]
if len(info) > 4:
rawSize = int(info[4])
else:
rawSize = int(size)
csInfo.size = int(size)
csInfo.rawSize = rawSize
csInfo.trovesNeeded = trovesNeeded
csInfo.filesNeeded = filesNeeded
csInfo.removedTroves = removedTroves
csInfo.version = wireCsVersion
csInfoList.append(csInfo)
del trovesNeeded
del filesNeeded
del removedTroves
if (getCsVersion >= 51 and wireCsVersion == neededCsVersion
and infoOnly and not url):
# We only got size information from the repository; there
# is no changeset to fetch/cache. We can bail out early.
for jobIdx, csInfo in enumerate(csInfoList):
csInfo.path = None
changeSetList[jobIdx] = csInfo
return None, csInfoList
return url, csInfoList
def _getCachedChangeSetList(self, chgSetList, fingerprints, verPath):
"""
Return a parallel list to chgSetList and fingerprints, with items
set on the corresponding position if the changeset was retrieved from
the cache
"""
changeSetList = [ None ] * len(chgSetList)
if not self.csCache:
# We have no cache, so don't even bother
return changeSetList
# We need to order by fingerprint first
# This prevents deadlocks from occurring - as long as different
# processes acquire locks in the same order, we should be fine
orderedData = sorted(
enumerate(itertools.izip(chgSetList, fingerprints)),
key = lambda x: x[1][1])
for jobIdx, (rawJob, fingerprint) in orderedData:
# if we have both a cs fingerprint and a cache, then we will
# cache the cs for this job
cachable = bool(fingerprint)
if not cachable:
continue
# look up the changeset in the cache, oldest to newest
for iterV in verPath:
# We will only lock the last version (wireCsVersion)
# Everything else gets derived from it, and is fast to convert
shouldLock = (iterV == verPath[-1])
csInfo = self.csCache.get((fingerprint, iterV),
shouldLock = shouldLock)
if csInfo:
# Found in the cache (possibly with an older version)
csInfo.fingerprint = fingerprint
changeSetList[jobIdx] = csInfo
break
return changeSetList
def _getNeededChangeSets(self, caller, authToken, verPath, chgSetList,
serverVersion,
getCsVersion, wireCsVersion, neededCsVersion,
recurse, withFiles, withFileContents, excludeAutoSource,
mirrorMode, infoOnly, _recursed = False):
fingerprints = self.lookupFingerprints(caller, authToken, chgSetList,
recurse, withFiles, withFileContents, excludeAutoSource,
mirrorMode)
changeSetList = self._getCachedChangeSetList(chgSetList, fingerprints,
verPath)
changeSetsNeeded = \
[ x for x in
enumerate(itertools.izip(chgSetList, fingerprints))
if changeSetList[x[0]] is None ]
self.pokeCounter('cscache_misses', len(changeSetsNeeded))
self.pokeCounter('cscache_hits', len(chgSetList) - len(changeSetsNeeded))
if self.callLog and changeSetsNeeded:
self.callLog.log(None, authToken, '__createChangeSets',
changeSetsNeeded, systemId=caller.systemId)
if serverVersion < 50 or self.forceSingleCsJob:
# calling internal changeset generation, which only supports
# a single job or calling an upstream repository that does not
# support protocol version 50 (needed to send all jobs at once)
neededList = [ [ x ] for x in changeSetsNeeded ]
else:
# calling a server which supports both neededCsVersion and
# returns per-job supplmental information
if changeSetsNeeded:
neededList = [ changeSetsNeeded ]
else:
neededList = []
# List of (url, csInfoList)
# This is a loop to make supporting single-request changeset generation
# easy; we need that not only for old servers we proxy, but for an
# internal server as well (since internal servers only support
# single jobs!)
urlInfoList = [ self._callGetChangeSet(caller, changeSetList,
getCsVersion, wireCsVersion, neededCsVersion, neededHere,
recurse, withFiles, withFileContents, excludeAutoSource,
mirrorMode, infoOnly)
for neededHere in neededList ]
forceProxy = caller._lastProxy
for (url, csInfoList), neededHere in zip(urlInfoList, neededList):
if url is None:
# Only size information was received; nothing further needed
continue
self._cacheChangeSet(url, neededHere, csInfoList, changeSetList,
forceProxy)
# hash versions to quickly find the index in verPath
verHash = dict((csVer, idx) for (idx, csVer) in enumerate(verPath))
# Handle format conversions
for csInfo in changeSetList:
if infoOnly and csInfo.path is None:
assert(neededCsVersion == wireCsVersion)
# the changeset isn't present
continue
if csInfo.version == neededCsVersion:
# We already have the right version
continue
# Now walk the precedence list backwards for conversion
idx = verHash[csInfo.version]
for iterV in reversed(verPath[:idx]):
# Convert the changeset
path, newSize = self._convertChangeSet(csInfo, iterV,
csInfo.version)
csInfo.size = newSize
csInfo.version = iterV
cachable = (csInfo.fingerprint and self.csCache)
if not cachable:
# we're not caching; erase the old version
os.unlink(csInfo.path)
csInfo.path = path
csInfo.offset = 0
else:
self.csCache.set((csInfo.fingerprint, iterV),
(csInfo, open(path), None))
assert csInfo.version == neededCsVersion
return changeSetList
def _cacheChangeSet(self, url, neededHere, csInfoList, changeSetList,
forceProxy):
inPath = None
if hasattr(url, 'read'):
# Nested changeset file in multi-part response
inF = url
elif url.startswith('file://localhost/'):
inPath = url[16:]
inF = open(inPath, 'rb')
else:
headers = [('X-Conary-Servername', self._serverName)]
try:
inF = transport.ConaryURLOpener(proxyMap=self.proxyMap
).open(url, forceProxy=forceProxy, headers=headers)
except transport.TransportError, e:
raise errors.RepositoryError(str(e))
for (jobIdx, (rawJob, fingerprint)), csInfo in \
itertools.izip(neededHere, csInfoList):
cachable = bool(fingerprint and self.csCache)
if cachable:
# Add it to the cache
self.csCache.set((fingerprint, csInfo.version),
(csInfo, inF, csInfo.rawSize))
csInfo.cached = True
else:
# If only one file was requested, and it's already
# a file://, this is unnecessary :-(
(fd, tmpPath) = tempfile.mkstemp(dir = self.cfg.tmpDir,
suffix = '.ccs-out')
outF = os.fdopen(fd, "w")
util.copyfileobj(inF, outF, sizeLimit = csInfo.rawSize)
outF.close()
csInfo.path = tmpPath
csInfo.offset = 0
csInfo.cached = False
csInfo.fingerprint = fingerprint
changeSetList[jobIdx] = csInfo
if inPath:
os.unlink(inPath)
inF.close()
def _localUrl(self, url):
# If the changeset can be downloaded locally, return it
parts = util.urlSplit(url)
fname = parts[6]
if '/' in fname:
return url
try:
producer = ChangesetProducer(
os.path.join(self.cfg.tmpDir, fname + '-out'),
self.getContentsStore())
except IOError as err:
if err.args[0] == errno.ENOENT:
return url
raise
tmpFile = tempfile.TemporaryFile(dir=self.cfg.tmpDir, suffix='.tmp')
for data in producer:
tmpFile.write(data)
tmpFile.seek(0)
return tmpFile
class BaseCachingChangesetFilter(ChangesetFilter):
# Changeset filter which uses a directory to create a ChangesetCache
# instance for the cache
def __init__(self, cfg, basicUrl):
if cfg.changesetCacheDir:
util.mkdirChain(cfg.changesetCacheDir)
csCache = ChangesetCache(
datastore.ShallowDataStore(cfg.changesetCacheDir),
cfg.changesetCacheLogFile)
else:
csCache = None
ChangesetFilter.__init__(self, cfg, basicUrl, csCache)
class RepositoryFilterMixin(object):
# Simple mixin which lets a BaseProxy derivative sit in front of a
# in-process repository class (defined in netrepos.py) rather than
# acting as a proxy for a network repository somewhere else. repos
# is a netrepos.NetworkRepositoryServer instance
forceGetCsVersion = ChangesetFilter.SERVER_VERSIONS[-1]
forceSingleCsJob = False
def __init__(self, repos):
self.repos = repos
self.callFactory = RepositoryCallFactory(repos, self.log)
class Memcache(object):
# mixin for providing memcache based caching of fingerprint, troveinfo
# and deplists
def __init__(self, cfg):
self.memCacheTimeout = cfg.memCacheTimeout
self.memCacheLocation = cfg.memCache
self.memCacheUserAuth = cfg.memCacheUserAuth
self.memCachePrefix = cfg.memCachePrefix
if self.memCacheTimeout >= 0:
self.memCache = cache.getCache(self.memCacheLocation)
else:
self.memCache = cache.EmptyCache()
def _getKeys(self, authToken, listArgs, extraArgs=(), extraKwargs=None):
if extraKwargs is None:
extraKwargs = ()
else:
extraKwargs = tuple(sorted(extraKwargs.items()))
if self.memCacheUserAuth:
authInfo = (authToken[0], authToken[1], tuple(authToken[2]))
else:
authInfo = ()
if self.memCachePrefix:
extraArgs = (self.memCachePrefix,) + extraArgs
# Hash the common arguments separately to save a few cycles.
# Microbenchmarks indicate that this adds effectively zero cost even
# with only one item.
common = digestlib.sha1(
str(authInfo + extraArgs + extraKwargs)
).digest()
return [digestlib.sha1(common + str(x)).hexdigest() for x in listArgs]
def _coalesce(self, authToken, callable, listArg, *extraArgs, **kwargs):
"""Memoize a proxy repository call.
@param authToken: Caller's credentials, used to partition the saved
results and in the method call if necessary.
@param callable: Callable to invoke to retrieve results. It should
accept a list of queries as the first argument, and return a
parallel list of results.
@param listArg: List to pass as the first argument to C{callable}.
@param extraArgs: Additional positional arguments.
@param key_prefix: String to prepend to the cache key. (keyword only)
@param kwargs: Additional keyword arguments.
"""
key_prefix = kwargs.pop('key_prefix')
keys = self._getKeys(authToken, listArg, extraArgs, kwargs)
cachedDict = self.memCache.get_multi(keys, key_prefix = key_prefix)
finalResults = [ cachedDict.get(x) for x in keys ]
needed = [ (i, x) for i, x in enumerate(listArg)
if keys[i] not in cachedDict ]
if needed:
others = callable([x[1] for x in needed], *extraArgs, **kwargs)
for (i, x), result in itertools.izip(needed, others):
finalResults[i] = result
updates = dict( (keys[i], result) for
(i, x), result in itertools.izip(needed, others) )
self.memCache.set_multi(updates,
key_prefix = key_prefix,
time = self.memCacheTimeout)
return finalResults
def lookupFingerprints(self, caller, authToken, chgSetList, recurse,
withFiles, withFileContents, excludeAutoSource,
mirrorMode):
return self._coalesce(authToken,
lambda *args : self._callGetChangeSetFingerprints(
caller, *args),
chgSetList,
recurse, withFiles, withFileContents, excludeAutoSource,
mirrorMode, key_prefix = "FPRINT")
def getDepsForTroveList(self, caller, authToken, clientVersion, troveList,
provides = True, requires = True):
# this could merge provides/requires in the cache (perhaps always
# requesting both?), but doesn't
return self._coalesce(authToken,
lambda *args, **kwargs :
caller.getDepsForTroveList(clientVersion, *args,
**kwargs),
troveList, provides = provides, requires = requires,
key_prefix = "DEPS")
def getTroveInfo(self, caller, authToken, clientVersion, infoType,
troveList):
return self._coalesce(authToken,
lambda nTroveList, nInfoType:
caller.getTroveInfo(clientVersion, nInfoType,
nTroveList),
troveList, infoType,
key_prefix = "TROVEINFO")
def pokeCounter(self, name, delta):
if not delta:
return
if self.memCachePrefix:
name = self.memCachePrefix + ':' + name
if not self.memCache.incr(name, delta):
self.memCache.set(name, str(delta))
class SimpleRepositoryFilter(Memcache, BaseCachingChangesetFilter, RepositoryFilterMixin):
# Basic class used for creating repositories with Conary. It places
# a changeset caching layer on top of an in-memory repository.
def __init__(self, cfg, basicUrl, repos):
Memcache.__init__(self, cfg)
BaseCachingChangesetFilter.__init__(self, cfg, basicUrl)
RepositoryFilterMixin.__init__(self, repos)
def getContentsStore(self):
return self.repos.getContentsStore()
class FileCachingChangesetFilter(BaseCachingChangesetFilter):
# Adds caching for getFileContents() call to allow proxies to keep
# those results around
def __init__(self, cfg, basicUrl):
BaseCachingChangesetFilter.__init__(self, cfg, basicUrl)
util.mkdirChain(cfg.proxyContentsDir)
self.contents = datastore.DataStore(cfg.proxyContentsDir)
def getFileContents(self, caller, authToken, clientVersion, fileList,
authCheckOnly = False):
if clientVersion < 42:
# server doesn't support auth checks through getFileContents
return caller.getFileContents(clientVersion, fileList, authCheckOnly)
hasFiles = []
neededFiles = []
for encFileId, encVersion in fileList:
fileId = sha1helper.sha1ToString(self.toFileId(encFileId))
if self.contents.hasFile(fileId + '-c'):
path = self.contents.hashToPath(fileId + '-c')
pathfd = None
try:
try:
# touch the file; we don't want it to be removed
# by a cleanup job when we need it
pathfd = os.open(path, os.O_RDONLY)
hasFiles.append((encFileId, encVersion))
continue
except OSError:
pass
finally:
if pathfd: os.close(pathfd)
neededFiles.append((encFileId, encVersion))
# make sure this user has permissions for these file contents. an
# exception will get raised if we don't have sufficient permissions
if hasFiles:
caller.getFileContents(clientVersion, hasFiles, True)
if neededFiles:
# now get the contents we don't have cached
(url, sizes) = caller.getFileContents(
clientVersion, neededFiles, False)
url = self._localUrl(url)
self._saveFileContents(neededFiles, url, sizes,
forceProxy=caller._lastProxy)
url, sizes = self._saveFileContentsChangeset(clientVersion, fileList)
return url, sizes
def _saveFileContents(self, fileList, url, sizes, forceProxy):
# insure that the size is an integer -- protocol version
# 44 returns a string to avoid XML-RPC marshal limits
sizes = [ int(x) for x in sizes ]
if hasattr(url, "read"):
dest = url
dest.seek(0, 2)
size = dest.tell()
dest.seek(0)
else:
(fd, tmpPath) = tempfile.mkstemp(dir = self.cfg.tmpDir,
suffix = '.tmp')
dest = util.ExtendedFile(tmpPath, "w+", buffering = False)
os.close(fd)
os.unlink(tmpPath)
headers = [('X-Conary-Servername', self._serverName)]
inUrl = transport.ConaryURLOpener(proxyMap=self.proxyMap).open(url,
forceProxy=forceProxy, headers=headers)
size = util.copyfileobj(inUrl, dest)
inUrl.close()
dest.seek(0)
totalSize = sum(sizes)
start = 0
# We skip the integrity check here because (1) the hash we're using
# has '-c' applied and (2) the hash is a fileId sha1, not a file
# contents sha1
for (encFileId, envVersion), size in itertools.izip(fileList,
sizes):
nestedF = util.SeekableNestedFile(dest, size, start)
self._cacheFileContents(encFileId, nestedF)
totalSize -= size
start += size
assert(totalSize == 0)
# this closes the underlying fd opened by mkstemp for us
dest.close()
def _saveFileContentsChangeset(self, clientVersion, fileList):
manifest = netserver.ManifestWriter(self.tmpPath)
sizeList = []
for encFileId, encVersion in fileList:
fileId = sha1helper.sha1ToString(self.toFileId(encFileId))
filePath = self.contents.hashToPath(fileId + '-c')
size = os.stat(filePath).st_size
sizeList.append(size)
manifest.append(filePath,
expandedSize=size,
isChangeset=False,
preserveFile=True,
offset=0,
)
name = manifest.close()
url = os.path.join(self.urlBase(), "changeset?%s" % name)
# client versions >= 44 use strings instead of ints for size
# because xmlrpclib can't marshal ints > 2GiB
if clientVersion >= 44:
sizeList = [ str(x) for x in sizeList ]
else:
for size in sizeList:
if size >= 0x80000000:
raise errors.InvalidClientVersion(
'This version of Conary does not support '
'downloading file contents larger than 2 '
'GiB. Please install a new Conary client.')
return (url, sizeList)
def _cacheFileContents(self, encFileId, fileObj):
# We skip the integrity check here because (1) the hash we're using
# has '-c' applied and (2) the hash is a fileId sha1, not a file
# contents sha1
fileId = sha1helper.sha1ToString(self.toFileId(encFileId))
self.contents.addFile(fileObj, fileId + '-c',
precompressed = True,
integrityCheck = False)
class ProxyRepositoryServer(Memcache, FileCachingChangesetFilter):
# class for proxy servers used by standalone and apache implementations
# adds a proxy specific version of getFileContentsFromTrove()
SERVER_VERSIONS = range(42, netserver.SERVER_VERSIONS[-1] + 1)
forceSingleCsJob = False
def __init__(self, cfg, basicUrl):
Memcache.__init__(self, cfg)
FileCachingChangesetFilter.__init__(self, cfg, basicUrl)
self.callFactory = ProxyCallFactory()
def setBaseUrlOverride(self, rawUrl, headers, isSecure):
# Setting it to None here will make urlBase() do the right thing
proxyHost = headers.get('X-Conary-Proxy-Host', None)
if not proxyHost:
self._baseUrlOverride = None
return
# We really don't want to use rawUrl in the proxy, that points to the
# server and it won't help rewriting URLs with that address
self._baseUrlOverride = headers.get('X-Conary-Proxy-Host', None)
proto = (isSecure and "https") or "http"
if rawUrl.startswith('/'):
self._baseUrlOverride = '%s://%s%s' % (proto, proxyHost, rawUrl)
else:
items = list(urlparse.urlparse(rawUrl))
items[0] = proto
items[1] = proxyHost
self._baseUrlOverride = urlparse.urlunparse(items)
def getFileContentsFromTrove(self, caller, authToken, clientVersion,
troveName, version, flavor, pathList):
(url, sizes) = caller.getFileContentsFromTrove(
clientVersion, troveName, version, flavor, pathList)
# XXX This look too similar to _saveFileContents* - at some point we
# should refactor this code to call those.
# insure that the size is an integer -- protocol version
# 44 returns a string to avoid XML-RPC marshal limits
sizes = [ int(x) for x in sizes ]
(fd, tmpPath) = tempfile.mkstemp(dir = self.cfg.tmpDir,
suffix = '.tmp')
dest = util.ExtendedFile(tmpPath, "w+", buffering = False)
os.close(fd)
os.unlink(tmpPath)
headers = [('X-Conary-Servername', self._serverName)]
inUrl = transport.ConaryURLOpener(proxyMap=self.proxyMap).open(url,
forceProxy=caller._lastProxy, headers=headers)
size = util.copyfileobj(inUrl, dest)
inUrl.close()
dest.seek(0)
totalSize = sum(sizes)
start = 0
# We skip the integrity check here because (1) the hash we're using
# has '-c' applied and (2) the hash is a fileId sha1, not a file
# contents sha1
fileList = []
for size in sizes:
nestedF = util.SeekableNestedFile(dest, size, start)
(fd, tmpPath) = tempfile.mkstemp(dir = self.cfg.tmpDir,
suffix = '.tmp')
with os.fdopen(fd, 'w') as f_out:
size = util.copyfileobj(nestedF, f_out)
totalSize -= size
start += size
fileList.append(tmpPath)
assert(totalSize == 0)
# this closes the underlying fd opened by mkstemp for us
dest.close()
manifest = netserver.ManifestWriter(self.tmpPath)
sizeList = []
for filePath in fileList:
size = os.stat(filePath).st_size
sizeList.append(size)
manifest.append(filePath,
expandedSize=size,
isChangeset=False,
preserveFile=False,
offset=0,
)
name = manifest.close()
url = os.path.join(self.urlBase(), "changeset?%s" % name)
# client versions >= 44 use strings instead of ints for size
# because xmlrpclib can't marshal ints > 2GiB
if clientVersion >= 44:
sizeList = [ str(x) for x in sizeList ]
else:
for size in sizeList:
if size >= 0x80000000:
raise errors.InvalidClientVersion(
'This version of Conary does not support '
'downloading file contents larger than 2 '
'GiB. Please install a new Conary client.')
return (url, sizeList)
class ChangesetCache(object):
CACHE_VERSION = 1
# Provides a place to cache changeset; uses a directory for them
# all indexed by fingerprint
def __init__(self, dataStore, logPath=None):
self.dataStore = dataStore
self.logPath = logPath
self.locksMap = {}
# Use only 1/4 our file descriptor limit for locks
limit = resource.getrlimit(resource.RLIMIT_NOFILE)[0]
self.maxLocks = limit / 4
def hashKey(self, key):
(fingerPrint, csVersion) = key
return self.dataStore.hashToPath(fingerPrint + '-%d.%d' % (
csVersion, self.CACHE_VERSION))
def set(self, key, value):
(csInfo, inF, sizeLimit) = value
csPath = self.hashKey(key)
util.mkdirChain(os.path.dirname(csPath))
csObj = self.locksMap.get(csPath)
if csObj is None:
# We did not get a lock for it
csObj = util.AtomicFile(csPath, tmpsuffix = '.ccs-new')
csInfo.path = csPath
csInfo.write(csObj)
try:
written = util.copyfileobj(inF, csObj, sizeLimit=sizeLimit)
except transport.MultipartDecodeError:
raise errors.RepositoryError("The changeset was corrupted in "
"transit, please try again")
if sizeLimit is not None and written != sizeLimit:
raise errors.RepositoryError("Changeset was truncated in transit "
"(expected %d bytes, got %d bytes for subchangeset)" %
(sizeLimit, written))
csObj.commit()
# If we locked the cache file, we need to no longer track it
self.locksMap.pop(csPath, None)
self._log('WRITE', key, size=sizeLimit)
def get(self, key, shouldLock = True):
csPath = self.hashKey(key)
csVersion = key[1]
if len(self.locksMap) >= self.maxLocks:
shouldLock = False
lockfile = util.LockedFile(csPath)
util.mkdirChain(os.path.dirname(csPath))
fileObj = lockfile.open(shouldLock=shouldLock)
if fileObj is None:
if shouldLock:
# We got the lock on csPath
self.locksMap[csPath] = lockfile
self._log('MISS', key)
return None
csInfo = ChangeSetInfo(cacheObj=fileObj)
csInfo.path = csPath
csInfo.cached = True
csInfo.version = csVersion
self._log('HIT', key)
return csInfo
def resetLocks(self):
self.locksMap.clear()
def _log(self, status, key, **kwargs):
"""Log a HIT/MISS/WRITE to file."""
if self.logPath is None:
return
now = time.time()
msecs = (now - long(now)) * 1000
extra = ''.join(' %s=%r' % (x, y) for (x, y) in kwargs.items())
rec = '%s,%03d %s-%d %s%s\n' % (
time.strftime('%F %T', time.localtime(now)), msecs,
key[0], key[1], status, extra)
open(self.logPath, 'a').write(rec)
class AuthenticationInformation(object):
# summarizes authentication information to keep in a cache
__slots__ = ( 'name', 'pw', 'entitlements' )
def __init__(self, authToken, entitlements):
self.name = authToken[0]
# this will
self.pw = sha1helper.sha1ToString(authToken[1])
self.entitlements = sorted(entitlements)
def redirectUrl(authToken, url):
# return the url to use for the final server
s = url.split('/')
s[2] = ('%s:%s@' % (netclient.quote(authToken[0]),
netclient.quote(authToken[1]))) + s[2]
url = '/'.join(s)
return url
def formatViaHeader(localAddr, protocolString, prefix=''):
via = "%s %s (Conary/%s)" % (protocolString, localAddr,
constants.version)
if prefix:
return prefix + ', ' + via
else:
return via
class ProxyRepositoryError(Exception):
def __init__(self, name, args, kwArgs):
self.name = name
self.args = tuple(args)
self.kwArgs = kwArgs
class ChangesetProducer(object):
"""
Transform a changeset manifest (something.cf-out) into an iterable stream
of bytes.
"""
def __init__(self, manifestPath, contentsStore):
self.contentsStore = contentsStore
self.items = []
self.totalSize = 0
self.resumeOffset = None
assert manifestPath.endswith('-out')
if manifestPath.endswith('.cf-out'):
# Manifest of items to produce
for line in open(manifestPath):
line = line.split()
if len(line) == 1:
key, value = line[0].split('=')
if key == 'resumeOffset':
self.resumeOffset = int(value)
else:
raise RuntimeError("invalid key in changeset manifest")
continue
(path, expandedSize, isChangeset, preserveFile, offset,
) = line
expandedSize = long(expandedSize)
self.items.append((path, expandedSize, int(isChangeset),
int(preserveFile), int(offset)))
self.totalSize += expandedSize
util.removeIfExists(manifestPath)
else:
# Single prepared temporary file (always deleted)
try:
expandedSize = os.stat(manifestPath).st_size
except OSError as err:
raise IOError(*err.args)
self.items.append((manifestPath, expandedSize, 0, 0, 0))
def getSize(self):
return self.totalSize - (self.resumeOffset or 0)
def __iter__(self):
for (path, expandedSize, isChangeset, preserveFile, offset,
) in self.items:
container = util.ExtendedFile(path, 'rb', buffering=False)
rawSize = os.fstat(container.fileno()).st_size - offset
fobj = util.SeekableNestedFile(container, rawSize, offset)
if self.resumeOffset:
self.resumeOffset -= expandedSize
if self.resumeOffset >= 0:
# This file has been skipped entirely
additionalOffset = expandedSize
else:
# Part of this file will be returned
remaining = -self.resumeOffset
additionalOffset = expandedSize - remaining
self.resumeOffset = None
else:
additionalOffset = 0
assert 0 <= additionalOffset <= expandedSize
if additionalOffset == expandedSize:
# Skipped
pass
elif isChangeset:
changeSet = filecontainer.FileContainer(fobj)
for data in changeSet.dumpIter(self._readNestedFile,
offset=additionalOffset):
yield data
else:
fobj.seek(additionalOffset)
for data in util.iterFileChunks(fobj):
yield data
container.close()
if not preserveFile:
os.unlink(path)
def _readNestedFile(self, name, tag, rawSize, subfile):
"""Use with ChangeSet.dumpIter to handle external file references."""
if changeset.ChangedFileTypes.refr[4:] == tag[2:]:
# this is a reference to a compressed file in the contents store
entry = subfile.read()
sha1, expandedSize = entry.split(' ')
expandedSize = int(expandedSize)
tag = tag[0:2] + changeset.ChangedFileTypes.file[4:]
path = self.contentsStore.hashToPath(
sha1helper.sha1FromString(sha1))
fobj = open(path, 'rb')
return tag, expandedSize, fobj
else:
# this is data from the changeset itself
return tag, rawSize, subfile
# ewtroan: for the internal proxy, we support client version 38 but need to talk to a server which is at least version 41
# ewtroan: for external proxy, we support client version 41 and need a server which is at least 41
# ewtroan: and when I get a call, I need to know what version the server is, which I can't keep anywhere as state
# ewtroan: but I can't very well tell a client version 38 to call back with server version 41
# Gafton: hmm - is there a way to differentiate your internal/external state in the code ?
# ewtroan: I'm going to split the classes
# ***ewtroan copies some code around
# Gafton: same basic class with different dressings?
# ewtroan: I set the fullproxy to be versions 41-43
# ewtroan: while the changeset caching advertises versions 38-43
# ewtroan: it works because the internal proxy only talks to the internal repository, and those don't really need to explicitly negotiate
# ewtroan: not a perfect model, but good enough
# Gafton: okay, that makes sense
# ewtroan: and I'll make the internal one override the protocol version to call into the bottom one with for getChangeSet() and for the external one use the protocol version the client asked for
# ewtroan: which will all work fine with the autoconverstion of formats in the proxy
| apache-2.0 |
morenopc/edx-platform | lms/djangoapps/foldit/views.py | 191 | 6365 | import hashlib
import json
import logging
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from foldit.models import Score, PuzzleComplete
from student.models import unique_id_for_user
import re
log = logging.getLogger(__name__)
@login_required
@csrf_exempt
@require_POST
def foldit_ops(request):
"""
Endpoint view for foldit operations.
"""
responses = []
if "SetPlayerPuzzleScores" in request.POST:
puzzle_scores_json = request.POST.get("SetPlayerPuzzleScores")
pz_verify_json = request.POST.get("SetPlayerPuzzleScoresVerify")
log.debug("SetPlayerPuzzleScores message: puzzle scores: %r",
puzzle_scores_json)
puzzle_score_verify = json.loads(pz_verify_json)
if not verifies_ok(request.user.email,
puzzle_scores_json, puzzle_score_verify):
responses.append({"OperationID": "SetPlayerPuzzleScores",
"Success": "false",
"ErrorString": "Verification failed",
"ErrorCode": "VerifyFailed"})
log.warning(
"Verification of SetPlayerPuzzleScores failed:"
"user %s, scores json %r, verify %r",
request.user,
puzzle_scores_json,
pz_verify_json
)
else:
# This is needed because we are not getting valid json - the
# value of ScoreType is an unquoted string. Right now regexes are
# quoting the string, but ideally the json itself would be fixed.
# To allow for fixes without breaking this, the regex should only
# match unquoted strings,
a = re.compile(r':([a-zA-Z]*),')
puzzle_scores_json = re.sub(a, r':"\g<1>",', puzzle_scores_json)
puzzle_scores = json.loads(puzzle_scores_json)
responses.append(save_scores(request.user, puzzle_scores))
if "SetPuzzlesComplete" in request.POST:
puzzles_complete_json = request.POST.get("SetPuzzlesComplete")
pc_verify_json = request.POST.get("SetPuzzlesCompleteVerify")
log.debug("SetPuzzlesComplete message: %r",
puzzles_complete_json)
puzzles_complete_verify = json.loads(pc_verify_json)
if not verifies_ok(request.user.email,
puzzles_complete_json, puzzles_complete_verify):
responses.append({"OperationID": "SetPuzzlesComplete",
"Success": "false",
"ErrorString": "Verification failed",
"ErrorCode": "VerifyFailed"})
log.warning(
"Verification of SetPuzzlesComplete failed:"
" user %s, puzzles json %r, verify %r",
request.user,
puzzles_complete_json,
pc_verify_json
)
else:
puzzles_complete = json.loads(puzzles_complete_json)
responses.append(save_complete(request.user, puzzles_complete))
return HttpResponse(json.dumps(responses))
def verify_code(email, val):
"""
Given the email and passed in value (str), return the expected
verification code.
"""
# TODO: is this the right string?
verification_string = email.lower() + '|' + val
return hashlib.md5(verification_string).hexdigest()
def verifies_ok(email, val, verification):
"""
Check that the hash_str matches the expected hash of val.
Returns True if verification ok, False otherwise
"""
if verification.get("VerifyMethod") != "FoldItVerify":
log.debug("VerificationMethod in %r isn't FoldItVerify", verification)
return False
hash_str = verification.get("Verify")
return verify_code(email, val) == hash_str
def save_scores(user, puzzle_scores):
score_responses = []
for score in puzzle_scores:
log.debug("score: %s", score)
# expected keys ScoreType, PuzzleID (int),
# BestScore (energy), CurrentScore (Energy), ScoreVersion (int)
puzzle_id = score['PuzzleID']
best_score = score['BestScore']
current_score = score['CurrentScore']
score_version = score['ScoreVersion']
# SetPlayerPuzzleScoreResponse object
# Score entries are unique on user/unique_user_id/puzzle_id/score_version
try:
obj = Score.objects.get(
user=user,
unique_user_id=unique_id_for_user(user),
puzzle_id=puzzle_id,
score_version=score_version)
obj.current_score = current_score
obj.best_score = best_score
except Score.DoesNotExist:
obj = Score(
user=user,
unique_user_id=unique_id_for_user(user),
puzzle_id=puzzle_id,
current_score=current_score,
best_score=best_score,
score_version=score_version)
obj.save()
score_responses.append({'PuzzleID': puzzle_id,
'Status': 'Success'})
return {"OperationID": "SetPlayerPuzzleScores", "Value": score_responses}
def save_complete(user, puzzles_complete):
"""
Returned list of PuzzleIDs should be in sorted order (I don't think client
cares, but tests do)
"""
for complete in puzzles_complete:
log.debug("Puzzle complete: %s", complete)
puzzle_id = complete['PuzzleID']
puzzle_set = complete['Set']
puzzle_subset = complete['SubSet']
# create if not there
PuzzleComplete.objects.get_or_create(
user=user,
unique_user_id=unique_id_for_user(user),
puzzle_id=puzzle_id,
puzzle_set=puzzle_set,
puzzle_subset=puzzle_subset)
# List of all puzzle ids of intro-level puzzles completed ever, including on this
# request
# TODO: this is just in this request...
complete_responses = list(pc.puzzle_id
for pc in PuzzleComplete.objects.filter(user=user))
return {"OperationID": "SetPuzzlesComplete", "Value": complete_responses}
| agpl-3.0 |
tensorflow/probability | spinoffs/fun_mc/fun_mc/dynamic/backend_jax/util.py | 1 | 9586 | # Copyright 2021 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""FunMC utilities implemented via JAX."""
import functools
import jax
from jax import lax
from jax import random
from jax import tree_util
from jax.experimental import stax
import jax.numpy as jnp
__all__ = [
'assert_same_shallow_tree',
'block_until_ready',
'flatten_tree',
'get_shallow_tree',
'inverse_fn',
'make_tensor_seed',
'map_tree',
'map_tree_up_to',
'move_axis',
'named_call',
'random_categorical',
'random_integer',
'random_normal',
'random_uniform',
'split_seed',
'trace',
'value_and_grad',
'value_and_ldj',
]
def map_tree(fn, tree, *args):
"""Maps `fn` over the leaves of a nested structure."""
return tree_util.tree_multimap(fn, tree, *args)
def flatten_tree(tree):
"""Flattens a nested structure to a list."""
return tree_util.tree_flatten(tree)[0]
def unflatten_tree(tree, xs):
"""Inverse operation of `flatten_tree`."""
return tree_util.tree_unflatten(tree_util.tree_structure(tree), xs)
def map_tree_up_to(shallow, fn, tree, *rest):
"""`map_tree` with recursion depth defined by depth of `shallow`."""
def wrapper(_, *rest):
return fn(*rest)
return tree_util.tree_multimap(wrapper, shallow, tree, *rest)
def get_shallow_tree(is_leaf, tree):
"""Returns a shallow tree, expanding only when is_leaf(subtree) is False."""
return tree_util.tree_map(is_leaf, tree, is_leaf=is_leaf)
def assert_same_shallow_tree(shallow, tree):
"""Asserts that `tree` has the same shallow structure as `shallow`."""
# Do a dummy multimap for the side-effect of verifying that the structures are
# the same. This doesn't catch all the errors we actually care about, sadly.
map_tree_up_to(shallow, lambda *args: (), tree)
def value_and_grad(fn, args):
"""Given `fn: (args) -> out, extra`, returns `dout/dargs`."""
output, vjp_fn, extra = jax.vjp(fn, args, has_aux=True)
grad = vjp_fn(jnp.ones_like(output))[0]
return output, extra, grad
def make_tensor_seed(seed):
"""Converts a seed to a `Tensor` seed."""
if seed is None:
raise ValueError('seed must not be None when using JAX')
return jnp.asarray(seed, jnp.uint32)
def split_seed(seed, count):
"""Splits a seed into `count` seeds."""
return random.split(make_tensor_seed(seed), count)
def random_uniform(shape, dtype, seed):
"""Generates a sample from uniform distribution over [0., 1)."""
return random.uniform(
shape=tuple(shape), dtype=dtype, key=make_tensor_seed(seed))
def random_integer(shape, dtype, minval, maxval, seed):
"""Generates a sample from uniform distribution over [minval, maxval)."""
return random.randint(
shape=tuple(shape),
dtype=dtype,
minval=minval,
maxval=maxval,
key=make_tensor_seed(seed))
def random_normal(shape, dtype, seed):
"""Generates a sample from a standard normal distribution."""
return random.normal(
shape=tuple(shape), dtype=dtype, key=make_tensor_seed(seed))
def _searchsorted(a, v):
"""Returns where `v` can be inserted so that `a` remains sorted."""
def cond(state):
low_idx, high_idx = state
return low_idx < high_idx
def body(state):
low_idx, high_idx = state
mid_idx = (low_idx + high_idx) // 2
mid_v = a[mid_idx]
low_idx = jnp.where(v > mid_v, mid_idx + 1, low_idx)
high_idx = jnp.where(v > mid_v, high_idx, mid_idx)
return low_idx, high_idx
low_idx, _ = lax.while_loop(cond, body, (0, a.shape[-1]))
return low_idx
def random_categorical(logits, num_samples, seed):
"""Returns a sample from a categorical distribution. `logits` must be 2D."""
probs = stax.softmax(logits)
cum_sum = jnp.cumsum(probs, axis=-1)
eta = random.uniform(
make_tensor_seed(seed), (num_samples,) + cum_sum.shape[:-1])
cum_sum = jnp.broadcast_to(cum_sum, (num_samples,) + cum_sum.shape)
flat_cum_sum = cum_sum.reshape([-1, cum_sum.shape[-1]])
flat_eta = eta.reshape([-1])
return jax.vmap(_searchsorted)(flat_cum_sum, flat_eta).reshape(eta.shape).T
def trace(state, fn, num_steps, unroll, **_):
"""Implementation of `trace` operator, without the calling convention."""
# We need the shapes and dtypes of the outputs of `fn`.
_, untraced_spec, traced_spec = jax.eval_shape(
fn, map_tree(lambda s: jax.ShapeDtypeStruct(s.shape, s.dtype), state))
untraced_init = map_tree(lambda spec: jnp.zeros(spec.shape, spec.dtype),
untraced_spec)
try:
num_steps = int(num_steps)
use_scan = True
except TypeError:
use_scan = False
if flatten_tree(traced_spec):
raise ValueError(
'Cannot trace values when `num_steps` is not statically known. Pass '
'False to `trace_mask` or return an empty structure (e.g. `()`) as '
'the extra output.')
if unroll:
raise ValueError(
'Cannot unroll when `num_steps` is not statically known.')
if unroll:
traced_lists = map_tree(lambda _: [], traced_spec)
untraced = untraced_init
for _ in range(num_steps):
state, untraced, traced_element = fn(state)
map_tree_up_to(traced_spec, lambda l, e: l.append(e), traced_lists,
traced_element)
# Using asarray instead of stack to handle empty arrays correctly.
traced = map_tree_up_to(traced_spec,
lambda l, s: jnp.asarray(l, dtype=s.dtype),
traced_lists, traced_spec)
elif use_scan:
def wrapper(state_untraced, _):
state, _ = state_untraced
state, untraced, traced = fn(state)
return (state, untraced), traced
(state, untraced), traced = lax.scan(
wrapper,
(state, untraced_init),
xs=None,
length=num_steps,
)
else:
trace_arrays = map_tree(
lambda spec: jnp.zeros((num_steps,) + spec.shape, spec.dtype),
traced_spec)
def wrapper(i, state_untraced_traced):
state, _, trace_arrays = state_untraced_traced
state, untraced, traced = fn(state)
trace_arrays = map_tree(lambda a, e: jax.ops.index_update(a, i, e),
trace_arrays, traced)
return (state, untraced, trace_arrays)
state, untraced, traced = lax.fori_loop(
jnp.asarray(0, num_steps.dtype),
num_steps,
wrapper,
(state, untraced_init, trace_arrays),
)
return state, untraced, traced
# TODO(siege): This is WIP, probably to be replaced by JAX's budding inverse
# function support.
def value_and_ldj(fn, args):
"""Compute the value and log-det jacobian of function evaluated at args.
This assumes that `fn`'s `extra` output is a 2-tuple, where the first element
is arbitrary and the the last element is the log determinant of the jacobian
of the transformation.
Args:
fn: Function to evaluate.
args: Arguments to `fn`.
Returns:
ret: First output of `fn`.
extra: Second output of `fn`.
ldj: Log-det jacobian of `fn`.
#### Example
```python
def scale_by_two(x):
# Return x unchanged as the extra output for illustrative purposes.
return 2 * x, (x, jnp.log(2))
y, y_extra, y_ldj = value_and_ldj(scale_by_2, 3.)
assert y == 6
assert y_extra == 3
assert y_ldj == jnp.log(2)
```
"""
value, (extra, ldj) = fn(args)
return value, (extra, ldj), ldj
def inverse_fn(fn):
"""Compute the inverse of a function.
This assumes that `fn` has a field called `inverse` which contains the inverse
of the function.
Args:
fn: Function to invert.
Returns:
inverse: Inverse of `fn`.
#### Example
```python
def scale_by_two(x):
# Return x unchanged as the extra output for illustrative purposes.
return 2 * x, (x, jnp.log(2))
def scale_by_half(x):
return x / 2, (x, -jnp.log(2))
scale_by_two.inverse = scale_by_half
scale_by_half.inverse = scale_by_two
y, y_extra, y_ldj = value_and_ldj(scale_by_2, 3.)
assert y == 6
assert y_extra == 3
assert y_ldj == jnp.log(2)
inv_scale_by_2 = inverse_fn(scale_by_2)
assert inv_scale_by_2 == scale_by_half
x, x_extra, x_ldj = value_and_ldj(inv_scale_by_2, 4.)
assert x == 2
assert x_extra == 4
assert x_ldj == -jnp.log(2)
```
"""
return fn.inverse
def block_until_ready(tensors):
"""Blocks computation until it is ready.
Args:
tensors: A nest of Tensors.
Returns:
tensors: Tensors that are are guaranteed to be ready to materialize.
"""
def _block_until_ready(tensor):
if hasattr(tensor, 'block_until_ready'):
return tensor.block_until_ready()
else:
return tensor
return map_tree(_block_until_ready, tensors)
def move_axis(x, source, dest):
"""Move axis from source to dest."""
return jnp.moveaxis(x, source, dest)
def named_call(f=None, name=None):
"""Adds a name to a function for profiling purposes."""
if f is None:
return functools.partial(named_call, name=name)
return jax.named_call(f, name=name)
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.