repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
openbudgets/openbudgets | openbudgets/apps/transport/incoming/resolvers/__init__.py | 4 | 11919 | from django.conf import settings
from openbudgets.apps.transport.incoming.parsers import ITEM_SEPARATOR
from openbudgets.apps.transport.incoming.errors import DataAmbiguityError, ParentScopeError,\
InverseScopesError, InverseNodeNotFoundError
class PathResolver(object):
#PATH_DELIMITER = settings.OPENBUDGETS_IMPORT_INTRA_FIELD_DELIMITER
PATH_DELIMITER = ','
ITEM_SEPARATOR = ITEM_SEPARATOR
def __init__(self, parser, data, parent_template=None):
self.parser = parser
self.raw_data = data
self.parent_template = parent_template
self.has_parent_template = True if parent_template else False
self.resolved_lookup = {}
self.resolved_rows_by_code = {}
self.unresolved_rows_by_code = {}
self.parent_nodes_by_code = {}
self.root_nodes_lookup = {}
self.parent_keys_lookup = []
def resolve(self):
first_run = []
if self.parent_template:
parent_nodes = self.parent_template.nodes.values('code', 'parent', 'path')
for node in parent_nodes:
code = node['code']
if code not in self.parent_nodes_by_code:
self.parent_nodes_by_code[code] = []
self.parent_nodes_by_code[code].append(node)
if not node['parent']:
self.root_nodes_lookup[code] = node
for row_num, obj in enumerate(self.raw_data):
first_run.append((row_num, obj))
next_run = self._resolve_paths(first_run)
while len(next_run):
next_run = self._resolve_paths(next_run, False)
# one extra run for resolving inverses and marking nodes that `has_children`
for key, row in self.resolved_lookup.iteritems():
self._set_inverse_scope(row)
obj = row[1]
obj['has_children'] = obj['path'] in self.parent_keys_lookup
if len(self.unresolved_rows_by_code):
for code, rows in self.unresolved_rows_by_code.iteritems():
for i, (row_num, obj) in enumerate(rows):
self._throw_parent_scope_error(obj['code'], obj.get('parent', ''), row_num)
return self.resolved_lookup
def throw(self, error):
self.parser.throw(error)
def _resolve_paths(self, data, first_run=True):
#TODO: see where we lost the recognition of a DataAmbiguity along the way and throw it where needed
next_run = []
for row in data:
row_num, obj = row
code = obj.get('code', None)
parent = obj.get('parent', None)
if first_run:
scope = obj.get('parentscope', None)
if scope:
tmp = scope.split(self.PATH_DELIMITER)
scope = ','.join(tmp) # tuple(tmp)
if parent and scope:
# we have scope so we can resolve immediately
key = self.PATH_DELIMITER.join((code, parent, scope))
self._resolve_row(key, row)
elif not parent:
# top level node - resolve
self._resolve_row(code, row)
self.root_nodes_lookup[code] = obj
elif parent in self.root_nodes_lookup:
key = self.PATH_DELIMITER.join((code, self.root_nodes_lookup[parent]['code']))
self._resolve_row(key, row)
else:
# defer for next run
self._defer_row(code, row, next_run)
else:
has_unresolved_parents = parent in self.unresolved_rows_by_code
if parent in self.root_nodes_lookup:
key = self.PATH_DELIMITER.join((code, self.root_nodes_lookup[parent]['code']))
self._resolve_row(key, row)
elif self.has_parent_template:
is_in_parent = parent in self.parent_nodes_by_code
is_in_resolved = parent in self.resolved_rows_by_code
if is_in_parent != is_in_resolved and not has_unresolved_parents:
if is_in_resolved and len(self.resolved_rows_by_code[parent]) == 1:
scope = self._get_scope_by_code(parent)
route = [code, parent]
if scope:
route += scope
key = self.PATH_DELIMITER.join(route)
self._resolve_row(key, row, self.PATH_DELIMITER.join(scope))
elif is_in_parent and len(self.parent_nodes_by_code[parent]) == 1:
scope = self._get_scope_by_code(parent, True)
route = [code, parent]
if scope:
route += scope
key = self.PATH_DELIMITER.join(route)
self._resolve_row(key, row, self.PATH_DELIMITER.join(scope))
else:
self._throw_parent_scope_error(code, parent, row_num)
elif is_in_parent:
self._throw_parent_scope_error(code, parent, row_num)
else:
# defer for next run
self._defer_row(code, row, next_run)
else:
if parent in self.resolved_rows_by_code:
has_single_resolved_parent = len(self.resolved_rows_by_code[parent]) == 1
if has_single_resolved_parent and not has_unresolved_parents:
scope = self._get_scope_by_code(parent)
route = [code, parent]
if scope:
route += scope
key = self.PATH_DELIMITER.join(route)
self._resolve_row(key, row, self.PATH_DELIMITER.join(scope))
else:
self._throw_parent_scope_error(code, parent, row_num)
else:
# defer for next run
self._defer_row(code, row, next_run)
return next_run if len(next_run) < len(data) else []
def _throw_parent_scope_error(self, code, parent, row_num):
self.throw(
ParentScopeError(
row=row_num + 2,
columns=['code', 'parent'],
values=[code, parent]
)
)
def _resolve_row(self, key, row, scope=None):
row_num, obj = row
code = obj['code']
if self.has_parent_template:
self._remove_overridden_from_lookup(code, key)
if scope:
obj['parentscope'] = scope
obj['path'] = key
self.resolved_lookup[key] = row
parent = obj.get('parent', None)
if parent:
scope = scope or obj.get('parentscope', None)
parent_key = self.PATH_DELIMITER.join((parent, scope)) if scope else parent
self.parent_keys_lookup.append(parent_key)
if code not in self.resolved_rows_by_code:
self.resolved_rows_by_code[code] = []
self.resolved_rows_by_code[code].append(row)
# remove row from unresolved lookup
if code in self.unresolved_rows_by_code:
list_copy = list(self.unresolved_rows_by_code[code])
for i, (_row_num, _obj) in enumerate(list_copy):
if _row_num == row_num:
self.unresolved_rows_by_code[code].pop(i)
# remove that code from the lookup if it's not containing any rows
if not len(self.unresolved_rows_by_code[code]):
del self.unresolved_rows_by_code[code]
break
def _defer_row(self, code, row, next_run):
next_run.append(row)
if code not in self.unresolved_rows_by_code:
self.unresolved_rows_by_code[code] = []
self.unresolved_rows_by_code[code].append(row)
def _lookup_path_in_parent(self, code, key, callback=None):
if code in self.parent_nodes_by_code:
for i, node in enumerate(self.parent_nodes_by_code[code]):
if node['path'] == key:
if callback:
callback(i, node)
return True
return False
def _remove_overridden_from_lookup(self, code, key):
def _remove_node(i, node):
self.parent_nodes_by_code[code].pop(i)
self._lookup_path_in_parent(code, key, _remove_node)
def _get_scope_by_code(self, code, is_parent_node=False):
if is_parent_node:
parent_path = self.parent_nodes_by_code[code][0]['path']
else:
parent_path = self.resolved_rows_by_code[code][0][1]['path']
return parent_path.split(self.PATH_DELIMITER)[1:]
def _set_inverse_scope(self, row):
row_num, obj = row
inverse_codes = obj.get('inverse', None)
inverse_scopes = obj.get('inversescope', None)
if inverse_codes:
inverses = inverse_codes.split(self.ITEM_SEPARATOR)
if inverse_scopes:
inverse_scopes = inverse_scopes.split(self.ITEM_SEPARATOR)
if len(inverse_scopes) != len(inverses):
return self.throw(
InverseScopesError(
row=row_num,
columns=('inverse', 'inversescope'),
values=(inverse_codes, inverse_scopes)
)
)
for i, inv_code in enumerate(inverses):
key = self.PATH_DELIMITER.join((inv_code, inverse_scopes[i]))
if key not in self.resolved_lookup and not self._lookup_path_in_parent(inv_code, key):
InverseNodeNotFoundError(
row=row_num,
columns=['inverse', 'inversescope'],
values=[obj.get('inverse'), obj.get('inversescope')]
)
else:
inverse_scopes = []
for i, inv_code in enumerate(inverses):
if inv_code in self.root_nodes_lookup:
inverse_scopes.append('')
if inv_code in self.resolved_rows_by_code:
if len(self.resolved_rows_by_code[inv_code]) == 1:
scope = self.PATH_DELIMITER.join(self._get_scope_by_code(inv_code))
inverse_scopes.append(scope)
else:
self.throw(
InverseNodeNotFoundError(
row=row_num,
columns=['inverse', 'inversescope'],
values=[obj.get('inverse'), obj.get('inversescope')]
)
)
elif inv_code in self.parent_nodes_by_code and len(self.parent_nodes_by_code[inv_code]) == 1:
scope = self.PATH_DELIMITER.join(self._get_scope_by_code(inv_code, True))
inverse_scopes.append(scope)
else:
self.throw(
InverseNodeNotFoundError(
row=row_num,
columns=['inverse', 'inversescope'],
values=[obj.get('inverse'), obj.get('inversescope')]
)
)
if len(inverse_scopes):
obj['inversescope'] = self.ITEM_SEPARATOR.join(inverse_scopes)
| bsd-3-clause |
nbargnesi/tornado | tornado/httpclient.py | 104 | 26658 | """Blocking and non-blocking HTTP client interfaces.
This module defines a common interface shared by two implementations,
``simple_httpclient`` and ``curl_httpclient``. Applications may either
instantiate their chosen implementation class directly or use the
`AsyncHTTPClient` class from this module, which selects an implementation
that can be overridden with the `AsyncHTTPClient.configure` method.
The default implementation is ``simple_httpclient``, and this is expected
to be suitable for most users' needs. However, some applications may wish
to switch to ``curl_httpclient`` for reasons such as the following:
* ``curl_httpclient`` has some features not found in ``simple_httpclient``,
including support for HTTP proxies and the ability to use a specified
network interface.
* ``curl_httpclient`` is more likely to be compatible with sites that are
not-quite-compliant with the HTTP spec, or sites that use little-exercised
features of HTTP.
* ``curl_httpclient`` is faster.
* ``curl_httpclient`` was the default prior to Tornado 2.0.
Note that if you are using ``curl_httpclient``, it is highly
recommended that you use a recent version of ``libcurl`` and
``pycurl``. Currently the minimum supported version of libcurl is
7.21.1, and the minimum version of pycurl is 7.18.2. It is highly
recommended that your ``libcurl`` installation is built with
asynchronous DNS resolver (threaded or c-ares), otherwise you may
encounter various problems with request timeouts (for more
information, see
http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS
and comments in curl_httpclient.py).
To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup::
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
"""
from __future__ import absolute_import, division, print_function, with_statement
import functools
import time
import weakref
from tornado.concurrent import TracebackFuture
from tornado.escape import utf8, native_str
from tornado import httputil, stack_context
from tornado.ioloop import IOLoop
from tornado.util import Configurable
class HTTPClient(object):
"""A blocking HTTP client.
This interface is provided for convenience and testing; most applications
that are running an IOLoop will want to use `AsyncHTTPClient` instead.
Typical usage looks like this::
http_client = httpclient.HTTPClient()
try:
response = http_client.fetch("http://www.google.com/")
print response.body
except httpclient.HTTPError as e:
# HTTPError is raised for non-200 responses; the response
# can be found in e.response.
print("Error: " + str(e))
except Exception as e:
# Other errors are possible, such as IOError.
print("Error: " + str(e))
http_client.close()
"""
def __init__(self, async_client_class=None, **kwargs):
self._io_loop = IOLoop(make_current=False)
if async_client_class is None:
async_client_class = AsyncHTTPClient
self._async_client = async_client_class(self._io_loop, **kwargs)
self._closed = False
def __del__(self):
self.close()
def close(self):
"""Closes the HTTPClient, freeing any resources used."""
if not self._closed:
self._async_client.close()
self._io_loop.close()
self._closed = True
def fetch(self, request, **kwargs):
"""Executes a request, returning an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
If it is a string, we construct an `HTTPRequest` using any additional
kwargs: ``HTTPRequest(request, **kwargs)``
If an error occurs during the fetch, we raise an `HTTPError` unless
the ``raise_error`` keyword argument is set to False.
"""
response = self._io_loop.run_sync(functools.partial(
self._async_client.fetch, request, **kwargs))
return response
class AsyncHTTPClient(Configurable):
"""An non-blocking HTTP client.
Example usage::
def handle_request(response):
if response.error:
print "Error:", response.error
else:
print response.body
http_client = AsyncHTTPClient()
http_client.fetch("http://www.google.com/", handle_request)
The constructor for this class is magic in several respects: It
actually creates an instance of an implementation-specific
subclass, and instances are reused as a kind of pseudo-singleton
(one per `.IOLoop`). The keyword argument ``force_instance=True``
can be used to suppress this singleton behavior. Unless
``force_instance=True`` is used, no arguments other than
``io_loop`` should be passed to the `AsyncHTTPClient` constructor.
The implementation subclass as well as arguments to its
constructor can be set with the static method `configure()`
All `AsyncHTTPClient` implementations support a ``defaults``
keyword argument, which can be used to set default values for
`HTTPRequest` attributes. For example::
AsyncHTTPClient.configure(
None, defaults=dict(user_agent="MyUserAgent"))
# or with force_instance:
client = AsyncHTTPClient(force_instance=True,
defaults=dict(user_agent="MyUserAgent"))
.. versionchanged:: 4.1
The ``io_loop`` argument is deprecated.
"""
@classmethod
def configurable_base(cls):
return AsyncHTTPClient
@classmethod
def configurable_default(cls):
from tornado.simple_httpclient import SimpleAsyncHTTPClient
return SimpleAsyncHTTPClient
@classmethod
def _async_clients(cls):
attr_name = '_async_client_dict_' + cls.__name__
if not hasattr(cls, attr_name):
setattr(cls, attr_name, weakref.WeakKeyDictionary())
return getattr(cls, attr_name)
def __new__(cls, io_loop=None, force_instance=False, **kwargs):
io_loop = io_loop or IOLoop.current()
if force_instance:
instance_cache = None
else:
instance_cache = cls._async_clients()
if instance_cache is not None and io_loop in instance_cache:
return instance_cache[io_loop]
instance = super(AsyncHTTPClient, cls).__new__(cls, io_loop=io_loop,
**kwargs)
# Make sure the instance knows which cache to remove itself from.
# It can't simply call _async_clients() because we may be in
# __new__(AsyncHTTPClient) but instance.__class__ may be
# SimpleAsyncHTTPClient.
instance._instance_cache = instance_cache
if instance_cache is not None:
instance_cache[instance.io_loop] = instance
return instance
def initialize(self, io_loop, defaults=None):
self.io_loop = io_loop
self.defaults = dict(HTTPRequest._DEFAULTS)
if defaults is not None:
self.defaults.update(defaults)
self._closed = False
def close(self):
"""Destroys this HTTP client, freeing any file descriptors used.
This method is **not needed in normal use** due to the way
that `AsyncHTTPClient` objects are transparently reused.
``close()`` is generally only necessary when either the
`.IOLoop` is also being closed, or the ``force_instance=True``
argument was used when creating the `AsyncHTTPClient`.
No other methods may be called on the `AsyncHTTPClient` after
``close()``.
"""
if self._closed:
return
self._closed = True
if self._instance_cache is not None:
if self._instance_cache.get(self.io_loop) is not self:
raise RuntimeError("inconsistent AsyncHTTPClient cache")
del self._instance_cache[self.io_loop]
def fetch(self, request, callback=None, raise_error=True, **kwargs):
"""Executes a request, asynchronously returning an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
If it is a string, we construct an `HTTPRequest` using any additional
kwargs: ``HTTPRequest(request, **kwargs)``
This method returns a `.Future` whose result is an
`HTTPResponse`. By default, the ``Future`` will raise an `HTTPError`
if the request returned a non-200 response code. Instead, if
``raise_error`` is set to False, the response will always be
returned regardless of the response code.
If a ``callback`` is given, it will be invoked with the `HTTPResponse`.
In the callback interface, `HTTPError` is not automatically raised.
Instead, you must check the response's ``error`` attribute or
call its `~HTTPResponse.rethrow` method.
"""
if self._closed:
raise RuntimeError("fetch() called on closed AsyncHTTPClient")
if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs)
# We may modify this (to add Host, Accept-Encoding, etc),
# so make sure we don't modify the caller's object. This is also
# where normal dicts get converted to HTTPHeaders objects.
request.headers = httputil.HTTPHeaders(request.headers)
request = _RequestProxy(request, self.defaults)
future = TracebackFuture()
if callback is not None:
callback = stack_context.wrap(callback)
def handle_future(future):
exc = future.exception()
if isinstance(exc, HTTPError) and exc.response is not None:
response = exc.response
elif exc is not None:
response = HTTPResponse(
request, 599, error=exc,
request_time=time.time() - request.start_time)
else:
response = future.result()
self.io_loop.add_callback(callback, response)
future.add_done_callback(handle_future)
def handle_response(response):
if raise_error and response.error:
future.set_exception(response.error)
else:
future.set_result(response)
self.fetch_impl(request, handle_response)
return future
def fetch_impl(self, request, callback):
raise NotImplementedError()
@classmethod
def configure(cls, impl, **kwargs):
"""Configures the `AsyncHTTPClient` subclass to use.
``AsyncHTTPClient()`` actually creates an instance of a subclass.
This method may be called with either a class object or the
fully-qualified name of such a class (or ``None`` to use the default,
``SimpleAsyncHTTPClient``)
If additional keyword arguments are given, they will be passed
to the constructor of each subclass instance created. The
keyword argument ``max_clients`` determines the maximum number
of simultaneous `~AsyncHTTPClient.fetch()` operations that can
execute in parallel on each `.IOLoop`. Additional arguments
may be supported depending on the implementation class in use.
Example::
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
"""
super(AsyncHTTPClient, cls).configure(impl, **kwargs)
class HTTPRequest(object):
"""HTTP client request object."""
# Default values for HTTPRequest parameters.
# Merged with the values on the request object by AsyncHTTPClient
# implementations.
_DEFAULTS = dict(
connect_timeout=20.0,
request_timeout=20.0,
follow_redirects=True,
max_redirects=5,
decompress_response=True,
proxy_password='',
allow_nonstandard_methods=False,
validate_cert=True)
def __init__(self, url, method="GET", headers=None, body=None,
auth_username=None, auth_password=None, auth_mode=None,
connect_timeout=None, request_timeout=None,
if_modified_since=None, follow_redirects=None,
max_redirects=None, user_agent=None, use_gzip=None,
network_interface=None, streaming_callback=None,
header_callback=None, prepare_curl_callback=None,
proxy_host=None, proxy_port=None, proxy_username=None,
proxy_password=None, allow_nonstandard_methods=None,
validate_cert=None, ca_certs=None,
allow_ipv6=None,
client_key=None, client_cert=None, body_producer=None,
expect_100_continue=False, decompress_response=None,
ssl_options=None):
r"""All parameters except ``url`` are optional.
:arg string url: URL to fetch
:arg string method: HTTP method, e.g. "GET" or "POST"
:arg headers: Additional HTTP headers to pass on the request
:type headers: `~tornado.httputil.HTTPHeaders` or `dict`
:arg body: HTTP request body as a string (byte or unicode; if unicode
the utf-8 encoding will be used)
:arg body_producer: Callable used for lazy/asynchronous request bodies.
It is called with one argument, a ``write`` function, and should
return a `.Future`. It should call the write function with new
data as it becomes available. The write function returns a
`.Future` which can be used for flow control.
Only one of ``body`` and ``body_producer`` may
be specified. ``body_producer`` is not supported on
``curl_httpclient``. When using ``body_producer`` it is recommended
to pass a ``Content-Length`` in the headers as otherwise chunked
encoding will be used, and many servers do not support chunked
encoding on requests. New in Tornado 4.0
:arg string auth_username: Username for HTTP authentication
:arg string auth_password: Password for HTTP authentication
:arg string auth_mode: Authentication mode; default is "basic".
Allowed values are implementation-defined; ``curl_httpclient``
supports "basic" and "digest"; ``simple_httpclient`` only supports
"basic"
:arg float connect_timeout: Timeout for initial connection in seconds
:arg float request_timeout: Timeout for entire request in seconds
:arg if_modified_since: Timestamp for ``If-Modified-Since`` header
:type if_modified_since: `datetime` or `float`
:arg bool follow_redirects: Should redirects be followed automatically
or return the 3xx response?
:arg int max_redirects: Limit for ``follow_redirects``
:arg string user_agent: String to send as ``User-Agent`` header
:arg bool decompress_response: Request a compressed response from
the server and decompress it after downloading. Default is True.
New in Tornado 4.0.
:arg bool use_gzip: Deprecated alias for ``decompress_response``
since Tornado 4.0.
:arg string network_interface: Network interface to use for request.
``curl_httpclient`` only; see note below.
:arg callable streaming_callback: If set, ``streaming_callback`` will
be run with each chunk of data as it is received, and
``HTTPResponse.body`` and ``HTTPResponse.buffer`` will be empty in
the final response.
:arg callable header_callback: If set, ``header_callback`` will
be run with each header line as it is received (including the
first line, e.g. ``HTTP/1.0 200 OK\r\n``, and a final line
containing only ``\r\n``. All lines include the trailing newline
characters). ``HTTPResponse.headers`` will be empty in the final
response. This is most useful in conjunction with
``streaming_callback``, because it's the only way to get access to
header data while the request is in progress.
:arg callable prepare_curl_callback: If set, will be called with
a ``pycurl.Curl`` object to allow the application to make additional
``setopt`` calls.
:arg string proxy_host: HTTP proxy hostname. To use proxies,
``proxy_host`` and ``proxy_port`` must be set; ``proxy_username`` and
``proxy_pass`` are optional. Proxies are currently only supported
with ``curl_httpclient``.
:arg int proxy_port: HTTP proxy port
:arg string proxy_username: HTTP proxy username
:arg string proxy_password: HTTP proxy password
:arg bool allow_nonstandard_methods: Allow unknown values for ``method``
argument?
:arg bool validate_cert: For HTTPS requests, validate the server's
certificate?
:arg string ca_certs: filename of CA certificates in PEM format,
or None to use defaults. See note below when used with
``curl_httpclient``.
:arg string client_key: Filename for client SSL key, if any. See
note below when used with ``curl_httpclient``.
:arg string client_cert: Filename for client SSL certificate, if any.
See note below when used with ``curl_httpclient``.
:arg ssl.SSLContext ssl_options: `ssl.SSLContext` object for use in
``simple_httpclient`` (unsupported by ``curl_httpclient``).
Overrides ``validate_cert``, ``ca_certs``, ``client_key``,
and ``client_cert``.
:arg bool allow_ipv6: Use IPv6 when available? Default is true.
:arg bool expect_100_continue: If true, send the
``Expect: 100-continue`` header and wait for a continue response
before sending the request body. Only supported with
simple_httpclient.
.. note::
When using ``curl_httpclient`` certain options may be
inherited by subsequent fetches because ``pycurl`` does
not allow them to be cleanly reset. This applies to the
``ca_certs``, ``client_key``, ``client_cert``, and
``network_interface`` arguments. If you use these
options, you should pass them on every request (you don't
have to always use the same values, but it's not possible
to mix requests that specify these options with ones that
use the defaults).
.. versionadded:: 3.1
The ``auth_mode`` argument.
.. versionadded:: 4.0
The ``body_producer`` and ``expect_100_continue`` arguments.
.. versionadded:: 4.2
The ``ssl_options`` argument.
"""
# Note that some of these attributes go through property setters
# defined below.
self.headers = headers
if if_modified_since:
self.headers["If-Modified-Since"] = httputil.format_timestamp(
if_modified_since)
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_username = proxy_username
self.proxy_password = proxy_password
self.url = url
self.method = method
self.body = body
self.body_producer = body_producer
self.auth_username = auth_username
self.auth_password = auth_password
self.auth_mode = auth_mode
self.connect_timeout = connect_timeout
self.request_timeout = request_timeout
self.follow_redirects = follow_redirects
self.max_redirects = max_redirects
self.user_agent = user_agent
if decompress_response is not None:
self.decompress_response = decompress_response
else:
self.decompress_response = use_gzip
self.network_interface = network_interface
self.streaming_callback = streaming_callback
self.header_callback = header_callback
self.prepare_curl_callback = prepare_curl_callback
self.allow_nonstandard_methods = allow_nonstandard_methods
self.validate_cert = validate_cert
self.ca_certs = ca_certs
self.allow_ipv6 = allow_ipv6
self.client_key = client_key
self.client_cert = client_cert
self.ssl_options = ssl_options
self.expect_100_continue = expect_100_continue
self.start_time = time.time()
@property
def headers(self):
return self._headers
@headers.setter
def headers(self, value):
if value is None:
self._headers = httputil.HTTPHeaders()
else:
self._headers = value
@property
def body(self):
return self._body
@body.setter
def body(self, value):
self._body = utf8(value)
@property
def body_producer(self):
return self._body_producer
@body_producer.setter
def body_producer(self, value):
self._body_producer = stack_context.wrap(value)
@property
def streaming_callback(self):
return self._streaming_callback
@streaming_callback.setter
def streaming_callback(self, value):
self._streaming_callback = stack_context.wrap(value)
@property
def header_callback(self):
return self._header_callback
@header_callback.setter
def header_callback(self, value):
self._header_callback = stack_context.wrap(value)
@property
def prepare_curl_callback(self):
return self._prepare_curl_callback
@prepare_curl_callback.setter
def prepare_curl_callback(self, value):
self._prepare_curl_callback = stack_context.wrap(value)
class HTTPResponse(object):
"""HTTP Response object.
Attributes:
* request: HTTPRequest object
* code: numeric HTTP status code, e.g. 200 or 404
* reason: human-readable reason phrase describing the status code
* headers: `tornado.httputil.HTTPHeaders` object
* effective_url: final location of the resource after following any
redirects
* buffer: ``cStringIO`` object for response body
* body: response body as string (created on demand from ``self.buffer``)
* error: Exception object, if any
* request_time: seconds from request start to finish
* time_info: dictionary of diagnostic timing information from the request.
Available data are subject to change, but currently uses timings
available from http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html,
plus ``queue``, which is the delay (if any) introduced by waiting for
a slot under `AsyncHTTPClient`'s ``max_clients`` setting.
"""
def __init__(self, request, code, headers=None, buffer=None,
effective_url=None, error=None, request_time=None,
time_info=None, reason=None):
if isinstance(request, _RequestProxy):
self.request = request.request
else:
self.request = request
self.code = code
self.reason = reason or httputil.responses.get(code, "Unknown")
if headers is not None:
self.headers = headers
else:
self.headers = httputil.HTTPHeaders()
self.buffer = buffer
self._body = None
if effective_url is None:
self.effective_url = request.url
else:
self.effective_url = effective_url
if error is None:
if self.code < 200 or self.code >= 300:
self.error = HTTPError(self.code, message=self.reason,
response=self)
else:
self.error = None
else:
self.error = error
self.request_time = request_time
self.time_info = time_info or {}
def _get_body(self):
if self.buffer is None:
return None
elif self._body is None:
self._body = self.buffer.getvalue()
return self._body
body = property(_get_body)
def rethrow(self):
"""If there was an error on the request, raise an `HTTPError`."""
if self.error:
raise self.error
def __repr__(self):
args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items()))
return "%s(%s)" % (self.__class__.__name__, args)
class HTTPError(Exception):
"""Exception thrown for an unsuccessful HTTP request.
Attributes:
* ``code`` - HTTP error integer error code, e.g. 404. Error code 599 is
used when no HTTP response was received, e.g. for a timeout.
* ``response`` - `HTTPResponse` object, if any.
Note that if ``follow_redirects`` is False, redirects become HTTPErrors,
and you can look at ``error.response.headers['Location']`` to see the
destination of the redirect.
"""
def __init__(self, code, message=None, response=None):
self.code = code
message = message or httputil.responses.get(code, "Unknown")
self.response = response
Exception.__init__(self, "HTTP %d: %s" % (self.code, message))
class _RequestProxy(object):
"""Combines an object with a dictionary of defaults.
Used internally by AsyncHTTPClient implementations.
"""
def __init__(self, request, defaults):
self.request = request
self.defaults = defaults
def __getattr__(self, name):
request_attr = getattr(self.request, name)
if request_attr is not None:
return request_attr
elif self.defaults is not None:
return self.defaults.get(name, None)
else:
return None
def main():
from tornado.options import define, options, parse_command_line
define("print_headers", type=bool, default=False)
define("print_body", type=bool, default=True)
define("follow_redirects", type=bool, default=True)
define("validate_cert", type=bool, default=True)
args = parse_command_line()
client = HTTPClient()
for arg in args:
try:
response = client.fetch(arg,
follow_redirects=options.follow_redirects,
validate_cert=options.validate_cert,
)
except HTTPError as e:
if e.response is not None:
response = e.response
else:
raise
if options.print_headers:
print(response.headers)
if options.print_body:
print(native_str(response.body))
client.close()
if __name__ == "__main__":
main()
| apache-2.0 |
JianyuWang/nova | nova/db/sqlalchemy/migrate_repo/versions/228_add_metrics_in_compute_nodes.py | 81 | 1236 | # Copyright 2013 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy import Text
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
# Add a new column metrics to save metrics info for compute nodes
compute_nodes = Table('compute_nodes', meta, autoload=True)
shadow_compute_nodes = Table('shadow_compute_nodes', meta, autoload=True)
metrics = Column('metrics', Text, nullable=True)
shadow_metrics = Column('metrics', Text, nullable=True)
compute_nodes.create_column(metrics)
shadow_compute_nodes.create_column(shadow_metrics)
| apache-2.0 |
ahuarte47/QGIS | tests/src/python/test_qgsogcutils.py | 45 | 17768 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsOgcUtils.
From build dir, run: ctest -R PyQgsOgcUtils -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'René-Luc Dhont'
__date__ = '21/06/2019'
__copyright__ = 'Copyright 2019, The QGIS Project'
import qgis # NOQA switch sip api
from qgis.PyQt.QtCore import QVariant
from qgis.PyQt.QtXml import QDomDocument
from qgis.core import QgsOgcUtils, QgsVectorLayer, QgsField
from qgis.testing import start_app, unittest
start_app()
class TestQgsOgcUtils(unittest.TestCase):
def test_expressionFromOgcFilterWithInt(self):
"""
Test expressionFromOgcFilter with Int type field
"""
vl = QgsVectorLayer('Point', 'vl', 'memory')
vl.dataProvider().addAttributes([QgsField('id', QVariant.Int)])
vl.updateFields()
# Literals are Integer 1 and 3
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 1 AND id < 3')
# Literals are Double 1.0 and 3.0
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1.0</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3.0</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 1 AND id < 3')
# Literals are Double 1.5 and 3.5
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1.5</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3.5</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 2 AND id < 4')
# Literals are Scientific notation 15e-01 and 35e-01
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>15e-01</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>35e-01</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 2 AND id < 4')
def test_expressionFromOgcFilterWithLonglong(self):
"""
Test expressionFromOgcFilter with LongLong type field
"""
vl = QgsVectorLayer('Point', 'vl', 'memory')
vl.dataProvider().addAttributes([QgsField('id', QVariant.LongLong)])
vl.updateFields()
# Literals are Integer 1 and 3
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 1 AND id < 3')
# Literals are Double 1.0 and 3.0
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1.0</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3.0</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 1 AND id < 3')
# Literals are Double 1.5 and 3.5
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1.5</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3.5</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 2 AND id < 4')
# Literals are Scientific notation 15e-01 and 35e-01
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>15e-01</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>35e-01</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 2 AND id < 4')
def test_expressionFromOgcFilterWithDouble(self):
"""
Test expressionFromOgcFilter with Double type field
"""
vl = QgsVectorLayer('Point', 'vl', 'memory')
vl.dataProvider().addAttributes([QgsField('id', QVariant.Double)])
vl.updateFields()
# Literals are Integer 1 and 3
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 1 AND id < 3')
# Literals are Double 1.0 and 3.0
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1.0</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3.0</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 1 AND id < 3')
# Literals are Double 1.5 and 3.5
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1.5</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3.5</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 1.5 AND id < 3.5')
# Literals are Scientific notation 15e-01 and 35e-01
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>15e-01</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>35e-01</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > 1.5 AND id < 3.5')
def test_expressionFromOgcFilterWithString(self):
"""
Test expressionFromOgcFilter with String type field
"""
vl = QgsVectorLayer('Point', 'vl', 'memory')
vl.dataProvider().addAttributes([QgsField('id', QVariant.String)])
vl.updateFields()
# Literals are Integer 1 and 3
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > \'1\' AND id < \'3\'')
# Literals are Double 1.0 and 3.0
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1.0</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3.0</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > \'1.0\' AND id < \'3.0\'')
# Literals are Double 1.5 and 3.5
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1.5</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3.5</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > \'1.5\' AND id < \'3.5\'')
# Literals are Scientific notation 15e-01 and 35e-01
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>15e-01</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>35e-01</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'id > \'15e-01\' AND id < \'35e-01\'')
def test_expressionFromOgcFilterWithAndOrPropertyIsLike(self):
"""
Test expressionFromOgcFilter with And, Or and PropertyIsLike with wildCard
"""
vl = QgsVectorLayer('Point', 'vl', 'memory')
vl.dataProvider().addAttributes([QgsField('id', QVariant.LongLong), QgsField('THEME', QVariant.String), QgsField('PROGRAMME', QVariant.String)])
vl.updateFields()
f = '''<?xml version="1.0" encoding="UTF-8"?>
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsLike escape="\\" wildCard="%" singleChar="_">
<ogc:PropertyName>THEME</ogc:PropertyName>
<ogc:Literal>%Phytoplancton total%</ogc:Literal>
</ogc:PropertyIsLike>
<ogc:Or>
<ogc:PropertyIsLike escapeChar="\\" matchCase="false" singleChar="?" wildCard="*">
<ogc:PropertyName>PROGRAMME</ogc:PropertyName>
<ogc:Literal>REPHY;*</ogc:Literal>
</ogc:PropertyIsLike>
<ogc:PropertyIsLike escapeChar="\\" matchCase="false" singleChar="?" wildCard="*">
<ogc:PropertyName>PROGRAMME</ogc:PropertyName>
<ogc:Literal>*;REPHY</ogc:Literal>
</ogc:PropertyIsLike>
<ogc:PropertyIsLike escapeChar="\\" matchCase="false" singleChar="?" wildCard="*">
<ogc:PropertyName>PROGRAMME</ogc:PropertyName>
<ogc:Literal>*;REPHY;*</ogc:Literal>
</ogc:PropertyIsLike>
<ogc:PropertyIsLike escapeChar="\\" matchCase="false" singleChar="?" wildCard="*">
<ogc:PropertyName>PROGRAMME</ogc:PropertyName>
<ogc:Literal>^REPHY$</ogc:Literal>
</ogc:PropertyIsLike>
</ogc:Or>
</ogc:And>
</ogc:Filter>
'''
d = QDomDocument('filter')
d.setContent(f, True)
e = QgsOgcUtils.expressionFromOgcFilter(d.documentElement(), vl)
self.assertEqual(e.expression(), 'THEME LIKE \'%Phytoplancton total%\' AND (PROGRAMME ILIKE \'REPHY;%\' OR PROGRAMME ILIKE \'%;REPHY\' OR PROGRAMME ILIKE \'%;REPHY;%\' OR PROGRAMME ILIKE \'^REPHY$\')')
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
SHAFNehal/Course | tf-hands-on/slim/python/slim/queues.py | 35 | 2087 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains a helper context for running queue runners.
@@NestedQueueRunnerError
@@QueueRunners
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from contextlib import contextmanager
import threading
from tensorflow.python.framework import ops
from tensorflow.python.training import coordinator
__all__ = [
'NestedQueueRunnerError',
'QueueRunners',
]
_queue_runner_lock = threading.Lock()
class NestedQueueRunnerError(Exception):
pass
@contextmanager
def QueueRunners(session):
"""Creates a context manager that handles starting and stopping queue runners.
Args:
session: the currently running session.
Yields:
a context in which queues are run.
Raises:
NestedQueueRunnerError: if a QueueRunners context is nested within another.
"""
if not _queue_runner_lock.acquire(False):
raise NestedQueueRunnerError('QueueRunners cannot be nested')
coord = coordinator.Coordinator()
threads = []
for qr in ops.get_collection(ops.GraphKeys.QUEUE_RUNNERS):
threads.extend(qr.create_threads(session,
coord=coord,
daemon=True,
start=True))
try:
yield
finally:
coord.request_stop()
coord.join(threads, stop_grace_period_secs=120)
_queue_runner_lock.release()
| apache-2.0 |
craftytrickster/servo | tests/wpt/harness/wptrunner/update/update.py | 118 | 5053 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import sys
from metadata import MetadataUpdateRunner
from sync import SyncFromUpstreamRunner
from tree import GitTree, HgTree, NoVCSTree
from .. import environment as env
from base import Step, StepRunner, exit_clean, exit_unclean
from state import State
def setup_paths(sync_path):
sys.path.insert(0, os.path.abspath(sync_path))
from tools import localpaths
class LoadConfig(Step):
"""Step for loading configuration from the ini file and kwargs."""
provides = ["sync", "paths", "metadata_path", "tests_path"]
def create(self, state):
state.sync = {"remote_url": state.kwargs["remote_url"],
"branch": state.kwargs["branch"],
"path": state.kwargs["sync_path"]}
state.paths = state.kwargs["test_paths"]
state.tests_path = state.paths["/"]["tests_path"]
state.metadata_path = state.paths["/"]["metadata_path"]
assert state.tests_path.startswith("/")
class LoadTrees(Step):
"""Step for creating a Tree for the local copy and a GitTree for the
upstream sync."""
provides = ["local_tree", "sync_tree"]
def create(self, state):
if os.path.exists(state.sync["path"]):
sync_tree = GitTree(root=state.sync["path"])
else:
sync_tree = None
if GitTree.is_type():
local_tree = GitTree()
elif HgTree.is_type():
local_tree = HgTree()
else:
local_tree = NoVCSTree()
state.update({"local_tree": local_tree,
"sync_tree": sync_tree})
class SyncFromUpstream(Step):
"""Step that synchronises a local copy of the code with upstream."""
def create(self, state):
if not state.kwargs["sync"]:
return
if not state.sync_tree:
os.mkdir(state.sync["path"])
state.sync_tree = GitTree(root=state.sync["path"])
kwargs = state.kwargs
with state.push(["sync", "paths", "metadata_path", "tests_path", "local_tree",
"sync_tree"]):
state.target_rev = kwargs["rev"]
state.no_patch = kwargs["no_patch"]
state.suite_name = kwargs["suite_name"]
runner = SyncFromUpstreamRunner(self.logger, state)
runner.run()
class UpdateMetadata(Step):
"""Update the expectation metadata from a set of run logs"""
def create(self, state):
if not state.kwargs["run_log"]:
return
kwargs = state.kwargs
with state.push(["local_tree", "sync_tree", "paths", "serve_root"]):
state.run_log = kwargs["run_log"]
state.ignore_existing = kwargs["ignore_existing"]
state.no_patch = kwargs["no_patch"]
state.suite_name = kwargs["suite_name"]
state.product = kwargs["product"]
state.config = kwargs["config"]
runner = MetadataUpdateRunner(self.logger, state)
runner.run()
class UpdateRunner(StepRunner):
"""Runner for doing an overall update."""
steps = [LoadConfig,
LoadTrees,
SyncFromUpstream,
UpdateMetadata]
class WPTUpdate(object):
def __init__(self, logger, runner_cls=UpdateRunner, **kwargs):
"""Object that controls the running of a whole wptupdate.
:param runner_cls: Runner subclass holding the overall list of
steps to run.
:param kwargs: Command line arguments
"""
self.runner_cls = runner_cls
self.serve_root = kwargs["test_paths"]["/"]["tests_path"]
if not kwargs["sync"]:
setup_paths(self.serve_root)
else:
if os.path.exists(kwargs["sync_path"]):
# If the sync path doesn't exist we defer this until it does
setup_paths(kwargs["sync_path"])
self.state = State(logger)
self.kwargs = kwargs
self.logger = logger
def run(self, **kwargs):
if self.kwargs["abort"]:
self.abort()
return exit_clean
if not self.kwargs["continue"] and not self.state.is_empty():
self.logger.error("Found existing state. Run with --continue to resume or --abort to clear state")
return exit_unclean
if self.kwargs["continue"]:
if self.state.is_empty():
self.logger.error("No sync in progress?")
return exit_clean
self.kwargs = self.state.kwargs
else:
self.state.kwargs = self.kwargs
self.state.serve_root = self.serve_root
update_runner = self.runner_cls(self.logger, self.state)
rv = update_runner.run()
if rv in (exit_clean, None):
self.state.clear()
return rv
def abort(self):
self.state.clear()
| mpl-2.0 |
Shopify/dd-agent | tests/core/test_autorestart.py | 38 | 3219 | # stdlib
import os
import shlex
import signal
import subprocess
import time
import unittest
# 3p
from nose.plugins.attrib import attr
@attr(requires='core_integration')
class TestAutoRestart(unittest.TestCase):
""" Test the auto-restart and forking of the agent """
def setUp(self):
self.agent_foreground = None
self.agent_daemon = None
def tearDown(self):
if self.agent_foreground:
self.agent_foreground.kill()
if self.agent_daemon:
args = shlex.split('python agent.py stop')
subprocess.Popen(args).communicate()
def _start_foreground(self):
# Run the agent in the foreground with auto-restarting on.
args = shlex.split('python agent.py foreground --autorestart')
self.agent_foreground = subprocess.Popen(args)
time.sleep(5)
def _start_daemon(self):
args = shlex.split('python agent.py start --autorestart')
self.agent_daemon = subprocess.Popen(args)
time.sleep(5)
def _get_child_parent_pids(self, grep_str):
args = shlex.split('pgrep -f "%s"' % grep_str)
pgrep = subprocess.Popen(args, stdout=subprocess.PIPE,
close_fds=True).communicate()[0]
pids = pgrep.strip().split('\n')
assert len(pids) == 2, pgrep
return sorted([int(p) for p in pids], reverse=True)
def test_foreground(self):
self._start_foreground()
grep_str = 'agent.py foreground'
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Try killing the parent proc, confirm that the child is killed as well.
os.kill(parent_pid, signal.SIGTERM)
os.waitpid(parent_pid, 0)
time.sleep(6)
self.assertRaises(OSError, os.kill, child_pid, signal.SIGTERM)
# Restart the foreground agent.
self._start_foreground()
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Set a SIGUSR1 to the child to force an auto-restart exit.
os.kill(child_pid, signal.SIGUSR1)
time.sleep(6)
# Confirm that the child is still alive
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Kill the foreground process.
self.agent_foreground.terminate()
self.agent_foreground = None
def test_daemon(self):
self._start_daemon()
grep_str = 'agent.py start'
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Try killing the parent proc, confirm that the child is killed as well.
os.kill(parent_pid, signal.SIGTERM)
time.sleep(6)
self.assertRaises(OSError, os.kill, child_pid, signal.SIGTERM)
# Restart the daemon agent.
self._start_daemon()
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Set a SIGUSR1 to the child to force an auto-restart exit.
os.kill(child_pid, signal.SIGUSR1)
time.sleep(6)
# Confirm that the child is still alive
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Kill the daemon process.
os.kill(parent_pid, signal.SIGTERM)
self.agent_daemon = None
| bsd-3-clause |
GitHublong/hue | desktop/core/ext-py/Paste-2.0.1/paste/proxy.py | 50 | 10192 | # (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
An application that proxies WSGI requests to a remote server.
TODO:
* Send ``Via`` header? It's not clear to me this is a Via in the
style of a typical proxy.
* Other headers or metadata? I put in X-Forwarded-For, but that's it.
* Signed data of non-HTTP keys? This would be for things like
REMOTE_USER.
* Something to indicate what the original URL was? The original host,
scheme, and base path.
* Rewriting ``Location`` headers? mod_proxy does this.
* Rewriting body? (Probably not on this one -- that can be done with
a different middleware that wraps this middleware)
* Example::
use = egg:Paste#proxy
address = http://server3:8680/exist/rest/db/orgs/sch/config/
allowed_request_methods = GET
"""
from six.moves import http_client as httplib
from six.moves.urllib import parse as urlparse
from six.moves.urllib.parse import quote
import six
from paste import httpexceptions
from paste.util.converters import aslist
# Remove these headers from response (specify lower case header
# names):
filtered_headers = (
'transfer-encoding',
'connection',
'keep-alive',
'proxy-authenticate',
'proxy-authorization',
'te',
'trailers',
'upgrade',
)
class Proxy(object):
def __init__(self, address, allowed_request_methods=(),
suppress_http_headers=()):
self.address = address
self.parsed = urlparse.urlsplit(address)
self.scheme = self.parsed[0].lower()
self.host = self.parsed[1]
self.path = self.parsed[2]
self.allowed_request_methods = [
x.lower() for x in allowed_request_methods if x]
self.suppress_http_headers = [
x.lower() for x in suppress_http_headers if x]
def __call__(self, environ, start_response):
if (self.allowed_request_methods and
environ['REQUEST_METHOD'].lower() not in self.allowed_request_methods):
return httpexceptions.HTTPBadRequest("Disallowed")(environ, start_response)
if self.scheme == 'http':
ConnClass = httplib.HTTPConnection
elif self.scheme == 'https':
ConnClass = httplib.HTTPSConnection
else:
raise ValueError(
"Unknown scheme for %r: %r" % (self.address, self.scheme))
conn = ConnClass(self.host)
headers = {}
for key, value in environ.items():
if key.startswith('HTTP_'):
key = key[5:].lower().replace('_', '-')
if key == 'host' or key in self.suppress_http_headers:
continue
headers[key] = value
headers['host'] = self.host
if 'REMOTE_ADDR' in environ:
headers['x-forwarded-for'] = environ['REMOTE_ADDR']
if environ.get('CONTENT_TYPE'):
headers['content-type'] = environ['CONTENT_TYPE']
if environ.get('CONTENT_LENGTH'):
if environ['CONTENT_LENGTH'] == '-1':
# This is a special case, where the content length is basically undetermined
body = environ['wsgi.input'].read(-1)
headers['content-length'] = str(len(body))
else:
headers['content-length'] = environ['CONTENT_LENGTH']
length = int(environ['CONTENT_LENGTH'])
body = environ['wsgi.input'].read(length)
else:
body = ''
path_info = quote(environ['PATH_INFO'])
if self.path:
request_path = path_info
if request_path and request_path[0] == '/':
request_path = request_path[1:]
path = urlparse.urljoin(self.path, request_path)
else:
path = path_info
if environ.get('QUERY_STRING'):
path += '?' + environ['QUERY_STRING']
conn.request(environ['REQUEST_METHOD'],
path,
body, headers)
res = conn.getresponse()
headers_out = parse_headers(res.msg)
status = '%s %s' % (res.status, res.reason)
start_response(status, headers_out)
# @@: Default?
length = res.getheader('content-length')
if length is not None:
body = res.read(int(length))
else:
body = res.read()
conn.close()
return [body]
def make_proxy(global_conf, address, allowed_request_methods="",
suppress_http_headers=""):
"""
Make a WSGI application that proxies to another address:
``address``
the full URL ending with a trailing ``/``
``allowed_request_methods``:
a space seperated list of request methods (e.g., ``GET POST``)
``suppress_http_headers``
a space seperated list of http headers (lower case, without
the leading ``http_``) that should not be passed on to target
host
"""
allowed_request_methods = aslist(allowed_request_methods)
suppress_http_headers = aslist(suppress_http_headers)
return Proxy(
address,
allowed_request_methods=allowed_request_methods,
suppress_http_headers=suppress_http_headers)
class TransparentProxy(object):
"""
A proxy that sends the request just as it was given, including
respecting HTTP_HOST, wsgi.url_scheme, etc.
This is a way of translating WSGI requests directly to real HTTP
requests. All information goes in the environment; modify it to
modify the way the request is made.
If you specify ``force_host`` (and optionally ``force_scheme``)
then HTTP_HOST won't be used to determine where to connect to;
instead a specific host will be connected to, but the ``Host``
header in the request will remain intact.
"""
def __init__(self, force_host=None,
force_scheme='http'):
self.force_host = force_host
self.force_scheme = force_scheme
def __repr__(self):
return '<%s %s force_host=%r force_scheme=%r>' % (
self.__class__.__name__,
hex(id(self)),
self.force_host, self.force_scheme)
def __call__(self, environ, start_response):
scheme = environ['wsgi.url_scheme']
if self.force_host is None:
conn_scheme = scheme
else:
conn_scheme = self.force_scheme
if conn_scheme == 'http':
ConnClass = httplib.HTTPConnection
elif conn_scheme == 'https':
ConnClass = httplib.HTTPSConnection
else:
raise ValueError(
"Unknown scheme %r" % scheme)
if 'HTTP_HOST' not in environ:
raise ValueError(
"WSGI environ must contain an HTTP_HOST key")
host = environ['HTTP_HOST']
if self.force_host is None:
conn_host = host
else:
conn_host = self.force_host
conn = ConnClass(conn_host)
headers = {}
for key, value in environ.items():
if key.startswith('HTTP_'):
key = key[5:].lower().replace('_', '-')
headers[key] = value
headers['host'] = host
if 'REMOTE_ADDR' in environ and 'HTTP_X_FORWARDED_FOR' not in environ:
headers['x-forwarded-for'] = environ['REMOTE_ADDR']
if environ.get('CONTENT_TYPE'):
headers['content-type'] = environ['CONTENT_TYPE']
if environ.get('CONTENT_LENGTH'):
length = int(environ['CONTENT_LENGTH'])
body = environ['wsgi.input'].read(length)
if length == -1:
environ['CONTENT_LENGTH'] = str(len(body))
elif 'CONTENT_LENGTH' not in environ:
body = ''
length = 0
else:
body = ''
length = 0
path = (environ.get('SCRIPT_NAME', '')
+ environ.get('PATH_INFO', ''))
path = quote(path)
if 'QUERY_STRING' in environ:
path += '?' + environ['QUERY_STRING']
conn.request(environ['REQUEST_METHOD'],
path, body, headers)
res = conn.getresponse()
headers_out = parse_headers(res.msg)
status = '%s %s' % (res.status, res.reason)
start_response(status, headers_out)
# @@: Default?
length = res.getheader('content-length')
if length is not None:
body = res.read(int(length))
else:
body = res.read()
conn.close()
return [body]
def parse_headers(message):
"""
Turn a Message object into a list of WSGI-style headers.
"""
headers_out = []
if six.PY3:
for header, value in message.items():
if header.lower() not in filtered_headers:
headers_out.append((header, value))
else:
for full_header in message.headers:
if not full_header:
# Shouldn't happen, but we'll just ignore
continue
if full_header[0].isspace():
# Continuation line, add to the last header
if not headers_out:
raise ValueError(
"First header starts with a space (%r)" % full_header)
last_header, last_value = headers_out.pop()
value = last_value + ' ' + full_header.strip()
headers_out.append((last_header, value))
continue
try:
header, value = full_header.split(':', 1)
except:
raise ValueError("Invalid header: %r" % full_header)
value = value.strip()
if header.lower() not in filtered_headers:
headers_out.append((header, value))
return headers_out
def make_transparent_proxy(
global_conf, force_host=None, force_scheme='http'):
"""
Create a proxy that connects to a specific host, but does
absolutely no other filtering, including the Host header.
"""
return TransparentProxy(force_host=force_host,
force_scheme=force_scheme)
| apache-2.0 |
chemelnucfin/tensorflow | tensorflow/python/kernel_tests/map_fn_test.py | 14 | 8821 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.kernels.functional_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
import tensorflow.python.ops.tensor_array_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
# pylint: disable=invalid-name
def simple_scoped_fn(a, x):
"""Simple function: (a, x) -> 2(x+a), but with "2" as a variable in scope."""
with variable_scope.variable_scope("body"):
# Dummy variable, just to check that scoping works as intended.
two = variable_scope.get_variable(
"two", [],
dtype=dtypes.int32,
initializer=init_ops.constant_initializer(2))
return math_ops.multiply(math_ops.add(a, x), two)
@test_util.with_control_flow_v2
class MapFnTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testMap_Simple(self):
nums = [1, 2, 3, 4, 5, 6]
elems = constant_op.constant(nums, name="data")
r = map_fn.map_fn(
lambda x: math_ops.multiply(math_ops.add(x, 3), 2), elems)
self.assertAllEqual(
np.array([(x + 3) * 2 for x in nums]), self.evaluate(r))
def testMapDtypeEager(self):
with context.eager_mode():
dtype = map_fn.map_fn(lambda x: constant_op.constant(""),
constant_op.constant([]),
dtype=dtypes.string).dtype
self.assertEqual(dtype, dtypes.string)
def testMapSparseTensor(self):
with self.cached_session():
with self.assertRaises(TypeError):
map_fn.map_fn(
lambda x: x,
sparse_tensor.SparseTensor(
indices=[[0, 0], [0, 1], [1, 0]],
values=constant_op.constant([0, 1, 2]),
dense_shape=[2, 2]))
@test_util.run_in_graph_and_eager_modes
def testMapOverScalarErrors(self):
with self.assertRaisesRegexp(ValueError, "not scalars"):
map_fn.map_fn(lambda x: x, [1, 2])
with self.assertRaisesRegexp(ValueError, "not a scalar"):
map_fn.map_fn(lambda x: x, 1)
@test_util.run_deprecated_v1
def testMap_Scoped(self):
with self.cached_session() as sess:
def double_scoped(x):
"""2x with a dummy 2 that is scoped."""
with variable_scope.variable_scope("body"):
# Dummy variable, just to check that scoping works as intended.
two = variable_scope.get_variable(
"two", [],
dtype=dtypes.int32,
initializer=init_ops.constant_initializer(2))
return math_ops.multiply(x, two)
with variable_scope.variable_scope("root") as varscope:
elems = constant_op.constant([1, 2, 3, 4, 5, 6], name="data")
doubles = np.array([2 * x for x in [1, 2, 3, 4, 5, 6]])
r = map_fn.map_fn(double_scoped, elems)
# Check that we have the one variable we asked for here.
self.assertEqual(len(variables.trainable_variables()), 1)
self.assertEqual(variables.trainable_variables()[0].name,
"root/body/two:0")
sess.run([variables.global_variables_initializer()])
self.assertAllEqual(doubles, self.evaluate(r))
# Now let's reuse our single variable.
varscope.reuse_variables()
r = map_fn.map_fn(double_scoped, elems)
self.assertEqual(len(variables.trainable_variables()), 1)
self.assertAllEqual(doubles, self.evaluate(r))
@test_util.run_deprecated_v1
def testMap_Grad(self):
with self.cached_session():
param = constant_op.constant(2.0)
elems = constant_op.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], name="elems")
y = map_fn.map_fn(
lambda x: math_ops.multiply(math_ops.square(x), param), elems)
r = gradients_impl.gradients(y, param)[0]
self.assertAllEqual(91.0, self.evaluate(r))
r = gradients_impl.gradients(y, elems)[0]
self.assertAllEqual([4.0, 8.0, 12.0, 16.0, 20.0, 24.0], self.evaluate(r))
@test_util.run_in_graph_and_eager_modes
def testMap_SimpleNotTensor(self):
nums = np.array([1, 2, 3, 4, 5, 6])
r = map_fn.map_fn(
lambda x: math_ops.multiply(math_ops.add(x, 3), 2), nums)
self.assertAllEqual(
np.array([(x + 3) * 2 for x in nums]), self.evaluate(r))
@test_util.run_in_graph_and_eager_modes
def testMap_SingleInputMultiOutput(self):
nums = np.array([1, 2, 3, 4, 5, 6])
r = map_fn.map_fn(
lambda x: ((x + 3) * 2, -(x + 3) * 2),
nums,
dtype=(dtypes.int64, dtypes.int64))
self.assertEqual(2, len(r))
self.assertEqual((6,), r[0].get_shape())
self.assertEqual((6,), r[1].get_shape())
received = self.evaluate(r)
self.assertAllEqual((nums + 3) * 2, received[0])
self.assertAllEqual(-(nums + 3) * 2, received[1])
@test_util.run_in_graph_and_eager_modes
def testMap_MultiOutputMismatchedDtype(self):
nums = np.array([1, 2, 3, 4, 5, 6])
with self.assertRaisesRegexp(
TypeError, r"two structures don't have the same nested structure"):
# lambda emits tuple, but dtype is a list
map_fn.map_fn(
lambda x: ((x + 3) * 2, -(x + 3) * 2),
nums,
dtype=[dtypes.int64, dtypes.int64])
@test_util.run_in_graph_and_eager_modes
def testMap_MultiInputSingleOutput(self):
nums = np.array([1, 2, 3, 4, 5, 6])
r = map_fn.map_fn(
lambda x: x[0] * x[1][0] + x[1][1], (nums, (nums, -nums)),
dtype=dtypes.int64)
self.assertEqual((6,), r.get_shape())
received = self.evaluate(r)
self.assertAllEqual(nums * nums + (-nums), received)
@test_util.run_in_graph_and_eager_modes
def testMap_MultiInputSameStructureOutput(self):
nums = np.array([1, 2, 3, 4, 5, 6])
r = map_fn.map_fn(lambda x: (x[1][0], (x[1][1], x[0])),
(nums, (2 * nums, -nums)))
r = [r[0], r[1][0], r[1][1]]
self.assertEqual((6,), r[0].get_shape())
self.assertEqual((6,), r[1].get_shape())
self.assertEqual((6,), r[2].get_shape())
received = self.evaluate(r)
self.assertAllEqual(2 * nums, received[0])
self.assertAllEqual(-nums, received[1])
self.assertAllEqual(nums, received[2])
@test_util.run_in_graph_and_eager_modes
def testMapShape(self):
x = constant_op.constant([[1, 2, 3], [4, 5, 6]])
y = map_fn.map_fn(lambda e: e, x)
self.assertAllEqual(y.get_shape(), self.evaluate(y).shape)
@test_util.run_deprecated_v1
def testMapUnknownShape(self):
x = array_ops.placeholder(dtypes.float32)
y = map_fn.map_fn(lambda e: e, x)
self.assertIs(None, y.get_shape().dims)
# TODO(b/124383826): this test fails in eager: the iterable is of length 0 so
# so the body of the while loop never executes
@test_util.run_v1_only("b/120545219")
def testMapEmptyScalar(self):
map_return = map_fn.map_fn(lambda x: 1,
constant_op.constant([], dtype=dtypes.int32))
self.assertAllEqual([0], map_return.get_shape().dims)
self.assertAllEqual([0], self.evaluate(map_return).shape)
# TODO(b/124383826): this test fails in eager: the iterable is of length 0 so
# so the body of the while loop never executes
@test_util.run_v1_only("b/120545219")
def testMapEmptyTensor(self):
with self.cached_session():
map_return = map_fn.map_fn(lambda x: array_ops.zeros([3, 2]),
constant_op.constant([]))
self.assertAllEqual([0, 3, 2], map_return.get_shape().dims)
self.assertAllEqual([0, 3, 2], self.evaluate(map_return).shape)
if __name__ == "__main__":
test.main()
# pylint: enable=invalid-name
| apache-2.0 |
flybird119/voltdb | build.py | 5 | 13379 | #!/usr/bin/env python
import os, sys, commands, string
from buildtools import *
# usage:
# The following all work as you might expect:
# python build.py
# ./build.py debug
# python build.py release
# python build.py test
# ./build.py clean
# ./build.py release clean
# python build.py release test
# The command line args can include a build level: release or debug
# the default is debug
# The command line args can include an action: build, clean or test
# the default is build
# The order doesn't matter
# Including multiple levels or actions is a bad idea
###############################################################################
# INITIALIZE BUILD CONTEXT
# - Detect Platform
# - Parse Target and Level from Command Line
###############################################################################
CTX = BuildContext(sys.argv)
# CTX is an instance of BuildContext, which is declared in buildtools.py
# BuildContext contains vars that determine how the makefile will be built
# and how the build will go down. It also checks the platform and parses
# command line args to determine target and build level.
###############################################################################
# SET GLOBAL CONTEXT VARIABLES FOR BUILDING
###############################################################################
# these are the base compile options that get added to every compile step
# this does not include header/lib search paths or specific flags for
# specific targets
CTX.CPPFLAGS += """-Wall -Wextra -Werror -Woverloaded-virtual
-Wpointer-arith -Wcast-qual -Wwrite-strings
-Winit-self -Wno-sign-compare -Wno-unused-parameter
-D__STDC_CONSTANT_MACROS -D__STDC_LIMIT_MACROS -DNOCLOCK
-fno-omit-frame-pointer
-fvisibility=default
-DBOOST_SP_DISABLE_THREADS -DBOOST_DISABLE_THREADS -DBOOST_ALL_NO_LIB"""
# clang doesn't seem to want this
if compiler_name == 'gcc':
CTX.CPPFLAGS += " -pthread"
CTX.LDFLAGS += " -rdynamic"
if (compiler_name == 'clang') and (compiler_major == 3 and compiler_minor >= 4):
CTX.CPPFLAGS += " -Wno-varargs"
if (compiler_name != 'gcc') or (compiler_major == 4 and compiler_minor >= 3):
CTX.CPPFLAGS += " -Wno-ignored-qualifiers -fno-strict-aliasing"
if CTX.PROFILE:
CTX.CPPFLAGS += " -fvisibility=default -DPROFILE_ENABLED"
# linker flags
CTX.LDFLAGS += """ -g3"""
CTX.LASTLDFLAGS = """ -ldl"""
if CTX.COVERAGE:
CTX.LDFLAGS += " -ftest-coverage -fprofile-arcs"
# for the google perftools profiler and the recommended stack unwinder
# which you must have separately built and installed. Take some guesses
# at the library location (/usr/local/lib).
if CTX.PROFILE:
CTX.LDFLAGS = """ -L/usr/local/lib -g3 -lprofiler -lunwind"""
# consider setting CTX.LASTLDFLAGS to " " rather than -ldl if that option really is unwanted.
# this is where the build will look for header files
# - the test source will also automatically look in the test root dir
CTX.INCLUDE_DIRS = ['src/ee']
CTX.SYSTEM_DIRS = ['third_party/cpp']
# don't worry about checking for changes in header files in the following
# directories
CTX.IGNORE_SYS_PREFIXES = ['/usr/include', '/usr/lib', 'third_party']
# where to find the source
CTX.INPUT_PREFIX = "src/ee/"
# where to find the source
CTX.THIRD_PARTY_INPUT_PREFIX = "third_party/cpp/"
# where to find the tests
CTX.TEST_PREFIX = "tests/ee/"
###############################################################################
# SET RELEASE LEVEL CONTEXT
###############################################################################
if "VOLT_LOG_LEVEL" in os.environ:
LOG_LEVEL = os.environ["VOLT_LOG_LEVEL"]
else:
LOG_LEVEL = "500"
if CTX.LEVEL == "MEMCHECK":
CTX.CPPFLAGS += " -g3 -DDEBUG -DMEMCHECK -DVOLT_LOG_LEVEL=%s" % LOG_LEVEL
CTX.OUTPUT_PREFIX = "obj/memcheck"
if CTX.LEVEL == "DEBUG":
CTX.CPPFLAGS += " -g3 -DDEBUG -DVOLT_LOG_LEVEL=%s" % LOG_LEVEL
CTX.OUTPUT_PREFIX = "obj/debug"
if CTX.LEVEL == "RELEASE":
CTX.CPPFLAGS += " -g3 -O3 -mmmx -msse -msse2 -msse3 -DNDEBUG -DVOLT_LOG_LEVEL=%s" % LOG_LEVEL
CTX.OUTPUT_PREFIX = "obj/release"
# build in parallel directory instead of subdir so that relative paths work
if CTX.COVERAGE:
CTX.CPPFLAGS += " -ftest-coverage -fprofile-arcs"
CTX.OUTPUT_PREFIX += "-coverage"
CTX.OUTPUT_PREFIX += "/"
###############################################################################
# HANDLE PLATFORM SPECIFIC STUFF
###############################################################################
# Defaults Section
CTX.JNIEXT = "so"
CTX.JNILIBFLAGS += " -shared"
CTX.SOFLAGS += " -shared"
CTX.SOEXT = "so"
out = Popen('java -cp tools/ SystemPropertyPrinter java.library.path'.split(),
stdout = PIPE).communicate()[0]
libpaths = ' '.join( '-L' + path for path in out.strip().split(':') if path != '' and path != '/usr/lib' )
CTX.JNIBINFLAGS += " " + libpaths
CTX.JNIBINFLAGS += " -ljava -ljvm -lverify"
if CTX.PLATFORM == "Darwin":
CTX.CPPFLAGS += " -DMACOSX -arch x86_64"
CTX.JNIEXT = "jnilib"
CTX.JNILIBFLAGS = " -bundle"
CTX.JNIBINFLAGS = " -framework JavaVM,1.7"
CTX.SOFLAGS += "-dynamiclib -undefined dynamic_lookup -single_module"
CTX.SOEXT = "dylib"
CTX.JNIFLAGS = "-framework JavaVM,1.7"
if CTX.PLATFORM == "Linux":
CTX.CPPFLAGS += " -Wno-attributes -Wcast-align -Wconversion -DLINUX -fpic"
CTX.NMFLAGS += " --demangle"
###############################################################################
# SPECIFY SOURCE FILE INPUT
###############################################################################
# the input is a map from directory name to a list of whitespace
# separated source files (cpp only for now). Preferred ordering is
# one file per line, indented one space, in alphabetical order.
CTX.INPUT[''] = """
voltdbjni.cpp
"""
CTX.INPUT['catalog'] = """
catalog.cpp
catalogtype.cpp
cluster.cpp
column.cpp
columnref.cpp
connector.cpp
connectortableinfo.cpp
connectorproperty.cpp
constraint.cpp
constraintref.cpp
database.cpp
index.cpp
indexref.cpp
materializedviewinfo.cpp
planfragment.cpp
statement.cpp
table.cpp
"""
CTX.INPUT['structures'] = """
CompactingPool.cpp
ContiguousAllocator.cpp
"""
CTX.INPUT['common'] = """
CompactingStringPool.cpp
CompactingStringStorage.cpp
FatalException.cpp
ThreadLocalPool.cpp
SegvException.cpp
SerializableEEException.cpp
SQLException.cpp
InterruptException.cpp
StringRef.cpp
tabletuple.cpp
TupleSchema.cpp
types.cpp
UndoLog.cpp
NValue.cpp
RecoveryProtoMessage.cpp
RecoveryProtoMessageBuilder.cpp
DefaultTupleSerializer.cpp
FullTupleSerializer.cpp
executorcontext.cpp
serializeio.cpp
StreamPredicateList.cpp
Topend.cpp
TupleOutputStream.cpp
TupleOutputStreamProcessor.cpp
MiscUtil.cpp
debuglog.cpp
"""
CTX.INPUT['execution'] = """
FragmentManager.cpp
JNITopend.cpp
VoltDBEngine.cpp
"""
CTX.INPUT['executors'] = """
OptimizedProjector.cpp
abstractexecutor.cpp
aggregateexecutor.cpp
deleteexecutor.cpp
executorutil.cpp
indexcountexecutor.cpp
indexscanexecutor.cpp
insertexecutor.cpp
limitexecutor.cpp
materializedscanexecutor.cpp
materializeexecutor.cpp
nestloopexecutor.cpp
nestloopindexexecutor.cpp
orderbyexecutor.cpp
projectionexecutor.cpp
receiveexecutor.cpp
sendexecutor.cpp
seqscanexecutor.cpp
tablecountexecutor.cpp
tuplescanexecutor.cpp
unionexecutor.cpp
updateexecutor.cpp
"""
CTX.INPUT['expressions'] = """
abstractexpression.cpp
expressionutil.cpp
vectorexpression.cpp
functionexpression.cpp
tupleaddressexpression.cpp
operatorexpression.cpp
parametervalueexpression.cpp
subqueryexpression.cpp
scalarvalueexpression.cpp
"""
CTX.INPUT['plannodes'] = """
abstractjoinnode.cpp
abstractoperationnode.cpp
abstractplannode.cpp
abstractscannode.cpp
aggregatenode.cpp
deletenode.cpp
indexscannode.cpp
indexcountnode.cpp
tablecountnode.cpp
insertnode.cpp
limitnode.cpp
materializenode.cpp
materializedscanplannode.cpp
nestloopindexnode.cpp
nestloopnode.cpp
orderbynode.cpp
plannodefragment.cpp
plannodeutil.cpp
projectionnode.cpp
receivenode.cpp
SchemaColumn.cpp
sendnode.cpp
seqscannode.cpp
tuplescannode.cpp
unionnode.cpp
updatenode.cpp
"""
CTX.INPUT['indexes'] = """
tableindex.cpp
tableindexfactory.cpp
IndexStats.cpp
"""
CTX.INPUT['storage'] = """
constraintutil.cpp
CopyOnWriteContext.cpp
ElasticContext.cpp
CopyOnWriteIterator.cpp
ConstraintFailureException.cpp
TableStreamer.cpp
ElasticScanner.cpp
MaterializedViewMetadata.cpp
persistenttable.cpp
PersistentTableStats.cpp
StreamedTableStats.cpp
streamedtable.cpp
table.cpp
TableCatalogDelegate.cpp
tablefactory.cpp
TableStats.cpp
tableutil.cpp
temptable.cpp
TempTableLimits.cpp
TupleStreamBase.cpp
ExportTupleStream.cpp
DRTupleStream.cpp
BinaryLogSink.cpp
RecoveryContext.cpp
TupleBlock.cpp
TableStreamerContext.cpp
ElasticIndex.cpp
ElasticIndexReadContext.cpp
"""
CTX.INPUT['stats'] = """
StatsAgent.cpp
StatsSource.cpp
"""
CTX.INPUT['logging'] = """
JNILogProxy.cpp
LogManager.cpp
"""
# specify the third party input
CTX.THIRD_PARTY_INPUT['jsoncpp'] = """
jsoncpp.cpp
"""
CTX.THIRD_PARTY_INPUT['crc'] = """
crc32c.cc
crc32ctables.cc
"""
CTX.THIRD_PARTY_INPUT['murmur3'] = """
MurmurHash3.cpp
"""
CTX.THIRD_PARTY_INPUT['sha1'] = """
sha1.cpp
"""
###############################################################################
# SPECIFY THE TESTS
###############################################################################
whichtests = os.getenv("EETESTSUITE")
if whichtests == None:
whichtests = "${eetestsuite}"
# input format similar to source, but the executable name is listed
if whichtests == "${eetestsuite}":
CTX.TESTS['.'] = """
harness_test
"""
if whichtests in ("${eetestsuite}", "catalog"):
CTX.TESTS['catalog'] = """
catalog_test
"""
if whichtests in ("${eetestsuite}", "logging"):
CTX.TESTS['logging'] = """
logging_test
"""
if whichtests in ("${eetestsuite}", "common"):
CTX.TESTS['common'] = """
debuglog_test
elastic_hashinator_test
nvalue_test
pool_test
serializeio_test
tabletuple_test
tupleschema_test
undolog_test
valuearray_test
"""
if whichtests in ("${eetestsuite}", "execution"):
CTX.TESTS['execution'] = """
add_drop_table
engine_test
FragmentManagerTest
"""
if whichtests in ("${eetestsuite}", "executors"):
CTX.TESTS['executors'] = """
OptimizedProjectorTest
"""
if whichtests in ("${eetestsuite}", "expressions"):
CTX.TESTS['expressions'] = """
expression_test
function_test
"""
if whichtests in ("${eetestsuite}", "indexes"):
CTX.TESTS['indexes'] = """
index_key_test
index_scripted_test
index_test
compacting_hash_index
CompactingTreeMultiIndexTest
"""
if whichtests in ("${eetestsuite}", "storage"):
CTX.TESTS['storage'] = """
CompactionTest
CopyOnWriteTest
DRBinaryLog_test
DRTupleStream_test
ExportTupleStream_test
PersistentTableMemStatsTest
StreamedTable_test
TempTableLimitsTest
constraint_test
filter_test
persistent_table_log_test
persistenttable_test
serialize_test
table_and_indexes_test
table_test
tabletuple_export_test
"""
if whichtests in ("${eetestsuite}", "structures"):
CTX.TESTS['structures'] = """
CompactingMapTest
CompactingMapIndexCountTest
CompactingHashTest
CompactingPoolTest
CompactingMapBenchmark
"""
if whichtests in ("${eetestsuite}", "plannodes"):
CTX.TESTS['plannodes'] = """
PlanNodeFragmentTest
"""
###############################################################################
# BUILD THE MAKEFILE
###############################################################################
# this function (in buildtools.py) generates the makefile
# it's currently a bit ugly but it'll get cleaned up soon
if not os.environ.get('EESKIPBUILDMAKEFILE'):
print "build.py: Making the makefile"
buildMakefile(CTX)
if os.environ.get('EEONLYBUILDMAKEFILE'):
sys.exit()
###############################################################################
# RUN THE MAKEFILE
###############################################################################
numHardwareThreads = 4
if CTX.PLATFORM == "Darwin":
numHardwareThreads = 0
output = commands.getstatusoutput("sysctl hw.ncpu")
numHardwareThreads = int(string.strip(string.split(output[1])[1]))
elif CTX.PLATFORM == "Linux":
numHardwareThreads = 0
for line in open('/proc/cpuinfo').readlines():
name_value = map(string.strip, string.split(line, ':', 1))
if len(name_value) != 2:
continue
name,value = name_value
if name == "processor":
numHardwareThreads = numHardwareThreads + 1
retval = os.system("make --directory=%s -j%d" % (CTX.OUTPUT_PREFIX, numHardwareThreads))
if retval != 0:
sys.exit(-1)
###############################################################################
# RUN THE TESTS IF ASKED TO
###############################################################################
retval = 0
if CTX.TARGET == "TEST":
retval = runTests(CTX)
elif CTX.TARGET == "VOLTDBIPC":
retval = buildIPC(CTX)
if retval != 0:
sys.exit(-1)
| agpl-3.0 |
CLVsol/odoo_addons | clv_medicament/clv_annotation/clv_annotation.py | 1 | 2262 | # -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp.osv import fields, osv
class clv_medicament(osv.osv):
_inherit = 'clv_medicament'
_columns = {
'annotation_ids': fields.many2many('clv_annotation',
'clv_medicament_annotation_rel',
'medicament_id',
'annotation_id',
'Annotations')
}
class clv_annotation(osv.osv):
_inherit = 'clv_annotation'
_columns = {
'medicament_ids': fields.many2many('clv_medicament',
'clv_medicament_annotation_rel',
'annotation_id',
'medicament_id',
'Medicaments')
}
| agpl-3.0 |
jimklo/LearningRegistry | LR/lr/controllers/services.py | 2 | 3121 | # Copyright 2011 Department of Defence
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from lr.lib.base import BaseController, render
import lr.lib.helpers as h
import urllib2, json, datetime
from lr.model import LRNode as sourceLRNode, NodeServiceModel
log = logging.getLogger(__name__)
class ServicesController(BaseController):
"""REST Controller styled on the Atom Publishing Protocol"""
# To properly map this controller, ensure your config/routing.py
# file has a resource setup:
# map.resource('services', 'services')
def index(self, format='html'):
"""GET /services: All items in the collection"""
if sourceLRNode.isServiceAvailable('Network Node Services') == False:
return "Administrative service is not available"
data = {}
data['timestamp'] = str(datetime.datetime.utcnow())
data['node_id'] = sourceLRNode.nodeDescription.node_id
data['active'] = sourceLRNode.nodeDescription.active
data['node_name'] = sourceLRNode.nodeDescription.node_name
data['services'] = []
for s in sourceLRNode.nodeServices:
data['services'].append(s.specData)
return json.dumps(data)
# url('services')
def create(self):
"""POST /services: Create a new item"""
# url('services')
def new(self, format='html'):
"""GET /services/new: Form to create a new item"""
# url('new_services')
def update(self, id):
"""PUT /services/id: Update an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="PUT" />
# Or using helpers:
# h.form(url('services', id=ID),
# method='put')
# url('services', id=ID)
def delete(self, id):
"""DELETE /services/id: Delete an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="DELETE" />
# Or using helpers:
# h.form(url('services', id=ID),
# method='delete')
# url('services', id=ID)
def show(self, id, format='html'):
"""GET /services/id: Show a specific item"""
# url('services', id=ID)
def edit(self, id, format='html'):
"""GET /services/id/edit: Form to edit an existing item"""
# url('edit_services', id=ID)
| apache-2.0 |
eestay/edx-platform | common/lib/xmodule/xmodule/modulestore/tests/factories.py | 16 | 15607 | import pprint
import threading
from uuid import uuid4
from decorator import contextmanager
import pymongo.message
from factory import Factory, Sequence, lazy_attribute_sequence, lazy_attribute
from factory.containers import CyclicDefinitionError
from mock import Mock, patch
from nose.tools import assert_less_equal, assert_greater_equal
import dogstats_wrapper as dog_stats_api
from opaque_keys.edx.locations import Location
from opaque_keys.edx.keys import UsageKey
from xblock.core import XBlock
from xmodule.tabs import StaticTab
from xmodule.modulestore import prefer_xmodules, ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.x_module import DEPRECATION_VSCOMPAT_EVENT
class Dummy(object):
pass
class XModuleFactoryLock(threading.local):
"""
This class exists to store whether XModuleFactory can be accessed in a safe
way (meaning, in a context where the data it creates will be cleaned up).
Users of XModuleFactory (or its subclasses) should only call XModuleFactoryLock.enable
after ensuring that a) the modulestore will be cleaned up, and b) that XModuleFactoryLock.disable
will be called.
"""
def __init__(self):
super(XModuleFactoryLock, self).__init__()
self._enabled = False
def enable(self):
"""
Enable XModuleFactories. This should only be turned in a context
where the modulestore will be reset at the end of the test (such
as inside ModuleStoreTestCase).
"""
self._enabled = True
def disable(self):
"""
Disable XModuleFactories. This should be called once the data
from the factory has been cleaned up.
"""
self._enabled = False
def is_enabled(self):
"""
Return whether XModuleFactories are enabled.
"""
return self._enabled
XMODULE_FACTORY_LOCK = XModuleFactoryLock()
class XModuleFactory(Factory):
"""
Factory for XModules
"""
# We have to give a Factory a FACTORY_FOR.
# However, the class that we create is actually determined by the category
# specified in the factory
FACTORY_FOR = Dummy
@lazy_attribute
def modulestore(self):
msg = "XMODULE_FACTORY_LOCK not enabled. Please use ModuleStoreTestCase as your test baseclass."
assert XMODULE_FACTORY_LOCK.is_enabled(), msg
from xmodule.modulestore.django import modulestore
return modulestore()
last_course = threading.local()
class CourseFactory(XModuleFactory):
"""
Factory for XModule courses.
"""
org = Sequence('org.{}'.format)
number = Sequence('course_{}'.format)
display_name = Sequence('Run {}'.format)
# pylint: disable=unused-argument
@classmethod
def _create(cls, target_class, **kwargs):
# All class attributes (from this class and base classes) are
# passed in via **kwargs. However, some of those aren't actual field values,
# so pop those off for use separately
org = kwargs.pop('org', None)
# because the factory provides a default 'number' arg, prefer the non-defaulted 'course' arg if any
number = kwargs.pop('course', kwargs.pop('number', None))
store = kwargs.pop('modulestore')
name = kwargs.get('name', kwargs.get('run', Location.clean(kwargs.get('display_name'))))
run = kwargs.pop('run', name)
user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test)
# Pass the metadata just as field=value pairs
kwargs.update(kwargs.pop('metadata', {}))
default_store_override = kwargs.pop('default_store', None)
with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
if default_store_override is not None:
with store.default_store(default_store_override):
new_course = store.create_course(org, number, run, user_id, fields=kwargs)
else:
new_course = store.create_course(org, number, run, user_id, fields=kwargs)
last_course.loc = new_course.location
return new_course
class LibraryFactory(XModuleFactory):
"""
Factory for creating a content library
"""
org = Sequence('org{}'.format)
library = Sequence('lib{}'.format)
display_name = Sequence('Test Library {}'.format)
# pylint: disable=unused-argument
@classmethod
def _create(cls, target_class, **kwargs):
"""
Create a library with a unique name and key.
All class attributes (from this class and base classes) are automagically
passed in via **kwargs.
"""
# some of the kwargst actual field values, so pop those off for use separately:
org = kwargs.pop('org')
library = kwargs.pop('library')
store = kwargs.pop('modulestore')
user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test)
# Pass the metadata just as field=value pairs
kwargs.update(kwargs.pop('metadata', {}))
default_store_override = kwargs.pop('default_store', ModuleStoreEnum.Type.split)
with store.default_store(default_store_override):
new_library = store.create_library(org, library, user_id, fields=kwargs)
return new_library
class ItemFactory(XModuleFactory):
"""
Factory for XModule items.
"""
category = 'chapter'
parent = None
@lazy_attribute_sequence
def display_name(self, n):
return "{} {}".format(self.category, n)
@lazy_attribute
def location(self):
if self.display_name is None:
dest_name = uuid4().hex
else:
dest_name = self.display_name.replace(" ", "_")
new_location = self.parent_location.course_key.make_usage_key(
self.category,
dest_name
)
return new_location
@lazy_attribute
def parent_location(self):
default_location = getattr(last_course, 'loc', None)
try:
parent = self.parent
# This error is raised if the caller hasn't provided either parent or parent_location
# In this case, we'll just return the default parent_location
except CyclicDefinitionError:
return default_location
if parent is None:
return default_location
return parent.location
@classmethod
def _create(cls, target_class, **kwargs):
"""
Uses ``**kwargs``:
:parent_location: (required): the location of the parent module
(e.g. the parent course or section)
:category: the category of the resulting item.
:data: (optional): the data for the item
(e.g. XML problem definition for a problem item)
:display_name: (optional): the display name of the item
:metadata: (optional): dictionary of metadata attributes
:boilerplate: (optional) the boilerplate for overriding field values
:publish_item: (optional) whether or not to publish the item (default is True)
:target_class: is ignored
"""
# All class attributes (from this class and base classes) are
# passed in via **kwargs. However, some of those aren't actual field values,
# so pop those off for use separately
# catch any old style users before they get into trouble
assert 'template' not in kwargs
parent_location = kwargs.pop('parent_location', None)
data = kwargs.pop('data', None)
category = kwargs.pop('category', None)
display_name = kwargs.pop('display_name', None)
metadata = kwargs.pop('metadata', {})
location = kwargs.pop('location')
user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test)
publish_item = kwargs.pop('publish_item', True)
assert isinstance(location, UsageKey)
assert location != parent_location
store = kwargs.pop('modulestore')
# This code was based off that in cms/djangoapps/contentstore/views.py
parent = kwargs.pop('parent', None) or store.get_item(parent_location)
with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
if 'boilerplate' in kwargs:
template_id = kwargs.pop('boilerplate')
clz = XBlock.load_class(category, select=prefer_xmodules)
template = clz.get_template(template_id)
assert template is not None
metadata.update(template.get('metadata', {}))
if not isinstance(data, basestring):
data.update(template.get('data'))
# replace the display name with an optional parameter passed in from the caller
if display_name is not None:
metadata['display_name'] = display_name
module = store.create_child(
user_id,
parent.location,
location.block_type,
block_id=location.block_id,
metadata=metadata,
definition_data=data,
runtime=parent.runtime,
fields=kwargs,
)
# VS[compat] cdodge: This is a hack because static_tabs also have references from the course module, so
# if we add one then we need to also add it to the policy information (i.e. metadata)
# we should remove this once we can break this reference from the course to static tabs
if category == 'static_tab':
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=(
"location:itemfactory_create_static_tab",
u"block:{}".format(location.block_type),
)
)
course = store.get_course(location.course_key)
course.tabs.append(
StaticTab(
name=display_name,
url_slug=location.name,
)
)
store.update_item(course, user_id)
# parent and publish the item, so it can be accessed
if 'detached' not in module._class_tags:
parent.children.append(location)
store.update_item(parent, user_id)
if publish_item:
published_parent = store.publish(parent.location, user_id)
# module is last child of parent
return published_parent.get_children()[-1]
else:
return store.get_item(location)
elif publish_item:
return store.publish(location, user_id)
else:
return module
@contextmanager
def check_exact_number_of_calls(object_with_method, method_name, num_calls):
"""
Instruments the given method on the given object to verify the number of calls to the
method is exactly equal to 'num_calls'.
"""
with check_number_of_calls(object_with_method, method_name, num_calls, num_calls):
yield
def check_number_of_calls(object_with_method, method_name, maximum_calls, minimum_calls=1):
"""
Instruments the given method on the given object to verify the number of calls to the method is
less than or equal to the expected maximum_calls and greater than or equal to the expected minimum_calls.
"""
return check_sum_of_calls(object_with_method, [method_name], maximum_calls, minimum_calls)
@contextmanager
def check_sum_of_calls(object_, methods, maximum_calls, minimum_calls=1):
"""
Instruments the given methods on the given object to verify that the total sum of calls made to the
methods falls between minumum_calls and maximum_calls.
"""
mocks = {
method: Mock(wraps=getattr(object_, method))
for method in methods
}
with patch.multiple(object_, **mocks):
yield
call_count = sum(mock.call_count for mock in mocks.values())
calls = pprint.pformat({
method_name: mock.call_args_list
for method_name, mock in mocks.items()
})
# Assertion errors don't handle multi-line values, so pretty-print to std-out instead
if not minimum_calls <= call_count <= maximum_calls:
print "Expected between {} and {} calls, {} were made. Calls: {}".format(
minimum_calls,
maximum_calls,
call_count,
calls,
)
# verify the counter actually worked by ensuring we have counted greater than (or equal to) the minimum calls
assert_greater_equal(call_count, minimum_calls)
# now verify the number of actual calls is less than (or equal to) the expected maximum
assert_less_equal(call_count, maximum_calls)
def mongo_uses_error_check(store):
"""
Does mongo use the error check as a separate message?
"""
if hasattr(store, 'mongo_wire_version'):
return store.mongo_wire_version() <= 1
if hasattr(store, 'modulestores'):
return any([mongo_uses_error_check(substore) for substore in store.modulestores])
return False
@contextmanager
def check_mongo_calls(num_finds=0, num_sends=None):
"""
Instruments the given store to count the number of calls to find (incl find_one) and the number
of calls to send_message which is for insert, update, and remove (if you provide num_sends). At the
end of the with statement, it compares the counts to the num_finds and num_sends.
:param num_finds: the exact number of find calls expected
:param num_sends: If none, don't instrument the send calls. If non-none, count and compare to
the given int value.
"""
with check_sum_of_calls(
pymongo.message,
['query', 'get_more'],
num_finds,
num_finds
):
if num_sends is not None:
with check_sum_of_calls(
pymongo.message,
# mongo < 2.6 uses insert, update, delete and _do_batched_insert. >= 2.6 _do_batched_write
['insert', 'update', 'delete', '_do_batched_write_command', '_do_batched_insert', ],
num_sends,
num_sends
):
yield
else:
yield
# This dict represents the attribute keys for a course's 'about' info.
# Note: The 'video' attribute is intentionally excluded as it must be
# handled separately; its value maps to an alternate key name.
# Reference : cms/djangoapps/models/settings/course_details.py
ABOUT_ATTRIBUTES = {
'effort': "Testing effort",
}
class CourseAboutFactory(XModuleFactory):
"""
Factory for XModule course about.
"""
@classmethod
def _create(cls, target_class, **kwargs): # pylint: disable=unused-argument
"""
Uses **kwargs:
effort: effor information
video : video link
"""
user_id = kwargs.pop('user_id', None)
course_id, course_runtime = kwargs.pop("course_id"), kwargs.pop("course_runtime")
store = modulestore()
for about_key in ABOUT_ATTRIBUTES:
about_item = store.create_xblock(course_runtime, course_id, 'about', about_key)
about_item.data = ABOUT_ATTRIBUTES[about_key]
store.update_item(about_item, user_id, allow_not_found=True)
about_item = store.create_xblock(course_runtime, course_id, 'about', 'video')
about_item.data = "www.youtube.com/embed/testing-video-link"
store.update_item(about_item, user_id, allow_not_found=True)
| agpl-3.0 |
dstanek/keystone | keystone/tests/unit/test_backend_sql.py | 1 | 40921 | # -*- coding: utf-8 -*-
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import uuid
import mock
from oslo_config import cfg
from oslo_db import exception as db_exception
from oslo_db import options
from oslo_log import log
from oslo_log import versionutils
from six.moves import range
import sqlalchemy
from sqlalchemy import exc
from testtools import matchers
from keystone.common import driver_hints
from keystone.common import sql
from keystone import exception
from keystone.identity.backends import sql as identity_sql
from keystone.tests import unit as tests
from keystone.tests.unit import default_fixtures
from keystone.tests.unit.ksfixtures import database
from keystone.tests.unit import test_backend
from keystone.token.persistence.backends import sql as token_sql
CONF = cfg.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
class SqlTests(tests.SQLDriverOverrides, tests.TestCase):
def setUp(self):
super(SqlTests, self).setUp()
self.useFixture(database.Database())
self.load_backends()
# populate the engine with tables & fixtures
self.load_fixtures(default_fixtures)
# defaulted by the data load
self.user_foo['enabled'] = True
def config_files(self):
config_files = super(SqlTests, self).config_files()
config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
return config_files
class SqlModels(SqlTests):
def select_table(self, name):
table = sqlalchemy.Table(name,
sql.ModelBase.metadata,
autoload=True)
s = sqlalchemy.select([table])
return s
def assertExpectedSchema(self, table, expected_schema):
"""Assert that a table's schema is what we expect.
:param string table: the name of the table to inspect
:param tuple expected_schema: a tuple of tuples containing the
expected schema
:raises AssertionError: when the database schema doesn't match the
expected schema
The expected_schema format is simply::
(
('column name', sql type, qualifying detail),
...
)
The qualifying detail varies based on the type of the column::
- sql.Boolean columns must indicate the column's default value or
None if there is no default
- Columns with a length, like sql.String, must indicate the
column's length
- All other column types should use None
Example::
cols = (('id', sql.String, 64),
('enabled', sql.Boolean, True),
('extra', sql.JsonBlob, None))
self.assertExpectedSchema('table_name', cols)
"""
table = self.select_table(table)
actual_schema = []
for column in table.c:
if isinstance(column.type, sql.Boolean):
default = None
if column._proxies[0].default:
default = column._proxies[0].default.arg
actual_schema.append((column.name, type(column.type), default))
elif (hasattr(column.type, 'length') and
not isinstance(column.type, sql.Enum)):
# NOTE(dstanek): Even though sql.Enum columns have a length
# set we don't want to catch them here. Maybe in the future
# we'll check to see that they contain a list of the correct
# possible values.
actual_schema.append((column.name,
type(column.type),
column.type.length))
else:
actual_schema.append((column.name, type(column.type), None))
self.assertItemsEqual(expected_schema, actual_schema)
def test_user_model(self):
cols = (('id', sql.String, 64),
('name', sql.String, 255),
('password', sql.String, 128),
('domain_id', sql.String, 64),
('default_project_id', sql.String, 64),
('enabled', sql.Boolean, None),
('extra', sql.JsonBlob, None))
self.assertExpectedSchema('user', cols)
def test_group_model(self):
cols = (('id', sql.String, 64),
('name', sql.String, 64),
('description', sql.Text, None),
('domain_id', sql.String, 64),
('extra', sql.JsonBlob, None))
self.assertExpectedSchema('group', cols)
def test_domain_model(self):
cols = (('id', sql.String, 64),
('name', sql.String, 64),
('enabled', sql.Boolean, True),
('extra', sql.JsonBlob, None))
self.assertExpectedSchema('domain', cols)
def test_project_model(self):
cols = (('id', sql.String, 64),
('name', sql.String, 64),
('description', sql.Text, None),
('domain_id', sql.String, 64),
('enabled', sql.Boolean, None),
('extra', sql.JsonBlob, None),
('parent_id', sql.String, 64))
self.assertExpectedSchema('project', cols)
def test_role_assignment_model(self):
cols = (('type', sql.Enum, None),
('actor_id', sql.String, 64),
('target_id', sql.String, 64),
('role_id', sql.String, 64),
('inherited', sql.Boolean, False))
self.assertExpectedSchema('assignment', cols)
def test_user_group_membership(self):
cols = (('group_id', sql.String, 64),
('user_id', sql.String, 64))
self.assertExpectedSchema('user_group_membership', cols)
class SqlIdentity(SqlTests, test_backend.IdentityTests):
def test_password_hashed(self):
session = sql.get_session()
user_ref = self.identity_api._get_user(session, self.user_foo['id'])
self.assertNotEqual(user_ref['password'], self.user_foo['password'])
def test_delete_user_with_project_association(self):
user = {'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
'password': uuid.uuid4().hex}
user = self.identity_api.create_user(user)
self.assignment_api.add_user_to_project(self.tenant_bar['id'],
user['id'])
self.identity_api.delete_user(user['id'])
self.assertRaises(exception.UserNotFound,
self.assignment_api.list_projects_for_user,
user['id'])
def test_create_null_user_name(self):
user = {'name': None,
'domain_id': DEFAULT_DOMAIN_ID,
'password': uuid.uuid4().hex}
self.assertRaises(exception.ValidationError,
self.identity_api.create_user,
user)
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user_by_name,
user['name'],
DEFAULT_DOMAIN_ID)
def test_create_user_case_sensitivity(self):
# user name case sensitivity is down to the fact that it is marked as
# an SQL UNIQUE column, which may not be valid for other backends, like
# LDAP.
# create a ref with a lowercase name
ref = {
'name': uuid.uuid4().hex.lower(),
'domain_id': DEFAULT_DOMAIN_ID}
ref = self.identity_api.create_user(ref)
# assign a new ID with the same name, but this time in uppercase
ref['name'] = ref['name'].upper()
self.identity_api.create_user(ref)
def test_create_project_case_sensitivity(self):
# project name case sensitivity is down to the fact that it is marked
# as an SQL UNIQUE column, which may not be valid for other backends,
# like LDAP.
# create a ref with a lowercase name
ref = {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex.lower(),
'domain_id': DEFAULT_DOMAIN_ID}
self.resource_api.create_project(ref['id'], ref)
# assign a new ID with the same name, but this time in uppercase
ref['id'] = uuid.uuid4().hex
ref['name'] = ref['name'].upper()
self.resource_api.create_project(ref['id'], ref)
def test_create_null_project_name(self):
tenant = {'id': uuid.uuid4().hex,
'name': None,
'domain_id': DEFAULT_DOMAIN_ID}
self.assertRaises(exception.ValidationError,
self.resource_api.create_project,
tenant['id'],
tenant)
self.assertRaises(exception.ProjectNotFound,
self.resource_api.get_project,
tenant['id'])
self.assertRaises(exception.ProjectNotFound,
self.resource_api.get_project_by_name,
tenant['name'],
DEFAULT_DOMAIN_ID)
def test_delete_project_with_user_association(self):
user = {'name': 'fakeuser',
'domain_id': DEFAULT_DOMAIN_ID,
'password': 'passwd'}
user = self.identity_api.create_user(user)
self.assignment_api.add_user_to_project(self.tenant_bar['id'],
user['id'])
self.resource_api.delete_project(self.tenant_bar['id'])
tenants = self.assignment_api.list_projects_for_user(user['id'])
self.assertEqual([], tenants)
def test_metadata_removed_on_delete_user(self):
# A test to check that the internal representation
# or roles is correctly updated when a user is deleted
user = {'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
'password': 'passwd'}
user = self.identity_api.create_user(user)
role = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex}
self.role_api.create_role(role['id'], role)
self.assignment_api.add_role_to_user_and_project(
user['id'],
self.tenant_bar['id'],
role['id'])
self.identity_api.delete_user(user['id'])
# Now check whether the internal representation of roles
# has been deleted
self.assertRaises(exception.MetadataNotFound,
self.assignment_api._get_metadata,
user['id'],
self.tenant_bar['id'])
def test_metadata_removed_on_delete_project(self):
# A test to check that the internal representation
# or roles is correctly updated when a project is deleted
user = {'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
'password': 'passwd'}
user = self.identity_api.create_user(user)
role = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex}
self.role_api.create_role(role['id'], role)
self.assignment_api.add_role_to_user_and_project(
user['id'],
self.tenant_bar['id'],
role['id'])
self.resource_api.delete_project(self.tenant_bar['id'])
# Now check whether the internal representation of roles
# has been deleted
self.assertRaises(exception.MetadataNotFound,
self.assignment_api._get_metadata,
user['id'],
self.tenant_bar['id'])
def test_update_project_returns_extra(self):
"""This tests for backwards-compatibility with an essex/folsom bug.
Non-indexed attributes were returned in an 'extra' attribute, instead
of on the entity itself; for consistency and backwards compatibility,
those attributes should be included twice.
This behavior is specific to the SQL driver.
"""
tenant_id = uuid.uuid4().hex
arbitrary_key = uuid.uuid4().hex
arbitrary_value = uuid.uuid4().hex
tenant = {
'id': tenant_id,
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
arbitrary_key: arbitrary_value}
ref = self.resource_api.create_project(tenant_id, tenant)
self.assertEqual(arbitrary_value, ref[arbitrary_key])
self.assertIsNone(ref.get('extra'))
tenant['name'] = uuid.uuid4().hex
ref = self.resource_api.update_project(tenant_id, tenant)
self.assertEqual(arbitrary_value, ref[arbitrary_key])
self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key])
def test_update_user_returns_extra(self):
"""This tests for backwards-compatibility with an essex/folsom bug.
Non-indexed attributes were returned in an 'extra' attribute, instead
of on the entity itself; for consistency and backwards compatibility,
those attributes should be included twice.
This behavior is specific to the SQL driver.
"""
arbitrary_key = uuid.uuid4().hex
arbitrary_value = uuid.uuid4().hex
user = {
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
'password': uuid.uuid4().hex,
arbitrary_key: arbitrary_value}
ref = self.identity_api.create_user(user)
self.assertEqual(arbitrary_value, ref[arbitrary_key])
self.assertIsNone(ref.get('password'))
self.assertIsNone(ref.get('extra'))
user['name'] = uuid.uuid4().hex
user['password'] = uuid.uuid4().hex
ref = self.identity_api.update_user(ref['id'], user)
self.assertIsNone(ref.get('password'))
self.assertIsNone(ref['extra'].get('password'))
self.assertEqual(arbitrary_value, ref[arbitrary_key])
self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key])
def test_sql_user_to_dict_null_default_project_id(self):
user = {
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
'password': uuid.uuid4().hex}
user = self.identity_api.create_user(user)
session = sql.get_session()
query = session.query(identity_sql.User)
query = query.filter_by(id=user['id'])
raw_user_ref = query.one()
self.assertIsNone(raw_user_ref.default_project_id)
user_ref = raw_user_ref.to_dict()
self.assertNotIn('default_project_id', user_ref)
session.close()
def test_list_domains_for_user(self):
domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(domain['id'], domain)
user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
'domain_id': domain['id'], 'enabled': True}
test_domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(test_domain1['id'], test_domain1)
test_domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(test_domain2['id'], test_domain2)
user = self.identity_api.create_user(user)
user_domains = self.assignment_api.list_domains_for_user(user['id'])
self.assertEqual(0, len(user_domains))
self.assignment_api.create_grant(user_id=user['id'],
domain_id=test_domain1['id'],
role_id=self.role_member['id'])
self.assignment_api.create_grant(user_id=user['id'],
domain_id=test_domain2['id'],
role_id=self.role_member['id'])
user_domains = self.assignment_api.list_domains_for_user(user['id'])
self.assertThat(user_domains, matchers.HasLength(2))
def test_list_domains_for_user_with_grants(self):
# Create two groups each with a role on a different domain, and
# make user1 a member of both groups. Both these new domains
# should now be included, along with any direct user grants.
domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(domain['id'], domain)
user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
'domain_id': domain['id'], 'enabled': True}
user = self.identity_api.create_user(user)
group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
group1 = self.identity_api.create_group(group1)
group2 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
group2 = self.identity_api.create_group(group2)
test_domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(test_domain1['id'], test_domain1)
test_domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(test_domain2['id'], test_domain2)
test_domain3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(test_domain3['id'], test_domain3)
self.identity_api.add_user_to_group(user['id'], group1['id'])
self.identity_api.add_user_to_group(user['id'], group2['id'])
# Create 3 grants, one user grant, the other two as group grants
self.assignment_api.create_grant(user_id=user['id'],
domain_id=test_domain1['id'],
role_id=self.role_member['id'])
self.assignment_api.create_grant(group_id=group1['id'],
domain_id=test_domain2['id'],
role_id=self.role_admin['id'])
self.assignment_api.create_grant(group_id=group2['id'],
domain_id=test_domain3['id'],
role_id=self.role_admin['id'])
user_domains = self.assignment_api.list_domains_for_user(user['id'])
self.assertThat(user_domains, matchers.HasLength(3))
def test_list_domains_for_user_with_inherited_grants(self):
"""Test that inherited roles on the domain are excluded.
Test Plan:
- Create two domains, one user, group and role
- Domain1 is given an inherited user role, Domain2 an inherited
group role (for a group of which the user is a member)
- When listing domains for user, neither domain should be returned
"""
domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
domain1 = self.resource_api.create_domain(domain1['id'], domain1)
domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
domain2 = self.resource_api.create_domain(domain2['id'], domain2)
user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
'domain_id': domain1['id'], 'enabled': True}
user = self.identity_api.create_user(user)
group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']}
group = self.identity_api.create_group(group)
self.identity_api.add_user_to_group(user['id'], group['id'])
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.role_api.create_role(role['id'], role)
# Create a grant on each domain, one user grant, one group grant,
# both inherited.
self.assignment_api.create_grant(user_id=user['id'],
domain_id=domain1['id'],
role_id=role['id'],
inherited_to_projects=True)
self.assignment_api.create_grant(group_id=group['id'],
domain_id=domain2['id'],
role_id=role['id'],
inherited_to_projects=True)
user_domains = self.assignment_api.list_domains_for_user(user['id'])
# No domains should be returned since both domains have only inherited
# roles assignments.
self.assertThat(user_domains, matchers.HasLength(0))
class SqlTrust(SqlTests, test_backend.TrustTests):
pass
class SqlToken(SqlTests, test_backend.TokenTests):
def test_token_revocation_list_uses_right_columns(self):
# This query used to be heavy with too many columns. We want
# to make sure it is only running with the minimum columns
# necessary.
expected_query_args = (token_sql.TokenModel.id,
token_sql.TokenModel.expires)
with mock.patch.object(token_sql, 'sql') as mock_sql:
tok = token_sql.Token()
tok.list_revoked_tokens()
mock_query = mock_sql.get_session().query
mock_query.assert_called_with(*expected_query_args)
def test_flush_expired_tokens_batch(self):
# TODO(dstanek): This test should be rewritten to be less
# brittle. The code will likely need to be changed first. I
# just copied the spirit of the existing test when I rewrote
# mox -> mock. These tests are brittle because they have the
# call structure for SQLAlchemy encoded in them.
# test sqlite dialect
with mock.patch.object(token_sql, 'sql') as mock_sql:
mock_sql.get_session().bind.dialect.name = 'sqlite'
tok = token_sql.Token()
tok.flush_expired_tokens()
filter_mock = mock_sql.get_session().query().filter()
self.assertFalse(filter_mock.limit.called)
self.assertTrue(filter_mock.delete.called_once)
def test_flush_expired_tokens_batch_mysql(self):
# test mysql dialect, we don't need to test IBM DB SA separately, since
# other tests below test the differences between how they use the batch
# strategy
with mock.patch.object(token_sql, 'sql') as mock_sql:
mock_sql.get_session().query().filter().delete.return_value = 0
mock_sql.get_session().bind.dialect.name = 'mysql'
tok = token_sql.Token()
expiry_mock = mock.Mock()
ITERS = [1, 2, 3]
expiry_mock.return_value = iter(ITERS)
token_sql._expiry_range_batched = expiry_mock
tok.flush_expired_tokens()
# The expiry strategy is only invoked once, the other calls are via
# the yield return.
self.assertEqual(1, expiry_mock.call_count)
mock_delete = mock_sql.get_session().query().filter().delete
self.assertThat(mock_delete.call_args_list,
matchers.HasLength(len(ITERS)))
def test_expiry_range_batched(self):
upper_bound_mock = mock.Mock(side_effect=[1, "final value"])
sess_mock = mock.Mock()
query_mock = sess_mock.query().filter().order_by().offset().limit()
query_mock.one.side_effect = [['test'], sql.NotFound()]
for i, x in enumerate(token_sql._expiry_range_batched(sess_mock,
upper_bound_mock,
batch_size=50)):
if i == 0:
# The first time the batch iterator returns, it should return
# the first result that comes back from the database.
self.assertEqual(x, 'test')
elif i == 1:
# The second time, the database range function should return
# nothing, so the batch iterator returns the result of the
# upper_bound function
self.assertEqual(x, "final value")
else:
self.fail("range batch function returned more than twice")
def test_expiry_range_strategy_sqlite(self):
tok = token_sql.Token()
sqlite_strategy = tok._expiry_range_strategy('sqlite')
self.assertEqual(token_sql._expiry_range_all, sqlite_strategy)
def test_expiry_range_strategy_ibm_db_sa(self):
tok = token_sql.Token()
db2_strategy = tok._expiry_range_strategy('ibm_db_sa')
self.assertIsInstance(db2_strategy, functools.partial)
self.assertEqual(db2_strategy.func, token_sql._expiry_range_batched)
self.assertEqual(db2_strategy.keywords, {'batch_size': 100})
def test_expiry_range_strategy_mysql(self):
tok = token_sql.Token()
mysql_strategy = tok._expiry_range_strategy('mysql')
self.assertIsInstance(mysql_strategy, functools.partial)
self.assertEqual(mysql_strategy.func, token_sql._expiry_range_batched)
self.assertEqual(mysql_strategy.keywords, {'batch_size': 1000})
class SqlCatalog(SqlTests, test_backend.CatalogTests):
_legacy_endpoint_id_in_endpoint = True
_enabled_default_to_true_when_creating_endpoint = True
def test_catalog_ignored_malformed_urls(self):
service = {
'id': uuid.uuid4().hex,
'type': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
}
self.catalog_api.create_service(service['id'], service.copy())
malformed_url = "http://192.168.1.104:8774/v2/$(tenant)s"
endpoint = {
'id': uuid.uuid4().hex,
'region_id': None,
'service_id': service['id'],
'interface': 'public',
'url': malformed_url,
}
self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
# NOTE(dstanek): there are no valid URLs, so nothing is in the catalog
catalog = self.catalog_api.get_catalog('fake-user', 'fake-tenant')
self.assertEqual({}, catalog)
def test_get_catalog_with_empty_public_url(self):
service = {
'id': uuid.uuid4().hex,
'type': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
}
self.catalog_api.create_service(service['id'], service.copy())
endpoint = {
'id': uuid.uuid4().hex,
'region_id': None,
'interface': 'public',
'url': '',
'service_id': service['id'],
}
self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
catalog = self.catalog_api.get_catalog('user', 'tenant')
catalog_endpoint = catalog[endpoint['region_id']][service['type']]
self.assertEqual(service['name'], catalog_endpoint['name'])
self.assertEqual(endpoint['id'], catalog_endpoint['id'])
self.assertEqual('', catalog_endpoint['publicURL'])
self.assertIsNone(catalog_endpoint.get('adminURL'))
self.assertIsNone(catalog_endpoint.get('internalURL'))
def test_create_endpoint_region_404(self):
service = {
'id': uuid.uuid4().hex,
'type': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
}
self.catalog_api.create_service(service['id'], service.copy())
endpoint = {
'id': uuid.uuid4().hex,
'region_id': uuid.uuid4().hex,
'service_id': service['id'],
'interface': 'public',
'url': uuid.uuid4().hex,
}
self.assertRaises(exception.ValidationError,
self.catalog_api.create_endpoint,
endpoint['id'],
endpoint.copy())
def test_create_region_invalid_id(self):
region = {
'id': '0' * 256,
'description': '',
'extra': {},
}
self.assertRaises(exception.StringLengthExceeded,
self.catalog_api.create_region,
region.copy())
def test_create_region_invalid_parent_id(self):
region = {
'id': uuid.uuid4().hex,
'parent_region_id': '0' * 256,
}
self.assertRaises(exception.RegionNotFound,
self.catalog_api.create_region,
region)
def test_delete_region_with_endpoint(self):
# create a region
region = {
'id': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
}
self.catalog_api.create_region(region)
# create a child region
child_region = {
'id': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'parent_id': region['id']
}
self.catalog_api.create_region(child_region)
# create a service
service = {
'id': uuid.uuid4().hex,
'type': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
}
self.catalog_api.create_service(service['id'], service)
# create an endpoint attached to the service and child region
child_endpoint = {
'id': uuid.uuid4().hex,
'region_id': child_region['id'],
'interface': uuid.uuid4().hex[:8],
'url': uuid.uuid4().hex,
'service_id': service['id'],
}
self.catalog_api.create_endpoint(child_endpoint['id'], child_endpoint)
self.assertRaises(exception.RegionDeletionError,
self.catalog_api.delete_region,
child_region['id'])
# create an endpoint attached to the service and parent region
endpoint = {
'id': uuid.uuid4().hex,
'region_id': region['id'],
'interface': uuid.uuid4().hex[:8],
'url': uuid.uuid4().hex,
'service_id': service['id'],
}
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
self.assertRaises(exception.RegionDeletionError,
self.catalog_api.delete_region,
region['id'])
class SqlPolicy(SqlTests, test_backend.PolicyTests):
pass
class SqlInheritance(SqlTests, test_backend.InheritanceTests):
pass
class SqlTokenCacheInvalidation(SqlTests, test_backend.TokenCacheInvalidation):
def setUp(self):
super(SqlTokenCacheInvalidation, self).setUp()
self._create_test_data()
class SqlFilterTests(SqlTests, test_backend.FilterTests):
def _get_user_name_field_size(self):
return identity_sql.User.name.type.length
def clean_up_entities(self):
"""Clean up entity test data from Filter Test Cases."""
for entity in ['user', 'group', 'project']:
self._delete_test_data(entity, self.entity_list[entity])
self._delete_test_data(entity, self.domain1_entity_list[entity])
del self.entity_list
del self.domain1_entity_list
self.domain1['enabled'] = False
self.resource_api.update_domain(self.domain1['id'], self.domain1)
self.resource_api.delete_domain(self.domain1['id'])
del self.domain1
def test_list_entities_filtered_by_domain(self):
# NOTE(henry-nash): This method is here rather than in test_backend
# since any domain filtering with LDAP is handled by the manager
# layer (and is already tested elsewhere) not at the driver level.
self.addCleanup(self.clean_up_entities)
self.domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(self.domain1['id'], self.domain1)
self.entity_list = {}
self.domain1_entity_list = {}
for entity in ['user', 'group', 'project']:
# Create 5 entities, 3 of which are in domain1
DOMAIN1_ENTITIES = 3
self.entity_list[entity] = self._create_test_data(entity, 2)
self.domain1_entity_list[entity] = self._create_test_data(
entity, DOMAIN1_ENTITIES, self.domain1['id'])
# Should get back the DOMAIN1_ENTITIES in domain1
hints = driver_hints.Hints()
hints.add_filter('domain_id', self.domain1['id'])
entities = self._list_entities(entity)(hints=hints)
self.assertEqual(DOMAIN1_ENTITIES, len(entities))
self._match_with_list(entities, self.domain1_entity_list[entity])
# Check the driver has removed the filter from the list hints
self.assertFalse(hints.get_exact_filter_by_name('domain_id'))
def test_filter_sql_injection_attack(self):
"""Test against sql injection attack on filters
Test Plan:
- Attempt to get all entities back by passing a two-term attribute
- Attempt to piggyback filter to damage DB (e.g. drop table)
"""
# Check we have some users
users = self.identity_api.list_users()
self.assertTrue(len(users) > 0)
hints = driver_hints.Hints()
hints.add_filter('name', "anything' or 'x'='x")
users = self.identity_api.list_users(hints=hints)
self.assertEqual(0, len(users))
# See if we can add a SQL command...use the group table instead of the
# user table since 'user' is reserved word for SQLAlchemy.
group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID}
group = self.identity_api.create_group(group)
hints = driver_hints.Hints()
hints.add_filter('name', "x'; drop table group")
groups = self.identity_api.list_groups(hints=hints)
self.assertEqual(0, len(groups))
groups = self.identity_api.list_groups()
self.assertTrue(len(groups) > 0)
class SqlLimitTests(SqlTests, test_backend.LimitTests):
def setUp(self):
super(SqlLimitTests, self).setUp()
test_backend.LimitTests.setUp(self)
class FakeTable(sql.ModelBase):
__tablename__ = 'test_table'
col = sql.Column(sql.String(32), primary_key=True)
@sql.handle_conflicts('keystone')
def insert(self):
raise db_exception.DBDuplicateEntry
@sql.handle_conflicts('keystone')
def update(self):
raise db_exception.DBError(
inner_exception=exc.IntegrityError('a', 'a', 'a'))
@sql.handle_conflicts('keystone')
def lookup(self):
raise KeyError
class SqlDecorators(tests.TestCase):
def test_initialization_fail(self):
self.assertRaises(exception.StringLengthExceeded,
FakeTable, col='a' * 64)
def test_initialization(self):
tt = FakeTable(col='a')
self.assertEqual('a', tt.col)
def test_non_ascii_init(self):
# NOTE(I159): Non ASCII characters must cause UnicodeDecodeError
# if encoding is not provided explicitly.
self.assertRaises(UnicodeDecodeError, FakeTable, col='Я')
def test_conflict_happend(self):
self.assertRaises(exception.Conflict, FakeTable().insert)
self.assertRaises(exception.UnexpectedError, FakeTable().update)
def test_not_conflict_error(self):
self.assertRaises(KeyError, FakeTable().lookup)
class SqlModuleInitialization(tests.TestCase):
@mock.patch.object(sql.core, 'CONF')
@mock.patch.object(options, 'set_defaults')
def test_initialize_module(self, set_defaults, CONF):
sql.initialize()
set_defaults.assert_called_with(CONF,
connection='sqlite:///keystone.db')
class SqlCredential(SqlTests):
def _create_credential_with_user_id(self, user_id=uuid.uuid4().hex):
credential_id = uuid.uuid4().hex
new_credential = {
'id': credential_id,
'user_id': user_id,
'project_id': uuid.uuid4().hex,
'blob': uuid.uuid4().hex,
'type': uuid.uuid4().hex,
'extra': uuid.uuid4().hex
}
self.credential_api.create_credential(credential_id, new_credential)
return new_credential
def _validateCredentialList(self, retrieved_credentials,
expected_credentials):
self.assertEqual(len(retrieved_credentials), len(expected_credentials))
retrived_ids = [c['id'] for c in retrieved_credentials]
for cred in expected_credentials:
self.assertIn(cred['id'], retrived_ids)
def setUp(self):
super(SqlCredential, self).setUp()
self.credentials = []
for _ in range(3):
self.credentials.append(
self._create_credential_with_user_id())
self.user_credentials = []
for _ in range(3):
cred = self._create_credential_with_user_id(self.user_foo['id'])
self.user_credentials.append(cred)
self.credentials.append(cred)
def test_list_credentials(self):
credentials = self.credential_api.list_credentials()
self._validateCredentialList(credentials, self.credentials)
# test filtering using hints
hints = driver_hints.Hints()
hints.add_filter('user_id', self.user_foo['id'])
credentials = self.credential_api.list_credentials(hints)
self._validateCredentialList(credentials, self.user_credentials)
def test_list_credentials_for_user(self):
credentials = self.credential_api.list_credentials_for_user(
self.user_foo['id'])
self._validateCredentialList(credentials, self.user_credentials)
class DeprecatedDecorators(SqlTests):
def setUp(self):
super(DeprecatedDecorators, self).setUp()
# The only reason this is here is because report_deprecated_feature()
# registers the fatal_deprecations option which these tests use.
versionutils.report_deprecated_feature(
log.getLogger(__name__), 'ignore this message')
def test_assignment_to_role_api(self):
"""Test that calling one of the methods does call LOG.deprecated.
This method is really generic to the type of backend, but we need
one to execute the test, so the SQL backend is as good as any.
"""
# Rather than try and check that a log message is issued, we
# enable fatal_deprecations so that we can check for the
# raising of the exception.
# First try to create a role without enabling fatal deprecations,
# which should work due to the cross manager deprecated calls.
role_ref = {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex}
self.assignment_api.create_role(role_ref['id'], role_ref)
self.role_api.get_role(role_ref['id'])
# Now enable fatal exceptions - creating a role by calling the
# old manager should now fail.
self.config_fixture.config(fatal_deprecations=True)
role_ref = {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex}
self.assertRaises(versionutils.DeprecatedConfig,
self.assignment_api.create_role,
role_ref['id'], role_ref)
def test_assignment_to_resource_api(self):
"""Test that calling one of the methods does call LOG.deprecated.
This method is really generic to the type of backend, but we need
one to execute the test, so the SQL backend is as good as any.
"""
# Rather than try and check that a log message is issued, we
# enable fatal_deprecations so that we can check for the
# raising of the exception.
# First try to create a project without enabling fatal deprecations,
# which should work due to the cross manager deprecated calls.
project_ref = {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID}
self.resource_api.create_project(project_ref['id'], project_ref)
self.resource_api.get_project(project_ref['id'])
# Now enable fatal exceptions - creating a project by calling the
# old manager should now fail.
self.config_fixture.config(fatal_deprecations=True)
project_ref = {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID}
self.assertRaises(versionutils.DeprecatedConfig,
self.assignment_api.create_project,
project_ref['id'], project_ref)
| apache-2.0 |
leighpauls/k2cro4 | third_party/python_26/Lib/test/test_cmath.py | 55 | 18774 | from test.test_support import run_unittest
from test.test_math import parse_testfile, test_file
import unittest
import os, sys
import cmath, math
from cmath import phase, polar, rect, pi
INF = float('inf')
NAN = float('nan')
complex_zeros = [complex(x, y) for x in [0.0, -0.0] for y in [0.0, -0.0]]
complex_infinities = [complex(x, y) for x, y in [
(INF, 0.0), # 1st quadrant
(INF, 2.3),
(INF, INF),
(2.3, INF),
(0.0, INF),
(-0.0, INF), # 2nd quadrant
(-2.3, INF),
(-INF, INF),
(-INF, 2.3),
(-INF, 0.0),
(-INF, -0.0), # 3rd quadrant
(-INF, -2.3),
(-INF, -INF),
(-2.3, -INF),
(-0.0, -INF),
(0.0, -INF), # 4th quadrant
(2.3, -INF),
(INF, -INF),
(INF, -2.3),
(INF, -0.0)
]]
complex_nans = [complex(x, y) for x, y in [
(NAN, -INF),
(NAN, -2.3),
(NAN, -0.0),
(NAN, 0.0),
(NAN, 2.3),
(NAN, INF),
(-INF, NAN),
(-2.3, NAN),
(-0.0, NAN),
(0.0, NAN),
(2.3, NAN),
(INF, NAN)
]]
def almostEqualF(a, b, rel_err=2e-15, abs_err = 5e-323):
"""Determine whether floating-point values a and b are equal to within
a (small) rounding error. The default values for rel_err and
abs_err are chosen to be suitable for platforms where a float is
represented by an IEEE 754 double. They allow an error of between
9 and 19 ulps."""
# special values testing
if math.isnan(a):
return math.isnan(b)
if math.isinf(a):
return a == b
# if both a and b are zero, check whether they have the same sign
# (in theory there are examples where it would be legitimate for a
# and b to have opposite signs; in practice these hardly ever
# occur).
if not a and not b:
return math.copysign(1., a) == math.copysign(1., b)
# if a-b overflows, or b is infinite, return False. Again, in
# theory there are examples where a is within a few ulps of the
# max representable float, and then b could legitimately be
# infinite. In practice these examples are rare.
try:
absolute_error = abs(b-a)
except OverflowError:
return False
else:
return absolute_error <= max(abs_err, rel_err * abs(a))
class CMathTests(unittest.TestCase):
# list of all functions in cmath
test_functions = [getattr(cmath, fname) for fname in [
'acos', 'acosh', 'asin', 'asinh', 'atan', 'atanh',
'cos', 'cosh', 'exp', 'log', 'log10', 'sin', 'sinh',
'sqrt', 'tan', 'tanh']]
# test first and second arguments independently for 2-argument log
test_functions.append(lambda x : cmath.log(x, 1729. + 0j))
test_functions.append(lambda x : cmath.log(14.-27j, x))
def setUp(self):
self.test_values = open(test_file)
def tearDown(self):
self.test_values.close()
def rAssertAlmostEqual(self, a, b, rel_err = 2e-15, abs_err = 5e-323):
"""Check that two floating-point numbers are almost equal."""
# special values testing
if math.isnan(a):
if math.isnan(b):
return
self.fail("%s should be nan" % repr(b))
if math.isinf(a):
if a == b:
return
self.fail("finite result where infinity excpected: "
"expected %s, got %s" % (repr(a), repr(b)))
if not a and not b:
if math.atan2(a, -1.) != math.atan2(b, -1.):
self.fail("zero has wrong sign: expected %s, got %s" %
(repr(a), repr(b)))
# test passes if either the absolute error or the relative
# error is sufficiently small. The defaults amount to an
# error of between 9 ulps and 19 ulps on an IEEE-754 compliant
# machine.
try:
absolute_error = abs(b-a)
except OverflowError:
pass
else:
if absolute_error <= max(abs_err, rel_err * abs(a)):
return
self.fail("%s and %s are not sufficiently close" % (repr(a), repr(b)))
def test_constants(self):
e_expected = 2.71828182845904523536
pi_expected = 3.14159265358979323846
self.rAssertAlmostEqual(cmath.pi, pi_expected, 9,
"cmath.pi is %s; should be %s" % (cmath.pi, pi_expected))
self.rAssertAlmostEqual(cmath.e, e_expected, 9,
"cmath.e is %s; should be %s" % (cmath.e, e_expected))
def test_user_object(self):
# Test automatic calling of __complex__ and __float__ by cmath
# functions
# some random values to use as test values; we avoid values
# for which any of the functions in cmath is undefined
# (i.e. 0., 1., -1., 1j, -1j) or would cause overflow
cx_arg = 4.419414439 + 1.497100113j
flt_arg = -6.131677725
# a variety of non-complex numbers, used to check that
# non-complex return values from __complex__ give an error
non_complexes = ["not complex", 1, 5L, 2., None,
object(), NotImplemented]
# Now we introduce a variety of classes whose instances might
# end up being passed to the cmath functions
# usual case: new-style class implementing __complex__
class MyComplex(object):
def __init__(self, value):
self.value = value
def __complex__(self):
return self.value
# old-style class implementing __complex__
class MyComplexOS:
def __init__(self, value):
self.value = value
def __complex__(self):
return self.value
# classes for which __complex__ raises an exception
class SomeException(Exception):
pass
class MyComplexException(object):
def __complex__(self):
raise SomeException
class MyComplexExceptionOS:
def __complex__(self):
raise SomeException
# some classes not providing __float__ or __complex__
class NeitherComplexNorFloat(object):
pass
class NeitherComplexNorFloatOS:
pass
class MyInt(object):
def __int__(self): return 2
def __long__(self): return 2L
def __index__(self): return 2
class MyIntOS:
def __int__(self): return 2
def __long__(self): return 2L
def __index__(self): return 2
# other possible combinations of __float__ and __complex__
# that should work
class FloatAndComplex(object):
def __float__(self):
return flt_arg
def __complex__(self):
return cx_arg
class FloatAndComplexOS:
def __float__(self):
return flt_arg
def __complex__(self):
return cx_arg
class JustFloat(object):
def __float__(self):
return flt_arg
class JustFloatOS:
def __float__(self):
return flt_arg
for f in self.test_functions:
# usual usage
self.assertEqual(f(MyComplex(cx_arg)), f(cx_arg))
self.assertEqual(f(MyComplexOS(cx_arg)), f(cx_arg))
# other combinations of __float__ and __complex__
self.assertEqual(f(FloatAndComplex()), f(cx_arg))
self.assertEqual(f(FloatAndComplexOS()), f(cx_arg))
self.assertEqual(f(JustFloat()), f(flt_arg))
self.assertEqual(f(JustFloatOS()), f(flt_arg))
# TypeError should be raised for classes not providing
# either __complex__ or __float__, even if they provide
# __int__, __long__ or __index__. An old-style class
# currently raises AttributeError instead of a TypeError;
# this could be considered a bug.
self.assertRaises(TypeError, f, NeitherComplexNorFloat())
self.assertRaises(TypeError, f, MyInt())
self.assertRaises(Exception, f, NeitherComplexNorFloatOS())
self.assertRaises(Exception, f, MyIntOS())
# non-complex return value from __complex__ -> TypeError
for bad_complex in non_complexes:
self.assertRaises(TypeError, f, MyComplex(bad_complex))
self.assertRaises(TypeError, f, MyComplexOS(bad_complex))
# exceptions in __complex__ should be propagated correctly
self.assertRaises(SomeException, f, MyComplexException())
self.assertRaises(SomeException, f, MyComplexExceptionOS())
def test_input_type(self):
# ints and longs should be acceptable inputs to all cmath
# functions, by virtue of providing a __float__ method
for f in self.test_functions:
for arg in [2, 2L, 2.]:
self.assertEqual(f(arg), f(arg.__float__()))
# but strings should give a TypeError
for f in self.test_functions:
for arg in ["a", "long_string", "0", "1j", ""]:
self.assertRaises(TypeError, f, arg)
def test_cmath_matches_math(self):
# check that corresponding cmath and math functions are equal
# for floats in the appropriate range
# test_values in (0, 1)
test_values = [0.01, 0.1, 0.2, 0.5, 0.9, 0.99]
# test_values for functions defined on [-1., 1.]
unit_interval = test_values + [-x for x in test_values] + \
[0., 1., -1.]
# test_values for log, log10, sqrt
positive = test_values + [1.] + [1./x for x in test_values]
nonnegative = [0.] + positive
# test_values for functions defined on the whole real line
real_line = [0.] + positive + [-x for x in positive]
test_functions = {
'acos' : unit_interval,
'asin' : unit_interval,
'atan' : real_line,
'cos' : real_line,
'cosh' : real_line,
'exp' : real_line,
'log' : positive,
'log10' : positive,
'sin' : real_line,
'sinh' : real_line,
'sqrt' : nonnegative,
'tan' : real_line,
'tanh' : real_line}
for fn, values in test_functions.items():
float_fn = getattr(math, fn)
complex_fn = getattr(cmath, fn)
for v in values:
z = complex_fn(v)
self.rAssertAlmostEqual(float_fn(v), z.real)
self.assertEqual(0., z.imag)
# test two-argument version of log with various bases
for base in [0.5, 2., 10.]:
for v in positive:
z = cmath.log(v, base)
self.rAssertAlmostEqual(math.log(v, base), z.real)
self.assertEqual(0., z.imag)
def test_specific_values(self):
if not float.__getformat__("double").startswith("IEEE"):
return
def rect_complex(z):
"""Wrapped version of rect that accepts a complex number instead of
two float arguments."""
return cmath.rect(z.real, z.imag)
def polar_complex(z):
"""Wrapped version of polar that returns a complex number instead of
two floats."""
return complex(*polar(z))
for id, fn, ar, ai, er, ei, flags in parse_testfile(test_file):
arg = complex(ar, ai)
expected = complex(er, ei)
if fn == 'rect':
function = rect_complex
elif fn == 'polar':
function = polar_complex
else:
function = getattr(cmath, fn)
if 'divide-by-zero' in flags or 'invalid' in flags:
try:
actual = function(arg)
except ValueError:
continue
else:
test_str = "%s: %s(complex(%r, %r))" % (id, fn, ar, ai)
self.fail('ValueError not raised in test %s' % test_str)
if 'overflow' in flags:
try:
actual = function(arg)
except OverflowError:
continue
else:
test_str = "%s: %s(complex(%r, %r))" % (id, fn, ar, ai)
self.fail('OverflowError not raised in test %s' % test_str)
actual = function(arg)
if 'ignore-real-sign' in flags:
actual = complex(abs(actual.real), actual.imag)
expected = complex(abs(expected.real), expected.imag)
if 'ignore-imag-sign' in flags:
actual = complex(actual.real, abs(actual.imag))
expected = complex(expected.real, abs(expected.imag))
# for the real part of the log function, we allow an
# absolute error of up to 2e-15.
if fn in ('log', 'log10'):
real_abs_err = 2e-15
else:
real_abs_err = 5e-323
if not (almostEqualF(expected.real, actual.real,
abs_err = real_abs_err) and
almostEqualF(expected.imag, actual.imag)):
error_message = (
"%s: %s(complex(%r, %r))\n" % (id, fn, ar, ai) +
"Expected: complex(%r, %r)\n" %
(expected.real, expected.imag) +
"Received: complex(%r, %r)\n" %
(actual.real, actual.imag) +
"Received value insufficiently close to expected value.")
self.fail(error_message)
def assertCISEqual(self, a, b):
eps = 1E-7
if abs(a[0] - b[0]) > eps or abs(a[1] - b[1]) > eps:
self.fail((a ,b))
def test_polar(self):
self.assertCISEqual(polar(0), (0., 0.))
self.assertCISEqual(polar(1.), (1., 0.))
self.assertCISEqual(polar(-1.), (1., pi))
self.assertCISEqual(polar(1j), (1., pi/2))
self.assertCISEqual(polar(-1j), (1., -pi/2))
def test_phase(self):
self.assertAlmostEqual(phase(0), 0.)
self.assertAlmostEqual(phase(1.), 0.)
self.assertAlmostEqual(phase(-1.), pi)
self.assertAlmostEqual(phase(-1.+1E-300j), pi)
self.assertAlmostEqual(phase(-1.-1E-300j), -pi)
self.assertAlmostEqual(phase(1j), pi/2)
self.assertAlmostEqual(phase(-1j), -pi/2)
# zeros
self.assertEqual(phase(complex(0.0, 0.0)), 0.0)
self.assertEqual(phase(complex(0.0, -0.0)), -0.0)
self.assertEqual(phase(complex(-0.0, 0.0)), pi)
self.assertEqual(phase(complex(-0.0, -0.0)), -pi)
# infinities
self.assertAlmostEqual(phase(complex(-INF, -0.0)), -pi)
self.assertAlmostEqual(phase(complex(-INF, -2.3)), -pi)
self.assertAlmostEqual(phase(complex(-INF, -INF)), -0.75*pi)
self.assertAlmostEqual(phase(complex(-2.3, -INF)), -pi/2)
self.assertAlmostEqual(phase(complex(-0.0, -INF)), -pi/2)
self.assertAlmostEqual(phase(complex(0.0, -INF)), -pi/2)
self.assertAlmostEqual(phase(complex(2.3, -INF)), -pi/2)
self.assertAlmostEqual(phase(complex(INF, -INF)), -pi/4)
self.assertEqual(phase(complex(INF, -2.3)), -0.0)
self.assertEqual(phase(complex(INF, -0.0)), -0.0)
self.assertEqual(phase(complex(INF, 0.0)), 0.0)
self.assertEqual(phase(complex(INF, 2.3)), 0.0)
self.assertAlmostEqual(phase(complex(INF, INF)), pi/4)
self.assertAlmostEqual(phase(complex(2.3, INF)), pi/2)
self.assertAlmostEqual(phase(complex(0.0, INF)), pi/2)
self.assertAlmostEqual(phase(complex(-0.0, INF)), pi/2)
self.assertAlmostEqual(phase(complex(-2.3, INF)), pi/2)
self.assertAlmostEqual(phase(complex(-INF, INF)), 0.75*pi)
self.assertAlmostEqual(phase(complex(-INF, 2.3)), pi)
self.assertAlmostEqual(phase(complex(-INF, 0.0)), pi)
# real or imaginary part NaN
for z in complex_nans:
self.assert_(math.isnan(phase(z)))
def test_abs(self):
# zeros
for z in complex_zeros:
self.assertEqual(abs(z), 0.0)
# infinities
for z in complex_infinities:
self.assertEqual(abs(z), INF)
# real or imaginary part NaN
self.assertEqual(abs(complex(NAN, -INF)), INF)
self.assert_(math.isnan(abs(complex(NAN, -2.3))))
self.assert_(math.isnan(abs(complex(NAN, -0.0))))
self.assert_(math.isnan(abs(complex(NAN, 0.0))))
self.assert_(math.isnan(abs(complex(NAN, 2.3))))
self.assertEqual(abs(complex(NAN, INF)), INF)
self.assertEqual(abs(complex(-INF, NAN)), INF)
self.assert_(math.isnan(abs(complex(-2.3, NAN))))
self.assert_(math.isnan(abs(complex(-0.0, NAN))))
self.assert_(math.isnan(abs(complex(0.0, NAN))))
self.assert_(math.isnan(abs(complex(2.3, NAN))))
self.assertEqual(abs(complex(INF, NAN)), INF)
self.assert_(math.isnan(abs(complex(NAN, NAN))))
# result overflows
if float.__getformat__("double").startswith("IEEE"):
self.assertRaises(OverflowError, abs, complex(1.4e308, 1.4e308))
def assertCEqual(self, a, b):
eps = 1E-7
if abs(a.real - b[0]) > eps or abs(a.imag - b[1]) > eps:
self.fail((a ,b))
def test_rect(self):
self.assertCEqual(rect(0, 0), (0, 0))
self.assertCEqual(rect(1, 0), (1., 0))
self.assertCEqual(rect(1, -pi), (-1., 0))
self.assertCEqual(rect(1, pi/2), (0, 1.))
self.assertCEqual(rect(1, -pi/2), (0, -1.))
def test_isnan(self):
self.failIf(cmath.isnan(1))
self.failIf(cmath.isnan(1j))
self.failIf(cmath.isnan(INF))
self.assert_(cmath.isnan(NAN))
self.assert_(cmath.isnan(complex(NAN, 0)))
self.assert_(cmath.isnan(complex(0, NAN)))
self.assert_(cmath.isnan(complex(NAN, NAN)))
self.assert_(cmath.isnan(complex(NAN, INF)))
self.assert_(cmath.isnan(complex(INF, NAN)))
def test_isinf(self):
self.failIf(cmath.isinf(1))
self.failIf(cmath.isinf(1j))
self.failIf(cmath.isinf(NAN))
self.assert_(cmath.isinf(INF))
self.assert_(cmath.isinf(complex(INF, 0)))
self.assert_(cmath.isinf(complex(0, INF)))
self.assert_(cmath.isinf(complex(INF, INF)))
self.assert_(cmath.isinf(complex(NAN, INF)))
self.assert_(cmath.isinf(complex(INF, NAN)))
def test_main():
run_unittest(CMathTests)
if __name__ == "__main__":
test_main()
| bsd-3-clause |
raboof/supybot | plugins/Nickometer/plugin.py | 9 | 9274 | ###
# Copyright (c) 2004, William Robinson.
# Derived from work (c) 1998, Adam Spiers <adam.spiers@new.ox.ac.uk>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
###
# This algorithm is almost a direct from a the perl nickometer from
# blootbot. Hardly any of the original code has been used, though most of
# the comments, I copy-pasted. As a matter of courtesy, the original copyright
# message follows:
#
# #
# # Lame-o-Nickometer backend
# #
# # (c) 1998 Adam Spiers <adam.spiers@new.ox.ac.uk>
# #
# # You may do whatever you want with this code, but give me credit.
# #
# # $Id: Nickometer.py,v 1.13 2004/10/22 22:19:30 jamessan Exp $
# #
###
import supybot
import re
import math
import string
import supybot.callbacks as callbacks
from supybot.commands import wrap, additional
def slowExponent(x):
return 1.3 * x * (1 - math.atan(x / 6.0) * 2 / math.pi)
def slowPow(x, y):
return math.pow(x, slowExponent(y))
def caseShifts(s):
s=re.sub('[^a-zA-Z]', '', s)
s=re.sub('[A-Z]+', 'U', s)
s=re.sub('[a-z]+', 'l', s)
return len(s)-1
def numberShifts(s):
s=re.sub('[^a-zA-Z0-9]', '', s)
s=re.sub('[a-zA-Z]+', 'l', s)
s=re.sub('[0-9]+', 'n', s)
return len(s)-1
class Nickometer(callbacks.Plugin):
def punish(self, damage, reason):
self.log.debug('%s lameness points awarded: %s', damage, reason)
return damage
def nickometer(self, irc, msg, args, nick):
"""[<nick>]
Tells you how lame said nick is. If <nick> is not given, uses the
nick of the person giving the command.
"""
score = 0L
if not nick:
nick = msg.nick
originalNick = nick
if not nick:
irc.error('Give me a nick to judge as the argument, please.')
return
specialCost = [('69', 500),
('dea?th', 500),
('dark', 400),
('n[i1]ght', 300),
('n[i1]te', 500),
('fuck', 500),
('sh[i1]t', 500),
('coo[l1]', 500),
('kew[l1]', 500),
('lame', 500),
('dood', 500),
('dude', 500),
('[l1](oo?|u)[sz]er', 500),
('[l1]eet', 500),
('e[l1]ite', 500),
('[l1]ord', 500),
('pron', 1000),
('warez', 1000),
('xx', 100),
('\\[rkx]0', 1000),
('\\0[rkx]', 1000)]
letterNumberTranslator = string.maketrans('023457+8', 'ozeasttb')
for special in specialCost:
tempNick = nick
if special[0][0] != '\\':
tempNick = tempNick.translate(letterNumberTranslator)
if tempNick and re.search(special[0], tempNick, re.IGNORECASE):
score += self.punish(special[1], 'matched special case /%s/' %
special[0])
# I don't really know about either of these next two statements,
# but they don't seem to do much harm.
# Allow Perl referencing
nick=re.sub('^\\\\([A-Za-z])', '\1', nick);
# C-- ain't so bad either
nick=re.sub('^C--$', 'C', nick);
# Punish consecutive non-alphas
matches=re.findall('[^\w\d]{2,}',nick)
for match in matches:
score += self.punish(slowPow(10, len(match)),
'%s consecutive non-alphas ' % len(match))
# Remove balanced brackets ...
while 1:
nickInitial = nick
nick=re.sub('^([^()]*)(\()(.*)(\))([^()]*)$', '\1\3\5', nick, 1)
nick=re.sub('^([^{}]*)(\{)(.*)(\})([^{}]*)$', '\1\3\5', nick, 1)
nick=re.sub('^([^[\]]*)(\[)(.*)(\])([^[\]]*)$', '\1\3\5', nick, 1)
if nick == nickInitial:
break
self.log.debug('Removed some matching brackets %r => %r',
nickInitial, nick)
# ... and punish for unmatched brackets
unmatched = re.findall('[][(){}]', nick)
if len(unmatched) > 0:
score += self.punish(slowPow(10, len(unmatched)),
'%s unmatched parentheses' % len(unmatched))
# Punish k3wlt0k
k3wlt0k_weights = (5, 5, 2, 5, 2, 3, 1, 2, 2, 2)
for i in range(len(k3wlt0k_weights)):
hits=re.findall(`i`, nick)
if (hits and len(hits)>0):
score += self.punish(k3wlt0k_weights[i] * len(hits) * 30,
'%s occurrences of %s ' % (len(hits), i))
# An alpha caps is not lame in middle or at end, provided the first
# alpha is caps.
nickOriginalCase = nick
match = re.search('^([^A-Za-z]*[A-Z].*[a-z].*?)[-_]?([A-Z])', nick)
if match:
nick = ''.join([nick[:match.start(2)],
nick[match.start(2)].lower(),
nick[match.start(2)+1:]])
match = re.search('^([^A-Za-z]*)([A-Z])([a-z])', nick)
if match:
nick = ''.join([nick[:match.start(2)],
nick[match.start(2):match.end(2)].lower(),
nick[match.end(2):]])
# Punish uppercase to lowercase shifts and vice-versa, modulo
# exceptions above
# the commented line is the equivalent of the original, but i think
# they intended my version, otherwise, the first caps alpha will
# still be punished
#cshifts = caseShifts(nickOriginalCase);
cshifts = caseShifts(nick);
if cshifts > 1 and re.match('.*[A-Z].*', nick):
score += self.punish(slowPow(9, cshifts),
'%s case shifts' % cshifts)
# Punish lame endings
if re.match('.*[XZ][^a-zA-Z]*$', nickOriginalCase):
score += self.punish(50, 'the last alphanumeric character was lame')
# Punish letter to numeric shifts and vice-versa
nshifts = numberShifts(nick);
if nshifts > 1:
score += self.punish(slowPow(9, nshifts),
'%s letter/number shifts' % nshifts)
# Punish extraneous caps
caps = re.findall('[A-Z]', nick)
if caps and len(caps) > 0:
score += self.punish(slowPow(7, len(caps)),
'%s extraneous caps' % len(caps))
# one trailing underscore is ok. i also added a - for parasite-
nick = re.sub('[-_]$','',nick)
# Punish anything that's left
remains = re.findall('[^a-zA-Z0-9]', nick)
if remains and len(remains) > 0:
score += self.punish(50*len(remains) + slowPow(9, len(remains)),
'%s extraneous symbols' % len(remains))
# Use an appropriate function to map [0, +inf) to [0, 100)
percentage = 100 * (1 + math.tanh((score - 400.0) / 400.0)) * \
(1 - 1 / (1 + score / 5.0)) / 2
# if it's above 99.9%, show as many digits as is interesting
score_string=re.sub('(99\\.9*\\d|\\.\\d).*','\\1',`percentage`)
irc.reply('The "lame nick-o-meter" reading for "%s" is %s%%.' %
(originalNick, score_string))
self.log.debug('Calculated lameness score for %s as %s '
'(raw score was %s)', originalNick, score_string, score)
nickometer = wrap(nickometer, [additional('text')])
Class = Nickometer
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| bsd-3-clause |
republic-analytics/luigi | test/contrib/hive_test.py | 5 | 15200 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import tempfile
from helpers import unittest
from luigi import six
import luigi.contrib.hive
import mock
from luigi import LocalTarget
class HiveTest(unittest.TestCase):
count = 0
def mock_hive_cmd(self, args, check_return=True):
self.last_hive_cmd = args
self.count += 1
return "statement{0}".format(self.count)
def setUp(self):
self.run_hive_cmd_saved = luigi.contrib.hive.run_hive
luigi.contrib.hive.run_hive = self.mock_hive_cmd
def tearDown(self):
luigi.contrib.hive.run_hive = self.run_hive_cmd_saved
def test_run_hive_command(self):
pre_count = self.count
res = luigi.contrib.hive.run_hive_cmd("foo")
self.assertEqual(["-e", "foo"], self.last_hive_cmd)
self.assertEqual("statement{0}".format(pre_count + 1), res)
def test_run_hive_script_not_exists(self):
def test():
luigi.contrib.hive.run_hive_script("/tmp/some-non-existant-file______")
self.assertRaises(RuntimeError, test)
def test_run_hive_script_exists(self):
with tempfile.NamedTemporaryFile(delete=True) as f:
pre_count = self.count
res = luigi.contrib.hive.run_hive_script(f.name)
self.assertEqual(["-f", f.name], self.last_hive_cmd)
self.assertEqual("statement{0}".format(pre_count + 1), res)
def test_create_parent_dirs(self):
dirname = "/tmp/hive_task_test_dir"
class FooHiveTask(object):
def output(self):
return LocalTarget(os.path.join(dirname, "foo"))
runner = luigi.contrib.hive.HiveQueryRunner()
runner.prepare_outputs(FooHiveTask())
self.assertTrue(os.path.exists(dirname))
class HiveCommandClientTest(unittest.TestCase):
"""Note that some of these tests are really for the CDH releases of Hive, to which I do not currently have access.
Hopefully there are no significant differences in the expected output"""
def setUp(self):
self.client = luigi.contrib.hive.HiveCommandClient()
self.apacheclient = luigi.contrib.hive.ApacheHiveCommandClient()
self.metastoreclient = luigi.contrib.hive.MetastoreClient()
@mock.patch("luigi.contrib.hive.run_hive_cmd")
def test_default_table_location(self, run_command):
run_command.return_value = "Protect Mode: None \n" \
"Retention: 0 \n" \
"Location: hdfs://localhost:9000/user/hive/warehouse/mytable \n" \
"Table Type: MANAGED_TABLE \n"
returned = self.client.table_location("mytable")
self.assertEqual('hdfs://localhost:9000/user/hive/warehouse/mytable', returned)
@mock.patch("luigi.contrib.hive.run_hive_cmd")
def test_table_exists(self, run_command):
run_command.return_value = "OK"
returned = self.client.table_exists("mytable")
self.assertFalse(returned)
run_command.return_value = "OK\n" \
"mytable"
returned = self.client.table_exists("mytable")
self.assertTrue(returned)
# Issue #896 test case insensitivity
returned = self.client.table_exists("MyTable")
self.assertTrue(returned)
run_command.return_value = "day=2013-06-28/hour=3\n" \
"day=2013-06-28/hour=4\n" \
"day=2013-07-07/hour=2\n"
self.client.partition_spec = mock.Mock(name="partition_spec")
self.client.partition_spec.return_value = "somepart"
returned = self.client.table_exists("mytable", partition={'a': 'b'})
self.assertTrue(returned)
run_command.return_value = ""
returned = self.client.table_exists("mytable", partition={'a': 'b'})
self.assertFalse(returned)
@mock.patch("luigi.contrib.hive.run_hive_cmd")
def test_table_schema(self, run_command):
run_command.return_value = "FAILED: SemanticException [Error 10001]: blah does not exist\nSome other stuff"
returned = self.client.table_schema("mytable")
self.assertFalse(returned)
run_command.return_value = "OK\n" \
"col1 string None \n" \
"col2 string None \n" \
"col3 string None \n" \
"day string None \n" \
"hour smallint None \n\n" \
"# Partition Information \n" \
"# col_name data_type comment \n\n" \
"day string None \n" \
"hour smallint None \n" \
"Time taken: 2.08 seconds, Fetched: 34 row(s)\n"
expected = [('OK',),
('col1', 'string', 'None'),
('col2', 'string', 'None'),
('col3', 'string', 'None'),
('day', 'string', 'None'),
('hour', 'smallint', 'None'),
('',),
('# Partition Information',),
('# col_name', 'data_type', 'comment'),
('',),
('day', 'string', 'None'),
('hour', 'smallint', 'None'),
('Time taken: 2.08 seconds, Fetched: 34 row(s)',)]
returned = self.client.table_schema("mytable")
self.assertEqual(expected, returned)
def test_partition_spec(self):
returned = self.client.partition_spec({'a': 'b', 'c': 'd'})
self.assertEqual("`a`='b',`c`='d'", returned)
@mock.patch("luigi.contrib.hive.run_hive_cmd")
def test_apacheclient_table_exists(self, run_command):
run_command.return_value = "OK"
returned = self.apacheclient.table_exists("mytable")
self.assertFalse(returned)
run_command.return_value = "OK\n" \
"mytable"
returned = self.apacheclient.table_exists("mytable")
self.assertTrue(returned)
# Issue #896 test case insensitivity
returned = self.apacheclient.table_exists("MyTable")
self.assertTrue(returned)
run_command.return_value = "day=2013-06-28/hour=3\n" \
"day=2013-06-28/hour=4\n" \
"day=2013-07-07/hour=2\n"
self.apacheclient.partition_spec = mock.Mock(name="partition_spec")
self.apacheclient.partition_spec.return_value = "somepart"
returned = self.apacheclient.table_exists("mytable", partition={'a': 'b'})
self.assertTrue(returned)
run_command.return_value = ""
returned = self.apacheclient.table_exists("mytable", partition={'a': 'b'})
self.assertFalse(returned)
@mock.patch("luigi.contrib.hive.run_hive_cmd")
def test_apacheclient_table_schema(self, run_command):
run_command.return_value = "FAILED: SemanticException [Error 10001]: Table not found mytable\nSome other stuff"
returned = self.apacheclient.table_schema("mytable")
self.assertFalse(returned)
run_command.return_value = "OK\n" \
"col1 string None \n" \
"col2 string None \n" \
"col3 string None \n" \
"day string None \n" \
"hour smallint None \n\n" \
"# Partition Information \n" \
"# col_name data_type comment \n\n" \
"day string None \n" \
"hour smallint None \n" \
"Time taken: 2.08 seconds, Fetched: 34 row(s)\n"
expected = [('OK',),
('col1', 'string', 'None'),
('col2', 'string', 'None'),
('col3', 'string', 'None'),
('day', 'string', 'None'),
('hour', 'smallint', 'None'),
('',),
('# Partition Information',),
('# col_name', 'data_type', 'comment'),
('',),
('day', 'string', 'None'),
('hour', 'smallint', 'None'),
('Time taken: 2.08 seconds, Fetched: 34 row(s)',)]
returned = self.apacheclient.table_schema("mytable")
self.assertEqual(expected, returned)
@mock.patch("luigi.contrib.hive.HiveThriftContext")
def test_metastoreclient_partition_existence_regardless_of_order(self, thrift_context):
thrift_context.return_value = thrift_context
client_mock = mock.Mock(name="clientmock")
client_mock.return_value = client_mock
thrift_context.__enter__ = client_mock
client_mock.get_partition_names = mock.Mock(return_value=["p1=x/p2=y", "p1=a/p2=b"])
partition_spec = OrderedDict([("p1", "a"), ("p2", "b")])
self.assertTrue(self.metastoreclient.table_exists("table", "default", partition_spec))
partition_spec = OrderedDict([("p2", "b"), ("p1", "a")])
self.assertTrue(self.metastoreclient.table_exists("table", "default", partition_spec))
def test_metastore_partition_spec_has_the_same_order(self):
partition_spec = OrderedDict([("p1", "a"), ("p2", "b")])
spec_string = luigi.contrib.hive.MetastoreClient().partition_spec(partition_spec)
self.assertEqual(spec_string, "p1=a/p2=b")
partition_spec = OrderedDict([("p2", "b"), ("p1", "a")])
spec_string = luigi.contrib.hive.MetastoreClient().partition_spec(partition_spec)
self.assertEqual(spec_string, "p1=a/p2=b")
@mock.patch("luigi.configuration")
def test_client_def(self, hive_syntax):
hive_syntax.get_config.return_value.get.return_value = "cdh4"
client = luigi.contrib.hive.get_default_client()
self.assertEqual(luigi.contrib.hive.HiveCommandClient, type(client))
hive_syntax.get_config.return_value.get.return_value = "cdh3"
client = luigi.contrib.hive.get_default_client()
self.assertEqual(luigi.contrib.hive.HiveCommandClient, type(client))
hive_syntax.get_config.return_value.get.return_value = "apache"
client = luigi.contrib.hive.get_default_client()
self.assertEqual(luigi.contrib.hive.ApacheHiveCommandClient, type(client))
hive_syntax.get_config.return_value.get.return_value = "metastore"
client = luigi.contrib.hive.get_default_client()
self.assertEqual(luigi.contrib.hive.MetastoreClient, type(client))
@mock.patch('subprocess.Popen')
def test_run_hive_command(self, popen):
# I'm testing this again to check the return codes
# I didn't want to tear up all the existing tests to change how run_hive is mocked
comm = mock.Mock(name='communicate_mock')
comm.return_value = "some return stuff", ""
preturn = mock.Mock(name='open_mock')
preturn.returncode = 0
preturn.communicate = comm
popen.return_value = preturn
returned = luigi.contrib.hive.run_hive(["blah", "blah"])
self.assertEqual("some return stuff", returned)
preturn.returncode = 17
self.assertRaises(luigi.contrib.hive.HiveCommandError, luigi.contrib.hive.run_hive, ["blah", "blah"])
comm.return_value = "", "some stderr stuff"
returned = luigi.contrib.hive.run_hive(["blah", "blah"], False)
self.assertEqual("", returned)
class MyHiveTask(luigi.contrib.hive.HiveQueryTask):
param = luigi.Parameter()
def query(self):
return 'banana banana %s' % self.param
class TestHiveTask(unittest.TestCase):
task_class = MyHiveTask
@mock.patch('luigi.contrib.hadoop.run_and_track_hadoop_job')
def test_run(self, run_and_track_hadoop_job):
success = luigi.run([self.task_class.__name__, '--param', 'foo', '--local-scheduler', '--no-lock'])
self.assertTrue(success)
self.assertEqual('hive', run_and_track_hadoop_job.call_args[0][0][0])
class MyHiveTaskArgs(MyHiveTask):
def hivevars(self):
return {'my_variable1': 'value1', 'my_variable2': 'value2'}
def hiveconfs(self):
return {'hive.additional.conf': 'conf_value'}
class TestHiveTaskArgs(TestHiveTask):
task_class = MyHiveTaskArgs
def test_arglist(self):
task = self.task_class(param='foo')
f_name = 'my_file'
runner = luigi.contrib.hive.HiveQueryRunner()
arglist = runner.get_arglist(f_name, task)
f_idx = arglist.index('-f')
self.assertEqual(arglist[f_idx + 1], f_name)
hivevars = ['{}={}'.format(k, v) for k, v in six.iteritems(task.hivevars())]
for var in hivevars:
idx = arglist.index(var)
self.assertEqual(arglist[idx - 1], '--hivevar')
hiveconfs = ['{}={}'.format(k, v) for k, v in six.iteritems(task.hiveconfs())]
for conf in hiveconfs:
idx = arglist.index(conf)
self.assertEqual(arglist[idx - 1], '--hiveconf')
class TestHiveTarget(unittest.TestCase):
def test_hive_table_target(self):
client = mock.Mock()
target = luigi.contrib.hive.HiveTableTarget(database='db', table='foo', client=client)
target.exists()
client.table_exists.assert_called_with('foo', 'db')
def test_hive_partition_target(self):
client = mock.Mock()
target = luigi.contrib.hive.HivePartitionTarget(database='db', table='foo', partition='bar', client=client)
target.exists()
client.table_exists.assert_called_with('foo', 'db', 'bar')
| apache-2.0 |
ncliam/serverpos | openerp/tools/test_config.py | 456 | 1418 | # -*- coding: utf-8 -*-
""" Tests for the configuration file/command-line arguments. """
# This test should be run from its directory.
# TODO A configmanager object cannot parse multiple times a config file
# and/or the command line, preventing to 'reload' a configuration.
import os
import config
config_file_00 = os.path.join(os.path.dirname(__file__),'test-config-values-00.conf')
# 1. No config file, no command-line arguments (a.k.a. default values)
conf = config.configmanager()
conf.parse_config()
assert conf['osv_memory_age_limit'] == 1.0
assert os.path.join(conf['root_path'], 'addons') == conf['addons_path']
# 2. No config file, some command-line arguments
conf = config.configmanager()
# mess with the optparse.Option definition to allow an invalid path
conf.casts['addons_path'].action = 'store'
conf.parse_config(['--addons-path=/xyz/dont-exist', '--osv-memory-age-limit=2.3'])
assert conf['osv_memory_age_limit'] == 2.3
assert conf['addons_path'] == '/xyz/dont-exist'
# 3. Config file, no command-line arguments
conf = config.configmanager()
conf.parse_config(['-c', config_file_00])
assert conf['osv_memory_age_limit'] == 3.4
# 4. Config file, and command-line arguments
conf = config.configmanager()
conf.parse_config(['-c', config_file_00, '--osv-memory-age-limit=2.3'])
assert conf['osv_memory_age_limit'] == 2.3
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
havard024/prego | venv/lib/python2.7/site-packages/coffin/views/generic/dates.py | 3 | 1957 | from coffin.views.generic.detail import SingleObjectTemplateResponseMixin
from coffin.views.generic.list import MultipleObjectTemplateResponseMixin
import django.views.generic.dates as _generic_dates
class ArchiveIndexView(MultipleObjectTemplateResponseMixin, _generic_dates.BaseArchiveIndexView):
"""
Equivalent of django generic view ArchiveIndexView, but uses Jinja template renderer.
"""
template_name_suffix = '_archive'
class YearArchiveView(MultipleObjectTemplateResponseMixin, _generic_dates.BaseYearArchiveView):
"""
Equivalent of django generic view YearArchiveView, but uses Jinja template renderer.
"""
template_name_suffix = '_archive_year'
class MonthArchiveView(MultipleObjectTemplateResponseMixin, _generic_dates.BaseMonthArchiveView):
"""
Equivalent of django generic view MonthArchiveView, but uses Jinja template renderer.
"""
template_name_suffix = '_archive_month'
class WeekArchiveView(MultipleObjectTemplateResponseMixin, _generic_dates.BaseWeekArchiveView):
"""
Equivalent of django generic view WeekArchiveView, but uses Jinja template renderer.
"""
template_name_suffix = '_archive_week'
class DayArchiveView(MultipleObjectTemplateResponseMixin, _generic_dates.BaseDayArchiveView):
"""
Equivalent of django generic view DayArchiveView, but uses Jinja template renderer.
"""
template_name_suffix = "_archive_day"
class TodayArchiveView(MultipleObjectTemplateResponseMixin, _generic_dates.BaseTodayArchiveView):
"""
Equivalent of django generic view TodayArchiveView, but uses Jinja template renderer.
"""
template_name_suffix = "_archive_day"
class DateDetailView(SingleObjectTemplateResponseMixin, _generic_dates.BaseDateDetailView):
"""
Equivalent of django generic view DateDetailView, but uses Jinja template renderer.
"""
template_name_suffix = '_detail' | mit |
Arzaroth/virtualnetwork | network/mapParser.py | 1 | 4727 | #!/usr/bin/python3.3 -O
from pyrser import grammar,meta
from pyrser.directives import ignore
from network import Host, Router
import sys
def insensitiveCase(s):
return "[" + " ".join("['" + "'|'".join(x) + "']" for x in map((lambda each: [each.lower(), each.upper()]), s)) + "]"
class MapParser(grammar.Grammar):
entry = "Map"
grammar = """
Map = [#init_map(_) @ignore("null") [[[Hosts:h #add_host(_, h)] | [Routers:r #add_router(_, r)]] eol*]+
#link_hosts(_) eof]
Hosts = [#init_host(_) '[' ws {host} ws ']' eol+ [[Name | Ip | TTL | Route]:r #add_fhost(_, r)]+]
Routers = [#init_router(_) '[' ws {router} ws ']' eol+ [[Name | Ip | TTL | Route]:r #add_frouter(_, r)]+]
Name = [ws {name} ws '=' ws id:i #ret_f(_, "id", i) ws eol+]
Ip = [ws {ip} ws '=' ws cidraddr:c #ret_f(_, "ip", c) ws eol+]
TTL = [ws {ttl} ws '=' ws num:n #ret_f(_, "ttl", n) ws eol+]
Route = [ws {route} ws '=' ws [[{default}:c ws id:i #ret_f(_, "route", c, i)]
| [cidraddr:c ws id:i #ret_f(_, "route", c, i)]] ws eol+]
cidraddr = [num '.' num '.' num '.' num '/' num]
ws = [[' ' | '\r' | '\t']*]
""".format(host = insensitiveCase("Host"),
router = insensitiveCase("Router"),
route = insensitiveCase("Route"),
ip = insensitiveCase("IP"),
ttl = insensitiveCase("TTL"),
name = insensitiveCase("Name"),
default = insensitiveCase("Default"),
internet = insensitiveCase("Internet"))
@meta.hook(MapParser)
def init_map(self, ast):
ast.network = {}
ast.routes = {}
return True
@meta.hook(MapParser)
def init_host(self, ast):
self.init_map(ast)
ast.network["route"] = []
return True
@meta.hook(MapParser)
def init_router(self, ast):
self.init_host(ast)
ast.network["ips"] = []
return True
@meta.hook(MapParser)
def link_hosts(self, ast):
for k,v in ast.routes.items():
for x in v:
if x[1] not in ast.network:
raise Exception("Unknown host ({0}) for {1} route.".format(x[1], k))
ast.network[k].addRoute(ast.network[x[1]], x[0])
return True
def base_add(ast, h):
if "name" not in h.network:
raise Exception("Missing name field for given host:\n{0}".format(self.value(h)))
if h.network["name"] in ast.network:
raise Exception("Redefinion of {0}.".format(h.network["name"]))
ast.routes[h.network["name"]] = h.network["route"][::]
@meta.hook(MapParser)
def add_host(self, ast, h):
base_add(ast, h)
if "ip" not in h.network:
raise Exception("Missing ip field for given host:\n{0}".format(self.value(h)))
if "ttl" in h.network:
ast.network[h.network["name"]] = Host(h.network["name"],
h.network["ip"], h.network["ttl"])
else:
ast.network[h.network["name"]] = Host(h.network["name"],
h.network["ip"])
return True
@meta.hook(MapParser)
def add_router(self, ast, h):
base_add(ast, h)
if not h.network["ips"]:
raise Exception("Missing ip field for given host")
if "ttl" in h.network:
ast.network[h.network["name"]] = Router(h.network["name"],
*h.network["ips"], ttl = h.network["ttl"])
else:
ast.network[h.network["name"]] = Router(h.network["name"],
*h.network["ips"])
return True
@meta.hook(MapParser)
def ret_f(self, ast, *args):
ast.retvals = [args[0]]
ast.retvals.extend([self.value(x) for x in args[1:]])
return True
@meta.hook(MapParser)
def add_fhost(self, ast, r):
def reg_name(ast, name):
ast.network["name"] = name[0]
def reg_ip(ast, ip):
ast.network["ip"] = ip[0]
def reg_ttl(ast, ttl):
ast.network["ttl"] = ttl[0]
def reg_route(ast, route):
ast.network["route"].append(route)
fmap = {'id' : reg_name,
'ip' : reg_ip,
'ttl' : reg_ttl,
'route' : reg_route}
if r.retvals[0] in fmap:
fmap[r.retvals[0]](ast, r.retvals[1:])
return True
@meta.hook(MapParser)
def add_frouter(self, ast, r):
def reg_name(ast, name):
ast.network["name"] = name[0]
def reg_ip(ast, ip):
ast.network["ips"].append(ip[0])
def reg_ttl(ast, ttl):
ast.network["ttl"] = ttl[0]
def reg_route(ast, route):
ast.network["route"].append(route)
fmap = {'id' : reg_name,
'ip' : reg_ip,
'ttl' : reg_ttl,
'route' : reg_route}
if r.retvals[0] in fmap:
fmap[r.retvals[0]](ast, r.retvals[1:])
return True
| gpl-3.0 |
michaelni/audacity | lib-src/lv2/lv2/plugins/eg03-metro.lv2/waflib/Tools/kde4.py | 275 | 2007 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys,re
from waflib import Options,TaskGen,Task,Utils
from waflib.TaskGen import feature,after_method
@feature('msgfmt')
def apply_msgfmt(self):
for lang in self.to_list(self.langs):
node=self.path.find_resource(lang+'.po')
task=self.create_task('msgfmt',node,node.change_ext('.mo'))
langname=lang.split('/')
langname=langname[-1]
inst=getattr(self,'install_path','${KDE4_LOCALE_INSTALL_DIR}')
self.bld.install_as(inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+getattr(self,'appname','set_your_appname')+'.mo',task.outputs[0],chmod=getattr(self,'chmod',Utils.O644))
class msgfmt(Task.Task):
color='BLUE'
run_str='${MSGFMT} ${SRC} -o ${TGT}'
def configure(self):
kdeconfig=self.find_program('kde4-config')
prefix=self.cmd_and_log('%s --prefix'%kdeconfig).strip()
fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
try:os.stat(fname)
except OSError:
fname='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
try:os.stat(fname)
except OSError:self.fatal('could not open %s'%fname)
try:
txt=Utils.readf(fname)
except(OSError,IOError):
self.fatal('could not read %s'%fname)
txt=txt.replace('\\\n','\n')
fu=re.compile('#(.*)\n')
txt=fu.sub('',txt)
setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
found=setregexp.findall(txt)
for(_,key,val)in found:
self.env[key]=val
self.env['LIB_KDECORE']=['kdecore']
self.env['LIB_KDEUI']=['kdeui']
self.env['LIB_KIO']=['kio']
self.env['LIB_KHTML']=['khtml']
self.env['LIB_KPARTS']=['kparts']
self.env['LIBPATH_KDECORE']=[os.path.join(self.env.KDE4_LIB_INSTALL_DIR,'kde4','devel'),self.env.KDE4_LIB_INSTALL_DIR]
self.env['INCLUDES_KDECORE']=[self.env['KDE4_INCLUDE_INSTALL_DIR']]
self.env.append_value('INCLUDES_KDECORE',[self.env['KDE4_INCLUDE_INSTALL_DIR']+os.sep+'KDE'])
self.find_program('msgfmt',var='MSGFMT')
| gpl-2.0 |
kennethlove/django | django/db/models/related.py | 102 | 3162 | from django.utils.encoding import smart_unicode
from django.db.models.fields import BLANK_CHOICE_DASH
class BoundRelatedObject(object):
def __init__(self, related_object, field_mapping, original):
self.relation = related_object
self.field_mappings = field_mapping[related_object.name]
def template_name(self):
raise NotImplementedError
def __repr__(self):
return repr(self.__dict__)
class RelatedObject(object):
def __init__(self, parent_model, model, field):
self.parent_model = parent_model
self.model = model
self.opts = model._meta
self.field = field
self.name = '%s:%s' % (self.opts.app_label, self.opts.module_name)
self.var_name = self.opts.object_name.lower()
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH,
limit_to_currently_related=False):
"""Returns choices with a default blank choices included, for use
as SelectField choices for this field.
Analogue of django.db.models.fields.Field.get_choices, provided
initially for utilisation by RelatedFieldListFilter.
"""
first_choice = include_blank and blank_choice or []
queryset = self.model._default_manager.all()
if limit_to_currently_related:
queryset = queryset.complex_filter(
{'%s__isnull' % self.parent_model._meta.module_name: False})
lst = [(x._get_pk_val(), smart_unicode(x)) for x in queryset]
return first_choice + lst
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
# Defer to the actual field definition for db prep
return self.field.get_db_prep_lookup(lookup_type, value,
connection=connection, prepared=prepared)
def editable_fields(self):
"Get the fields in this class that should be edited inline."
return [f for f in self.opts.fields + self.opts.many_to_many if f.editable and f != self.field]
def __repr__(self):
return "<RelatedObject: %s related to %s>" % (self.name, self.field.name)
def bind(self, field_mapping, original, bound_related_object_class=BoundRelatedObject):
return bound_related_object_class(self, field_mapping, original)
def get_accessor_name(self):
# This method encapsulates the logic that decides what name to give an
# accessor descriptor that retrieves related many-to-one or
# many-to-many objects. It uses the lower-cased object_name + "_set",
# but this can be overridden with the "related_name" option.
if self.field.rel.multiple:
# If this is a symmetrical m2m relation on self, there is no reverse accessor.
if getattr(self.field.rel, 'symmetrical', False) and self.model == self.parent_model:
return None
return self.field.rel.related_name or (self.opts.object_name.lower() + '_set')
else:
return self.field.rel.related_name or (self.opts.object_name.lower())
def get_cache_name(self):
return "_%s_cache" % self.get_accessor_name()
| bsd-3-clause |
luiscarlosgph/nas | env/lib/python2.7/site-packages/django/contrib/messages/storage/fallback.py | 704 | 2172 | from django.contrib.messages.storage.base import BaseStorage
from django.contrib.messages.storage.cookie import CookieStorage
from django.contrib.messages.storage.session import SessionStorage
class FallbackStorage(BaseStorage):
"""
Tries to store all messages in the first backend, storing any unstored
messages in each subsequent backend backend.
"""
storage_classes = (CookieStorage, SessionStorage)
def __init__(self, *args, **kwargs):
super(FallbackStorage, self).__init__(*args, **kwargs)
self.storages = [storage_class(*args, **kwargs)
for storage_class in self.storage_classes]
self._used_storages = set()
def _get(self, *args, **kwargs):
"""
Gets a single list of messages from all storage backends.
"""
all_messages = []
for storage in self.storages:
messages, all_retrieved = storage._get()
# If the backend hasn't been used, no more retrieval is necessary.
if messages is None:
break
if messages:
self._used_storages.add(storage)
all_messages.extend(messages)
# If this storage class contained all the messages, no further
# retrieval is necessary
if all_retrieved:
break
return all_messages, all_retrieved
def _store(self, messages, response, *args, **kwargs):
"""
Stores the messages, returning any unstored messages after trying all
backends.
For each storage backend, any messages not stored are passed on to the
next backend.
"""
for storage in self.storages:
if messages:
messages = storage._store(messages, response,
remove_oldest=False)
# Even if there are no more messages, continue iterating to ensure
# storages which contained messages are flushed.
elif storage in self._used_storages:
storage._store([], response)
self._used_storages.remove(storage)
return messages
| mit |
dgjustice/ansible | lib/ansible/modules/database/mysql/mysql_user.py | 13 | 24121 | #!/usr/bin/python
# (c) 2012, Mark Theunissen <mark.theunissen@gmail.com>
# Sponsored by Four Kitchens http://fourkitchens.com.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: mysql_user
short_description: Adds or removes a user from a MySQL database.
description:
- Adds or removes a user from a MySQL database.
version_added: "0.6"
options:
name:
description:
- name of the user (role) to add or remove
required: true
password:
description:
- set the user's password.
required: false
default: null
encrypted:
description:
- Indicate that the 'password' field is a `mysql_native_password` hash
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "2.0"
host:
description:
- the 'host' part of the MySQL username
required: false
default: localhost
host_all:
description:
- override the host option, making ansible apply changes to
all hostnames for a given user. This option cannot be used
when creating users
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "2.1"
priv:
description:
- "MySQL privileges string in the format: C(db.table:priv1,priv2)."
- "Multiple privileges can be specified by separating each one using
a forward slash: C(db.table:priv/db.table:priv)."
- The format is based on MySQL C(GRANT) statement.
- Database and table names can be quoted, MySQL-style.
- If column privileges are used, the C(priv1,priv2) part must be
exactly as returned by a C(SHOW GRANT) statement. If not followed,
the module will always report changes. It includes grouping columns
by permission (C(SELECT(col1,col2)) instead of C(SELECT(col1),SELECT(col2))).
required: false
default: null
append_privs:
description:
- Append the privileges defined by priv to the existing ones for this
user instead of overwriting existing ones.
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "1.4"
sql_log_bin:
description:
- Whether binary logging should be enabled or disabled for the connection.
required: false
choices: ["yes", "no" ]
default: "yes"
version_added: "2.1"
state:
description:
- Whether the user should exist. When C(absent), removes
the user.
required: false
default: present
choices: [ "present", "absent" ]
check_implicit_admin:
description:
- Check if mysql allows login as root/nopassword before trying supplied credentials.
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "1.3"
update_password:
required: false
default: always
choices: ['always', 'on_create']
version_added: "2.0"
description:
- C(always) will update passwords if they differ. C(on_create) will only set the password for newly created users.
notes:
- "MySQL server installs with default login_user of 'root' and no password. To secure this user
as part of an idempotent playbook, you must create at least two tasks: the first must change the root user's password,
without providing any login_user/login_password details. The second must drop a ~/.my.cnf file containing
the new root credentials. Subsequent runs of the playbook will then succeed by reading the new credentials from
the file."
- Currently, there is only support for the `mysql_native_password` encrypted password hash module.
author: "Jonathan Mainguy (@Jmainguy)"
extends_documentation_fragment: mysql
'''
EXAMPLES = """
# Removes anonymous user account for localhost
- mysql_user:
name: ''
host: localhost
state: absent
# Removes all anonymous user accounts
- mysql_user:
name: ''
host_all: yes
state: absent
# Create database user with name 'bob' and password '12345' with all database privileges
- mysql_user:
name: bob
password: 12345
priv: '*.*:ALL'
state: present
# Create database user with name 'bob' and previously hashed mysql native password '*EE0D72C1085C46C5278932678FBE2C6A782821B4' with all database privileges
- mysql_user:
name: bob
password: '*EE0D72C1085C46C5278932678FBE2C6A782821B4'
encrypted: yes
priv: '*.*:ALL'
state: present
# Creates database user 'bob' and password '12345' with all database privileges and 'WITH GRANT OPTION'
- mysql_user:
name: bob
password: 12345
priv: '*.*:ALL,GRANT'
state: present
# Modify user Bob to require SSL connections. Note that REQUIRESSL is a special privilege that should only apply to *.* by itself.
- mysql_user:
name: bob
append_privs: true
priv: '*.*:REQUIRESSL'
state: present
# Ensure no user named 'sally'@'localhost' exists, also passing in the auth credentials.
- mysql_user:
login_user: root
login_password: 123456
name: sally
state: absent
# Ensure no user named 'sally' exists at all
- mysql_user:
name: sally
host_all: yes
state: absent
# Specify grants composed of more than one word
- mysql_user:
name: replication
password: 12345
priv: "*.*:REPLICATION CLIENT"
state: present
# Revoke all privileges for user 'bob' and password '12345'
- mysql_user:
name: bob
password: 12345
priv: "*.*:USAGE"
state: present
# Example privileges string format
# mydb.*:INSERT,UPDATE/anotherdb.*:SELECT/yetanotherdb.*:ALL
# Example using login_unix_socket to connect to server
- mysql_user:
name: root
password: abc123
login_unix_socket: /var/run/mysqld/mysqld.sock
# Example of skipping binary logging while adding user 'bob'
- mysql_user:
name: bob
password: 12345
priv: "*.*:USAGE"
state: present
sql_log_bin: no
# Example .my.cnf file for setting the root password
# [client]
# user=root
# password=n<_665{vS43y
"""
import getpass
import tempfile
import re
import string
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
from ansible.module_utils.six import iteritems
VALID_PRIVS = frozenset(('CREATE', 'DROP', 'GRANT', 'GRANT OPTION',
'LOCK TABLES', 'REFERENCES', 'EVENT', 'ALTER',
'DELETE', 'INDEX', 'INSERT', 'SELECT', 'UPDATE',
'CREATE TEMPORARY TABLES', 'TRIGGER', 'CREATE VIEW',
'SHOW VIEW', 'ALTER ROUTINE', 'CREATE ROUTINE',
'EXECUTE', 'FILE', 'CREATE TABLESPACE', 'CREATE USER',
'PROCESS', 'PROXY', 'RELOAD', 'REPLICATION CLIENT',
'REPLICATION SLAVE', 'SHOW DATABASES', 'SHUTDOWN',
'SUPER', 'ALL', 'ALL PRIVILEGES', 'USAGE', 'REQUIRESSL'))
class InvalidPrivsError(Exception):
pass
# ===========================================
# MySQL module specific support methods.
#
# User Authentication Management was change in MySQL 5.7
# This is a generic check for if the server version is less than version 5.7
def server_version_check(cursor):
cursor.execute("SELECT VERSION()")
result = cursor.fetchone()
version_str = result[0]
version = version_str.split('.')
# Currently we have no facility to handle new-style password update on
# mariadb and the old-style update continues to work
if 'mariadb' in version_str.lower():
return True
if (int(version[0]) <= 5 and int(version[1]) < 7):
return True
else:
return False
def get_mode(cursor):
cursor.execute('SELECT @@GLOBAL.sql_mode')
result = cursor.fetchone()
mode_str = result[0]
if 'ANSI' in mode_str:
mode = 'ANSI'
else:
mode = 'NOTANSI'
return mode
def user_exists(cursor, user, host, host_all):
if host_all:
cursor.execute("SELECT count(*) FROM user WHERE user = %s", ([user]))
else:
cursor.execute("SELECT count(*) FROM user WHERE user = %s AND host = %s", (user,host))
count = cursor.fetchone()
return count[0] > 0
def user_add(cursor, user, host, host_all, password, encrypted, new_priv, check_mode):
# we cannot create users without a proper hostname
if host_all:
return False
if check_mode:
return True
if password and encrypted:
cursor.execute("CREATE USER %s@%s IDENTIFIED BY PASSWORD %s", (user,host,password))
elif password and not encrypted:
cursor.execute("CREATE USER %s@%s IDENTIFIED BY %s", (user,host,password))
else:
cursor.execute("CREATE USER %s@%s", (user,host))
if new_priv is not None:
for db_table, priv in iteritems(new_priv):
privileges_grant(cursor, user,host,db_table,priv)
return True
def is_hash(password):
ishash = False
if len(password) == 41 and password[0] == '*':
if frozenset(password[1:]).issubset(string.hexdigits):
ishash = True
return ishash
def user_mod(cursor, user, host, host_all, password, encrypted, new_priv, append_privs, module):
changed = False
grant_option = False
if host_all:
hostnames = user_get_hostnames(cursor, [user])
else:
hostnames = [host]
for host in hostnames:
# Handle clear text and hashed passwords.
if bool(password):
# Determine what user management method server uses
old_user_mgmt = server_version_check(cursor)
if old_user_mgmt:
cursor.execute("SELECT password FROM user WHERE user = %s AND host = %s", (user,host))
else:
cursor.execute("SELECT authentication_string FROM user WHERE user = %s AND host = %s", (user,host))
current_pass_hash = cursor.fetchone()
if encrypted:
encrypted_string = (password)
if is_hash(password):
if current_pass_hash[0] != encrypted_string:
if module.check_mode:
return True
if old_user_mgmt:
cursor.execute("SET PASSWORD FOR %s@%s = %s", (user, host, password))
else:
cursor.execute("ALTER USER %s@%s IDENTIFIED WITH mysql_native_password AS %s", (user, host, password))
changed = True
else:
module.fail_json(msg="encrypted was specified however it does not appear to be a valid hash expecting: *SHA1(SHA1(your_password))")
else:
if old_user_mgmt:
cursor.execute("SELECT PASSWORD(%s)", (password,))
else:
cursor.execute("SELECT CONCAT('*', UCASE(SHA1(UNHEX(SHA1(%s)))))", (password,))
new_pass_hash = cursor.fetchone()
if current_pass_hash[0] != new_pass_hash[0]:
if module.check_mode:
return True
if old_user_mgmt:
cursor.execute("SET PASSWORD FOR %s@%s = PASSWORD(%s)", (user, host, password))
else:
cursor.execute("ALTER USER %s@%s IDENTIFIED WITH mysql_native_password BY %s", (user, host, password))
changed = True
# Handle privileges
if new_priv is not None:
curr_priv = privileges_get(cursor, user,host)
# If the user has privileges on a db.table that doesn't appear at all in
# the new specification, then revoke all privileges on it.
for db_table, priv in iteritems(curr_priv):
# If the user has the GRANT OPTION on a db.table, revoke it first.
if "GRANT" in priv:
grant_option = True
if db_table not in new_priv:
if user != "root" and "PROXY" not in priv and not append_privs:
if module.check_mode:
return True
privileges_revoke(cursor, user,host,db_table,priv,grant_option)
changed = True
# If the user doesn't currently have any privileges on a db.table, then
# we can perform a straight grant operation.
for db_table, priv in iteritems(new_priv):
if db_table not in curr_priv:
if module.check_mode:
return True
privileges_grant(cursor, user,host,db_table,priv)
changed = True
# If the db.table specification exists in both the user's current privileges
# and in the new privileges, then we need to see if there's a difference.
db_table_intersect = set(new_priv.keys()) & set(curr_priv.keys())
for db_table in db_table_intersect:
priv_diff = set(new_priv[db_table]) ^ set(curr_priv[db_table])
if (len(priv_diff) > 0):
if module.check_mode:
return True
if not append_privs:
privileges_revoke(cursor, user,host,db_table,curr_priv[db_table],grant_option)
privileges_grant(cursor, user,host,db_table,new_priv[db_table])
changed = True
return changed
def user_delete(cursor, user, host, host_all, check_mode):
if check_mode:
return True
if host_all:
hostnames = user_get_hostnames(cursor, [user])
for hostname in hostnames:
cursor.execute("DROP USER %s@%s", (user, hostname))
else:
cursor.execute("DROP USER %s@%s", (user, host))
return True
def user_get_hostnames(cursor, user):
cursor.execute("SELECT Host FROM mysql.user WHERE user = %s", user)
hostnames_raw = cursor.fetchall()
hostnames = []
for hostname_raw in hostnames_raw:
hostnames.append(hostname_raw[0])
return hostnames
def privileges_get(cursor, user,host):
""" MySQL doesn't have a better method of getting privileges aside from the
SHOW GRANTS query syntax, which requires us to then parse the returned string.
Here's an example of the string that is returned from MySQL:
GRANT USAGE ON *.* TO 'user'@'localhost' IDENTIFIED BY 'pass';
This function makes the query and returns a dictionary containing the results.
The dictionary format is the same as that returned by privileges_unpack() below.
"""
output = {}
cursor.execute("SHOW GRANTS FOR %s@%s", (user, host))
grants = cursor.fetchall()
def pick(x):
if x == 'ALL PRIVILEGES':
return 'ALL'
else:
return x
for grant in grants:
res = re.match("GRANT (.+) ON (.+) TO '.*'@'.+'( IDENTIFIED BY PASSWORD '.+')? ?(.*)", grant[0])
if res is None:
raise InvalidPrivsError('unable to parse the MySQL grant string: %s' % grant[0])
privileges = res.group(1).split(", ")
privileges = [ pick(x) for x in privileges]
if "WITH GRANT OPTION" in res.group(4):
privileges.append('GRANT')
if "REQUIRE SSL" in res.group(4):
privileges.append('REQUIRESSL')
db = res.group(2)
output[db] = privileges
return output
def privileges_unpack(priv, mode):
""" Take a privileges string, typically passed as a parameter, and unserialize
it into a dictionary, the same format as privileges_get() above. We have this
custom format to avoid using YAML/JSON strings inside YAML playbooks. Example
of a privileges string:
mydb.*:INSERT,UPDATE/anotherdb.*:SELECT/yetanother.*:ALL
The privilege USAGE stands for no privileges, so we add that in on *.* if it's
not specified in the string, as MySQL will always provide this by default.
"""
if mode == 'ANSI':
quote = '"'
else:
quote = '`'
output = {}
privs = []
for item in priv.strip().split('/'):
pieces = item.strip().split(':')
dbpriv = pieces[0].rsplit(".", 1)
# Do not escape if privilege is for database or table, i.e.
# neither quote *. nor .*
for i, side in enumerate(dbpriv):
if side.strip('`') != '*':
dbpriv[i] = '%s%s%s' % (quote, side.strip('`'), quote)
pieces[0] = '.'.join(dbpriv)
if '(' in pieces[1]:
output[pieces[0]] = re.split(r',\s*(?=[^)]*(?:\(|$))', pieces[1].upper())
for i in output[pieces[0]]:
privs.append(re.sub(r'\s*\(.*\)','',i))
else:
output[pieces[0]] = pieces[1].upper().split(',')
privs = output[pieces[0]]
new_privs = frozenset(privs)
if not new_privs.issubset(VALID_PRIVS):
raise InvalidPrivsError('Invalid privileges specified: %s' % new_privs.difference(VALID_PRIVS))
if '*.*' not in output:
output['*.*'] = ['USAGE']
# if we are only specifying something like REQUIRESSL and/or GRANT (=WITH GRANT OPTION) in *.*
# we still need to add USAGE as a privilege to avoid syntax errors
if 'REQUIRESSL' in priv and not set(output['*.*']).difference(set(['GRANT', 'REQUIRESSL'])):
output['*.*'].append('USAGE')
return output
def privileges_revoke(cursor, user,host,db_table,priv,grant_option):
# Escape '%' since mysql db.execute() uses a format string
db_table = db_table.replace('%', '%%')
if grant_option:
query = ["REVOKE GRANT OPTION ON %s" % db_table]
query.append("FROM %s@%s")
query = ' '.join(query)
cursor.execute(query, (user, host))
priv_string = ",".join([p for p in priv if p not in ('GRANT', 'REQUIRESSL')])
query = ["REVOKE %s ON %s" % (priv_string, db_table)]
query.append("FROM %s@%s")
query = ' '.join(query)
cursor.execute(query, (user, host))
def privileges_grant(cursor, user,host,db_table,priv):
# Escape '%' since mysql db.execute uses a format string and the
# specification of db and table often use a % (SQL wildcard)
db_table = db_table.replace('%', '%%')
priv_string = ",".join([p for p in priv if p not in ('GRANT', 'REQUIRESSL')])
query = ["GRANT %s ON %s" % (priv_string, db_table)]
query.append("TO %s@%s")
if 'REQUIRESSL' in priv:
query.append("REQUIRE SSL")
if 'GRANT' in priv:
query.append("WITH GRANT OPTION")
query = ' '.join(query)
cursor.execute(query, (user, host))
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec = dict(
login_user=dict(default=None),
login_password=dict(default=None, no_log=True),
login_host=dict(default="localhost"),
login_port=dict(default=3306, type='int'),
login_unix_socket=dict(default=None),
user=dict(required=True, aliases=['name']),
password=dict(default=None, no_log=True, type='str'),
encrypted=dict(default=False, type='bool'),
host=dict(default="localhost"),
host_all=dict(type="bool", default="no"),
state=dict(default="present", choices=["absent", "present"]),
priv=dict(default=None),
append_privs=dict(default=False, type='bool'),
check_implicit_admin=dict(default=False, type='bool'),
update_password=dict(default="always", choices=["always", "on_create"]),
connect_timeout=dict(default=30, type='int'),
config_file=dict(default="~/.my.cnf", type='path'),
sql_log_bin=dict(default=True, type='bool'),
ssl_cert=dict(default=None, type='path'),
ssl_key=dict(default=None, type='path'),
ssl_ca=dict(default=None, type='path'),
),
supports_check_mode=True
)
login_user = module.params["login_user"]
login_password = module.params["login_password"]
user = module.params["user"]
password = module.params["password"]
encrypted = module.boolean(module.params["encrypted"])
host = module.params["host"].lower()
host_all = module.params["host_all"]
state = module.params["state"]
priv = module.params["priv"]
check_implicit_admin = module.params['check_implicit_admin']
connect_timeout = module.params['connect_timeout']
config_file = module.params['config_file']
append_privs = module.boolean(module.params["append_privs"])
update_password = module.params['update_password']
ssl_cert = module.params["ssl_cert"]
ssl_key = module.params["ssl_key"]
ssl_ca = module.params["ssl_ca"]
db = 'mysql'
sql_log_bin = module.params["sql_log_bin"]
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
cursor = None
try:
if check_implicit_admin:
try:
cursor = mysql_connect(module, 'root', '', config_file, ssl_cert, ssl_key, ssl_ca, db,
connect_timeout=connect_timeout)
except:
pass
if not cursor:
cursor = mysql_connect(module, login_user, login_password, config_file, ssl_cert, ssl_key, ssl_ca, db,
connect_timeout=connect_timeout)
except Exception:
e = get_exception()
module.fail_json(msg="unable to connect to database, check login_user and login_password are correct or %s has the credentials. Exception message: %s" % (config_file, e))
if not sql_log_bin:
cursor.execute("SET SQL_LOG_BIN=0;")
if priv is not None:
try:
mode = get_mode(cursor)
except Exception:
e = get_exception()
module.fail_json(msg=str(e))
try:
priv = privileges_unpack(priv, mode)
except Exception:
e = get_exception()
module.fail_json(msg="invalid privileges string: %s" % str(e))
if state == "present":
if user_exists(cursor, user, host, host_all):
try:
if update_password == 'always':
changed = user_mod(cursor, user, host, host_all, password, encrypted, priv, append_privs, module)
else:
changed = user_mod(cursor, user, host, host_all, None, encrypted, priv, append_privs, module)
except (SQLParseError, InvalidPrivsError, MySQLdb.Error):
e = get_exception()
module.fail_json(msg=str(e))
else:
if host_all:
module.fail_json(msg="host_all parameter cannot be used when adding a user")
try:
changed = user_add(cursor, user, host, host_all, password, encrypted, priv, module.check_mode)
except (SQLParseError, InvalidPrivsError, MySQLdb.Error):
e = get_exception()
module.fail_json(msg=str(e))
elif state == "absent":
if user_exists(cursor, user, host, host_all):
changed = user_delete(cursor, user, host, host_all, module.check_mode)
else:
changed = False
module.exit_json(changed=changed, user=user)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.database import *
from ansible.module_utils.mysql import *
if __name__ == '__main__':
main()
| gpl-3.0 |
thnee/ansible | lib/ansible/plugins/connection/local.py | 14 | 6967 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2015, 2017 Toshio Kuratomi <tkuratomi@ansible.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
connection: local
short_description: execute on controller
description:
- This connection plugin allows ansible to execute tasks on the Ansible 'controller' instead of on a remote host.
author: ansible (@core)
version_added: historical
notes:
- The remote user is ignored, the user with which the ansible CLI was executed is used instead.
'''
import os
import shutil
import subprocess
import fcntl
import getpass
import ansible.constants as C
from ansible.compat import selectors
from ansible.errors import AnsibleError, AnsibleFileNotFound
from ansible.module_utils.six import text_type, binary_type
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.plugins.connection import ConnectionBase
from ansible.utils.display import Display
display = Display()
class Connection(ConnectionBase):
''' Local based connections '''
transport = 'local'
has_pipelining = True
def __init__(self, *args, **kwargs):
super(Connection, self).__init__(*args, **kwargs)
self.cwd = None
def _connect(self):
''' connect to the local host; nothing to do here '''
# Because we haven't made any remote connection we're running as
# the local user, rather than as whatever is configured in
# remote_user.
self._play_context.remote_user = getpass.getuser()
if not self._connected:
display.vvv(u"ESTABLISH LOCAL CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self._play_context.remote_addr)
self._connected = True
return self
def exec_command(self, cmd, in_data=None, sudoable=True):
''' run a command on the local host '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
display.debug("in local.exec_command()")
executable = C.DEFAULT_EXECUTABLE.split()[0] if C.DEFAULT_EXECUTABLE else None
if not os.path.exists(to_bytes(executable, errors='surrogate_or_strict')):
raise AnsibleError("failed to find the executable specified %s."
" Please verify if the executable exists and re-try." % executable)
display.vvv(u"EXEC {0}".format(to_text(cmd)), host=self._play_context.remote_addr)
display.debug("opening command with Popen()")
if isinstance(cmd, (text_type, binary_type)):
cmd = to_bytes(cmd)
else:
cmd = map(to_bytes, cmd)
p = subprocess.Popen(
cmd,
shell=isinstance(cmd, (text_type, binary_type)),
executable=executable,
cwd=self.cwd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
display.debug("done running command with Popen()")
if self.become and self.become.expect_prompt() and sudoable:
fcntl.fcntl(p.stdout, fcntl.F_SETFL, fcntl.fcntl(p.stdout, fcntl.F_GETFL) | os.O_NONBLOCK)
fcntl.fcntl(p.stderr, fcntl.F_SETFL, fcntl.fcntl(p.stderr, fcntl.F_GETFL) | os.O_NONBLOCK)
selector = selectors.DefaultSelector()
selector.register(p.stdout, selectors.EVENT_READ)
selector.register(p.stderr, selectors.EVENT_READ)
become_output = b''
try:
while not self.become.check_success(become_output) and not self.become.check_password_prompt(become_output):
events = selector.select(self._play_context.timeout)
if not events:
stdout, stderr = p.communicate()
raise AnsibleError('timeout waiting for privilege escalation password prompt:\n' + to_native(become_output))
for key, event in events:
if key.fileobj == p.stdout:
chunk = p.stdout.read()
elif key.fileobj == p.stderr:
chunk = p.stderr.read()
if not chunk:
stdout, stderr = p.communicate()
raise AnsibleError('privilege output closed while waiting for password prompt:\n' + to_native(become_output))
become_output += chunk
finally:
selector.close()
if not self.become.check_success(become_output):
become_pass = self.become.get_option('become_pass', playcontext=self._play_context)
p.stdin.write(to_bytes(become_pass, errors='surrogate_or_strict') + b'\n')
fcntl.fcntl(p.stdout, fcntl.F_SETFL, fcntl.fcntl(p.stdout, fcntl.F_GETFL) & ~os.O_NONBLOCK)
fcntl.fcntl(p.stderr, fcntl.F_SETFL, fcntl.fcntl(p.stderr, fcntl.F_GETFL) & ~os.O_NONBLOCK)
display.debug("getting output with communicate()")
stdout, stderr = p.communicate(in_data)
display.debug("done communicating")
display.debug("done with local.exec_command()")
return (p.returncode, stdout, stderr)
def _ensure_abs(self, path):
if not os.path.isabs(path) and self.cwd is not None:
path = os.path.normpath(os.path.join(self.cwd, path))
return path
def put_file(self, in_path, out_path):
''' transfer a file from local to local '''
super(Connection, self).put_file(in_path, out_path)
in_path = self._ensure_abs(in_path)
out_path = self._ensure_abs(out_path)
display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self._play_context.remote_addr)
if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound("file or module does not exist: {0}".format(to_native(in_path)))
try:
shutil.copyfile(to_bytes(in_path, errors='surrogate_or_strict'), to_bytes(out_path, errors='surrogate_or_strict'))
except shutil.Error:
raise AnsibleError("failed to copy: {0} and {1} are the same".format(to_native(in_path), to_native(out_path)))
except IOError as e:
raise AnsibleError("failed to transfer file to {0}: {1}".format(to_native(out_path), to_native(e)))
def fetch_file(self, in_path, out_path):
''' fetch a file from local to local -- for compatibility '''
super(Connection, self).fetch_file(in_path, out_path)
display.vvv(u"FETCH {0} TO {1}".format(in_path, out_path), host=self._play_context.remote_addr)
self.put_file(in_path, out_path)
def close(self):
''' terminate the connection; nothing to do here '''
self._connected = False
| gpl-3.0 |
alexus37/AugmentedRealityChess | pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/raw/GL/AMD/occlusion_query_event.py | 9 | 1147 | '''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_AMD_occlusion_query_event'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_AMD_occlusion_query_event',error_checker=_errors._error_checker)
GL_OCCLUSION_QUERY_EVENT_MASK_AMD=_C('GL_OCCLUSION_QUERY_EVENT_MASK_AMD',0x874F)
GL_QUERY_ALL_EVENT_BITS_AMD=_C('GL_QUERY_ALL_EVENT_BITS_AMD',0xFFFFFFFF)
GL_QUERY_DEPTH_BOUNDS_FAIL_EVENT_BIT_AMD=_C('GL_QUERY_DEPTH_BOUNDS_FAIL_EVENT_BIT_AMD',0x00000008)
GL_QUERY_DEPTH_FAIL_EVENT_BIT_AMD=_C('GL_QUERY_DEPTH_FAIL_EVENT_BIT_AMD',0x00000002)
GL_QUERY_DEPTH_PASS_EVENT_BIT_AMD=_C('GL_QUERY_DEPTH_PASS_EVENT_BIT_AMD',0x00000001)
GL_QUERY_STENCIL_FAIL_EVENT_BIT_AMD=_C('GL_QUERY_STENCIL_FAIL_EVENT_BIT_AMD',0x00000004)
@_f
@_p.types(None,_cs.GLenum,_cs.GLuint,_cs.GLenum,_cs.GLuint)
def glQueryObjectParameteruiAMD(target,id,pname,param):pass
| mit |
faux123/htc-m7 | Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
Workday/OpenFrame | tools/grit/grit/format/resource_map_unittest.py | 26 | 10570 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for grit.format.resource_map'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import StringIO
import unittest
from grit import grd_reader
from grit import util
from grit.format import resource_map
class FormatResourceMapUnittest(unittest.TestCase):
def testFormatResourceMap(self):
grd = grd_reader.Parse(StringIO.StringIO(
'''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en" current_release="3"
base_dir=".">
<outputs>
<output type="rc_header" filename="the_rc_header.h" />
<output type="resource_map_header"
filename="the_resource_map_header.h" />
</outputs>
<release seq="3">
<structures first_id="300">
<structure type="menu" name="IDC_KLONKMENU"
file="grit\\testdata\\klonk.rc" encoding="utf-16" />
</structures>
<includes first_id="10000">
<include type="foo" file="abc" name="IDS_FIRSTPRESENT" />
<if expr="False">
<include type="foo" file="def" name="IDS_MISSING" />
</if>
<if expr="lang != 'es'">
<include type="foo" file="ghi" name="IDS_LANGUAGESPECIFIC" />
</if>
<if expr="lang == 'es'">
<include type="foo" file="jkl" name="IDS_LANGUAGESPECIFIC" />
</if>
<include type="foo" file="mno" name="IDS_THIRDPRESENT" />
</includes>
</release>
</grit>'''), util.PathFromRoot('.'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_header')(grd, 'en', '.')))
self.assertEqual('''\
#include <stddef.h>
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
const char* const name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
extern const GritResourceMap kTheRcHeader[];
extern const size_t kTheRcHeaderSize;''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include "base/basictypes.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"IDC_KLONKMENU", IDC_KLONKMENU},
{"IDS_FIRSTPRESENT", IDS_FIRSTPRESENT},
{"IDS_MISSING", IDS_MISSING},
{"IDS_LANGUAGESPECIFIC", IDS_LANGUAGESPECIFIC},
{"IDS_THIRDPRESENT", IDS_THIRDPRESENT},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_file_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include "base/basictypes.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"grit/testdata/klonk.rc", IDC_KLONKMENU},
{"abc", IDS_FIRSTPRESENT},
{"def", IDS_MISSING},
{"ghi", IDS_LANGUAGESPECIFIC},
{"jkl", IDS_LANGUAGESPECIFIC},
{"mno", IDS_THIRDPRESENT},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
def testFormatResourceMapWithOutputAllEqualsFalseForStructures(self):
grd = grd_reader.Parse(StringIO.StringIO(
'''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en" current_release="3"
base_dir="." output_all_resource_defines="false">
<outputs>
<output type="rc_header" filename="the_rc_header.h" />
<output type="resource_map_header"
filename="the_resource_map_header.h" />
<output type="resource_map_source"
filename="the_resource_map_header.cc" />
</outputs>
<release seq="3">
<structures first_id="300">
<structure type="chrome_scaled_image" name="IDR_KLONKMENU"
file="foo.png" />
<if expr="False">
<structure type="chrome_scaled_image" name="IDR_MISSING"
file="bar.png" />
</if>
</structures>
</release>
</grit>'''), util.PathFromRoot('.'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_header')(grd, 'en', '.')))
self.assertEqual('''\
#include <stddef.h>
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
const char* const name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
extern const GritResourceMap kTheRcHeader[];
extern const size_t kTheRcHeaderSize;''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include "base/basictypes.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"IDR_KLONKMENU", IDR_KLONKMENU},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include "base/basictypes.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"IDR_KLONKMENU", IDR_KLONKMENU},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
def testFormatResourceMapWithOutputAllEqualsFalseForIncludes(self):
grd = grd_reader.Parse(StringIO.StringIO(
'''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en" current_release="3"
base_dir="." output_all_resource_defines="false">
<outputs>
<output type="rc_header" filename="the_rc_header.h" />
<output type="resource_map_header"
filename="the_resource_map_header.h" />
</outputs>
<release seq="3">
<structures first_id="300">
<structure type="menu" name="IDC_KLONKMENU"
file="grit\\testdata\\klonk.rc" encoding="utf-16" />
</structures>
<includes first_id="10000">
<include type="foo" file="abc" name="IDS_FIRSTPRESENT" />
<if expr="False">
<include type="foo" file="def" name="IDS_MISSING" />
</if>
<include type="foo" file="mno" name="IDS_THIRDPRESENT" />
</includes>
</release>
</grit>'''), util.PathFromRoot('.'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_header')(grd, 'en', '.')))
self.assertEqual('''\
#include <stddef.h>
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
const char* const name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
extern const GritResourceMap kTheRcHeader[];
extern const size_t kTheRcHeaderSize;''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include "base/basictypes.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"IDC_KLONKMENU", IDC_KLONKMENU},
{"IDS_FIRSTPRESENT", IDS_FIRSTPRESENT},
{"IDS_THIRDPRESENT", IDS_THIRDPRESENT},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_file_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include "base/basictypes.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"grit/testdata/klonk.rc", IDC_KLONKMENU},
{"abc", IDS_FIRSTPRESENT},
{"mno", IDS_THIRDPRESENT},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
def testFormatStringResourceMap(self):
grd = grd_reader.Parse(StringIO.StringIO(
'''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en" current_release="3"
base_dir=".">
<outputs>
<output type="rc_header" filename="the_rc_header.h" />
<output type="resource_map_header" filename="the_rc_map_header.h" />
<output type="resource_map_source" filename="the_rc_map_source.cc" />
</outputs>
<release seq="1" allow_pseudo="false">
<messages fallback_to_english="true">
<message name="IDS_PRODUCT_NAME" desc="The application name">
Application
</message>
<if expr="True">
<message name="IDS_DEFAULT_TAB_TITLE_TITLE_CASE"
desc="In Title Case: The default title in a tab.">
New Tab
</message>
</if>
<if expr="False">
<message name="IDS_DEFAULT_TAB_TITLE"
desc="The default title in a tab.">
New tab
</message>
</if>
</messages>
</release>
</grit>'''), util.PathFromRoot('.'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_header')(grd, 'en', '.')))
self.assertEqual('''\
#include <stddef.h>
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
const char* const name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
extern const GritResourceMap kTheRcHeader[];
extern const size_t kTheRcHeaderSize;''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_rc_map_header.h"
#include "base/basictypes.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"IDS_PRODUCT_NAME", IDS_PRODUCT_NAME},
{"IDS_DEFAULT_TAB_TITLE_TITLE_CASE", IDS_DEFAULT_TAB_TITLE_TITLE_CASE},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
yyzybb537/libgo | third_party/boost.context/tools/build/test/implicit_dependency.py | 44 | 1345 | #!/usr/bin/python
# Copyright (C) Vladimir Prus 2006.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# Test the <implicit-dependency> is respected even if the target referred to is
# not built itself, but only referred to by <implicit-dependency>.
import BoostBuild
t = BoostBuild.Tester(use_test_config=False)
t.write("jamroot.jam", """
make a.h : : gen-header ;
explicit a.h ;
exe hello : hello.cpp : <implicit-dependency>a.h ;
import os ;
if [ os.name ] = NT
{
actions gen-header
{
echo int i; > $(<)
}
}
else
{
actions gen-header
{
echo "int i;" > $(<)
}
}
""")
t.write("hello.cpp", """
#include "a.h"
int main() { return i; }
""")
t.run_build_system()
t.expect_addition("bin/$toolset/debug/hello.exe")
t.rm("bin")
t.write("jamroot.jam", """
make dir/a.h : : gen-header ;
explicit dir/a.h ;
exe hello : hello.cpp : <implicit-dependency>dir/a.h ;
import os ;
if [ os.name ] = NT
{
actions gen-header
{
echo int i; > $(<)
}
}
else
{
actions gen-header
{
echo "int i;" > $(<)
}
}
""")
t.write("hello.cpp", """
#include "dir/a.h"
int main() { return i; }
""")
t.run_build_system()
t.expect_addition("bin/$toolset/debug/hello.exe")
t.cleanup()
| mit |
Immortalin/python-for-android | python-modules/twisted/twisted/web/_auth/digest.py | 53 | 1700 | # -*- test-case-name: twisted.web.test.test_httpauth -*-
# Copyright (c) 2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Implementation of RFC2617: HTTP Digest Authentication
@see: U{http://www.faqs.org/rfcs/rfc2617.html}
"""
from zope.interface import implements
from twisted.cred import credentials
from twisted.web.iweb import ICredentialFactory
class DigestCredentialFactory(object):
"""
Wrapper for L{digest.DigestCredentialFactory} that implements the
L{ICredentialFactory} interface.
"""
implements(ICredentialFactory)
scheme = 'digest'
def __init__(self, algorithm, authenticationRealm):
"""
Create the digest credential factory that this object wraps.
"""
self.digest = credentials.DigestCredentialFactory(algorithm,
authenticationRealm)
def getChallenge(self, request):
"""
Generate the challenge for use in the WWW-Authenticate header
@param request: The L{IRequest} to with access was denied and for the
response to which this challenge is being generated.
@return: The C{dict} that can be used to generate a WWW-Authenticate
header.
"""
return self.digest.getChallenge(request.getClientIP())
def decode(self, response, request):
"""
Create a L{twisted.cred.digest.DigestedCredentials} object from the
given response and request.
@see: L{ICredentialFactory.decode}
"""
return self.digest.decode(response,
request.method,
request.getClientIP())
| apache-2.0 |
BeyondTheClouds/nova | nova/scheduler/filters/trusted_filter.py | 14 | 9323 | # Copyright (c) 2012 Intel, Inc.
# Copyright (c) 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Filter to add support for Trusted Computing Pools (EXPERIMENTAL).
Filter that only schedules tasks on a host if the integrity (trust)
of that host matches the trust requested in the ``extra_specs`` for the
flavor. The ``extra_specs`` will contain a key/value pair where the
key is ``trust``. The value of this pair (``trusted``/``untrusted``) must
match the integrity of that host (obtained from the Attestation
service) before the task can be scheduled on that host.
Note that the parameters to control access to the Attestation Service
are in the ``nova.conf`` file in a separate ``trust`` section. For example,
the config file will look something like:
[DEFAULT]
verbose=True
...
[trust]
server=attester.mynetwork.com
Details on the specific parameters can be found in the file
``trust_attest.py``.
Details on setting up and using an Attestation Service can be found at
the Open Attestation project at:
https://github.com/OpenAttestation/OpenAttestation
"""
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import timeutils
import requests
import nova.conf
from nova import context
from nova.i18n import _LW
from nova import objects
from nova.scheduler import filters
LOG = logging.getLogger(__name__)
CONF = nova.conf.CONF
class AttestationService(object):
# Provide access wrapper to attestation server to get integrity report.
def __init__(self):
self.api_url = CONF.trusted_computing.attestation_api_url
self.host = CONF.trusted_computing.attestation_server
self.port = CONF.trusted_computing.attestation_port
self.auth_blob = CONF.trusted_computing.attestation_auth_blob
self.key_file = None
self.cert_file = None
self.ca_file = CONF.trusted_computing.attestation_server_ca_file
self.request_count = 100
# If the CA file is not provided, let's check the cert if verification
# asked
self.verify = (not CONF.trusted_computing.attestation_insecure_ssl
and self.ca_file or True)
self.cert = (self.cert_file, self.key_file)
def _do_request(self, method, action_url, body, headers):
# Connects to the server and issues a request.
# :returns: result data
# :raises: IOError if the request fails
action_url = "https://%s:%s%s/%s" % (self.host, self.port,
self.api_url, action_url)
try:
res = requests.request(method, action_url, data=body,
headers=headers, cert=self.cert,
verify=self.verify)
status_code = res.status_code
if status_code in (requests.codes.OK,
requests.codes.CREATED,
requests.codes.ACCEPTED,
requests.codes.NO_CONTENT):
try:
return requests.codes.OK, jsonutils.loads(res.text)
except (TypeError, ValueError):
return requests.codes.OK, res.text
return status_code, None
except requests.exceptions.RequestException:
return IOError, None
def _request(self, cmd, subcmd, hosts):
body = {}
body['count'] = len(hosts)
body['hosts'] = hosts
cooked = jsonutils.dumps(body)
headers = {}
headers['content-type'] = 'application/json'
headers['Accept'] = 'application/json'
if self.auth_blob:
headers['x-auth-blob'] = self.auth_blob
status, res = self._do_request(cmd, subcmd, cooked, headers)
return status, res
def do_attestation(self, hosts):
"""Attests compute nodes through OAT service.
:param hosts: hosts list to be attested
:returns: dictionary for trust level and validate time
"""
result = None
status, data = self._request("POST", "PollHosts", hosts)
if data is not None:
result = data.get('hosts')
return result
class ComputeAttestationCache(object):
"""Cache for compute node attestation
Cache compute node's trust level for sometime,
if the cache is out of date, poll OAT service to flush the
cache.
OAT service may have cache also. OAT service's cache valid time
should be set shorter than trusted filter's cache valid time.
"""
def __init__(self):
self.attestservice = AttestationService()
self.compute_nodes = {}
admin = context.get_admin_context()
# Fetch compute node list to initialize the compute_nodes,
# so that we don't need poll OAT service one by one for each
# host in the first round that scheduler invokes us.
computes = objects.ComputeNodeList.get_all(admin)
for compute in computes:
host = compute.hypervisor_hostname
self._init_cache_entry(host)
def _cache_valid(self, host):
cachevalid = False
if host in self.compute_nodes:
node_stats = self.compute_nodes.get(host)
if not timeutils.is_older_than(
node_stats['vtime'],
CONF.trusted_computing.attestation_auth_timeout):
cachevalid = True
return cachevalid
def _init_cache_entry(self, host):
self.compute_nodes[host] = {
'trust_lvl': 'unknown',
'vtime': timeutils.normalize_time(
timeutils.parse_isotime("1970-01-01T00:00:00Z"))}
def _invalidate_caches(self):
for host in self.compute_nodes:
self._init_cache_entry(host)
def _update_cache_entry(self, state):
entry = {}
host = state['host_name']
entry['trust_lvl'] = state['trust_lvl']
try:
# Normalize as naive object to interoperate with utcnow().
entry['vtime'] = timeutils.normalize_time(
timeutils.parse_isotime(state['vtime']))
except ValueError:
try:
# Mt. Wilson does not necessarily return an ISO8601 formatted
# `vtime`, so we should try to parse it as a string formatted
# datetime.
vtime = timeutils.parse_strtime(state['vtime'], fmt="%c")
entry['vtime'] = timeutils.normalize_time(vtime)
except ValueError:
# Mark the system as un-trusted if get invalid vtime.
entry['trust_lvl'] = 'unknown'
entry['vtime'] = timeutils.utcnow()
self.compute_nodes[host] = entry
def _update_cache(self):
self._invalidate_caches()
states = self.attestservice.do_attestation(
list(self.compute_nodes.keys()))
if states is None:
return
for state in states:
self._update_cache_entry(state)
def get_host_attestation(self, host):
"""Check host's trust level."""
if host not in self.compute_nodes:
self._init_cache_entry(host)
if not self._cache_valid(host):
self._update_cache()
level = self.compute_nodes.get(host).get('trust_lvl')
return level
class ComputeAttestation(object):
def __init__(self):
self.caches = ComputeAttestationCache()
def is_trusted(self, host, trust):
level = self.caches.get_host_attestation(host)
return trust == level
class TrustedFilter(filters.BaseHostFilter):
"""Trusted filter to support Trusted Compute Pools."""
def __init__(self):
self.compute_attestation = ComputeAttestation()
LOG.warning(_LW('The TrustedFilter is considered experimental '
'by the OpenStack project because it receives much '
'less testing than the rest of Nova. This may change '
'in the future, but current deployers should be aware '
'that the use of it in production right now may be '
'risky.'))
# The hosts the instances are running on doesn't change within a request
run_filter_once_per_request = True
def host_passes(self, host_state, spec_obj):
instance_type = spec_obj.flavor
extra = (instance_type.extra_specs
if 'extra_specs' in instance_type else {})
trust = extra.get('trust:trusted_host')
host = host_state.nodename
if trust:
return self.compute_attestation.is_trusted(host, trust)
return True
| apache-2.0 |
alhashash/yowsup | yowsup/layers/protocol_profiles/layer.py | 31 | 2304 | from yowsup.layers import YowProtocolLayer
from .protocolentities import *
from yowsup.layers.protocol_iq.protocolentities import ErrorIqProtocolEntity, ResultIqProtocolEntity
class YowProfilesProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowProfilesProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Profiles Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:profile:picture":
if entity.getType() == "get":
self._sendIq(entity, self.onGetPictureResult, self.onGetPictureError)
elif entity.getType() == "set":
self._sendIq(entity, self.onSetPictureResult, self.onSetPictureError)
elif entity.getType() == "delete":
self._sendIq(entity, self.onDeletePictureResult, self.onDeletePictureError)
elif entity.getXmlns() == "status":
self._sendIq(entity, self.onSetStatusResult, self.onSetStatusError)
def recvIq(self, node):
pass
def onSetStatusResult(self, resultNode, originIqRequestEntity):
self.toUpper(ResultIqProtocolEntity.fromProtocolTreeNode(resultNode))
def onSetStatusError(self, errorNode, originalIqRequestEntity):
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(errorNode))
def onGetPictureResult(self, resultNode, originalIqRequestEntity):
self.toUpper(ResultGetPictureIqProtocolEntity.fromProtocolTreeNode(resultNode))
def onGetPictureError(self, errorNode, originalIqRequestEntity):
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(errorNode))
def onSetPictureResult(self, resultNode, originalIqRequestEntity):
self.toUpper(ResultGetPictureIqProtocolEntity.fromProtocolTreeNode(resultNode))
def onSetPictureError(self, errorNode, originalIqRequestEntity):
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(errorNode))
def onDeletePictureResult(self, resultNode, originalIqRequestEntity):
self.toUpper(ResultIqProtocolEntity.fromProtocolTreeNode(resultNode))
def onDeletePictureError(self, errorNode, originalIqRequestEntity):
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(errorNode))
| gpl-3.0 |
Jovy23/N900TUVUDNF4_KK_Kernel | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
qma/pants | src/python/pants/backend/jvm/tasks/resources_task.py | 1 | 3335 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from abc import abstractmethod
from pants.backend.core.tasks.task import Task
from pants.option.custom_types import list_option
class ResourcesTask(Task):
"""A base class for tasks that process or create resource files.
This base assumes that resources targets or targets that generate resources are independent from
each other and can be processed in isolation in any order.
"""
@classmethod
def product_types(cls):
return ['runtime_classpath']
@classmethod
def register_options(cls, register):
super(ResourcesTask, cls).register_options(register)
register('--confs', advanced=True, type=list_option, default=['default'],
help='Prepare resources for these Ivy confs.')
@classmethod
def prepare(cls, options, round_manager):
round_manager.require_data('compile_classpath')
@property
def cache_target_dirs(self):
return True
def execute(self):
# Tracked and returned for use in tests.
# TODO: Rewrite those tests. execute() is not supposed to return anything.
processed_targets = []
compile_classpath = self.context.products.get_data('compile_classpath')
runtime_classpath = self.context.products.get_data('runtime_classpath', compile_classpath.copy)
all_relevant_resources_targets = self.find_all_relevant_resources_targets()
if not all_relevant_resources_targets:
return processed_targets
with self.invalidated(targets=all_relevant_resources_targets,
fingerprint_strategy=self.create_invalidation_strategy(),
invalidate_dependents=False,
topological_order=False) as invalidation:
for vt in invalidation.all_vts:
# Register the target's chroot in the products.
for conf in self.get_options().confs:
runtime_classpath.add_for_target(vt.target, [(conf, vt.results_dir)])
# And if it was invalid, generate the resources to the chroot.
if not vt.valid:
self.prepare_resources(vt.target, vt.results_dir)
processed_targets.append(vt.target)
vt.update()
return processed_targets
@abstractmethod
def find_all_relevant_resources_targets(self):
"""Returns an iterable over all the relevant resources targets in the context."""
def create_invalidation_strategy(self):
"""Creates a custom fingerprint strategy for determining invalid resources targets.
:returns: A custom fingerprint strategy to use for determining invalid targets, or `None` to
use the standard target payload.
:rtype: :class:`pants.base.fingerprint_strategy.FingerprintStrategy`
"""
return None
@abstractmethod
def prepare_resources(self, target, chroot):
"""Prepares the resources associated with `target` in the given `chroot`.
:param target: The target to prepare resource files for.
:type target: :class:`pants.build_graph.target.Target`
:param string chroot: An existing, clean chroot dir to generate `target`'s resources to.
"""
| apache-2.0 |
shingonoide/odoo | addons/l10n_be_intrastat/wizard/xml_decl.py | 32 | 17798 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Odoo, Open Source Business Applications
# Copyright (C) 2014-2015 Odoo S.A. <http://www.odoo.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
import xml.etree.ElementTree as ET
from collections import namedtuple
from datetime import datetime
from openerp import exceptions, SUPERUSER_ID, tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
INTRASTAT_XMLNS = 'http://www.onegate.eu/2010-01-01'
class xml_decl(osv.TransientModel):
"""
Intrastat XML Declaration
"""
_name = "l10n_be_intrastat_xml.xml_decl"
_description = 'Intrastat XML Declaration'
def _get_tax_code(self, cr, uid, context=None):
obj_tax_code = self.pool.get('account.tax.code')
obj_user = self.pool.get('res.users')
company_id = obj_user.browse(cr, uid, uid, context=context).company_id.id
tax_code_ids = obj_tax_code.search(cr, uid, [('company_id', '=', company_id),
('parent_id', '=', False)],
context=context)
return tax_code_ids and tax_code_ids[0] or False
def _get_def_monthyear(self, cr, uid, context=None):
td = datetime.strptime(fields.date.context_today(self, cr, uid, context=context),
tools.DEFAULT_SERVER_DATE_FORMAT).date()
return td.strftime('%Y'), td.strftime('%m')
def _get_def_month(self, cr, uid, context=None):
return self._get_def_monthyear(cr, uid, context=context)[1]
def _get_def_year(self, cr, uid, context=None):
return self._get_def_monthyear(cr, uid, context=context)[0]
_columns = {
'name': fields.char('File Name'),
'month': fields.selection([('01','January'), ('02','February'), ('03','March'),
('04','April'), ('05','May'), ('06','June'), ('07','July'),
('08','August'), ('09','September'), ('10','October'),
('11','November'), ('12','December')], 'Month', required=True),
'year': fields.char('Year', size=4, required=True),
'tax_code_id': fields.many2one('account.tax.code', 'Company Tax Chart',
domain=[('parent_id', '=', False)], required=True),
'arrivals': fields.selection([('be-exempt', 'Exempt'),
('be-standard', 'Standard'),
('be-extended', 'Extended')],
'Arrivals', required=True),
'dispatches': fields.selection([('be-exempt', 'Exempt'),
('be-standard', 'Standard'),
('be-extended', 'Extended')],
'Dispatches', required=True),
'file_save': fields.binary('Intrastat Report File', readonly=True),
'state': fields.selection([('draft', 'Draft'), ('download', 'Download')], string="State"),
}
_defaults = {
'arrivals': 'be-standard',
'dispatches': 'be-standard',
'name': 'intrastat.xml',
'tax_code_id': _get_tax_code,
'month': _get_def_month,
'year': _get_def_year,
'state': 'draft',
}
def _company_warning(self, cr, uid, translated_msg, context=None):
""" Raise a error with custom message, asking user to configure company settings """
xmlid_mod = self.pool['ir.model.data']
action_id = xmlid_mod.xmlid_to_res_id(cr, uid, 'base.action_res_company_form')
raise exceptions.RedirectWarning(
translated_msg, action_id, _('Go to company configuration screen'))
def create_xml(self, cr, uid, ids, context=None):
"""Creates xml that is to be exported and sent to estate for partner vat intra.
:return: Value for next action.
:rtype: dict
"""
decl_datas = self.browse(cr, uid, ids[0])
company = decl_datas.tax_code_id.company_id
if not (company.partner_id and company.partner_id.country_id and
company.partner_id.country_id.id):
self._company_warning(
cr, uid,
_('The country of your company is not set, '
'please make sure to configure it first.'),
context=context)
kbo = company.company_registry
if not kbo:
self._company_warning(
cr, uid,
_('The registry number of your company is not set, '
'please make sure to configure it first.'),
context=context)
if len(decl_datas.year) != 4:
raise exceptions.Warning(_('Year must be 4 digits number (YYYY)'))
#Create root declaration
decl = ET.Element('DeclarationReport')
decl.set('xmlns', INTRASTAT_XMLNS)
#Add Administration elements
admin = ET.SubElement(decl, 'Administration')
fromtag = ET.SubElement(admin, 'From')
fromtag.text = kbo
fromtag.set('declarerType', 'KBO')
ET.SubElement(admin, 'To').text = "NBB"
ET.SubElement(admin, 'Domain').text = "SXX"
if decl_datas.arrivals == 'be-standard':
decl.append(self._get_lines(cr, SUPERUSER_ID, ids, decl_datas, company,
dispatchmode=False, extendedmode=False, context=context))
elif decl_datas.arrivals == 'be-extended':
decl.append(self._get_lines(cr, SUPERUSER_ID, ids, decl_datas, company,
dispatchmode=False, extendedmode=True, context=context))
if decl_datas.dispatches == 'be-standard':
decl.append(self._get_lines(cr, SUPERUSER_ID, ids, decl_datas, company,
dispatchmode=True, extendedmode=False, context=context))
elif decl_datas.dispatches == 'be-extended':
decl.append(self._get_lines(cr, SUPERUSER_ID, ids, decl_datas, company,
dispatchmode=True, extendedmode=True, context=context))
#Get xml string with declaration
data_file = ET.tostring(decl, encoding='UTF-8', method='xml')
#change state of the wizard
self.write(cr, uid, ids,
{'name': 'intrastat_%s%s.xml' % (decl_datas.year, decl_datas.month),
'file_save': base64.encodestring(data_file),
'state': 'download'},
context=context)
return {
'name': _('Save'),
'context': context,
'view_type': 'form',
'view_mode': 'form',
'res_model': 'l10n_be_intrastat_xml.xml_decl',
'type': 'ir.actions.act_window',
'target': 'new',
'res_id': ids[0],
}
def _get_lines(self, cr, uid, ids, decl_datas, company, dispatchmode=False,
extendedmode=False, context=None):
intrastatcode_mod = self.pool['report.intrastat.code']
invoiceline_mod = self.pool['account.invoice.line']
product_mod = self.pool['product.product']
region_mod = self.pool['l10n_be_intrastat.region']
warehouse_mod = self.pool['stock.warehouse']
if dispatchmode:
mode1 = 'out_invoice'
mode2 = 'in_refund'
declcode = "29"
else:
mode1 = 'in_invoice'
mode2 = 'out_refund'
declcode = "19"
decl = ET.Element('Report')
if not extendedmode:
decl.set('code', 'EX%sS' % declcode)
else:
decl.set('code', 'EX%sE' % declcode)
decl.set('date', '%s-%s' % (decl_datas.year, decl_datas.month))
datas = ET.SubElement(decl, 'Data')
if not extendedmode:
datas.set('form', 'EXF%sS' % declcode)
else:
datas.set('form', 'EXF%sE' % declcode)
datas.set('close', 'true')
intrastatkey = namedtuple("intrastatkey",
['EXTRF', 'EXCNT', 'EXTTA', 'EXREG',
'EXGO', 'EXTPC', 'EXDELTRM'])
entries = {}
sqlreq = """
select
inv_line.id
from
account_invoice_line inv_line
join account_invoice inv on inv_line.invoice_id=inv.id
left join res_country on res_country.id = inv.intrastat_country_id
left join res_partner on res_partner.id = inv.partner_id
left join res_country countrypartner on countrypartner.id = res_partner.country_id
join product_product on inv_line.product_id=product_product.id
join product_template on product_product.product_tmpl_id=product_template.id
left join account_period on account_period.id=inv.period_id
where
inv.state in ('open','paid')
and inv.company_id=%s
and not product_template.type='service'
and (res_country.intrastat=true or (inv.intrastat_country_id is null
and countrypartner.intrastat=true))
and ((res_country.code is not null and not res_country.code=%s)
or (res_country.code is null and countrypartner.code is not null
and not countrypartner.code=%s))
and inv.type in (%s, %s)
and to_char(account_period.date_start, 'YYYY')=%s
and to_char(account_period.date_start, 'MM')=%s
"""
cr.execute(sqlreq, (company.id, company.partner_id.country_id.code,
company.partner_id.country_id.code, mode1, mode2,
decl_datas.year, decl_datas.month))
lines = cr.fetchall()
invoicelines_ids = [rec[0] for rec in lines]
invoicelines = invoiceline_mod.browse(cr, uid, invoicelines_ids, context=context)
for inv_line in invoicelines:
#Check type of transaction
if inv_line.invoice_id.intrastat_transaction_id:
extta = inv_line.invoice_id.intrastat_transaction_id.code
else:
extta = "1"
#Check country
if inv_line.invoice_id.intrastat_country_id:
excnt = inv_line.invoice_id.intrastat_country_id.code
else:
excnt = inv_line.invoice_id.partner_id.country_id.code
#Check region
#If purchase, comes from purchase order, linked to a location,
#which is linked to the warehouse
#if sales, the sale order is linked to the warehouse
#if sales, from a delivery order, linked to a location,
#which is linked to the warehouse
#If none found, get the company one.
exreg = None
if inv_line.invoice_id.type in ('in_invoice', 'in_refund'):
#comes from purchase
POL = self.pool['purchase.order.line']
poline_ids = POL.search(
cr, uid, [('invoice_lines', 'in', inv_line.id)], context=context)
if poline_ids:
purchaseorder = POL.browse(cr, uid, poline_ids[0], context=context).order_id
region_id = warehouse_mod.get_regionid_from_locationid(
cr, uid, purchaseorder.location_id.id, context=context)
if region_id:
exreg = region_mod.browse(cr, uid, region_id).code
elif inv_line.invoice_id.type in ('out_invoice', 'out_refund'):
#comes from sales
soline_ids = self.pool['sale.order.line'].search(
cr, uid, [('invoice_lines', 'in', inv_line.id)], context=context)
if soline_ids:
saleorder = self.pool['sale.order.line'].browse(
cr, uid, soline_ids[0], context=context).order_id
if saleorder and saleorder.warehouse_id and saleorder.warehouse_id.region_id:
exreg = region_mod.browse(
cr, uid, saleorder.warehouse_id.region_id.id, context=context).code
if not exreg:
if company.region_id:
exreg = company.region_id.code
else:
self._company_warning(
cr, uid,
_('The Intrastat Region of the selected company is not set, '
'please make sure to configure it first.'),
context=context)
#Check commodity codes
intrastat_id = product_mod.get_intrastat_recursively(
cr, uid, inv_line.product_id.id, context=context)
if intrastat_id:
exgo = intrastatcode_mod.browse(cr, uid, intrastat_id, context=context).name
else:
raise exceptions.Warning(
_('Product "%s" has no intrastat code, please configure it') %
inv_line.product_id.display_name)
#In extended mode, 2 more fields required
if extendedmode:
#Check means of transport
if inv_line.invoice_id.transport_mode_id:
extpc = inv_line.invoice_id.transport_mode_id.code
elif company.transport_mode_id:
extpc = company.transport_mode_id.code
else:
self._company_warning(
cr, uid,
_('The default Intrastat transport mode of your company '
'is not set, please make sure to configure it first.'),
context=context)
#Check incoterm
if inv_line.invoice_id.incoterm_id:
exdeltrm = inv_line.invoice_id.incoterm_id.code
elif company.incoterm_id:
exdeltrm = company.incoterm_id.code
else:
self._company_warning(
cr, uid,
_('The default Incoterm of your company is not set, '
'please make sure to configure it first.'),
context=context)
else:
extpc = ""
exdeltrm = ""
linekey = intrastatkey(EXTRF=declcode, EXCNT=excnt,
EXTTA=extta, EXREG=exreg, EXGO=exgo,
EXTPC=extpc, EXDELTRM=exdeltrm)
#We have the key
#calculate amounts
if inv_line.price_unit and inv_line.quantity:
amount = inv_line.price_unit * inv_line.quantity
else:
amount = 0
weight = (inv_line.product_id.weight_net or 0.0) * \
self.pool.get('product.uom')._compute_qty(cr, uid, inv_line.uos_id.id, inv_line.quantity, inv_line.product_id.uom_id.id)
if (not inv_line.uos_id.category_id or not inv_line.product_id.uom_id.category_id
or inv_line.uos_id.category_id.id != inv_line.product_id.uom_id.category_id.id):
supply_units = inv_line.quantity
else:
supply_units = inv_line.quantity * inv_line.uos_id.factor
amounts = entries.setdefault(linekey, (0, 0, 0))
amounts = (amounts[0] + amount, amounts[1] + weight, amounts[2] + supply_units)
entries[linekey] = amounts
numlgn = 0
for linekey in entries:
amounts = entries[linekey]
if round(amounts[0], 0) == 0:
continue
numlgn += 1
item = ET.SubElement(datas, 'Item')
self._set_Dim(item, 'EXSEQCODE', unicode(numlgn))
self._set_Dim(item, 'EXTRF', unicode(linekey.EXTRF))
self._set_Dim(item, 'EXCNT', unicode(linekey.EXCNT))
self._set_Dim(item, 'EXTTA', unicode(linekey.EXTTA))
self._set_Dim(item, 'EXREG', unicode(linekey.EXREG))
self._set_Dim(item, 'EXTGO', unicode(linekey.EXGO))
if extendedmode:
self._set_Dim(item, 'EXTPC', unicode(linekey.EXTPC))
self._set_Dim(item, 'EXDELTRM', unicode(linekey.EXDELTRM))
self._set_Dim(item, 'EXTXVAL', unicode(round(amounts[0], 0)).replace(".", ","))
self._set_Dim(item, 'EXWEIGHT', unicode(round(amounts[1], 0)).replace(".", ","))
self._set_Dim(item, 'EXUNITS', unicode(round(amounts[2], 0)).replace(".", ","))
if numlgn == 0:
#no datas
datas.set('action', 'nihil')
return decl
def _set_Dim(self, item, prop, value):
dim = ET.SubElement(item, 'Dim')
dim.set('prop', prop)
dim.text = value
| agpl-3.0 |
ar45/django | django/contrib/postgres/fields/ranges.py | 124 | 5609 | import json
from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange, Range
from django.contrib.postgres import forms, lookups
from django.db import models
from django.utils import six
from .utils import AttributeSetter
__all__ = [
'RangeField', 'IntegerRangeField', 'BigIntegerRangeField',
'FloatRangeField', 'DateTimeRangeField', 'DateRangeField',
]
class RangeField(models.Field):
empty_strings_allowed = False
def get_prep_value(self, value):
if value is None:
return None
elif isinstance(value, Range):
return value
elif isinstance(value, (list, tuple)):
return self.range_type(value[0], value[1])
return value
def to_python(self, value):
if isinstance(value, six.string_types):
# Assume we're deserializing
vals = json.loads(value)
for end in ('lower', 'upper'):
if end in vals:
vals[end] = self.base_field.to_python(vals[end])
value = self.range_type(**vals)
elif isinstance(value, (list, tuple)):
value = self.range_type(value[0], value[1])
return value
def set_attributes_from_name(self, name):
super(RangeField, self).set_attributes_from_name(name)
self.base_field.set_attributes_from_name(name)
def value_to_string(self, obj):
value = self.value_from_object(obj)
if value is None:
return None
if value.isempty:
return json.dumps({"empty": True})
base_field = self.base_field
result = {"bounds": value._bounds}
for end in ('lower', 'upper'):
obj = AttributeSetter(base_field.attname, getattr(value, end))
result[end] = base_field.value_to_string(obj)
return json.dumps(result)
def formfield(self, **kwargs):
kwargs.setdefault('form_class', self.form_field)
return super(RangeField, self).formfield(**kwargs)
class IntegerRangeField(RangeField):
base_field = models.IntegerField()
range_type = NumericRange
form_field = forms.IntegerRangeField
def db_type(self, connection):
return 'int4range'
class BigIntegerRangeField(RangeField):
base_field = models.BigIntegerField()
range_type = NumericRange
form_field = forms.IntegerRangeField
def db_type(self, connection):
return 'int8range'
class FloatRangeField(RangeField):
base_field = models.FloatField()
range_type = NumericRange
form_field = forms.FloatRangeField
def db_type(self, connection):
return 'numrange'
class DateTimeRangeField(RangeField):
base_field = models.DateTimeField()
range_type = DateTimeTZRange
form_field = forms.DateTimeRangeField
def db_type(self, connection):
return 'tstzrange'
class DateRangeField(RangeField):
base_field = models.DateField()
range_type = DateRange
form_field = forms.DateRangeField
def db_type(self, connection):
return 'daterange'
RangeField.register_lookup(lookups.DataContains)
RangeField.register_lookup(lookups.ContainedBy)
RangeField.register_lookup(lookups.Overlap)
class RangeContainedBy(models.Lookup):
lookup_name = 'contained_by'
type_mapping = {
'integer': 'int4range',
'bigint': 'int8range',
'double precision': 'numrange',
'date': 'daterange',
'timestamp with time zone': 'tstzrange',
}
def as_sql(self, qn, connection):
field = self.lhs.output_field
if isinstance(field, models.FloatField):
sql = '%s::numeric <@ %s::{}'.format(self.type_mapping[field.db_type(connection)])
else:
sql = '%s <@ %s::{}'.format(self.type_mapping[field.db_type(connection)])
lhs, lhs_params = self.process_lhs(qn, connection)
rhs, rhs_params = self.process_rhs(qn, connection)
params = lhs_params + rhs_params
return sql % (lhs, rhs), params
def get_prep_lookup(self):
return RangeField().get_prep_lookup(self.lookup_name, self.rhs)
models.DateField.register_lookup(RangeContainedBy)
models.DateTimeField.register_lookup(RangeContainedBy)
models.IntegerField.register_lookup(RangeContainedBy)
models.BigIntegerField.register_lookup(RangeContainedBy)
models.FloatField.register_lookup(RangeContainedBy)
@RangeField.register_lookup
class FullyLessThan(lookups.PostgresSimpleLookup):
lookup_name = 'fully_lt'
operator = '<<'
@RangeField.register_lookup
class FullGreaterThan(lookups.PostgresSimpleLookup):
lookup_name = 'fully_gt'
operator = '>>'
@RangeField.register_lookup
class NotLessThan(lookups.PostgresSimpleLookup):
lookup_name = 'not_lt'
operator = '&>'
@RangeField.register_lookup
class NotGreaterThan(lookups.PostgresSimpleLookup):
lookup_name = 'not_gt'
operator = '&<'
@RangeField.register_lookup
class AdjacentToLookup(lookups.PostgresSimpleLookup):
lookup_name = 'adjacent_to'
operator = '-|-'
@RangeField.register_lookup
class RangeStartsWith(models.Transform):
lookup_name = 'startswith'
function = 'lower'
@property
def output_field(self):
return self.lhs.output_field.base_field
@RangeField.register_lookup
class RangeEndsWith(models.Transform):
lookup_name = 'endswith'
function = 'upper'
@property
def output_field(self):
return self.lhs.output_field.base_field
@RangeField.register_lookup
class IsEmpty(models.Transform):
lookup_name = 'isempty'
function = 'isempty'
output_field = models.BooleanField()
| bsd-3-clause |
myerpengine/odoo | install/win32/setup.py | 105 | 2541 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import glob
from distutils.core import setup
import py2exe
meta = {}
execfile(os.path.join(os.path.dirname(__file__), '..', 'openerp', 'release.py'), meta)
def generate_files():
actions = {
'start': ['stop', 'start'],
'stop': ['stop'],
}
files = []
if os.name == 'nt':
files.append(("Microsoft.VC90.CRT", glob.glob('C:\Microsoft.VC90.CRT\*.*')))
for action, steps in actions.items():
fname = action + '.bat'
files.append(fname)
with open(fname, 'w') as fp:
fp.write('@PATH=%WINDIR%\system32;%WINDIR%;%WINDIR%\System32\Wbem;.\n')
for step in steps:
fp.write('@net %s %s\n' % (step, meta['nt_service_name']))
files.append('meta.py')
with open('meta.py', 'w') as fp:
for m in 'description serie nt_service_name'.split():
fp.write("%s = %r\n" % (m, meta[m],))
return files
excludes = "Tkconstants Tkinter tcl _imagingtk PIL._imagingtk ImageTk PIL.ImageTk FixTk".split()
setup(service = ["OpenERPServerService"],
version = meta['version'],
license = meta['license'],
url = meta['url'],
author = meta['author'],
author_email = meta['author_email'],
data_files = generate_files(),
options = {"py2exe": {
"excludes": excludes,
"skip_archive": 1,
"optimize": 2,
}},
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
valentin-krasontovitsch/ansible | lib/ansible/plugins/connection/persistent.py | 15 | 5762 | # 2017 Red Hat Inc.
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
author: Ansible Core Team
connection: persistent
short_description: Use a persistent unix socket for connection
description:
- This is a helper plugin to allow making other connections persistent.
version_added: "2.3"
options:
persistent_command_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait for a command to
return from the remote device. If this timer is exceeded before the
command returns, the connection plugin will raise an exception and
close
default: 10
ini:
- section: persistent_connection
key: command_timeout
env:
- name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT
vars:
- name: ansible_command_timeout
"""
import os
import pty
import json
import subprocess
import sys
import termios
from ansible import constants as C
from ansible.plugins.connection import ConnectionBase
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection as SocketConnection, write_to_file_descriptor
from ansible.errors import AnsibleError
from ansible.utils.display import Display
display = Display()
class Connection(ConnectionBase):
''' Local based connections '''
transport = 'persistent'
has_pipelining = False
def _connect(self):
self._connected = True
return self
def exec_command(self, cmd, in_data=None, sudoable=True):
display.vvvv('exec_command(), socket_path=%s' % self.socket_path, host=self._play_context.remote_addr)
connection = SocketConnection(self.socket_path)
out = connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
return 0, out, ''
def put_file(self, in_path, out_path):
pass
def fetch_file(self, in_path, out_path):
pass
def close(self):
self._connected = False
def run(self):
"""Returns the path of the persistent connection socket.
Attempts to ensure (within playcontext.timeout seconds) that the
socket path exists. If the path exists (or the timeout has expired),
returns the socket path.
"""
display.vvvv('starting connection from persistent connection plugin', host=self._play_context.remote_addr)
socket_path = self._start_connection()
display.vvvv('local domain socket path is %s' % socket_path, host=self._play_context.remote_addr)
setattr(self, '_socket_path', socket_path)
return socket_path
def _start_connection(self):
'''
Starts the persistent connection
'''
candidate_paths = [C.ANSIBLE_CONNECTION_PATH or os.path.dirname(sys.argv[0])]
candidate_paths.extend(os.environ['PATH'].split(os.pathsep))
for dirname in candidate_paths:
ansible_connection = os.path.join(dirname, 'ansible-connection')
if os.path.isfile(ansible_connection):
break
else:
raise AnsibleError("Unable to find location of 'ansible-connection'. "
"Please set or check the value of ANSIBLE_CONNECTION_PATH")
python = sys.executable
master, slave = pty.openpty()
p = subprocess.Popen(
[python, ansible_connection, to_text(os.getppid())],
stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
os.close(slave)
# We need to set the pty into noncanonical mode. This ensures that we
# can receive lines longer than 4095 characters (plus newline) without
# truncating.
old = termios.tcgetattr(master)
new = termios.tcgetattr(master)
new[3] = new[3] & ~termios.ICANON
try:
termios.tcsetattr(master, termios.TCSANOW, new)
write_to_file_descriptor(master, {'ansible_command_timeout': self.get_option('persistent_command_timeout')})
write_to_file_descriptor(master, self._play_context.serialize())
(stdout, stderr) = p.communicate()
finally:
termios.tcsetattr(master, termios.TCSANOW, old)
os.close(master)
if p.returncode == 0:
result = json.loads(to_text(stdout, errors='surrogate_then_replace'))
else:
try:
result = json.loads(to_text(stderr, errors='surrogate_then_replace'))
except getattr(json.decoder, 'JSONDecodeError', ValueError):
# JSONDecodeError only available on Python 3.5+
result = {'error': to_text(stderr, errors='surrogate_then_replace')}
if 'messages' in result:
for level, message in result['messages']:
if level == 'log':
display.display(message, log_only=True)
elif level in ('debug', 'v', 'vv', 'vvv', 'vvvv', 'vvvvv', 'vvvvvv'):
getattr(display, level)(message, host=self._play_context.remote_addr)
else:
if hasattr(display, level):
getattr(display, level)(message)
else:
display.vvvv(message, host=self._play_context.remote_addr)
if 'error' in result:
if self._play_context.verbosity > 2:
if result.get('exception'):
msg = "The full traceback is:\n" + result['exception']
display.display(msg, color=C.COLOR_ERROR)
raise AnsibleError(result['error'])
return result['socket_path']
| gpl-3.0 |
JosmanPS/scikit-learn | sklearn/datasets/lfw.py | 141 | 19372 | """Loader for the Labeled Faces in the Wild (LFW) dataset
This dataset is a collection of JPEG pictures of famous people collected
over the internet, all details are available on the official website:
http://vis-www.cs.umass.edu/lfw/
Each picture is centered on a single face. The typical task is called
Face Verification: given a pair of two pictures, a binary classifier
must predict whether the two images are from the same person.
An alternative task, Face Recognition or Face Identification is:
given the picture of the face of an unknown person, identify the name
of the person by referring to a gallery of previously seen pictures of
identified persons.
Both Face Verification and Face Recognition are tasks that are typically
performed on the output of a model trained to perform Face Detection. The
most popular model for Face Detection is called Viola-Johns and is
implemented in the OpenCV library. The LFW faces were extracted by this face
detector from various online websites.
"""
# Copyright (c) 2011 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
from os import listdir, makedirs, remove
from os.path import join, exists, isdir
from sklearn.utils import deprecated
import logging
import numpy as np
try:
import urllib.request as urllib # for backwards compatibility
except ImportError:
import urllib
from .base import get_data_home, Bunch
from ..externals.joblib import Memory
from ..externals.six import b
logger = logging.getLogger(__name__)
BASE_URL = "http://vis-www.cs.umass.edu/lfw/"
ARCHIVE_NAME = "lfw.tgz"
FUNNELED_ARCHIVE_NAME = "lfw-funneled.tgz"
TARGET_FILENAMES = [
'pairsDevTrain.txt',
'pairsDevTest.txt',
'pairs.txt',
]
def scale_face(face):
"""Scale back to 0-1 range in case of normalization for plotting"""
scaled = face - face.min()
scaled /= scaled.max()
return scaled
#
# Common private utilities for data fetching from the original LFW website
# local disk caching, and image decoding.
#
def check_fetch_lfw(data_home=None, funneled=True, download_if_missing=True):
"""Helper function to download any missing LFW data"""
data_home = get_data_home(data_home=data_home)
lfw_home = join(data_home, "lfw_home")
if funneled:
archive_path = join(lfw_home, FUNNELED_ARCHIVE_NAME)
data_folder_path = join(lfw_home, "lfw_funneled")
archive_url = BASE_URL + FUNNELED_ARCHIVE_NAME
else:
archive_path = join(lfw_home, ARCHIVE_NAME)
data_folder_path = join(lfw_home, "lfw")
archive_url = BASE_URL + ARCHIVE_NAME
if not exists(lfw_home):
makedirs(lfw_home)
for target_filename in TARGET_FILENAMES:
target_filepath = join(lfw_home, target_filename)
if not exists(target_filepath):
if download_if_missing:
url = BASE_URL + target_filename
logger.warning("Downloading LFW metadata: %s", url)
urllib.urlretrieve(url, target_filepath)
else:
raise IOError("%s is missing" % target_filepath)
if not exists(data_folder_path):
if not exists(archive_path):
if download_if_missing:
logger.warning("Downloading LFW data (~200MB): %s", archive_url)
urllib.urlretrieve(archive_url, archive_path)
else:
raise IOError("%s is missing" % target_filepath)
import tarfile
logger.info("Decompressing the data archive to %s", data_folder_path)
tarfile.open(archive_path, "r:gz").extractall(path=lfw_home)
remove(archive_path)
return lfw_home, data_folder_path
def _load_imgs(file_paths, slice_, color, resize):
"""Internally used to load images"""
# Try to import imread and imresize from PIL. We do this here to prevent
# the whole sklearn.datasets module from depending on PIL.
try:
try:
from scipy.misc import imread
except ImportError:
from scipy.misc.pilutil import imread
from scipy.misc import imresize
except ImportError:
raise ImportError("The Python Imaging Library (PIL)"
" is required to load data from jpeg files")
# compute the portion of the images to load to respect the slice_ parameter
# given by the caller
default_slice = (slice(0, 250), slice(0, 250))
if slice_ is None:
slice_ = default_slice
else:
slice_ = tuple(s or ds for s, ds in zip(slice_, default_slice))
h_slice, w_slice = slice_
h = (h_slice.stop - h_slice.start) // (h_slice.step or 1)
w = (w_slice.stop - w_slice.start) // (w_slice.step or 1)
if resize is not None:
resize = float(resize)
h = int(resize * h)
w = int(resize * w)
# allocate some contiguous memory to host the decoded image slices
n_faces = len(file_paths)
if not color:
faces = np.zeros((n_faces, h, w), dtype=np.float32)
else:
faces = np.zeros((n_faces, h, w, 3), dtype=np.float32)
# iterate over the collected file path to load the jpeg files as numpy
# arrays
for i, file_path in enumerate(file_paths):
if i % 1000 == 0:
logger.info("Loading face #%05d / %05d", i + 1, n_faces)
# Checks if jpeg reading worked. Refer to issue #3594 for more
# details.
img = imread(file_path)
if img.ndim is 0:
raise RuntimeError("Failed to read the image file %s, "
"Please make sure that libjpeg is installed"
% file_path)
face = np.asarray(img[slice_], dtype=np.float32)
face /= 255.0 # scale uint8 coded colors to the [0.0, 1.0] floats
if resize is not None:
face = imresize(face, resize)
if not color:
# average the color channels to compute a gray levels
# representaion
face = face.mean(axis=2)
faces[i, ...] = face
return faces
#
# Task #1: Face Identification on picture with names
#
def _fetch_lfw_people(data_folder_path, slice_=None, color=False, resize=None,
min_faces_per_person=0):
"""Perform the actual data loading for the lfw people dataset
This operation is meant to be cached by a joblib wrapper.
"""
# scan the data folder content to retain people with more that
# `min_faces_per_person` face pictures
person_names, file_paths = [], []
for person_name in sorted(listdir(data_folder_path)):
folder_path = join(data_folder_path, person_name)
if not isdir(folder_path):
continue
paths = [join(folder_path, f) for f in listdir(folder_path)]
n_pictures = len(paths)
if n_pictures >= min_faces_per_person:
person_name = person_name.replace('_', ' ')
person_names.extend([person_name] * n_pictures)
file_paths.extend(paths)
n_faces = len(file_paths)
if n_faces == 0:
raise ValueError("min_faces_per_person=%d is too restrictive" %
min_faces_per_person)
target_names = np.unique(person_names)
target = np.searchsorted(target_names, person_names)
faces = _load_imgs(file_paths, slice_, color, resize)
# shuffle the faces with a deterministic RNG scheme to avoid having
# all faces of the same person in a row, as it would break some
# cross validation and learning algorithms such as SGD and online
# k-means that make an IID assumption
indices = np.arange(n_faces)
np.random.RandomState(42).shuffle(indices)
faces, target = faces[indices], target[indices]
return faces, target, target_names
def fetch_lfw_people(data_home=None, funneled=True, resize=0.5,
min_faces_per_person=0, color=False,
slice_=(slice(70, 195), slice(78, 172)),
download_if_missing=True):
"""Loader for the Labeled Faces in the Wild (LFW) people dataset
This dataset is a collection of JPEG pictures of famous people
collected on the internet, all details are available on the
official website:
http://vis-www.cs.umass.edu/lfw/
Each picture is centered on a single face. Each pixel of each channel
(color in RGB) is encoded by a float in range 0.0 - 1.0.
The task is called Face Recognition (or Identification): given the
picture of a face, find the name of the person given a training set
(gallery).
The original images are 250 x 250 pixels, but the default slice and resize
arguments reduce them to 62 x 74.
Parameters
----------
data_home : optional, default: None
Specify another download and cache folder for the datasets. By default
all scikit learn data is stored in '~/scikit_learn_data' subfolders.
funneled : boolean, optional, default: True
Download and use the funneled variant of the dataset.
resize : float, optional, default 0.5
Ratio used to resize the each face picture.
min_faces_per_person : int, optional, default None
The extracted dataset will only retain pictures of people that have at
least `min_faces_per_person` different pictures.
color : boolean, optional, default False
Keep the 3 RGB channels instead of averaging them to a single
gray level channel. If color is True the shape of the data has
one more dimension than than the shape with color = False.
slice_ : optional
Provide a custom 2D slice (height, width) to extract the
'interesting' part of the jpeg files and avoid use statistical
correlation from the background
download_if_missing : optional, True by default
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Returns
-------
dataset : dict-like object with the following attributes:
dataset.data : numpy array of shape (13233, 2914)
Each row corresponds to a ravelled face image of original size 62 x 47
pixels. Changing the ``slice_`` or resize parameters will change the shape
of the output.
dataset.images : numpy array of shape (13233, 62, 47)
Each row is a face image corresponding to one of the 5749 people in
the dataset. Changing the ``slice_`` or resize parameters will change the shape
of the output.
dataset.target : numpy array of shape (13233,)
Labels associated to each face image. Those labels range from 0-5748
and correspond to the person IDs.
dataset.DESCR : string
Description of the Labeled Faces in the Wild (LFW) dataset.
"""
lfw_home, data_folder_path = check_fetch_lfw(
data_home=data_home, funneled=funneled,
download_if_missing=download_if_missing)
logger.info('Loading LFW people faces from %s', lfw_home)
# wrap the loader in a memoizing function that will return memmaped data
# arrays for optimal memory usage
m = Memory(cachedir=lfw_home, compress=6, verbose=0)
load_func = m.cache(_fetch_lfw_people)
# load and memoize the pairs as np arrays
faces, target, target_names = load_func(
data_folder_path, resize=resize,
min_faces_per_person=min_faces_per_person, color=color, slice_=slice_)
# pack the results as a Bunch instance
return Bunch(data=faces.reshape(len(faces), -1), images=faces,
target=target, target_names=target_names,
DESCR="LFW faces dataset")
#
# Task #2: Face Verification on pairs of face pictures
#
def _fetch_lfw_pairs(index_file_path, data_folder_path, slice_=None,
color=False, resize=None):
"""Perform the actual data loading for the LFW pairs dataset
This operation is meant to be cached by a joblib wrapper.
"""
# parse the index file to find the number of pairs to be able to allocate
# the right amount of memory before starting to decode the jpeg files
with open(index_file_path, 'rb') as index_file:
split_lines = [ln.strip().split(b('\t')) for ln in index_file]
pair_specs = [sl for sl in split_lines if len(sl) > 2]
n_pairs = len(pair_specs)
# interating over the metadata lines for each pair to find the filename to
# decode and load in memory
target = np.zeros(n_pairs, dtype=np.int)
file_paths = list()
for i, components in enumerate(pair_specs):
if len(components) == 3:
target[i] = 1
pair = (
(components[0], int(components[1]) - 1),
(components[0], int(components[2]) - 1),
)
elif len(components) == 4:
target[i] = 0
pair = (
(components[0], int(components[1]) - 1),
(components[2], int(components[3]) - 1),
)
else:
raise ValueError("invalid line %d: %r" % (i + 1, components))
for j, (name, idx) in enumerate(pair):
try:
person_folder = join(data_folder_path, name)
except TypeError:
person_folder = join(data_folder_path, str(name, 'UTF-8'))
filenames = list(sorted(listdir(person_folder)))
file_path = join(person_folder, filenames[idx])
file_paths.append(file_path)
pairs = _load_imgs(file_paths, slice_, color, resize)
shape = list(pairs.shape)
n_faces = shape.pop(0)
shape.insert(0, 2)
shape.insert(0, n_faces // 2)
pairs.shape = shape
return pairs, target, np.array(['Different persons', 'Same person'])
@deprecated("Function 'load_lfw_people' has been deprecated in 0.17 and will be "
"removed in 0.19."
"Use fetch_lfw_people(download_if_missing=False) instead.")
def load_lfw_people(download_if_missing=False, **kwargs):
"""Alias for fetch_lfw_people(download_if_missing=False)
Check fetch_lfw_people.__doc__ for the documentation and parameter list.
"""
return fetch_lfw_people(download_if_missing=download_if_missing, **kwargs)
def fetch_lfw_pairs(subset='train', data_home=None, funneled=True, resize=0.5,
color=False, slice_=(slice(70, 195), slice(78, 172)),
download_if_missing=True):
"""Loader for the Labeled Faces in the Wild (LFW) pairs dataset
This dataset is a collection of JPEG pictures of famous people
collected on the internet, all details are available on the
official website:
http://vis-www.cs.umass.edu/lfw/
Each picture is centered on a single face. Each pixel of each channel
(color in RGB) is encoded by a float in range 0.0 - 1.0.
The task is called Face Verification: given a pair of two pictures,
a binary classifier must predict whether the two images are from
the same person.
In the official `README.txt`_ this task is described as the
"Restricted" task. As I am not sure as to implement the
"Unrestricted" variant correctly, I left it as unsupported for now.
.. _`README.txt`: http://vis-www.cs.umass.edu/lfw/README.txt
The original images are 250 x 250 pixels, but the default slice and resize
arguments reduce them to 62 x 74.
Read more in the :ref:`User Guide <labeled_faces_in_the_wild>`.
Parameters
----------
subset : optional, default: 'train'
Select the dataset to load: 'train' for the development training
set, 'test' for the development test set, and '10_folds' for the
official evaluation set that is meant to be used with a 10-folds
cross validation.
data_home : optional, default: None
Specify another download and cache folder for the datasets. By
default all scikit learn data is stored in '~/scikit_learn_data'
subfolders.
funneled : boolean, optional, default: True
Download and use the funneled variant of the dataset.
resize : float, optional, default 0.5
Ratio used to resize the each face picture.
color : boolean, optional, default False
Keep the 3 RGB channels instead of averaging them to a single
gray level channel. If color is True the shape of the data has
one more dimension than than the shape with color = False.
slice_ : optional
Provide a custom 2D slice (height, width) to extract the
'interesting' part of the jpeg files and avoid use statistical
correlation from the background
download_if_missing : optional, True by default
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Returns
-------
The data is returned as a Bunch object with the following attributes:
data : numpy array of shape (2200, 5828)
Each row corresponds to 2 ravel'd face images of original size 62 x 47
pixels. Changing the ``slice_`` or resize parameters will change the shape
of the output.
pairs : numpy array of shape (2200, 2, 62, 47)
Each row has 2 face images corresponding to same or different person
from the dataset containing 5749 people. Changing the ``slice_`` or resize
parameters will change the shape of the output.
target : numpy array of shape (13233,)
Labels associated to each pair of images. The two label values being
different persons or the same person.
DESCR : string
Description of the Labeled Faces in the Wild (LFW) dataset.
"""
lfw_home, data_folder_path = check_fetch_lfw(
data_home=data_home, funneled=funneled,
download_if_missing=download_if_missing)
logger.info('Loading %s LFW pairs from %s', subset, lfw_home)
# wrap the loader in a memoizing function that will return memmaped data
# arrays for optimal memory usage
m = Memory(cachedir=lfw_home, compress=6, verbose=0)
load_func = m.cache(_fetch_lfw_pairs)
# select the right metadata file according to the requested subset
label_filenames = {
'train': 'pairsDevTrain.txt',
'test': 'pairsDevTest.txt',
'10_folds': 'pairs.txt',
}
if subset not in label_filenames:
raise ValueError("subset='%s' is invalid: should be one of %r" % (
subset, list(sorted(label_filenames.keys()))))
index_file_path = join(lfw_home, label_filenames[subset])
# load and memoize the pairs as np arrays
pairs, target, target_names = load_func(
index_file_path, data_folder_path, resize=resize, color=color,
slice_=slice_)
# pack the results as a Bunch instance
return Bunch(data=pairs.reshape(len(pairs), -1), pairs=pairs,
target=target, target_names=target_names,
DESCR="'%s' segment of the LFW pairs dataset" % subset)
@deprecated("Function 'load_lfw_pairs' has been deprecated in 0.17 and will be "
"removed in 0.19."
"Use fetch_lfw_pairs(download_if_missing=False) instead.")
def load_lfw_pairs(download_if_missing=False, **kwargs):
"""Alias for fetch_lfw_pairs(download_if_missing=False)
Check fetch_lfw_pairs.__doc__ for the documentation and parameter list.
"""
return fetch_lfw_pairs(download_if_missing=download_if_missing, **kwargs)
| bsd-3-clause |
janncker/createrepo_c | tests/python/tests/test_crfile.py | 2 | 3467 | import unittest
import shutil
import tempfile
import os.path
import createrepo_c as cr
from fixtures import *
class TestCaseCrFile(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp(prefix="createrepo_ctest-")
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_crfile_basic_operations(self):
f = cr.CrFile(self.tmpdir+"/foo.gz",
cr.MODE_WRITE,
cr.GZ_COMPRESSION,
None)
self.assertTrue(f)
self.assertTrue(os.path.isfile(self.tmpdir+"/foo.gz"))
def test_crfile_operations_on_closed_file(self):
# Already closed file
path = os.path.join(self.tmpdir, "primary.xml.gz")
f = cr.CrFile(path, cr.MODE_WRITE, cr.GZ_COMPRESSION)
self.assertTrue(f)
self.assertTrue(os.path.isfile(path))
f.close()
self.assertRaises(cr.CreaterepoCError, f.write, "foobar")
f.close() # No error should be raised
del(f) # No error should be raised
def test_crfile_error_cases(self):
path = os.path.join(self.tmpdir, "foofile")
self.assertFalse(os.path.exists(path))
# Bad open mode
self.assertRaises(ValueError, cr.CrFile, path, 86,
cr.GZ_COMPRESSION, None)
self.assertFalse(os.path.exists(path))
# Bad compression type
self.assertRaises(ValueError, cr.CrFile, path,
cr.MODE_READ, 678, None)
self.assertFalse(os.path.exists(path))
# Bad contentstat object
self.assertRaises(TypeError, cr.XmlFile, path,
cr.MODE_READ, cr.GZ_COMPRESSION, "foo")
self.assertFalse(os.path.exists(path))
# Non existing path
self.assertRaises(IOError, cr.CrFile,
"foobar/foo/xxx/cvydmaticxuiowe")
def test_crfile_no_compression(self):
path = os.path.join(self.tmpdir, "foo")
f = cr.CrFile(path, cr.MODE_WRITE, cr.NO_COMPRESSION)
self.assertTrue(f)
self.assertTrue(os.path.isfile(path))
f.write("foobar")
f.close()
content = open(path).read()
self.assertEqual(content, "foobar")
def test_crfile_gz_compression(self):
path = os.path.join(self.tmpdir, "foo.gz")
f = cr.CrFile(path, cr.MODE_WRITE, cr.GZ_COMPRESSION)
self.assertTrue(f)
self.assertTrue(os.path.isfile(path))
f.write("foobar")
f.close()
import gzip
content = gzip.open(path).read()
self.assertEqual(content, "foobar")
def test_crfile_bz2_compression(self):
path = os.path.join(self.tmpdir, "foo.bz2")
f = cr.CrFile(path, cr.MODE_WRITE, cr.BZ2_COMPRESSION)
self.assertTrue(f)
self.assertTrue(os.path.isfile(path))
f.write("foobar")
f.close()
import bz2
content = bz2.decompress(open(path).read())
self.assertEqual(content, "foobar")
def test_crfile_xz_compression(self):
path = os.path.join(self.tmpdir, "foo.xz")
f = cr.CrFile(path, cr.MODE_WRITE, cr.XZ_COMPRESSION)
self.assertTrue(f)
self.assertTrue(os.path.isfile(path))
f.write("foobar")
f.close()
import subprocess
p = subprocess.Popen(["unxz", "--stdout", path], stdout=subprocess.PIPE)
content = p.stdout.read()
self.assertEqual(content, "foobar")
| gpl-2.0 |
opennode/waldur-mastermind | src/waldur_core/structure/managers.py | 1 | 4844 | from django.db import models
from waldur_core.core.managers import GenericKeyMixin
def get_permission_subquery(permissions, user):
subquery = models.Q()
for entity in ('customer', 'project'):
path = getattr(permissions, '%s_path' % entity, None)
if not path:
continue
if path == 'self':
prefix = 'permissions__'
else:
prefix = path + '__permissions__'
kwargs = {prefix + 'user': user, prefix + 'is_active': True}
subquery |= models.Q(**kwargs)
build_query = getattr(permissions, 'build_query', None)
if build_query:
subquery |= build_query(user)
return subquery
def filter_queryset_for_user(queryset, user):
if user is None or user.is_staff or user.is_support:
return queryset
try:
permissions = queryset.model.Permissions
except AttributeError:
return queryset
subquery = get_permission_subquery(permissions, user)
if not subquery:
return queryset
return queryset.filter(subquery).distinct()
class StructureQueryset(models.QuerySet):
""" Provides additional filtering by customer or project (based on permission definition).
Example:
.. code-block:: python
Instance.objects.filter(project=12)
Droplet.objects.filter(
customer__name__startswith='A',
state=Droplet.States.ONLINE)
Droplet.objects.filter(Q(customer__name='Alice') | Q(customer__name='Bob'))
"""
def exclude(self, *args, **kwargs):
return super(StructureQueryset, self).exclude(
*[self._patch_query_argument(a) for a in args],
**self._filter_by_custom_fields(**kwargs)
)
def filter(self, *args, **kwargs):
return super(StructureQueryset, self).filter(
*[self._patch_query_argument(a) for a in args],
**self._filter_by_custom_fields(**kwargs)
)
def _patch_query_argument(self, arg):
# patch Q() objects if passed and add support of custom fields
if isinstance(arg, models.Q):
children = []
for opt in arg.children:
if isinstance(opt, models.Q):
children.append(self._patch_query_argument(opt))
else:
args = self._filter_by_custom_fields(**dict([opt]))
children.append(tuple(args.items())[0])
arg.children = children
return arg
def _filter_by_custom_fields(self, **kwargs):
# traverse over filter arguments in search of custom fields
args = {}
fields = [f.name for f in self.model._meta.get_fields()]
for field, val in kwargs.items():
base_field = field.split('__')[0]
if base_field in fields:
args.update(**{field: val})
elif base_field in ('customer', 'project'):
args.update(self._filter_by_permission_fields(base_field, field, val))
else:
args.update(**{field: val})
return args
def _filter_by_permission_fields(self, name, field, value):
# handle fields connected via permissions relations
extra = '__'.join(field.split('__')[1:]) if '__' in field else None
try:
# look for the target field path in Permissions class,
path = getattr(self.model.Permissions, '%s_path' % name)
except AttributeError:
# fallback to FieldError if it's missed
return {field: value}
else:
if path == 'self':
if extra:
return {extra: value}
else:
return {
'pk': value.pk if isinstance(value, models.Model) else value
}
else:
if extra:
path += '__' + extra
return {path: value}
StructureManager = models.Manager.from_queryset(StructureQueryset)
class ServiceSettingsManager(GenericKeyMixin, models.Manager):
""" Allows to filter and get service settings by generic key """
def get_available_models(self):
""" Return list of models that are acceptable """
from waldur_core.structure.models import BaseResource
return BaseResource.get_all_models()
class SharedServiceSettingsManager(ServiceSettingsManager):
def get_queryset(self):
return (
super(SharedServiceSettingsManager, self).get_queryset().filter(shared=True)
)
class PrivateServiceSettingsManager(ServiceSettingsManager):
def get_queryset(self):
return (
super(PrivateServiceSettingsManager, self)
.get_queryset()
.filter(shared=False)
)
| mit |
pytorch/fairseq | tests/test_online_backtranslation.py | 1 | 7650 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import tempfile
import unittest
from pathlib import Path
from typing import Any, Dict, Sequence
import fairseq.data.indexed_dataset as indexed_dataset
import fairseq.options
import fairseq.tasks.online_backtranslation as obt
import torch
from tests import utils
def mk_sample(tokens: Sequence[int], batch_size: int = 2) -> Dict[str, Any]:
batch = torch.stack([torch.tensor(tokens, dtype=torch.long)] * batch_size)
sample = {
"net_input": {
"src_tokens": batch,
"prev_output_tokens": batch,
"src_lengths": torch.tensor([len(tokens)] * batch_size, dtype=torch.long),
},
"target": batch[:, 1:],
}
return sample
def mk_dataset(num_samples: int, max_len: int, output: Path):
output.parent.mkdir(exist_ok=True)
idx = indexed_dataset.IndexedDatasetBuilder(str(output))
data = torch.randint(5, 100, (num_samples, max_len))
lengths = torch.randint(3, max_len, (num_samples,))
for d, l in zip(data, lengths):
d[0] = 0
idx.add_item(d[:l])
idx.finalize(output.with_suffix(".idx"))
assert output.exists()
assert output.with_suffix(".idx").exists()
class OnlineBacktranslationTest(unittest.TestCase):
tmp_dir = Path(tempfile.mkdtemp(suffix="OnlineBacktranslationTest"))
@classmethod
def obt_task(
cls, languages: Sequence[str], data: Path = None, language_mapping: str = None
):
dict_path = cls.tmp_dir / "dict.txt"
if not dict_path.exists():
dictionary = utils.dummy_dictionary(100)
dictionary.save(str(dict_path))
if data is not None:
(data / "dict.txt").write_text(dict_path.read_text())
else:
data = cls.tmp_dir
assert len(languages) >= 2
kwargs = {
"arch": "transformer",
# --max-sentences=1 for better predictability of batches
"max_sentences": 1,
# Use characteristics dimensions
"encoder_layers": 3,
"encoder_embed_dim": 12,
"encoder_ffn_embed_dim": 14,
"encoder_attention_heads": 4,
"decoder_layers": 3,
"decoder_embed_dim": 12,
"decoder_output_dim": 12,
"decoder_ffn_embed_dim": 14,
"decoder_attention_heads": 4,
# Disable dropout so we have comparable tests.
"dropout": 0,
"attention_dropout": 0,
"activation_dropout": 0,
"encoder_layerdrop": 0,
}
args = fairseq.options.get_args(
data,
task="online_backtranslation",
mono_langs=",".join(languages),
valid_lang_pairs=f"{languages[0]}-{languages[1]}",
tokens_per_sample=256,
language_mapping=language_mapping,
**kwargs,
)
task = obt.OnlineBackTranslationTask.setup_task(args)
# we need to build the model to have the correct dictionary
model = task.build_model(task.args)
return task, model
def tmp_path(self, test_case: str) -> Path:
return Path(tempfile.mkdtemp(test_case, dir=self.tmp_dir))
def test_lang_tokens(self):
task, model = self.obt_task(["en", "ro", "zh"])
assert obt._lang_token("en") in task.dictionary
assert obt._lang_token("ro") in task.dictionary
assert obt._lang_token("zh") in task.dictionary
en_bos = obt._lang_token_index(task.common_dict, "en")
assert "en" == task.common_dict[en_bos].strip("_")
zh_bos = obt._lang_token_index(task.common_dict, "zh")
assert "zh" == task.common_dict[zh_bos].strip("_")
zh_sample = mk_sample([zh_bos, 16, 14, 12, 10])
# we expect to receive the bos token for translation
assert task.get_bos_token_from_sample(zh_sample) == en_bos
def test_backtranslate_sample(self):
task, model = self.obt_task(["en", "ro", "zh"])
en_bos = obt._lang_token_index(task.common_dict, "en")
zh_bos = obt._lang_token_index(task.common_dict, "zh")
sample = mk_sample([zh_bos, 16, 14, 12, 10])
task.backtranslate_sample(sample, "zh", "en")
target_zh = list(sample["target"][0])
assert target_zh == [16, 14, 12, 10] # original zh sentence
generated_en = sample["net_input"]["src_tokens"][0]
assert generated_en[0] == en_bos
def test_train_dataset(self):
data = self.tmp_path("test_train_dataset")
mk_dataset(20, 10, data / "en" / "train.bin")
mk_dataset(10, 10, data / "zh" / "train.bin")
task, model = self.obt_task(["en", "zh"], data)
task.load_dataset("train")
en_bos = obt._lang_token_index(task.common_dict, "en")
zh_bos = obt._lang_token_index(task.common_dict, "zh")
train = task.datasets["train"]
train.ordered_indices()
train.prefetch([0, 19])
sample_0 = train[0]
sample_19 = train[19]
self.assertEqual(
set(sample_0.keys()), {"en-BT", "en-DENOISE", "zh-BT", "zh-DENOISE"}
)
for sample in (sample_0, sample_19):
self.assertEqual(sample["en-BT"]["source"][0], en_bos)
# bt target isn't ready to look at.
self.assertEqual(sample["en-DENOISE"]["source"][0], en_bos)
# TODO What could we check on the target side ?
for i in range(10):
# Zh dataset is shorter, and is wrapped around En dataset.
train.prefetch([i, i + 10])
self.assertEqual(
list(train[i]["zh-DENOISE"]["source"]),
list(train[i + 10]["zh-DENOISE"]["source"]),
)
self.assertEqual(train[i]["zh-DENOISE"]["source"][0].item(), zh_bos)
# Sorted by increasing len
self.assertLess(
len(sample_0["en-BT"]["source"]), len(sample_19["en-BT"]["source"])
)
def test_valid_dataset(self):
data = self.tmp_path("test_valid_dataset")
mk_dataset(10, 21, data / "valid.en-zh.en.bin")
mk_dataset(10, 21, data / "valid.en-zh.zh.bin")
task, model = self.obt_task(["en", "zh"], data)
valid = task.load_dataset("valid")
en_bos = obt._lang_token_index(task.common_dict, "en")
assert valid is not None
valid.prefetch(range(10))
sample_0 = valid[0]
sample_9 = valid[9]
self.assertEqual(sample_0["id"], 0)
self.assertEqual(sample_9["id"], 9)
self.assertEqual(sample_0["source"][0], en_bos)
self.assertEqual(sample_9["source"][0], en_bos)
# TODO: could we test the target side ?
def assertFnMatch(self, fn, values):
for x, y in values.items():
fn_x = fn(x)
self.assertEqual(fn_x, y, f"Fn has wrong value: fn({x}) = {fn_x} != {y}")
def test_piecewise_linear_fn(self):
self.assertFnMatch(
obt.PiecewiseLinearFn.from_string("1.0"), {0: 1, 100: 1, 500: 1, 1000: 1}
)
self.assertFnMatch(
obt.PiecewiseLinearFn.from_string("0:1,1000:0"),
{0: 1, 500: 0.5, 1000: 0, 2000: 0},
)
self.assertFnMatch(
obt.PiecewiseLinearFn.from_string("0:0,1000:1"),
{0: 0, 500: 0.5, 1000: 1, 2000: 1},
)
self.assertFnMatch(
obt.PiecewiseLinearFn.from_string("0:0,1000:1,2000:0"),
{0: 0, 500: 0.5, 1000: 1, 1500: 0.5, 2000: 0, 3000: 0},
)
| mit |
jerryjiahaha/rts2 | scripts/rts2saf/unittest/test_db.py | 3 | 4371 | # (C) 2014, Markus Wildi, markus.wildi@bluewin.ch
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Or visit http://www.gnu.org/licenses/gpl.html.
#
import unittest
import os
import re
import pwd
import psycopg2
import logging
if not os.path.isdir('/tmp/rts2saf_log'):
os.mkdir('/tmp/rts2saf_log')
logging.basicConfig(filename='/tmp/rts2saf_log/unittest.log', level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s')
logger = logging.getLogger()
comment = re.compile('^;.*')
dbname= re.compile('^[ \t]*name[ \t]*=[ \t]*(\w+)')
dbusername= re.compile('^[ \t]*username[ \t]*=[ \t]*(\w+)')
dbpasswd= re.compile('^[ \t]*password[ \t]*=[ \t]*(\w+)')
# sequence matters
def suite():
suite = unittest.TestSuite()
suite.addTest(TestDatabase('test_readRts2Configuration'))
suite.addTest(TestDatabase('test_dbReadAccessTarget'))
suite.addTest(TestDatabase('test_dbReadAccessCCDScript'))
return suite
#@unittest.skip('class not yet implemented')
class TestDatabase(unittest.TestCase):
def tearDown(self):
pass
def setUp(self):
# ToDo: ugly
self.dbName = None
self.dbUser = None
self.dbPasswd = None
self.openedRts2Ini=False
try:
with open('/etc/rts2/rts2.ini') as ini:
self.openedRts2Ini=True
for ln in ini:
mc = comment.match(ln)
if mc:
continue
mn = dbname.match(ln)
if mn:
self.dbName= mn.group(1)
continue
mu = dbusername.match(ln)
if mu:
self.dbUser= mu.group(1)
continue
mp = dbpasswd.match(ln)
if mp:
self.dbPasswd= mp.group(1)
continue
except Exception, e:
print 'error: {}'.format(e)
# last resort
if not self.dbName:
self.dbName ='stars'
if not self.dbUser:
self.dbUser = pwd.getpwuid(os.getuid()).pw_name
if not self.dbPasswd:
self.dbPasswd =''
def test_readRts2Configuration(self):
logger.info('== {} =='.format(self._testMethodName))
self.assertTrue(self.openedRts2Ini,'return value: {}, coud not read rts2.ini'.format(self.openedRts2Ini))
def test_dbReadAccessTarget(self):
logger.info('== {} =='.format(self._testMethodName))
entry=(5, 'o', 'OnTargetFocus', None, None, 'this target does not change the RA/DEC values', True, 1, None, None, None, None, True, None, None, None)
conn = psycopg2.connect('dbname={} user={} password={}'.format(self.dbName, self.dbUser, self.dbPasswd))
crsr = conn.cursor()
crsr.execute('SELECT * FROM targets WHERE tar_id=5 ;')
result=crsr.fetchone()
crsr.close()
conn.close()
self.assertEqual(entry, result, 'return value:{}'.format(result))
def test_dbReadAccessCCDScript(self):
logger.info('== {} =='.format(self._testMethodName))
entry=('exe /usr/local/bin/rts2saf_focus.py')
conn = psycopg2.connect('dbname={} user={} password={}'.format(self.dbName, self.dbUser, self.dbPasswd))
crsr = conn.cursor()
crsr.execute('SELECT * FROM scripts WHERE tar_id=5 ;')
result=crsr.fetchone()
crsr.close()
conn.close()
self.assertEqual(entry, result[2], 'return value:>>{}<<'.format(result[2]))
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=0).run(suite())
| lgpl-3.0 |
rothnic/bokeh | scripts/interactive_tester.py | 43 | 9271 | from __future__ import print_function
import argparse
import importlib
import os
from shutil import rmtree
from six.moves import input
import sys
import textwrap
import time
import json
# TODO:
# catch and log exceptions in examples files that fail to open
DIRECTORIES = {
'file' : '../../examples/plotting/file',
'notebook': '../../examples/plotting/notebook',
'server' : '../../examples/plotting/server',
'ggplot' : '../../examples/compat/ggplot',
'glyphs' : '../../examples/glyphs',
'mpl' : '../../examples/compat/mpl',
'pandas' : '../../examples/compat/pandas',
'seaborn' : '../../examples/compat/seaborn',
'charts' : '../../examples/charts',
}
DEFAULT_TEST_FILES = [
'../../examples/plotting/file/stocks.py',
'../../examples/plotting/file/glucose.py',
'../../examples/compat/ggplot/density.py',
'../../examples/plotting/server/stocks.py',
'../../examples/plotting/server/glucose.py',
'../../examples/plotting/notebook/candlestick.ipynb',
'../../examples/plotting/notebook/glucose.ipynb',
'../../examples/compat/seaborn/violin.py',
'../../examples/charts/boxplot.py',
]
SESSION_FILE = os.path.abspath("INTERACTIVE_TESTER_SESSION.json")
def get_parser():
"""Create the parser that will be used to add arguments to the script.
"""
parser = argparse.ArgumentParser(description=textwrap.dedent("""
Tests a selection of .py or .ipynb bokeh example files.
The --location option allows you to select a specific examples subdirectory to test all files in,
ignoring __init__.py
Location arguments can be any valid path to a folder with the examples, like:
-l /path/to/my/examplesyou can choose:
or any of the pre-built keywords that point to the related examples:
- file
- notebook
- server
- ggplot
- glyphs
- mpl
- pandas
- seaborn
"""), formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--no-log', action='store_true', dest='nolog', default=False,
help="don't save a log of any errors discovered")
parser.add_argument('-l', '--location', action='store', default=False,
help="example directory in which you wish to test")
parser.add_argument('--reuse-session', action='store_true', dest='reuseSession', default=False,
help="do not clean last session log and start from where you left")
parser.add_argument('--notebook-options', action='store', dest='notebookOptions', default="",
help="options to be forwarded to ipython notebook to customize it's behaviour")
return parser
def depend_check(dependency):
"""
Make sure a given dependency is installed
"""
try:
importlib.import_module(dependency)
found = True
except ImportError as e:
print("%s\nPlease use conda or pip to install the necessary dependency." % (e))
found = False
return found
def save_session(session):
"""
Save the session object to the SESSION_FILE
Args:
session(dict): dict with all the example files and results of each run
"""
with open(SESSION_FILE, 'w') as res_file:
json.dump(session, res_file)
def get_session():
"""
Return last stored session
"""
try:
with open(SESSION_FILE, 'r') as res_file:
return json.load(res_file)
except IOError:
return {}
def clean_session():
"""
Removes previous session file
"""
if os.path.exists(SESSION_FILE):
os.remove(SESSION_FILE)
def main(testing_ground=None, notebook_options=""):
"""
Collect and run .py or .ipynb examples from a set list or given examples directory, ignoring __init__.py
User input is collected to determine a properly or improperly displayed page
"""
# Create a testing directory if one does not exist, then cd into it
testing_directory = 'tmp_test'
if not os.path.exists(testing_directory):
os.mkdir(testing_directory)
os.chdir(testing_directory)
if testing_ground:
log_name = results.location
TestFiles = [
fileName for fileName in os.listdir('%s/.' % testing_ground)
if fileName.endswith(('.py', '.ipynb')) and fileName != '__init__.py'
]
else:
log_name = "fast"
TestFiles = DEFAULT_TEST_FILES
Log = []
lastSession = get_session()
for index, fileName in enumerate(TestFiles):
if testing_ground:
fileName = "%s/%s" % (testing_ground, fileName)
try:
if not fileName in lastSession:
lastSession[fileName] = "TESTING..."
save_session(lastSession)
command = get_cmd(fileName, notebook_options)
opener(fileName, command)
if results.nolog:
# Don't display 'next file' message after opening final file in a dir
if index != len(TestFiles)-1:
input("\nPress enter to open next file ")
else:
ErrorReport = test_status()
if ErrorReport:
Log.append("\n\n%s: \n %s" % (fileName, ErrorReport))
lastSession[fileName] = ErrorReport
save_session(lastSession)
else:
prevRes = lastSession[fileName]
if prevRes == "TESTING...":
print("RESULT OF %s LAST RUN NOT REGISTERED!!" % fileName)
ErrorReport = test_status()
lastSession[fileName] = ErrorReport
save_session(lastSession)
else:
print("%s detected in last session: SKIPPING" % fileName)
except (KeyboardInterrupt, EOFError):
break
# exit the testing directory and delete it
os.chdir('../')
rmtree(testing_directory)
if Log:
logger(Log, log_name)
def get_cmd(some_file, notebook_options=""):
"""Determines how to open a file depending
on whether it is a .py or a .ipynb file
"""
if some_file.endswith('.py'):
command = "python"
elif some_file.endswith('.ipynb'):
command = "ipython notebook %s" % notebook_options
return command
def opener(some_file, command):
"""Print to screen what file is being opened and then open the file using
the command method provided.
"""
print("\nOpening %s\n" % some_file.strip('../'))
os.system("%s %s" % (command, some_file))
def test_status():
"""Collect user input to determine if a file displayed correctly or incorrectly.
In the case of incorrectly displayed plots, an 'ErrorReport' string is returned.
"""
status = input("Did the plot(s) display correctly? (y/n) ")
while not status.startswith(('y', 'n')):
print("")
status = input("Unexpected answer. Please type y or n. ")
if status.startswith('n'):
ErrorReport = input("Please describe the problem: ")
return ErrorReport
def logger(error_array, name):
"""
Log errors by appending to a .txt file. The name and directory the file is saved into
is provided by the name and log_dir args.
"""
logfile = "%s_examples_testlog.txt" % name
if os.path.exists(logfile):
os.remove(logfile)
with open(logfile, 'a') as f:
print("")
print("\nWriting error log to %s" % logfile)
for error in error_array:
f.write("%s\n" % error)
if __name__ == '__main__':
if not depend_check('bokeh'):
sys.exit(1)
parser = get_parser()
results = parser.parse_args()
if results.location:
if results.location and results.location in DIRECTORIES:
target = results.location
if target in ['ggplot', 'pandas', 'seaborn', 'charts']:
if not depend_check(target):
sys.exit(1)
test_dir = DIRECTORIES[target]
elif os.path.exists(results.location):
# in case target is not one of the recognized keys and is a
# valid path we can run the examples in that folder
test_dir = results.location
print("Running examples in custom location:", test_dir)
else:
print("Test location '%s' not recognized.\nPlease type 'python interactive_tester.py -h' for a list of valid test directories."
% results.location)
sys.exit(1)
else:
test_dir = None
if results.location == 'server' or test_dir is None:
print("Server examples require bokeh-server. Make sure you've typed 'bokeh-server' in another terminal tab.")
time.sleep(5)
if not results.reuseSession:
print("cleaning previous session file...",)
clean_session()
print("OK")
main(test_dir, notebook_options=results.notebookOptions)
| bsd-3-clause |
claritylab/sirius | lucida/commandcenter/controllers/ConfigChecker.py | 3 | 1570 | from Config import *
# Check Config.py.
if MAX_DOC_NUM_PER_USER <= 0:
print 'MAX_DOC_NUM_PER_USER must be non-negative'
exit()
if not (TRAIN_OR_LOAD == 'train' or TRAIN_OR_LOAD == 'load'):
print 'TRAIN_OR_LOAD must be either train or load'
exit()
for service_name, service_obj in SERVICES.iteritems():
if not service_name == service_obj.name:
print service_name, 'must be the same as', service_obj.name
exit()
for input_type in CLASSIFIER_DESCRIPTIONS:
print '@@@@@ When query type is ' + input_type + ', there are ' + \
str(len(CLASSIFIER_DESCRIPTIONS[input_type])) + ' possible classes:'
i = 0
for query_class_name, graph in \
CLASSIFIER_DESCRIPTIONS[input_type].iteritems():
print str(i) + '. ' + query_class_name + ' -- needs to invoke ' \
+ graph.to_string()
for node in graph.node_list:
if not node.service_name in SERVICES:
print 'CLASSIFIER_DESCRIPTIONS misconfigured'
print 'Unrecognized service:', node.sercice_name
exit()
if input_type == 'text':
if SERVICES[node.service_name].input_type != 'text':
print 'CLASSIFIER_DESCRIPTIONS misconfigured'
print node.service_name, 'does not receive text'
exit()
elif input_type == 'image':
if SERVICES[node.service_name].input_type != 'image':
print 'CLASSIFIER_DESCRIPTIONS misconfigured'
print node.service_name, 'does not receive image'
exit()
elif input_type == 'text_image':
pass
else:
print 'CLASSIFIER_DESCRIPTIONS misconfigured'
print 'input type must be either text, image, or text_image'
exit()
i += 1
| bsd-3-clause |
mkocka/galaxytea | modeling/domcek/plots.py | 1 | 4294 | import matplotlib.pyplot as plt
from numpy import *
###List of variables
# r_in [10**10 cm] innder radius
# r_out [10**10 cm] outer radius
# step [10**10 cm] step of plot
# alfa [] parameter of accretion
# M_16 [10**16 g.s**(-1)] accretion flow
# m_1 [solar mass] mass of compact object
# R_hv [10**10 cm] radius of compact object
# R_10 [10**10 cm] distance from compact object
# f numerical factor
###List of computed parameters
# Surface density [g.cm**(-2)] (sigma)
# Height [cm] (H)
# Density [g.cm**(-3)] (rho)
# Central disc temeprature [K] (T_c)
# Opacity [] (tau)
# viscosity [cm**2.s**(-1)] (nu)
# radial velocity towards center [cm.s**(-1)] (v_r)
###function solutions parameters
# parameter 1 r_in
# parameter 2 r_out
# parameter 3 step
# parameter 4 alfa
# parameter 5 M_16
# parameter 6 m_1
# parameter 7 R_hv
def solutions(r_in,r_out,step,alfa,M_16,m_1,R_hv):
#defining lists
list_function = arange(r_in,r_out,step)
R_10_l,surface_density_l,height_l,density_l,Fx = ([] for i in range(5))
temperature_l,opacity_l,viscosity_l,radial_velocity_l = ([] for i in range(4))
#computation and appending to lists
for R_10 in list_function:
f=(1-((R_hv)/(R_10))**(1.0/2))**(1.0/4)
surface_density = 5.2*alfa**(-4.0/5)*M_16**(7.0/10)*m_1**(1.0/4)*R_10**(-3.0/4)*f**(14.0/5)
height = 1.7*10**8*alfa**(-1.0/10)*M_16**(3.0/20)*m_1**(-3.0/8)*R_10**(9.0/8)*f**(3.0/5)
density = 3.1*10**(-8)*alfa**(-7.0/10)*M_16**(11.0/20)*m_1**(5.0/8)*R_10**(-15.0/8)*f**(11.0/5)
temperature = 1.4*10**4*alfa**(-1.0/5)*M_16**(3.0/10)*m_1**(1.0/4)*R_10**(-3.0/4)*f**(6.0/5)
opacity = 190*alfa**(-4.0/5)*M_16**(1.0/5)*f**(4.0/5)
viscosity = 1.8*10**14*alfa**(4.0/5)*M_16**(3.0/10)*m_1**(-1.0/4)*R_10**(3.0/4)*f**(6.0/5)
radial_velocity = 2.7*10**4*alfa**(4.0/5)*M_16**(3.0/10)*m_1**(-1.0/4)*R_10**(-1.0/4)*f**(-14.0/5)
R_10_l.append(R_10)
surface_density_l.append(surface_density)
height_l.append(height)
density_l.append(density)
temperature_l.append(temperature)
opacity_l.append(opacity)
viscosity_l.append(viscosity)
radial_velocity_l.append(radial_velocity)
Fx.append(f)
#transformation R_10 to kolimeters
R_km = [ x / 10**(-4) for x in R_10_l]
return R_km, surface_density_l, height_l, density_l,temperature_l,opacity_l,viscosity_l,radial_velocity_l,Fx
#for definitions of parameters look up
r_in =1.0001*10**(-4)
r_out =10**(-2)
step = 10**(-6)
alfa = 0.5
M_16 = 63
m_1 = 1.5
R_hv = 1.0*10**(-4)
lists=solutions(r_in,r_out,step,alfa,M_16,m_1,R_hv)
print 30*"-"
print "Used parameter values"
print 30*"-"
print "innder radius:", 10*".",r_in, 10*".", "[10$^{10}$ cm]"
print "outer radius:", 10*".", r_out, 10*".", "[10$^{10}$ cm]"
print "step of plot:", 10*".", step, 10*".", "[10$^{10}$ cm]"
print "parameter of accretion alfa:", 10*".", alfa
print "accretion flow:", 10*".", M_16, 10*".", "[10$^6$ g.s${-1)}$]"
print "mass of compact object:", 10*".", m_1, 10*".", "[solar mass]"
print "radius of compact object:", 10*".", R_hv, 10*".", "[10$^{10}$ cm]"
plt.plot(lists[0], lists[1])
plt.title('surface density')
plt.xlabel('radius [km]')
plt.ylabel('surface density [g.cm$^{-2}$] ')
plt.grid()
plt.savefig("surface density")
plt.gcf().clear()
plt.plot(lists[0], lists[2])
plt.title('height')
plt.xlabel('radius [km]')
plt.ylabel('height [cm] ')
plt.grid()
plt.savefig("height")
plt.gcf().clear()
plt.plot(lists[0], lists[3])
plt.title('density')
plt.xlabel('radius [km]')
plt.ylabel('density [g.cm$^{-3}$] ')
plt.grid()
plt.savefig("density")
plt.gcf().clear()
plt.plot(lists[0], lists[4])
plt.title('temperature')
plt.xlabel('radius [km]')
plt.ylabel('temperature [K] ')
plt.grid()
plt.savefig("temperature")
plt.gcf().clear()
plt.plot(lists[0], lists[5])
plt.title('opacity')
plt.xlabel('radius [km]')
plt.ylabel('opacity ')
plt.grid()
plt.savefig("opacity")
plt.gcf().clear()
plt.plot(lists[0], lists[6])
plt.title('viscosity')
plt.xlabel('radius [km]')
plt.ylabel('viscosity [cm$^{2}$.s$^{-1}$] ')
plt.grid()
plt.savefig("viscosity")
plt.gcf().clear()
plt.plot(lists[0], lists[7])
plt.title('radial velocity')
plt.xlabel('radius [km]')
plt.ylabel('radial velocity [cm.s$^{-1}$] ')
plt.grid()
plt.savefig("radial velocity")
plt.gcf().clear()
| mit |
DCSaunders/tensorflow | tensorflow/contrib/learn/python/learn/estimators/stability_test.py | 20 | 4977 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Estimator regression tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import tensorflow as tf
from tensorflow.contrib.learn.python.learn.learn_io import data_feeder
def _get_input_fn(x, y, batch_size=None):
df = data_feeder.setup_train_data_feeder(
x, y, n_classes=None, batch_size=batch_size)
return df.input_builder, df.get_feed_dict_fn()
# We use a null optimizer since we can't get deterministic results out of
# supervisor's multiple threads.
class _NullOptimizer(tf.train.Optimizer):
def __init__(self):
super(_NullOptimizer, self).__init__(use_locking=False, name='Null')
def _apply_dense(self, grad, var):
return tf.no_op()
def _apply_sparse(self, grad, var):
return tf.no_op()
def _prepare(self):
pass
_NULL_OPTIMIZER = _NullOptimizer()
class StabilityTest(tf.test.TestCase):
"""Tests that estiamtors are reproducible."""
def testRandomStability(self):
my_seed = 42
minval = -0.3333
maxval = 0.3333
with tf.Graph().as_default() as g:
with self.test_session(graph=g) as session:
g.seed = my_seed
x = tf.random_uniform([10, 10], minval=minval, maxval=maxval)
val1 = session.run(x)
with tf.Graph().as_default() as g:
with self.test_session(graph=g) as session:
g.seed = my_seed
x = tf.random_uniform([10, 10], minval=minval, maxval=maxval)
val2 = session.run(x)
self.assertAllClose(val1, val2)
def testLinearRegression(self):
my_seed = 42
config = tf.contrib.learn.RunConfig(tf_random_seed=my_seed)
boston = tf.contrib.learn.datasets.load_boston()
columns = [tf.contrib.layers.real_valued_column('', dimension=13)]
# We train with
with tf.Graph().as_default() as g1:
random.seed(my_seed)
g1.seed = my_seed
tf.contrib.framework.create_global_step()
regressor1 = tf.contrib.learn.LinearRegressor(optimizer=_NULL_OPTIMIZER,
feature_columns=columns,
config=config)
regressor1.fit(x=boston.data, y=boston.target, steps=1)
with tf.Graph().as_default() as g2:
random.seed(my_seed)
g2.seed = my_seed
tf.contrib.framework.create_global_step()
regressor2 = tf.contrib.learn.LinearRegressor(optimizer=_NULL_OPTIMIZER,
feature_columns=columns,
config=config)
regressor2.fit(x=boston.data, y=boston.target, steps=1)
self.assertAllClose(regressor1.weights_, regressor2.weights_)
self.assertAllClose(regressor1.bias_, regressor2.bias_)
self.assertAllClose(
list(regressor1.predict(boston.data, as_iterable=True)),
list(regressor2.predict(boston.data, as_iterable=True)), atol=1e-05)
def testDNNRegression(self):
my_seed = 42
config = tf.contrib.learn.RunConfig(tf_random_seed=my_seed)
boston = tf.contrib.learn.datasets.load_boston()
columns = [tf.contrib.layers.real_valued_column('', dimension=13)]
with tf.Graph().as_default() as g1:
random.seed(my_seed)
g1.seed = my_seed
tf.contrib.framework.create_global_step()
regressor1 = tf.contrib.learn.DNNRegressor(
hidden_units=[10], feature_columns=columns,
optimizer=_NULL_OPTIMIZER, config=config)
regressor1.fit(x=boston.data, y=boston.target, steps=1)
with tf.Graph().as_default() as g2:
random.seed(my_seed)
g2.seed = my_seed
tf.contrib.framework.create_global_step()
regressor2 = tf.contrib.learn.DNNRegressor(
hidden_units=[10], feature_columns=columns,
optimizer=_NULL_OPTIMIZER, config=config)
regressor2.fit(x=boston.data, y=boston.target, steps=1)
for w1, w2 in zip(regressor1.weights_, regressor2.weights_):
self.assertAllClose(w1, w2)
for b1, b2 in zip(regressor2.bias_, regressor2.bias_):
self.assertAllClose(b1, b2)
self.assertAllClose(
list(regressor1.predict(boston.data, as_iterable=True)),
list(regressor2.predict(boston.data, as_iterable=True)), atol=1e-05)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
creasyw/IMTAphy | documentation/toolchain/Pygments-0.11.1-py2.5.egg/pygments/cmdline.py | 19 | 12149 | # -*- coding: utf-8 -*-
"""
pygments.cmdline
~~~~~~~~~~~~~~~~
Command line interface.
:copyright: 2006-2008 by Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import sys
import getopt
from textwrap import dedent
from pygments import __version__, __author__, highlight
from pygments.util import ClassNotFound, OptionError, docstring_headline
from pygments.lexers import get_all_lexers, get_lexer_by_name, get_lexer_for_filename, \
find_lexer_class, guess_lexer, TextLexer
from pygments.formatters import get_all_formatters, get_formatter_by_name, \
get_formatter_for_filename, find_formatter_class, \
TerminalFormatter # pylint:disable-msg=E0611
from pygments.filters import get_all_filters, find_filter_class
from pygments.styles import get_all_styles, get_style_by_name
USAGE = """\
Usage: %s [-l <lexer> | -g] [-F <filter>[:<options>]] [-f <formatter>]
[-O <options>] [-P <option=value>] [-o <outfile>] [<infile>]
%s -S <style> -f <formatter> [-a <arg>] [-O <options>] [-P <option=value>]
%s -L [<which> ...]
%s -H <type> <name>
%s -h | -V
Highlight the input file and write the result to <outfile>.
If no input file is given, use stdin, if -o is not given, use stdout.
<lexer> is a lexer name (query all lexer names with -L). If -l is not
given, the lexer is guessed from the extension of the input file name
(this obviously doesn't work if the input is stdin). If -g is passed,
attempt to guess the lexer from the file contents, or pass through as
plain text if this fails (this can work for stdin).
Likewise, <formatter> is a formatter name, and will be guessed from
the extension of the output file name. If no output file is given,
the terminal formatter will be used by default.
With the -O option, you can give the lexer and formatter a comma-
separated list of options, e.g. ``-O bg=light,python=cool``.
The -P option adds lexer and formatter options like the -O option, but
you can only give one option per -P. That way, the option value may
contain commas and equals signs, which it can't with -O, e.g.
``-P "heading=Pygments, the Python highlighter".
With the -F option, you can add filters to the token stream, you can
give options in the same way as for -O after a colon (note: there must
not be spaces around the colon).
The -O, -P and -F options can be given multiple times.
With the -S option, print out style definitions for style <style>
for formatter <formatter>. The argument given by -a is formatter
dependent.
The -L option lists lexers, formatters, styles or filters -- set
`which` to the thing you want to list (e.g. "styles"), or omit it to
list everything.
The -H option prints detailed help for the object <name> of type <type>,
where <type> is one of "lexer", "formatter" or "filter".
The -h option prints this help.
The -V option prints the package version.
"""
def _parse_options(o_strs):
opts = {}
if not o_strs:
return opts
for o_str in o_strs:
if not o_str:
continue
o_args = o_str.split(',')
for o_arg in o_args:
o_arg = o_arg.strip()
try:
o_key, o_val = o_arg.split('=')
o_key = o_key.strip()
o_val = o_val.strip()
except ValueError:
opts[o_arg] = True
else:
opts[o_key] = o_val
return opts
def _parse_filters(f_strs):
filters = []
if not f_strs:
return filters
for f_str in f_strs:
if ':' in f_str:
fname, fopts = f_str.split(':', 1)
filters.append((fname, _parse_options([fopts])))
else:
filters.append((f_str, {}))
return filters
def _print_help(what, name):
try:
if what == 'lexer':
cls = find_lexer_class(name)
print "Help on the %s lexer:" % cls.name
print dedent(cls.__doc__)
elif what == 'formatter':
cls = find_formatter_class(name)
print "Help on the %s formatter:" % cls.name
print dedent(cls.__doc__)
elif what == 'filter':
cls = find_filter_class(name)
print "Help on the %s filter:" % name
print dedent(cls.__doc__)
except AttributeError:
print >>sys.stderr, "%s not found!" % what
def _print_list(what):
if what == 'lexer':
print
print "Lexers:"
print "~~~~~~~"
info = []
for fullname, names, exts, _ in get_all_lexers():
tup = (', '.join(names)+':', fullname,
exts and '(filenames ' + ', '.join(exts) + ')' or '')
info.append(tup)
info.sort()
for i in info:
print ('* %s\n %s %s') % i
elif what == 'formatter':
print
print "Formatters:"
print "~~~~~~~~~~~"
info = []
for cls in get_all_formatters():
doc = docstring_headline(cls)
tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
'(filenames ' + ', '.join(cls.filenames) + ')' or '')
info.append(tup)
info.sort()
for i in info:
print ('* %s\n %s %s') % i
elif what == 'filter':
print
print "Filters:"
print "~~~~~~~~"
for name in get_all_filters():
cls = find_filter_class(name)
print "* " + name + ':'
print " %s" % docstring_headline(cls)
elif what == 'style':
print
print "Styles:"
print "~~~~~~~"
for name in get_all_styles():
cls = get_style_by_name(name)
print "* " + name + ':'
print " %s" % docstring_headline(cls)
def main(args=sys.argv):
"""
Main command line entry point.
"""
# pylint: disable-msg=R0911,R0912,R0915
usage = USAGE % ((args[0],) * 5)
try:
popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:hVHg")
except getopt.GetoptError, err:
print >>sys.stderr, usage
return 2
opts = {}
O_opts = []
P_opts = []
F_opts = []
for opt, arg in popts:
if opt == '-O':
O_opts.append(arg)
elif opt == '-P':
P_opts.append(arg)
elif opt == '-F':
F_opts.append(arg)
opts[opt] = arg
if not opts and not args:
print usage
return 0
if opts.pop('-h', None) is not None:
print usage
return 0
if opts.pop('-V', None) is not None:
print 'Pygments version %s, (c) 2006-2008 by %s.' % (__version__, __author__)
return 0
# handle ``pygmentize -L``
L_opt = opts.pop('-L', None)
if L_opt is not None:
if opts:
print >>sys.stderr, usage
return 2
# print version
main(['', '-V'])
if not args:
args = ['lexer', 'formatter', 'filter', 'style']
for arg in args:
_print_list(arg.rstrip('s'))
return 0
# handle ``pygmentize -H``
H_opt = opts.pop('-H', None)
if H_opt is not None:
if opts or len(args) != 2:
print >>sys.stderr, usage
return 2
what, name = args
if what not in ('lexer', 'formatter', 'filter'):
print >>sys.stderr, usage
return 2
_print_help(what, name)
return 0
# parse -O options
parsed_opts = _parse_options(O_opts)
opts.pop('-O', None)
# parse -P options
for p_opt in P_opts:
try:
name, value = p_opt.split('=', 1)
except ValueError:
parsed_opts[p_opt] = True
else:
parsed_opts[name] = value
opts.pop('-P', None)
# handle ``pygmentize -S``
S_opt = opts.pop('-S', None)
a_opt = opts.pop('-a', None)
if S_opt is not None:
f_opt = opts.pop('-f', None)
if not f_opt:
print >>sys.stderr, usage
return 2
if opts or args:
print >>sys.stderr, usage
return 2
try:
parsed_opts['style'] = S_opt
fmter = get_formatter_by_name(f_opt, **parsed_opts)
except ClassNotFound, err:
print >>sys.stderr, err
return 1
arg = a_opt or ''
print fmter.get_style_defs(arg)
return 0
# if no -S is given, -a is not allowed
if a_opt is not None:
print >>sys.stderr, usage
return 2
# parse -F options
F_opts = _parse_filters(F_opts)
opts.pop('-F', None)
# select formatter
outfn = opts.pop('-o', None)
fmter = opts.pop('-f', None)
if fmter:
try:
fmter = get_formatter_by_name(fmter, **parsed_opts)
except (OptionError, ClassNotFound), err:
print >>sys.stderr, 'Error:', err
return 1
if outfn:
if not fmter:
try:
fmter = get_formatter_for_filename(outfn, **parsed_opts)
except (OptionError, ClassNotFound), err:
print >>sys.stderr, 'Error:', err
return 1
try:
outfile = file(outfn, 'wb')
except Exception, err:
print >>sys.stderr, 'Error: cannot open outfile:', err
return 1
else:
if not fmter:
fmter = TerminalFormatter(**parsed_opts)
outfile = sys.stdout
# select lexer
lexer = opts.pop('-l', None)
if lexer:
try:
lexer = get_lexer_by_name(lexer, **parsed_opts)
except (OptionError, ClassNotFound), err:
print >>sys.stderr, 'Error:', err
return 1
if args:
if len(args) > 1:
print >>sys.stderr, usage
return 2
infn = args[0]
try:
code = file(infn).read()
except Exception, err:
print >>sys.stderr, 'Error: cannot read infile:', err
return 1
if not lexer:
try:
lexer = get_lexer_for_filename(infn, **parsed_opts)
except ClassNotFound, err:
if '-g' in opts:
try:
lexer = guess_lexer(code)
except ClassNotFound:
lexer = TextLexer()
else:
print >>sys.stderr, 'Error:', err
return 1
except OptionError, err:
print >>sys.stderr, 'Error:', err
return 1
else:
if '-g' in opts:
code = sys.stdin.read()
try:
lexer = guess_lexer(code)
except ClassNotFound:
lexer = TextLexer()
elif not lexer:
print >>sys.stderr, 'Error: no lexer name given and reading ' + \
'from stdin (try using -g or -l <lexer>)'
return 2
else:
code = sys.stdin.read()
# No encoding given? Use latin1 if output file given,
# stdin/stdout encoding otherwise.
# (This is a compromise, I'm not too happy with it...)
if 'encoding' not in parsed_opts and 'outencoding' not in parsed_opts:
if outfn:
# encoding pass-through
fmter.encoding = 'latin1'
else:
# use terminal encoding
lexer.encoding = getattr(sys.stdin, 'encoding', None) or 'ascii'
fmter.encoding = getattr(sys.stdout, 'encoding', None) or 'ascii'
# ... and do it!
try:
# process filters
for fname, fopts in F_opts:
lexer.add_filter(fname, **fopts)
highlight(code, lexer, fmter, outfile)
except Exception, err:
import traceback
info = traceback.format_exception(*sys.exc_info())
msg = info[-1].strip()
if len(info) >= 3:
# extract relevant file and position info
msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:]
print >>sys.stderr
print >>sys.stderr, '*** Error while highlighting:'
print >>sys.stderr, msg
return 1
return 0
| gpl-2.0 |
jimi-c/ansible | lib/ansible/module_utils/network/enos/enos.py | 68 | 5474 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by
# Ansible still belong to the author of the module, and may assign their own
# license to the complete work.
#
# Copyright (C) 2017 Lenovo.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Contains utility methods
# Lenovo Networking
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback, return_values
from ansible.module_utils.network.common.utils import to_list, EntityCollection
from ansible.module_utils.connection import Connection, exec_command
from ansible.module_utils.connection import ConnectionError
_DEVICE_CONFIGS = {}
_CONNECTION = None
enos_provider_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
'timeout': dict(type='int'),
'context': dict(),
'passwords': dict()
}
enos_argument_spec = {
'provider': dict(type='dict', options=enos_provider_spec),
}
command_spec = {
'command': dict(key=True),
'prompt': dict(),
'answer': dict()
}
def get_provider_argspec():
return enos_provider_spec
def check_args(module, warnings):
pass
def get_connection(module):
global _CONNECTION
if _CONNECTION:
return _CONNECTION
_CONNECTION = Connection(module._socket_path)
context = None
try:
context = module.params['context']
except KeyError:
context = None
if context:
if context == 'system':
command = 'changeto system'
else:
command = 'changeto context %s' % context
_CONNECTION.get(command)
return _CONNECTION
def get_config(module, flags=None):
flags = [] if flags is None else flags
passwords = None
try:
passwords = module.params['passwords']
except KeyError:
passwords = None
if passwords:
cmd = 'more system:running-config'
else:
cmd = 'show running-config '
cmd += ' '.join(flags)
cmd = cmd.strip()
try:
return _DEVICE_CONFIGS[cmd]
except KeyError:
conn = get_connection(module)
out = conn.get(cmd)
cfg = to_text(out, errors='surrogate_then_replace').strip()
_DEVICE_CONFIGS[cmd] = cfg
return cfg
def to_commands(module, commands):
if not isinstance(commands, list):
raise AssertionError('argument must be of type <list>')
transform = EntityCollection(module, command_spec)
commands = transform(commands)
for index, item in enumerate(commands):
if module.check_mode and not item['command'].startswith('show'):
module.warn('only show commands are supported when using check '
'mode, not executing `%s`' % item['command'])
return commands
def run_commands(module, commands, check_rc=True):
connection = get_connection(module)
commands = to_commands(module, to_list(commands))
responses = list()
for cmd in commands:
out = connection.get(**cmd)
responses.append(to_text(out, errors='surrogate_then_replace'))
return responses
def load_config(module, config):
try:
conn = get_connection(module)
conn.get('enable')
conn.edit_config(config)
except ConnectionError as exc:
module.fail_json(msg=to_text(exc))
def get_defaults_flag(module):
rc, out, err = exec_command(module, 'show running-config ?')
out = to_text(out, errors='surrogate_then_replace')
commands = set()
for line in out.splitlines():
if line:
commands.add(line.strip().split()[0])
if 'all' in commands:
return 'all'
else:
return 'full'
| gpl-3.0 |
silenceli/nova | nova/scheduler/weights/metrics.py | 38 | 4448 | # Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Metrics Weigher. Weigh hosts by their metrics.
This weigher can compute the weight based on the compute node host's various
metrics. The to-be weighed metrics and their weighing ratio are specified
in the configuration file as the followings:
[metrics]
weight_setting = name1=1.0, name2=-1.0
The final weight would be name1.value * 1.0 + name2.value * -1.0.
"""
from oslo.config import cfg
from nova import exception
from nova.scheduler import utils
from nova.scheduler import weights
metrics_weight_opts = [
cfg.FloatOpt('weight_multiplier',
default=1.0,
help='Multiplier used for weighing metrics.'),
cfg.ListOpt('weight_setting',
default=[],
help='How the metrics are going to be weighed. This '
'should be in the form of "<name1>=<ratio1>, '
'<name2>=<ratio2>, ...", where <nameX> is one '
'of the metrics to be weighed, and <ratioX> is '
'the corresponding ratio. So for "name1=1.0, '
'name2=-1.0" The final weight would be '
'name1.value * 1.0 + name2.value * -1.0.'),
cfg.BoolOpt('required',
default=True,
help='How to treat the unavailable metrics. When a '
'metric is NOT available for a host, if it is set '
'to be True, it would raise an exception, so it '
'is recommended to use the scheduler filter '
'MetricFilter to filter out those hosts. If it is '
'set to be False, the unavailable metric would be '
'treated as a negative factor in weighing '
'process, the returned value would be set by '
'the option weight_of_unavailable.'),
cfg.FloatOpt('weight_of_unavailable',
default=float(-10000.0),
help='The final weight value to be returned if '
'required is set to False and any one of the '
'metrics set by weight_setting is unavailable.'),
]
CONF = cfg.CONF
CONF.register_opts(metrics_weight_opts, group='metrics')
class MetricsWeigher(weights.BaseHostWeigher):
def __init__(self):
self._parse_setting()
def _parse_setting(self):
self.setting = utils.parse_options(CONF.metrics.weight_setting,
sep='=',
converter=float,
name="metrics.weight_setting")
def weight_multiplier(self):
"""Override the weight multiplier."""
return CONF.metrics.weight_multiplier
def _weigh_object(self, host_state, weight_properties):
value = 0.0
for (name, ratio) in self.setting:
try:
value += host_state.metrics[name].value * ratio
except KeyError:
if CONF.metrics.required:
raise exception.ComputeHostMetricNotFound(
host=host_state.host,
node=host_state.nodename,
name=name)
else:
# We treat the unavailable metric as the most negative
# factor, i.e. set the value to make this obj would be
# at the end of the ordered weighed obj list
# Do nothing if ratio or weight_multiplier is 0.
if ratio * self.weight_multiplier() != 0:
return CONF.metrics.weight_of_unavailable
return value
| apache-2.0 |
yungyuc/solvcon | solvcon/parcel/vewave/tests/test_package.py | 2 | 2317 | # -*- coding: UTF-8 -*-
#
# Copyright (c) 2013, Yung-Yu Chen <yyc@solvcon.net>
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# - Neither the name of the SOLVCON nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import unittest
import importlib
class TestAllImport(unittest.TestCase):
def test_everything(self):
allmods = [
'solvcon.parcel.vewave',
'solvcon.parcel.vewave.case',
'solvcon.parcel.vewave.inout',
'solvcon.parcel.vewave.material',
'solvcon.parcel.vewave.planewave',
'solvcon.parcel.vewave.solver',
]
allmods = [importlib.import_module(name) for name in allmods]
for mod in allmods:
try:
for name in mod.__all__:
getattr(mod, name) # must not raise error.
except Exception as e:
e.args = ['modname = %s'%mod.__name__] + list(e.args)
raise
| bsd-3-clause |
tvtsoft/odoo8 | addons/l10n_es/__openerp__.py | 3 | 1878 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2008-2010 Zikzakmedia S.L. (http://zikzakmedia.com) All Rights Reserved.
# Jordi Esteve <jesteve@zikzakmedia.com>
# Copyright (c) 2012-2013, Grupo OPENTIA (<http://opentia.com>) Registered EU Trademark.
# Dpto. Consultoría <consultoria@opentia.es>
# Copyright (c) 2013 Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com)
# Pedro Manuel Baeza <pedro.baeza@serviciosbaeza.com>
{
"name" : "Spanish Charts of Accounts (PGCE 2008)",
"version" : "4.0",
"author" : "Spanish Localization Team",
'website' : 'https://launchpad.net/openerp-spain',
"category" : "Localization/Account Charts",
"description": """
Spanish charts of accounts (PGCE 2008).
========================================
* Defines the following chart of account templates:
* Spanish general chart of accounts 2008
* Spanish general chart of accounts 2008 for small and medium companies
* Spanish general chart of accounts 2008 for associations
* Defines templates for sale and purchase VAT
* Defines tax code templates
* Defines fiscal positions for spanish fiscal legislation
""",
"depends" : ["account", "base_vat", "base_iban"],
"data" : [
"account_type.xml",
"account_chart_template.xml",
"account_account_common.xml",
"account_account_full.xml",
"account_account_pymes.xml",
"account_account_assoc.xml",
"taxes_common.xml",
"fiscal_templates_common.xml",
"account_chart_template_post.xml",
"l10n_es_wizard.xml",
],
"demo" : [],
'auto_install': False,
"installable": False,
'images': ['images/config_chart_l10n_es.png', 'images/l10n_es_chart.png'],
}
| agpl-3.0 |
wronk/mne-python | examples/forward/plot_left_cerebellum_volume_source.py | 27 | 3245 | """
==============================================
Generate a left cerebellum volume source space
==============================================
Generate a volume source space of the left cerebellum and plot its vertices
relative to the left cortical surface source space and the freesurfer
segmentation file.
"""
# Author: Alan Leggitt <alan.leggitt@ucsf.edu>
#
# License: BSD (3-clause)
import numpy as np
from scipy.spatial import ConvexHull
from mayavi import mlab
from mne import setup_source_space, setup_volume_source_space
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
subjects_dir = data_path + '/subjects'
subj = 'sample'
aseg_fname = subjects_dir + '/sample/mri/aseg.mgz'
###############################################################################
# Setup the source spaces
# setup a cortical surface source space and extract left hemisphere
surf = setup_source_space(subj, subjects_dir=subjects_dir,
add_dist=False, overwrite=True)
lh_surf = surf[0]
# setup a volume source space of the left cerebellum cortex
volume_label = 'Left-Cerebellum-Cortex'
sphere = (0, 0, 0, 120)
lh_cereb = setup_volume_source_space(subj, mri=aseg_fname, sphere=sphere,
volume_label=volume_label,
subjects_dir=subjects_dir)
###############################################################################
# Plot the positions of each source space
# extract left cortical surface vertices, triangle faces, and surface normals
x1, y1, z1 = lh_surf['rr'].T
faces = lh_surf['use_tris']
normals = lh_surf['nn']
# normalize for mayavi
normals /= np.sum(normals * normals, axis=1)[:, np.newaxis]
# extract left cerebellum cortex source positions
x2, y2, z2 = lh_cereb[0]['rr'][lh_cereb[0]['inuse'].astype(bool)].T
# open a 3d figure in mayavi
mlab.figure(1, bgcolor=(0, 0, 0))
# plot the left cortical surface
mesh = mlab.pipeline.triangular_mesh_source(x1, y1, z1, faces)
mesh.data.point_data.normals = normals
mlab.pipeline.surface(mesh, color=3 * (0.7,))
# plot the convex hull bounding the left cerebellum
hull = ConvexHull(np.c_[x2, y2, z2])
mlab.triangular_mesh(x2, y2, z2, hull.simplices, color=3 * (0.5,), opacity=0.3)
# plot the left cerebellum sources
mlab.points3d(x2, y2, z2, color=(1, 1, 0), scale_factor=0.001)
# adjust view parameters
mlab.view(173.78, 101.75, 0.30, np.array([-0.03, -0.01, 0.03]))
mlab.roll(85)
##############################################################################
# Compare volume source locations to segmentation file in freeview
# Export source positions to nift file
nii_fname = data_path + '/MEG/sample/mne_sample_lh-cerebellum-cortex.nii'
# Combine the source spaces
src = surf + lh_cereb
src.export_volume(nii_fname, mri_resolution=True)
# Uncomment the following lines to display source positions in freeview.
'''
# display image in freeview
from mne.utils import run_subprocess
mri_fname = subjects_dir + '/sample/mri/brain.mgz'
run_subprocess(['freeview', '-v', mri_fname, '-v',
'%s:colormap=lut:opacity=0.5' % aseg_fname, '-v',
'%s:colormap=jet:colorscale=0,2' % nii_fname, '-slice',
'157 75 105'])
'''
| bsd-3-clause |
allenlavoie/tensorflow | tensorflow/contrib/data/python/ops/sliding.py | 6 | 3761 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Sliding dataset transformations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import nest
from tensorflow.python.data.util import sparse
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import gen_dataset_ops
class _SlideDataset(dataset_ops.Dataset):
"""A `Dataset` that passes a sliding window over its input."""
def __init__(self, input_dataset, window_size, stride=1):
"""See `sliding_window_batch` for details."""
super(_SlideDataset, self).__init__()
self._input_dataset = input_dataset
self._window_size = ops.convert_to_tensor(
window_size, dtype=dtypes.int64, name="window_size")
self._stride = ops.convert_to_tensor(
stride, dtype=dtypes.int64, name="stride")
def _as_variant_tensor(self):
return gen_dataset_ops.slide_dataset(
self._input_dataset._as_variant_tensor(), # pylint: disable=protected-access
window_size=self._window_size,
stride=self._stride,
output_shapes=nest.flatten(
sparse.as_dense_shapes(self.output_shapes, self.output_classes)),
output_types=nest.flatten(
sparse.as_dense_types(self.output_types, self.output_classes)))
@property
def output_classes(self):
return self._input_dataset.output_classes
@property
def output_shapes(self):
input_shapes = self._input_dataset.output_shapes
return nest.pack_sequence_as(input_shapes, [
tensor_shape.vector(None).concatenate(s)
for s in nest.flatten(self._input_dataset.output_shapes)
])
@property
def output_types(self):
return self._input_dataset.output_types
def sliding_window_batch(window_size, stride=1):
"""A sliding window with size of `window_size` and step of `stride`.
This transformation passes a sliding window over this dataset. The
window size is `window_size` and step size is `stride`. If the left
elements cannot fill up the sliding window, this transformation will
drop the final smaller element. For example:
```python
# NOTE: The following examples use `{ ... }` to represent the
# contents of a dataset.
a = { [1], [2], [3], [4], [5], [6] }
a.apply(tf.contrib.data.sliding_window_batch(window_size=3, stride=2)) ==
{
[[1], [2], [3]],
[[3], [4], [5]],
}
```
Args:
window_size: A `tf.int64` scalar `tf.Tensor`, representing the number of
elements in the sliding window.
stride: (Optional.) A `tf.int64` scalar `tf.Tensor`, representing the
steps moving the sliding window forward for one iteration. The default
is `1`. It must be in `[1, window_size)`.
Returns:
A `Dataset` transformation function, which can be passed to
@{tf.data.Dataset.apply}.
"""
def _apply_fn(dataset):
return _SlideDataset(dataset, window_size, stride)
return _apply_fn
| apache-2.0 |
MobSF/Mobile-Security-Framework-MobSF | mobsf/StaticAnalyzer/views/ios/view_source.py | 1 | 5000 | # -*- coding: utf_8 -*-
"""iOS View Source."""
import io
import json
import logging
import ntpath
import os
from pathlib import Path
import biplist
from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.utils.html import escape
from mobsf.MobSF.forms import FormUtil
from mobsf.MobSF.utils import (
is_file_exists,
is_safe_path,
print_n_send_error_response,
read_sqlite,
)
from mobsf.StaticAnalyzer.forms import (
ViewSourceIOSApiForm,
ViewSourceIOSForm,
)
logger = logging.getLogger(__name__)
def set_ext_api(file_path):
"""Smart Function to set Extenstion."""
ext = file_path.split('.')[-1]
if ext == 'plist':
return 'plist'
elif ext == 'xml':
return 'xml'
elif ext in ['sqlitedb', 'db', 'sqlite']:
return 'db'
elif ext == 'm':
return 'm'
else:
return 'txt'
def run(request, api=False):
"""View iOS Files."""
try:
logger.info('View iOS Source File')
exp = 'Error Description'
file_format = None
if api:
fil = request.POST['file']
md5_hash = request.POST['hash']
mode = request.POST['type']
viewsource_form = ViewSourceIOSApiForm(request.POST)
else:
fil = request.GET['file']
md5_hash = request.GET['md5']
mode = request.GET['type']
viewsource_form = ViewSourceIOSForm(request.GET)
typ = set_ext_api(fil)
if not viewsource_form.is_valid():
err = FormUtil.errors_message(viewsource_form)
if api:
return err
return print_n_send_error_response(request, err, False, exp)
base = Path(settings.UPLD_DIR) / md5_hash
if mode == 'ipa':
src1 = base / 'payload'
src2 = base / 'Payload'
if src1.exists():
src = src1
elif src2.exists():
src = src2
else:
raise Exception('MobSF cannot find Payload directory')
elif mode == 'ios':
src = base
sfile = src / fil
sfile = sfile.as_posix()
if not is_safe_path(src, sfile):
msg = 'Path Traversal Detected!'
if api:
return {'error': 'Path Traversal Detected!'}
return print_n_send_error_response(request, msg, False, exp)
dat = ''
sql_dump = {}
if typ == 'm':
file_format = 'cpp'
with io.open(sfile,
mode='r',
encoding='utf8',
errors='ignore') as flip:
dat = flip.read()
elif typ == 'xml':
file_format = 'xml'
with io.open(sfile,
mode='r',
encoding='utf8',
errors='ignore') as flip:
dat = flip.read()
elif typ == 'plist':
file_format = 'json'
dat = biplist.readPlist(sfile)
try:
dat = json.dumps(dat, indent=4, sort_keys=True)
except Exception:
pass
elif typ == 'db':
file_format = 'asciidoc'
sql_dump = read_sqlite(sfile)
elif typ == 'txt' and fil == 'classdump.txt':
file_format = 'cpp'
app_dir = os.path.join(settings.UPLD_DIR, md5_hash + '/')
cls_dump_file = os.path.join(app_dir, 'classdump.txt')
if is_file_exists(cls_dump_file):
with io.open(cls_dump_file, # lgtm [py/path-injection]
mode='r',
encoding='utf8',
errors='ignore') as flip:
dat = flip.read()
else:
dat = 'Class Dump result not Found'
elif typ == 'txt':
file_format = 'text'
with io.open(sfile,
mode='r',
encoding='utf8',
errors='ignore') as flip:
dat = flip.read()
else:
if api:
return {'error': 'Invalid Parameters'}
return HttpResponseRedirect('/error/')
context = {
'title': escape(ntpath.basename(fil)),
'file': escape(ntpath.basename(fil)),
'type': file_format,
'data': dat,
'sqlite': sql_dump,
'version': settings.MOBSF_VER,
}
template = 'general/view.html'
if api:
return context
return render(request, template, context)
except Exception as exp:
logger.exception('Error Viewing Source')
msg = str(exp)
exp = exp.__doc__
if api:
return print_n_send_error_response(request, msg, True, exp)
return print_n_send_error_response(request, msg, False, exp)
| gpl-3.0 |
alexandrevicenzi/pycompat | tests/test.py | 1 | 2225 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# These tests run only under Linux and Python 2.x +
# This is the Travis CI environment.
#
from pycompat import python as py
from pycompat import system
import sys
import unittest
class TestPyCompat(unittest.TestCase):
def test_python_is_64bits(self):
self.assertEqual(py.is_64bits, not py.is_32bits)
def test_is_cpython(self):
self.assertEqual(py.is_cpython, not py.is_pypy)
def test_immutability(self):
with self.assertRaises(AttributeError):
py.is2xx = 1
def test_python_is1xx(self):
self.assertFalse(py.is1xx)
def test_python_is2xx(self):
self.assertEqual(py.is2xx, sys.version_info[0] == 2)
def test_python_is3xx(self):
self.assertEqual(py.is3xx, sys.version_info[0] == 3)
def test_python_is_eqx(self):
self.assertTrue(py.is_eq(sys.version_info[0]))
def test_python_is_eqxx(self):
self.assertTrue(py.is_eq(sys.version_info[0], sys.version_info[1]))
def test_python_is_eqxxx(self):
self.assertTrue(py.is_eq(sys.version_info[0], sys.version_info[1], sys.version_info[2]))
def test_python_is_gtx(self):
self.assertTrue(py.is_gt(sys.version_info[0] - 1))
def test_python_is_gtxx(self):
self.assertTrue(py.is_gt(sys.version_info[0], sys.version_info[1] - 1))
def test_python_is_gtxxx(self):
self.assertTrue(py.is_gt(sys.version_info[0], sys.version_info[1], sys.version_info[2] - 1))
def test_python_is_ltx(self):
self.assertTrue(py.is_lt(sys.version_info[0] + 1))
def test_python_is_ltxx(self):
self.assertTrue(py.is_lt(sys.version_info[0], sys.version_info[1] + 1))
def test_python_is_ltxxx(self):
self.assertTrue(py.is_lt(sys.version_info[0], sys.version_info[1], sys.version_info[2] + 1))
def test_system_is_windows(self):
self.assertFalse(system.is_windows)
def test_system_is_cygwin(self):
self.assertFalse(system.is_cygwin)
def test_system_is_mac_os(self):
self.assertFalse(system.is_mac_os)
def test_system_is_linux(self):
self.assertTrue(system.is_linux)
if __name__ == '__main__':
unittest.main()
| mit |
djvoleur/test | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
indashnet/InDashNet.Open.UN2000 | android/external/chromium_org/testing/test_env.py | 96 | 3084 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Sets environment variables needed to run a chromium unit test."""
import os
import stat
import subprocess
import sys
# This is hardcoded to be src/ relative to this script.
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
CHROME_SANDBOX_ENV = 'CHROME_DEVEL_SANDBOX'
CHROME_SANDBOX_PATH = '/opt/chromium/chrome_sandbox'
def should_enable_sandbox(sandbox_path):
"""Return a boolean indicating that the current slave is capable of using the
sandbox and should enable it. This should return True iff the slave is a
Linux host with the sandbox file present and configured correctly."""
if not (sys.platform.startswith('linux') and
os.path.exists(sandbox_path)):
return False
sandbox_stat = os.stat(sandbox_path)
if ((sandbox_stat.st_mode & stat.S_ISUID) and
(sandbox_stat.st_mode & stat.S_IRUSR) and
(sandbox_stat.st_mode & stat.S_IXUSR) and
(sandbox_stat.st_uid == 0)):
return True
return False
def enable_sandbox_if_required(env, verbose=False):
"""Checks enables the sandbox if it is required, otherwise it disables it."""
chrome_sandbox_path = env.get(CHROME_SANDBOX_ENV, CHROME_SANDBOX_PATH)
if should_enable_sandbox(chrome_sandbox_path):
if verbose:
print 'Enabling sandbox. Setting environment variable:'
print ' %s="%s"' % (CHROME_SANDBOX_ENV, chrome_sandbox_path)
env[CHROME_SANDBOX_ENV] = chrome_sandbox_path
else:
if verbose:
print 'Sandbox not properly installed. Unsetting:'
print ' %s' % CHROME_SANDBOX_ENV
# The variable should be removed from the environment, making
# the variable empty silently disables the sandbox.
if env.get(CHROME_SANDBOX_ENV):
env.pop(CHROME_SANDBOX_ENV)
def fix_python_path(cmd):
"""Returns the fixed command line to call the right python executable."""
out = cmd[:]
if out[0] == 'python':
out[0] = sys.executable
elif out[0].endswith('.py'):
out.insert(0, sys.executable)
return out
def run_executable(cmd, env):
"""Runs an executable with:
- environment variable CR_SOURCE_ROOT set to the root directory.
- environment variable LANGUAGE to en_US.UTF-8.
- environment variable CHROME_DEVEL_SANDBOX set if need
- Reuses sys.executable automatically.
"""
# Many tests assume a English interface...
env['LANG'] = 'en_US.UTF-8'
# Used by base/base_paths_linux.cc as an override. Just make sure the default
# logic is used.
env.pop('CR_SOURCE_ROOT', None)
enable_sandbox_if_required(env)
# Ensure paths are correctly separated on windows.
cmd[0] = cmd[0].replace('/', os.path.sep)
cmd = fix_python_path(cmd)
try:
return subprocess.call(cmd, env=env)
except OSError:
print >> sys.stderr, 'Failed to start %s' % cmd
raise
def main():
return run_executable(sys.argv[1:], os.environ.copy())
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 |
viggates/nova | nova/servicegroup/drivers/mc.py | 2 | 4126 | # Service heartbeat driver using Memcached
# Copyright (c) 2013 Akira Yoshiyama <akirayoshiyama at gmail dot com>
#
# This is derived from nova/servicegroup/drivers/db.py.
# Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from nova import conductor
from nova import context
from nova.i18n import _
from nova.openstack.common import log as logging
from nova.openstack.common import memorycache
from nova.openstack.common import timeutils
from nova.servicegroup import api
CONF = cfg.CONF
CONF.import_opt('service_down_time', 'nova.service')
CONF.import_opt('memcached_servers', 'nova.openstack.common.memorycache')
LOG = logging.getLogger(__name__)
class MemcachedDriver(api.ServiceGroupDriver):
def __init__(self, *args, **kwargs):
test = kwargs.get('test')
if not CONF.memcached_servers and not test:
raise RuntimeError(_('memcached_servers not defined'))
self.mc = memorycache.get_client()
self.db_allowed = kwargs.get('db_allowed', True)
self.conductor_api = conductor.API(use_local=self.db_allowed)
def join(self, member_id, group_id, service=None):
"""Join the given service with its group."""
msg = _('Memcached_Driver: join new ServiceGroup member '
'%(member_id)s to the %(group_id)s group, '
'service = %(service)s')
LOG.debug(msg, {'member_id': member_id, 'group_id': group_id,
'service': service})
if service is None:
raise RuntimeError(_('service is a mandatory argument for '
'Memcached based ServiceGroup driver'))
report_interval = service.report_interval
if report_interval:
service.tg.add_timer(report_interval, self._report_state,
api.INITIAL_REPORTING_DELAY, service)
def is_up(self, service_ref):
"""Moved from nova.utils
Check whether a service is up based on last heartbeat.
"""
key = "%(topic)s:%(host)s" % service_ref
return self.mc.get(str(key)) is not None
def get_all(self, group_id):
"""Returns ALL members of the given group
"""
LOG.debug('Memcached_Driver: get_all members of the %s group',
group_id)
rs = []
ctxt = context.get_admin_context()
services = self.conductor_api.service_get_all_by_topic(ctxt, group_id)
for service in services:
if self.is_up(service):
rs.append(service['host'])
return rs
def _report_state(self, service):
"""Update the state of this service in the datastore."""
try:
key = "%(topic)s:%(host)s" % service.service_ref
# memcached has data expiration time capability.
# set(..., time=CONF.service_down_time) uses it and
# reduces key-deleting code.
self.mc.set(str(key),
timeutils.utcnow(),
time=CONF.service_down_time)
# TODO(termie): make this pattern be more elegant.
if getattr(service, 'model_disconnected', False):
service.model_disconnected = False
LOG.error(_('Recovered model server connection!'))
# TODO(vish): this should probably only catch connection errors
except Exception: # pylint: disable=W0702
if not getattr(service, 'model_disconnected', False):
service.model_disconnected = True
LOG.exception(_('model server went away'))
| apache-2.0 |
foxish/test-infra | gubernator/github/main_test.py | 3 | 5228 | #!/usr/bin/env python
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=no-self-use
"""
To run these tests:
$ pip install webtest nosegae
$ nosetests --with-gae --gae-lib-root ~/google_appengine/
"""
import json
import unittest
import webtest
from google.appengine.ext import deferred
from google.appengine.ext import testbed
import handlers
import main
import models
import secrets
app = webtest.TestApp(main.app)
class TestBase(unittest.TestCase):
def init_stubs(self):
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_blobstore_stub()
self.testbed.init_datastore_v3_stub()
self.testbed.init_taskqueue_stub()
class AppTest(TestBase):
def setUp(self):
self.init_stubs()
self.taskqueue = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
secrets.put('github_webhook_secret', 'some_secret', per_host=False)
def get_response(self, event, body):
if isinstance(body, dict):
body = json.dumps(body)
signature = handlers.make_signature(body)
resp = app.post('/webhook', body,
{'X-Github-Event': event,
'X-Hub-Signature': signature})
for task in self.taskqueue.get_filtered_tasks():
deferred.run(task.payload)
return resp
def test_webhook(self):
self.get_response('test', {'action': 'blah'})
hooks = list(models.GithubWebhookRaw.query())
self.assertEqual(len(hooks), 1)
self.assertIsNotNone(hooks[0].timestamp)
def test_webhook_bad_sig(self):
body = json.dumps({'action': 'blah'})
signature = handlers.make_signature(body + 'foo')
app.post('/webhook', body,
{'X-Github-Event': 'test',
'X-Hub-Signature': signature}, status=400)
def test_webhook_missing_sig(self):
app.post('/webhook', '{}',
{'X-Github-Event': 'test'}, status=400)
def test_webhook_unicode(self):
self.get_response('test', {'action': u'blah\u03BA'})
def test_webhook_status(self):
args = {
'name': 'owner/repo',
'sha': '1234',
'context': 'ci',
'state': 'success',
'target_url': 'http://example.com',
'description': 'passed the tests!',
'created_at': '2016-07-07T01:58:09Z',
'updated_at': '2016-07-07T02:03:12Z',
}
self.get_response('status', args)
statuses = list(models.GHStatus.query_for_sha('owner/repo', '1234'))
self.assertEqual(len(statuses), 1)
status = statuses[0]
args['repo'] = args.pop('name')
for key, value in args.iteritems():
status_val = getattr(status, key)
try:
status_val = status_val.strftime('%Y-%m-%dT%H:%M:%SZ')
except AttributeError:
pass
assert status_val == value, '%r != %r' % (getattr(status, key), value)
PR_EVENT_BODY = {
'repository': {'full_name': 'test/test'},
'pull_request': {
'number': 123,
'head': {'sha': 'cafe'},
'updated_at': '2016-07-07T02:03:12Z',
'state': 'open',
'user': {'login': 'rmmh'},
'assignees': [{'login': 'spxtr'}],
'title': 'test pr',
},
'action': 'opened',
}
def test_webhook_pr_open(self):
body = json.dumps(self.PR_EVENT_BODY)
self.get_response('pull_request', body)
digest = models.GHIssueDigest.get('test/test', 123)
self.assertTrue(digest.is_pr)
self.assertTrue(digest.is_open)
self.assertEqual(digest.involved, ['rmmh', 'spxtr'])
self.assertEqual(digest.payload['title'], 'test pr')
self.assertEqual(digest.payload['needs_rebase'], False)
def test_webhook_pr_open_and_status(self):
self.get_response('pull_request', self.PR_EVENT_BODY)
self.get_response('status', {
'repository': self.PR_EVENT_BODY['repository'],
'name': self.PR_EVENT_BODY['repository']['full_name'],
'sha': self.PR_EVENT_BODY['pull_request']['head']['sha'],
'context': 'test-ci',
'state': 'success',
'target_url': 'example.com',
'description': 'woop!',
'created_at': '2016-07-07T01:58:09Z',
'updated_at': '2016-07-07T02:03:15Z',
})
digest = models.GHIssueDigest.get('test/test', 123)
self.assertEqual(digest.payload['status'],
{'test-ci': ['success', 'example.com', 'woop!']})
| apache-2.0 |
victoredwardocallaghan/pygments-main | tests/test_perllexer.py | 30 | 5337 | # -*- coding: utf-8 -*-
"""
Pygments regex lexer tests
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import time
import unittest
from pygments.token import String
from pygments.lexers.agile import PerlLexer
class RunawayRegexTest(unittest.TestCase):
# A previous version of the Perl lexer would spend a great deal of
# time backtracking when given particular strings. These tests show that
# the runaway backtracking doesn't happen any more (at least for the given
# cases).
lexer = PerlLexer()
### Test helpers.
def assert_single_token(self, s, token):
"""Show that a given string generates only one token."""
tokens = list(self.lexer.get_tokens_unprocessed(s))
self.assertEqual(len(tokens), 1, tokens)
self.assertEqual(s, tokens[0][2])
self.assertEqual(token, tokens[0][1])
def assert_tokens(self, strings, expected_tokens):
"""Show that a given string generates the expected tokens."""
tokens = list(self.lexer.get_tokens_unprocessed(''.join(strings)))
self.assertEqual(len(tokens), len(expected_tokens), tokens)
for index, s in enumerate(strings):
self.assertEqual(s, tokens[index][2])
self.assertEqual(expected_tokens[index], tokens[index][1])
def assert_fast_tokenization(self, s):
"""Show that a given string is tokenized quickly."""
start = time.time()
tokens = list(self.lexer.get_tokens_unprocessed(s))
end = time.time()
# Isn't 10 seconds kind of a long time? Yes, but we don't want false
# positives when the tests are starved for CPU time.
if end-start > 10:
self.fail('tokenization took too long')
return tokens
### Strings.
def test_single_quote_strings(self):
self.assert_single_token(r"'foo\tbar\\\'baz'", String)
self.assert_fast_tokenization("'" + '\\'*999)
def test_double_quote_strings(self):
self.assert_single_token(r'"foo\tbar\\\"baz"', String)
self.assert_fast_tokenization('"' + '\\'*999)
def test_backtick_strings(self):
self.assert_single_token(r'`foo\tbar\\\`baz`', String.Backtick)
self.assert_fast_tokenization('`' + '\\'*999)
### Regex matches with various delimiters.
def test_match(self):
self.assert_single_token(r'/aa\tbb/', String.Regex)
self.assert_fast_tokenization('/' + '\\'*999)
def test_match_with_slash(self):
self.assert_tokens(['m', '/\n\\t\\\\/'], [String.Regex, String.Regex])
self.assert_fast_tokenization('m/xxx\n' + '\\'*999)
def test_match_with_bang(self):
self.assert_tokens(['m', r'!aa\t\!bb!'], [String.Regex, String.Regex])
self.assert_fast_tokenization('m!' + '\\'*999)
def test_match_with_brace(self):
self.assert_tokens(['m', r'{aa\t\}bb}'], [String.Regex, String.Regex])
self.assert_fast_tokenization('m{' + '\\'*999)
def test_match_with_angle_brackets(self):
self.assert_tokens(['m', r'<aa\t\>bb>'], [String.Regex, String.Regex])
self.assert_fast_tokenization('m<' + '\\'*999)
def test_match_with_parenthesis(self):
self.assert_tokens(['m', r'(aa\t\)bb)'], [String.Regex, String.Regex])
self.assert_fast_tokenization('m(' + '\\'*999)
def test_match_with_at_sign(self):
self.assert_tokens(['m', r'@aa\t\@bb@'], [String.Regex, String.Regex])
self.assert_fast_tokenization('m@' + '\\'*999)
def test_match_with_percent_sign(self):
self.assert_tokens(['m', r'%aa\t\%bb%'], [String.Regex, String.Regex])
self.assert_fast_tokenization('m%' + '\\'*999)
def test_match_with_dollar_sign(self):
self.assert_tokens(['m', r'$aa\t\$bb$'], [String.Regex, String.Regex])
self.assert_fast_tokenization('m$' + '\\'*999)
### Regex substitutions with various delimeters.
def test_substitution_with_slash(self):
self.assert_single_token('s/aaa/bbb/g', String.Regex)
self.assert_fast_tokenization('s/foo/' + '\\'*999)
def test_substitution_with_at_sign(self):
self.assert_single_token(r's@aaa@bbb@g', String.Regex)
self.assert_fast_tokenization('s@foo@' + '\\'*999)
def test_substitution_with_percent_sign(self):
self.assert_single_token(r's%aaa%bbb%g', String.Regex)
self.assert_fast_tokenization('s%foo%' + '\\'*999)
def test_substitution_with_brace(self):
self.assert_single_token(r's{aaa}', String.Regex)
self.assert_fast_tokenization('s{' + '\\'*999)
def test_substitution_with_angle_bracket(self):
self.assert_single_token(r's<aaa>', String.Regex)
self.assert_fast_tokenization('s<' + '\\'*999)
def test_substitution_with_angle_bracket(self):
self.assert_single_token(r's<aaa>', String.Regex)
self.assert_fast_tokenization('s<' + '\\'*999)
def test_substitution_with_square_bracket(self):
self.assert_single_token(r's[aaa]', String.Regex)
self.assert_fast_tokenization('s[' + '\\'*999)
def test_substitution_with_parenthesis(self):
self.assert_single_token(r's(aaa)', String.Regex)
self.assert_fast_tokenization('s(' + '\\'*999)
| bsd-2-clause |
JonGal/appengine-ml-demo | app/lib/rsa/util.py | 96 | 3059 | # -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions."""
from __future__ import with_statement, print_function
import sys
from optparse import OptionParser
import rsa.key
def private_to_public():
"""Reads a private key and outputs the corresponding public key."""
# Parse the CLI options
parser = OptionParser(usage='usage: %prog [options]',
description='Reads a private key and outputs the '
'corresponding public key. Both private and public keys use '
'the format described in PKCS#1 v1.5')
parser.add_option('-i', '--input', dest='infilename', type='string',
help='Input filename. Reads from stdin if not specified')
parser.add_option('-o', '--output', dest='outfilename', type='string',
help='Output filename. Writes to stdout of not specified')
parser.add_option('--inform', dest='inform',
help='key format of input - default PEM',
choices=('PEM', 'DER'), default='PEM')
parser.add_option('--outform', dest='outform',
help='key format of output - default PEM',
choices=('PEM', 'DER'), default='PEM')
(cli, cli_args) = parser.parse_args(sys.argv)
# Read the input data
if cli.infilename:
print('Reading private key from %s in %s format' %
(cli.infilename, cli.inform), file=sys.stderr)
with open(cli.infilename, 'rb') as infile:
in_data = infile.read()
else:
print('Reading private key from stdin in %s format' % cli.inform,
file=sys.stderr)
in_data = sys.stdin.read().encode('ascii')
assert type(in_data) == bytes, type(in_data)
# Take the public fields and create a public key
priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform)
pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e)
# Save to the output file
out_data = pub_key.save_pkcs1(cli.outform)
if cli.outfilename:
print('Writing public key to %s in %s format' %
(cli.outfilename, cli.outform), file=sys.stderr)
with open(cli.outfilename, 'wb') as outfile:
outfile.write(out_data)
else:
print('Writing public key to stdout in %s format' % cli.outform,
file=sys.stderr)
sys.stdout.write(out_data.decode('ascii'))
| apache-2.0 |
himleyb85/django | django/db/backends/sqlite3/base.py | 17 | 18191 | """
SQLite3 backend for django.
Works with either the pysqlite2 module or the sqlite3 module in the
standard library.
"""
from __future__ import unicode_literals
import datetime
import decimal
import re
import warnings
from django.conf import settings
from django.db import utils
from django.db.backends import utils as backend_utils
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.base.validation import BaseDatabaseValidation
from django.utils import six, timezone
from django.utils.dateparse import (
parse_date, parse_datetime, parse_duration, parse_time,
)
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
from django.utils.safestring import SafeBytes
try:
import pytz
except ImportError:
pytz = None
try:
try:
from pysqlite2 import dbapi2 as Database
except ImportError:
from sqlite3 import dbapi2 as Database
except ImportError as exc:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading either pysqlite2 or sqlite3 modules (tried in that order): %s" % exc)
# Some of these import sqlite3, so import them after checking if it's installed.
from .client import DatabaseClient # isort:skip
from .creation import DatabaseCreation # isort:skip
from .features import DatabaseFeatures # isort:skip
from .introspection import DatabaseIntrospection # isort:skip
from .operations import DatabaseOperations # isort:skip
from .schema import DatabaseSchemaEditor # isort:skip
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
def adapt_datetime_warn_on_aware_datetime(value):
# Remove this function and rely on the default adapter in Django 2.0.
if settings.USE_TZ and timezone.is_aware(value):
warnings.warn(
"The SQLite database adapter received an aware datetime (%s), "
"probably from cursor.execute(). Update your code to pass a "
"naive datetime in the database connection's time zone (UTC by "
"default).", RemovedInDjango20Warning)
# This doesn't account for the database connection's timezone,
# which isn't known. (That's why this adapter is deprecated.)
value = value.astimezone(timezone.utc).replace(tzinfo=None)
return value.isoformat(str(" "))
def decoder(conv_func):
""" The Python sqlite3 interface returns always byte strings.
This function converts the received value to a regular string before
passing it to the receiver function.
"""
return lambda s: conv_func(s.decode('utf-8'))
Database.register_converter(str("bool"), decoder(lambda s: s == '1'))
Database.register_converter(str("time"), decoder(parse_time))
Database.register_converter(str("date"), decoder(parse_date))
Database.register_converter(str("datetime"), decoder(parse_datetime))
Database.register_converter(str("timestamp"), decoder(parse_datetime))
Database.register_converter(str("TIMESTAMP"), decoder(parse_datetime))
Database.register_converter(str("decimal"), decoder(backend_utils.typecast_decimal))
Database.register_adapter(datetime.datetime, adapt_datetime_warn_on_aware_datetime)
Database.register_adapter(decimal.Decimal, backend_utils.rev_typecast_decimal)
if six.PY2:
Database.register_adapter(str, lambda s: s.decode('utf-8'))
Database.register_adapter(SafeBytes, lambda s: s.decode('utf-8'))
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'sqlite'
# SQLite doesn't actually support most of these types, but it "does the right
# thing" given more verbose field definitions, so leave them as is so that
# schema inspection is more useful.
data_types = {
'AutoField': 'integer',
'BigAutoField': 'integer',
'BinaryField': 'BLOB',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'decimal',
'DurationField': 'bigint',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'real',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'char(15)',
'GenericIPAddressField': 'char(39)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer unsigned',
'PositiveSmallIntegerField': 'smallint unsigned',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'text',
'TimeField': 'time',
'UUIDField': 'char(32)',
}
data_types_suffix = {
'AutoField': 'AUTOINCREMENT',
'BigAutoField': 'AUTOINCREMENT',
}
# SQLite requires LIKE statements to include an ESCAPE clause if the value
# being escaped has a percent or underscore in it.
# See http://www.sqlite.org/lang_expr.html for an explanation.
operators = {
'exact': '= %s',
'iexact': "LIKE %s ESCAPE '\\'",
'contains': "LIKE %s ESCAPE '\\'",
'icontains': "LIKE %s ESCAPE '\\'",
'regex': 'REGEXP %s',
'iregex': "REGEXP '(?i)' || %s",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE %s ESCAPE '\\'",
'endswith': "LIKE %s ESCAPE '\\'",
'istartswith': "LIKE %s ESCAPE '\\'",
'iendswith': "LIKE %s ESCAPE '\\'",
}
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
pattern_ops = {
'contains': r"LIKE '%%' || {} || '%%' ESCAPE '\'",
'icontains': r"LIKE '%%' || UPPER({}) || '%%' ESCAPE '\'",
'startswith': r"LIKE {} || '%%' ESCAPE '\'",
'istartswith': r"LIKE UPPER({}) || '%%' ESCAPE '\'",
'endswith': r"LIKE '%%' || {} ESCAPE '\'",
'iendswith': r"LIKE '%%' || UPPER({}) ESCAPE '\'",
}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def get_connection_params(self):
settings_dict = self.settings_dict
if not settings_dict['NAME']:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value.")
kwargs = {
'database': settings_dict['NAME'],
'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
}
kwargs.update(settings_dict['OPTIONS'])
# Always allow the underlying SQLite connection to be shareable
# between multiple threads. The safe-guarding will be handled at a
# higher level by the `BaseDatabaseWrapper.allow_thread_sharing`
# property. This is necessary as the shareability is disabled by
# default in pysqlite and it cannot be changed once a connection is
# opened.
if 'check_same_thread' in kwargs and kwargs['check_same_thread']:
warnings.warn(
'The `check_same_thread` option was provided and set to '
'True. It will be overridden with False. Use the '
'`DatabaseWrapper.allow_thread_sharing` property instead '
'for controlling thread shareability.',
RuntimeWarning
)
kwargs.update({'check_same_thread': False})
if self.features.can_share_in_memory_db:
kwargs.update({'uri': True})
return kwargs
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
conn.create_function("django_date_extract", 2, _sqlite_date_extract)
conn.create_function("django_date_trunc", 2, _sqlite_date_trunc)
conn.create_function("django_datetime_cast_date", 2, _sqlite_datetime_cast_date)
conn.create_function("django_datetime_extract", 3, _sqlite_datetime_extract)
conn.create_function("django_datetime_trunc", 3, _sqlite_datetime_trunc)
conn.create_function("django_time_extract", 2, _sqlite_time_extract)
conn.create_function("regexp", 2, _sqlite_regexp)
conn.create_function("django_format_dtdelta", 3, _sqlite_format_dtdelta)
conn.create_function("django_power", 2, _sqlite_power)
return conn
def init_connection_state(self):
pass
def create_cursor(self):
return self.connection.cursor(factory=SQLiteCursorWrapper)
def close(self):
self.validate_thread_sharing()
# If database is in memory, closing the connection destroys the
# database. To prevent accidental data loss, ignore close requests on
# an in-memory db.
if not self.is_in_memory_db(self.settings_dict['NAME']):
BaseDatabaseWrapper.close(self)
def _savepoint_allowed(self):
# Two conditions are required here:
# - A sufficiently recent version of SQLite to support savepoints,
# - Being in a transaction, which can only happen inside 'atomic'.
# When 'isolation_level' is not None, sqlite3 commits before each
# savepoint; it's a bug. When it is None, savepoints don't make sense
# because autocommit is enabled. The only exception is inside 'atomic'
# blocks. To work around that bug, on SQLite, 'atomic' starts a
# transaction explicitly rather than simply disable autocommit.
return self.features.uses_savepoints and self.in_atomic_block
def _set_autocommit(self, autocommit):
if autocommit:
level = None
else:
# sqlite3's internal default is ''. It's different from None.
# See Modules/_sqlite/connection.c.
level = ''
# 'isolation_level' is a misleading API.
# SQLite always runs at the SERIALIZABLE isolation level.
with self.wrap_database_errors:
self.connection.isolation_level = level
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
Raises an IntegrityError on the first invalid foreign key reference
encountered (if any) and provides detailed information about the
invalid reference in the error message.
Backends can override this method if they can more directly apply
constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute("""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL"""
% (primary_key_column_name, column_name, table_name, referenced_table_name,
column_name, referenced_column_name, column_name, referenced_column_name))
for bad_row in cursor.fetchall():
raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid "
"foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
% (table_name, bad_row[0], table_name, column_name, bad_row[1],
referenced_table_name, referenced_column_name))
def is_usable(self):
return True
def _start_transaction_under_autocommit(self):
"""
Start a transaction explicitly in autocommit mode.
Staying in autocommit mode works around a bug of sqlite3 that breaks
savepoints when autocommit is disabled.
"""
self.cursor().execute("BEGIN")
def is_in_memory_db(self, name):
return name == ":memory:" or "mode=memory" in force_text(name)
FORMAT_QMARK_REGEX = re.compile(r'(?<!%)%s')
class SQLiteCursorWrapper(Database.Cursor):
"""
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
This fixes it -- but note that if you want to use a literal "%s" in a query,
you'll need to use "%%s".
"""
def execute(self, query, params=None):
if params is None:
return Database.Cursor.execute(self, query)
query = self.convert_query(query)
return Database.Cursor.execute(self, query, params)
def executemany(self, query, param_list):
query = self.convert_query(query)
return Database.Cursor.executemany(self, query, param_list)
def convert_query(self, query):
return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%')
def _sqlite_date_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
else:
return getattr(dt, lookup_type)
def _sqlite_date_trunc(lookup_type, dt):
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'year':
return "%i-01-01" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i" % (dt.year, dt.month, dt.day)
def _sqlite_datetime_parse(dt, tzname):
if dt is None:
return None
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if tzname is not None:
dt = timezone.localtime(dt, pytz.timezone(tzname))
return dt
def _sqlite_datetime_cast_date(dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
return dt.date().isoformat()
def _sqlite_datetime_extract(lookup_type, dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
else:
return getattr(dt, lookup_type)
def _sqlite_datetime_trunc(lookup_type, dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
if lookup_type == 'year':
return "%i-01-01 00:00:00" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01 00:00:00" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day)
elif lookup_type == 'hour':
return "%i-%02i-%02i %02i:00:00" % (dt.year, dt.month, dt.day, dt.hour)
elif lookup_type == 'minute':
return "%i-%02i-%02i %02i:%02i:00" % (dt.year, dt.month, dt.day, dt.hour, dt.minute)
elif lookup_type == 'second':
return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
def _sqlite_time_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = backend_utils.typecast_time(dt)
except (ValueError, TypeError):
return None
return getattr(dt, lookup_type)
def _sqlite_format_dtdelta(conn, lhs, rhs):
"""
LHS and RHS can be either:
- An integer number of microseconds
- A string representing a timedelta object
- A string representing a datetime
"""
try:
if isinstance(lhs, six.integer_types):
lhs = str(decimal.Decimal(lhs) / decimal.Decimal(1000000))
real_lhs = parse_duration(lhs)
if real_lhs is None:
real_lhs = backend_utils.typecast_timestamp(lhs)
if isinstance(rhs, six.integer_types):
rhs = str(decimal.Decimal(rhs) / decimal.Decimal(1000000))
real_rhs = parse_duration(rhs)
if real_rhs is None:
real_rhs = backend_utils.typecast_timestamp(rhs)
if conn.strip() == '+':
out = real_lhs + real_rhs
else:
out = real_lhs - real_rhs
except (ValueError, TypeError):
return None
# typecast_timestamp returns a date or a datetime without timezone.
# It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]"
return str(out)
def _sqlite_regexp(re_pattern, re_string):
return bool(re.search(re_pattern, force_text(re_string))) if re_string is not None else False
def _sqlite_power(x, y):
return x ** y
| bsd-3-clause |
srimai/odoo | addons/hr_timesheet_invoice/wizard/hr_timesheet_invoice_create.py | 337 | 3655 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_timesheet_invoice_create(osv.osv_memory):
_name = 'hr.timesheet.invoice.create'
_description = 'Create invoice from timesheet'
_columns = {
'date': fields.boolean('Date', help='The real date of each work will be displayed on the invoice'),
'time': fields.boolean('Time spent', help='The time of each work done will be displayed on the invoice'),
'name': fields.boolean('Description', help='The detail of each work done will be displayed on the invoice'),
'price': fields.boolean('Cost', help='The cost of each work done will be displayed on the invoice. You probably don\'t want to check this'),
'product': fields.many2one('product.product', 'Force Product', help='Fill this field only if you want to force to use a specific product. Keep empty to use the real product that comes from the cost.'),
}
_defaults = {
'date': 1,
'name': 1,
}
def view_init(self, cr, uid, fields, context=None):
"""
This function checks for precondition before wizard executes
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param fields: List of fields for default value
@param context: A standard dictionary for contextual values
"""
analytic_obj = self.pool.get('account.analytic.line')
data = context and context.get('active_ids', [])
for analytic in analytic_obj.browse(cr, uid, data, context=context):
if analytic.invoice_id:
raise osv.except_osv(_('Warning!'), _("Invoice is already linked to some of the analytic line(s)!"))
def do_create(self, cr, uid, ids, context=None):
data = self.read(cr, uid, ids, context=context)[0]
# Create an invoice based on selected timesheet lines
invs = self.pool.get('account.analytic.line').invoice_cost_create(cr, uid, context['active_ids'], data, context=context)
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
mod_ids = mod_obj.search(cr, uid, [('name', '=', 'action_invoice_tree1')], context=context)
res_id = mod_obj.read(cr, uid, mod_ids, ['res_id'], context=context)[0]['res_id']
act_win = act_obj.read(cr, uid, [res_id], context=context)[0]
act_win['domain'] = [('id','in',invs),('type','=','out_invoice')]
act_win['name'] = _('Invoices')
return act_win
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
privateip/ansible-modules-core | source_control/subversion.py | 2 | 8872 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: subversion
short_description: Deploys a subversion repository.
description:
- Deploy given repository URL / revision to dest. If dest exists, update to the specified revision, otherwise perform a checkout.
version_added: "0.7"
author: "Dane Summers (@dsummersl) <njharman@gmail.com>"
notes:
- Requires I(svn) to be installed on the client.
requirements: []
options:
repo:
description:
- The subversion URL to the repository.
required: true
aliases: [ name, repository ]
default: null
dest:
description:
- Absolute path where the repository should be deployed.
required: true
default: null
revision:
description:
- Specific revision to checkout.
required: false
default: HEAD
aliases: [ version ]
force:
description:
- If C(yes), modified files will be discarded. If C(no), module will fail if it encounters modified files.
Prior to 1.9 the default was `yes`.
required: false
default: "no"
choices: [ "yes", "no" ]
username:
description:
- --username parameter passed to svn.
required: false
default: null
password:
description:
- --password parameter passed to svn.
required: false
default: null
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to svn executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
export:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "1.6"
description:
- If C(yes), do export instead of checkout/update.
switch:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "2.0"
description:
- If C(no), do not call svn switch before update.
'''
EXAMPLES = '''
# Checkout subversion repository to specified folder.
- subversion: repo=svn+ssh://an.example.org/path/to/repo dest=/src/checkout
# Export subversion directory to folder
- subversion: repo=svn+ssh://an.example.org/path/to/repo dest=/src/export export=True
'''
import re
import tempfile
class Subversion(object):
def __init__(
self, module, dest, repo, revision, username, password, svn_path):
self.module = module
self.dest = dest
self.repo = repo
self.revision = revision
self.username = username
self.password = password
self.svn_path = svn_path
def _exec(self, args, check_rc=True):
'''Execute a subversion command, and return output. If check_rc is False, returns the return code instead of the output.'''
bits = [
self.svn_path,
'--non-interactive',
'--trust-server-cert',
'--no-auth-cache',
]
if self.username:
bits.extend(["--username", self.username])
if self.password:
bits.extend(["--password", self.password])
bits.extend(args)
rc, out, err = self.module.run_command(bits, check_rc)
if check_rc:
return out.splitlines()
else:
return rc
def is_svn_repo(self):
'''Checks if path is a SVN Repo.'''
rc = self._exec(["info", self.dest], check_rc=False)
return rc == 0
def checkout(self):
'''Creates new svn working directory if it does not already exist.'''
self._exec(["checkout", "-r", self.revision, self.repo, self.dest])
def export(self, force=False):
'''Export svn repo to directory'''
cmd = ["export"]
if force:
cmd.append("--force")
cmd.extend(["-r", self.revision, self.repo, self.dest])
self._exec(cmd)
def switch(self):
'''Change working directory's repo.'''
# switch to ensure we are pointing at correct repo.
self._exec(["switch", self.repo, self.dest])
def update(self):
'''Update existing svn working directory.'''
self._exec(["update", "-r", self.revision, self.dest])
def revert(self):
'''Revert svn working directory.'''
self._exec(["revert", "-R", self.dest])
def get_revision(self):
'''Revision and URL of subversion working directory.'''
text = '\n'.join(self._exec(["info", self.dest]))
rev = re.search(r'^Revision:.*$', text, re.MULTILINE).group(0)
url = re.search(r'^URL:.*$', text, re.MULTILINE).group(0)
return rev, url
def has_local_mods(self):
'''True if revisioned files have been added or modified. Unrevisioned files are ignored.'''
lines = self._exec(["status", "--quiet", "--ignore-externals", self.dest])
# The --quiet option will return only modified files.
# Match only revisioned files, i.e. ignore status '?'.
regex = re.compile(r'^[^?X]')
# Has local mods if more than 0 modifed revisioned files.
return len(filter(regex.match, lines)) > 0
def needs_update(self):
curr, url = self.get_revision()
out2 = '\n'.join(self._exec(["info", "-r", "HEAD", self.dest]))
head = re.search(r'^Revision:.*$', out2, re.MULTILINE).group(0)
rev1 = int(curr.split(':')[1].strip())
rev2 = int(head.split(':')[1].strip())
change = False
if rev1 < rev2:
change = True
return change, curr, head
# ===========================================
def main():
module = AnsibleModule(
argument_spec=dict(
dest=dict(required=True, type='path'),
repo=dict(required=True, aliases=['name', 'repository']),
revision=dict(default='HEAD', aliases=['rev', 'version']),
force=dict(default='no', type='bool'),
username=dict(required=False),
password=dict(required=False),
executable=dict(default=None, type='path'),
export=dict(default=False, required=False, type='bool'),
switch=dict(default=True, required=False, type='bool'),
),
supports_check_mode=True
)
dest = module.params['dest']
repo = module.params['repo']
revision = module.params['revision']
force = module.params['force']
username = module.params['username']
password = module.params['password']
svn_path = module.params['executable'] or module.get_bin_path('svn', True)
export = module.params['export']
switch = module.params['switch']
# We screenscrape a huge amount of svn commands so use C locale anytime we
# call run_command()
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
svn = Subversion(module, dest, repo, revision, username, password, svn_path)
if export or not os.path.exists(dest):
before = None
local_mods = False
if module.check_mode:
module.exit_json(changed=True)
if not export:
svn.checkout()
else:
svn.export(force=force)
elif svn.is_svn_repo():
# Order matters. Need to get local mods before switch to avoid false
# positives. Need to switch before revert to ensure we are reverting to
# correct repo.
if module.check_mode:
check, before, after = svn.needs_update()
module.exit_json(changed=check, before=before, after=after)
before = svn.get_revision()
local_mods = svn.has_local_mods()
if switch:
svn.switch()
if local_mods:
if force:
svn.revert()
else:
module.fail_json(msg="ERROR: modified files exist in the repository.")
svn.update()
else:
module.fail_json(msg="ERROR: %s folder already exists, but its not a subversion repository." % (dest, ))
if export:
module.exit_json(changed=True)
else:
after = svn.get_revision()
changed = before != after or local_mods
module.exit_json(changed=changed, before=before, after=after)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
mitsuhiko/zine | zine/utils/exceptions.py | 1 | 1632 | # -*- coding: utf-8 -*-
"""
zine.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~
Exception utility module.
:copyright: (c) 2010 by the Zine Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from zine.i18n import _
class ZineException(Exception):
"""Baseclass for all Zine exceptions."""
message = None
def __init__(self, message=None):
Exception.__init__(self)
if message is not None:
self.message = message
def __str__(self):
return self.message or ''
def __unicode__(self):
return str(self).decode('utf-8', 'ignore')
class UserException(ZineException):
"""Baseclass for exception with unicode messages."""
def __str__(self):
return unicode(self).encode('utf-8')
def __unicode__(self):
if self.message is None:
return u''
return unicode(self.message)
def summarize_exception(exc_info):
def _to_unicode(x):
try:
return unicode(x)
except UnicodeError:
return str(x).encode('utf-8', 'replace')
exc_type, exc_value, tb = exc_info
if isinstance(exc_type, basestring):
prefix = _to_unicode(exc_type)
else:
prefix = _to_unicode(exc_type.__name__)
message = _to_unicode(exc_value)
filename = tb.tb_frame.f_globals.get('__file__')
if filename is None:
filename = _(u'unkown file')
else:
filename = _to_unicode(filename)
if filename.endswith('.pyc'):
filename = filename[:-1]
return u'%s: %s' % (prefix, message), (filename, tb.tb_lineno)
| bsd-3-clause |
kasperschmidt/TDOSE | tdose_extract_spectra.py | 1 | 43824 | # = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
import numpy as np
import sys
import astropy.io.fits as afits
import collections
import tdose_utilities as tu
import tdose_extract_spectra as tes
import tdose_build_mock_cube as tbmc
import pdb
import scipy.ndimage.filters as snf
import matplotlib as mpl
mpl.use('Agg') # prevent pyplot from opening window; enables closing ssh session with detached screen running TDOSE
import matplotlib.pyplot as plt
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
def extract_spectra(model_cube_file,source_association_dictionary=None,nameext='tdose_spectrum',outputdir='./',clobber=False,
variance_cube_file=None,variance_cube_ext='ERROR',source_model_cube_file=None,source_cube_ext='DATA',
model_cube_ext='DATA',layer_scale_ext='WAVESCL',data_cube_file=None,verbose=True):
"""
Assemble the spectra determined by the wavelength layer scaling of the normalized models
when generating the source model cube
--- INPUT ---
model_cube_file Model cube to base extraction on (using header info and layer scales)
source_association_dictionary Source association dictionary defining what sources should be combined into
objects (individual spectra).
nameext The name extension to use for saved spectra
outputdir Directory to save spectra to
clobber Overwrite spectra if they already exists
variance_cube_file File containing variance cube of data to be used to estimate nois on 1D spectrum
variance_cube_ext Extension of variance cube to use
source_model_cube_file The source model cube defining the individual sources
source_cube_ext Extension of source model cube file that contins source models
model_cube_ext Extension of model cube file that contains model
layer_scale_ext Extension of model cube file that contains the layer scales
data_cube_file File containing original data cube used for extraction of aperture spectra
verbose
--- EXAMPLE OF USE ---
"""
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Loading data needed for spectral assembly')
model_cube = afits.open(model_cube_file)[model_cube_ext].data
model_cube_hdr = afits.open(model_cube_file)[model_cube_ext].header
layer_scale_arr = afits.open(model_cube_file)[layer_scale_ext].data
if variance_cube_file is not None:
stddev_cube = np.sqrt(afits.open(variance_cube_file)[variance_cube_ext].data) # turn varinace into standard deviation
source_model_cube = afits.open(source_model_cube_file)[source_cube_ext].data
else:
stddev_cube = None
source_model_cube = None
Nsources = layer_scale_arr.shape[0]
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if data_cube_file is not None:
if verbose: print(' - Loading data cube ')
data_cube = afits.open(data_cube_file)[model_cube_ext].data
else:
data_cube = None
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if source_association_dictionary is None:
if verbose: print(' - Building default source association dictionary ' \
'(determining what sources are combined into objects), i.e., one source per object ')
sourcIDs_dic = collections.OrderedDict()
for oo in np.arange(int(Nsources)):
sourcIDs_dic[str(oo)] = [oo]
else:
sourcIDs_dic = source_association_dictionary
Nobj = len(list(sourcIDs_dic.keys()))
if verbose: print(' - Found '+str(Nobj)+' objects to generate spectra for in source_association_dictionary ')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Assembling wavelength vector for spectra ')
wavelengths = np.arange(model_cube_hdr['NAXIS3'])*model_cube_hdr['CD3_3']+model_cube_hdr['CRVAL3']
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
specfiles = []
for oo, key in enumerate(sourcIDs_dic.keys()):
obj_cube_hdr = model_cube_hdr.copy()
try:
specid = str("%.10d" % int(key))
except:
specid = str(key)
specname = outputdir+nameext+'_'+specid+'.fits'
specfiles.append(specname)
sourceIDs = sourcIDs_dic[key]
obj_cube_hdr.append(('OBJID ',specid ,'ID of object'),end=True)
obj_cube_hdr.append(('SRCIDS ',str(sourceIDs) ,'IDs of sources combined in object'),end=True)
if verbose:
infostr = ' - Extracting spectrum '+str("%6.f" % (oo+1))+' / '+str("%6.f" % Nobj)
sys.stdout.write("%s\r" % infostr)
sys.stdout.flush()
sourceoutput = tes.extract_spectrum(sourceIDs,layer_scale_arr,wavelengths,noise_cube=stddev_cube,
source_model_cube=source_model_cube, data_cube=data_cube,
specname=specname,obj_cube_hdr=obj_cube_hdr,clobber=clobber,verbose=True)
if verbose: print('\n - Done extracting spectra. Returning list of fits files generated')
return specfiles
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
def extract_spectrum(sourceIDs,layer_scale_arr,wavelengths,noise_cube=None,source_model_cube=None,
specname='tdose_extract_spectra_extractedspec.fits',obj_cube_hdr=None,data_cube=None,
clobber=False,verbose=True):
"""
Extracting a spectrum based on the layer scale image from the model cube provided a list of sources to combine.
Noise is estimated from the noise cube (of the data)
If all layer_scales are 1 a data_cube for the extractions is expected
--- INPUT ---
sourceIDs The source IDs to combine into spectrum
layer_scale_arr Layer scale array (or image) produced when generating the model cube
fractional flux belonging to the source in each pixel
wavelengths Wavelength vector to use for extracted 1D spectrum.
noise_cube Cube with uncertainties (sqrt(variance)) of data cube to be used for estimating 1D uncertainties
To estimate S/N and 1D noise, providing a source model cube is required
source_model_cube Source model cube containing the model cube for each individual source seperately
Needed in order to estimate noise from noise-cube
specname Name of file to save spectrum to
obj_cube_hdr Provide a template header to save the object cube (from combining the individual source cube)
as an extension to the extracted spectrum
data_cube In case all layers scales are 1, it is assumed that the source_model_cube contains a mask for the
spectral extraction, which will then be performed on this data_cube.
clobber To overwrite existing files set clobber=True
verbose Toggle verbosity
--- EXAMPLE OF USE ---
"""
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Checking shape of wavelengths and layer_scale_arr')
if wavelengths.shape[0] != layer_scale_arr.shape[1]:
sys.exit(' ---> Shape of wavelength vector ('+str(wavelengths.shape)+
') and wavelength dimension of layer scale array ('+
layer_scale_arr.shape[1].shape+') do not match.')
else:
if verbose: print(' dimensions match; proceeding...')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Checking all sources have spectra in layer_scale_arr')
maxsource = np.max(sourceIDs)
if maxsource >= layer_scale_arr.shape[0]:
sys.exit(' ---> Sources in list '+str(str(sourceIDs))+
' not available among '+str(layer_scale_arr.shape[0])+' sources in layer_scale_arr.')
else:
if verbose: print(' All sources exist in layer_scale_arr; proceeding...')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Assembling object spectrum from source scaling')
source_ent = np.asarray(sourceIDs).astype(int)
if (layer_scale_arr == 1).all():
if verbose: print(' - All layer scales are 1; assuming source model cube contain mask for spectral extraction')
object_cube = np.sum(np.abs(source_model_cube[source_ent,:,:]),axis=0)
if data_cube is None:
sys.exit(' ---> Did not find a data cube to extrac spectra from as expected')
object_mask = (object_cube == 0) # masking all zeros in object mask
invalid_mask = np.ma.masked_invalid(data_cube).mask
comb_mask = (invalid_mask | object_mask)
spec_1D_masked = np.sum(np.sum( np.ma.array(data_cube,mask=comb_mask) ,axis=1),axis=1)
spec_1D = spec_1D_masked.filled(fill_value=0.0)
if noise_cube is not None:
if verbose: print(' Calculating noise as d_spec_k = sqrt( SUMij d_pix_ij**2 ), i.e., as the sqrt of variances summed')
invalid_mask_noise = np.ma.masked_invalid(noise_cube).mask
comb_mask = (comb_mask | invalid_mask_noise)
variance_1D_masked = np.ma.array(noise_cube,mask=comb_mask)**2
noise_1D_masked = np.sqrt( np.sum( np.sum( variance_1D_masked, axis=1), axis=1) )
noise_1D = noise_1D_masked.filled(fill_value=np.nan)
if verbose: print(' Generating S/N vector')
SN_1D = spec_1D / noise_1D
else:
if verbose: print(' - No "noise_cube" provided. Setting all errors and S/N values to NaN')
SN_1D = np.zeros(spec_1D.shape)*np.NaN
noise_1D = np.zeros(spec_1D.shape)*np.NaN
else:
if verbose: print(' - Some layer scales are different from 1; hence assembling spectra using layer scales')
if len(source_ent) < 1:
spec_1D = layer_scale_arr[source_ent,:]
else:
spec_1D = np.sum( layer_scale_arr[source_ent,:],axis=0)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if noise_cube is not None:
if verbose: print(' - Estimate S/N at each wavelength for 1D spectrum (see Eq. 16 of Kamann+2013)')
if verbose: print(' Estimating fraction of flux in each pixel wrt. total flux in each layer')
object_cube = np.sum((source_model_cube[source_ent,:,:,:]),axis=0) # summing source models for all source IDs
fluxfrac_cube_sents = np.zeros(source_model_cube.shape[1:])
for sent in source_ent:
object_cube_sent = np.sum((source_model_cube[[sent],:,:,:]),axis=0) # getting source model for model 'sent'
fluxscale1D_sent = layer_scale_arr[sent,:]
fluxfrac_cube_sent = object_cube_sent / fluxscale1D_sent[:,None,None]
fluxfrac_cube_sents = fluxfrac_cube_sents + fluxfrac_cube_sent
fluxfrac_cube = fluxfrac_cube_sents / len(source_ent) # renormalizing flux-fraction cube
if verbose: print(' Defining pixel mask (ignoring NaN pixels) ') #+\
# 'and pixels with <'+str(fluxfrac_min)+' of total pixel flux in model cube) '
# pix_mask = (fluxfrac_cube < fluxfrac_min)
invalid_mask1 = np.ma.masked_invalid(fluxfrac_cube).mask
invalid_mask2 = np.ma.masked_invalid(noise_cube).mask
# combining mask making sure all individual mask pixels have True for it to be true in combined mask
comb_mask = (invalid_mask1 | invalid_mask2) # | pix_mask
if verbose: print(' Calculating noise propogated as d_spec_k = 1/sqrt( SUMij (fluxfrac_ij**2 / d_pix_ij**2) )')
squared_ratio = np.ma.array(fluxfrac_cube,mask=comb_mask)**2 / np.ma.array(noise_cube,mask=comb_mask)**2
inv_noise_masked = np.sqrt( np.sum( np.sum( squared_ratio, axis=1), axis=1) )
noise_1D = (1.0/inv_noise_masked).filled(fill_value=0.0)
if verbose: print(' Generating S/N vector')
SN_1D = spec_1D / noise_1D
else:
if verbose: print(' - No "noise_cube" provided. Setting all errors and S/N values to NaN')
SN_1D = np.zeros(spec_1D.shape)*np.NaN
noise_1D = np.zeros(spec_1D.shape)*np.NaN
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Saving extracted 1D spectrum and source cube to \n '+specname)
mainHDU = afits.PrimaryHDU() # primary HDU
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
c1 = afits.Column(name='wave', format='D', unit='ANGSTROMS', array=wavelengths)
c2 = afits.Column(name='flux', format='D', unit='', array=spec_1D)
c3 = afits.Column(name='fluxerror', format='D', unit='', array=noise_1D)
c4 = afits.Column(name='s2n', format='D', unit='', array=SN_1D)
coldefs = afits.ColDefs([c1,c2,c3,c4])
th = afits.BinTableHDU.from_columns(coldefs) # creating default header
# writing hdrkeys:'---KEY--', '----------------MAX LENGTH COMMENT-------------'
th.header.append(('EXTNAME ','SPEC1D' ,'cube containing source'),end=True)
head = th.header
tbHDU = afits.BinTableHDU.from_columns(coldefs, header=head)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if obj_cube_hdr is not None:
objHDU = afits.ImageHDU(object_cube)
for hdrkey in list(obj_cube_hdr.keys()):
if not hdrkey in list(objHDU.header.keys()):
objHDU.header.append((hdrkey,obj_cube_hdr[hdrkey],obj_cube_hdr.comments[hdrkey]),end=True)
try:
objHDU.header.append(('EXTNAMEC',objHDU.header['EXTNAME'] ,'EXTNAME of original source cube'),end=True)
del objHDU.header['EXTNAME']
except:
pass
objHDU.header.append(('EXTNAME ','SOURCECUBE' ,'cube containing source'),end=True)
hdulist = afits.HDUList([mainHDU,tbHDU,objHDU])
else:
hdulist = afits.HDUList([mainHDU,tbHDU])
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
hdulist.writeto(specname, overwrite=clobber)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
return wavelengths, spec_1D, noise_1D, object_cube
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
def extract_spectra_viasourcemodelcube(datacube,sourcemodelcube,wavelengths,speclist,specids='None',outputdir='./',
noisecube=False,sourcemodel_hdr='None',verbose=True):
"""
Wrapper for tes.extract_spectrum_viasourcemodelcube() to extract mutliple spectra
--- INPUT ----
datacube Datacube to extract spectra from
sourcemodelcube Cube containing the source models for each object used as "extraction cube"
Dimensions should be [Nsources,datacube.shape]
wavelengths Wavelength vector to use for extracted 1D spectrum.
speclist List of spectra to extract. Indexes corresponding to the source models in the
sourcemodlecube
specids List of IDs to use in naming of output for source models referred to in "speclist"
outputdir Directory to store spectra to
noisecube Cube with uncertainties (sqrt(variance)) of data cube to be used in extraction
souremodel_hdr If not 'None' provide a basic fits header for the source model cubes extracted
and they will be appended to the individual output fits file containing the extracted
spectra.
verbose Toggle verbosity
--- EXAMPLE OF USE ---
"""
if verbose: print(' - Check that source models indicated are present in source model cube ')
specnames = []
Nmodels = sourcemodelcube.shape[0]
maxobj = np.max(speclist)
if maxobj >= Nmodels:
sys.exit(' ---> Object model "'+str(maxobj)+'" is not included in source model cube (models start at 0)')
else:
if verbose: print(' All object models appear to be included in the '+str(Nmodels)+' source models found in cube')
if datacube.shape != sourcemodelcube[0].shape:
sys.exit(' ---> Shape of datacube ('+str(datacube.shape)+') and shape of source models ('+
sourcemodelcube[0].shape+') do not match.')
sourcemodel_sum = np.sum(sourcemodelcube,axis=0)
for ss, spec in enumerate(speclist):
if specids == 'None':
specid = spec
else:
specid = specids[ss]
specname = outputdir+'tdose_spectrum_'+str("%.12d" % specid)+'.fits'
specnames.append(specname)
sourcemodel = sourcemodelcube[spec,:,:,:]
sourceweights = sourcemodel/sourcemodel_sum # fractional flux of model for given source in each pixel
sourcemodel_hdr.append(('OBJMODEL',spec ,'Source model number in parent source model cube'),end=True)
sourcemodel_hdr.append(('OBJID ',specid ,'ID of source'),end=True)
if verbose:
infostr = ' - Extracting spectrum '+str("%6.f" % (spec+1))+' / '+str("%6.f" % len(speclist))
sys.stdout.write("%s\r" % infostr)
sys.stdout.flush()
sourceoutput = tes.extract_spectrum_viasourcemodelcube(datacube,sourceweights,wavelengths,specname=specname,
noisecube=noisecube,spec1Dmethod='sum',
sourcecube_hdr=sourcemodel_hdr,verbose=verbose)
if verbose: print('\n - Done extracting spectra. Returning list of fits files containing spectra')
return specnames
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
def extract_spectrum_viasourcemodelcube(datacube,sourceweights,wavelengths,
specname='tdose_extract_spectra_extractedspec.fits',
noisecube=None,spec1Dmethod='sum',sourcecube_hdr='None',verbose=True):
"""
Extracting a spectrum from a data cube given a source model (cube) to be used as 'extraction cube'
--- INPUT ---
datacube Datacube to extract spectra from
sourceweights Weights from source model to use as "extraction cube". The weights should contain the
fractional flux belonging to the source in each pixel
wavelengths Wavelength vector to use for extracted 1D spectrum.
specname Name of spectrum to generate
noisecube Cube with uncertainties (sqrt(variance)) of data cube to be used in extraction
spec1Dmethod Method used to extract 1D spectrum from source cube with
sourcecube_hdr If not 'None' provide a fits header for the source cube and it ill be appended to the
output fits file.
verbose Toggle verbosity
--- EXAMPLE OF USE ---
"""
if verbose: print(' - Checking shape of data and source model cubes')
if datacube.shape != sourceweights.shape:
sys.exit(' ---> Shape of datacube ('+str(datacube.shape)+') and source weights ('+
sourceweights.shape+') do not match.')
else:
if verbose: print(' dimensions match; proceeding with extraction ')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Applying weights to "datacube" to obtain source cube ')
sourcecube = datacube*sourceweights
if noisecube is not None:
if verbose: print(' - Using "noisecube" for error propagation ')
datanoise = noisecube
else:
if verbose: print(' - No "noisecube" provided. Setting all errors to 1')
datanoise = np.ones(datacube.shape)
if verbose: print(' - Assuming uncertainty on source weights equals the datanoise when propgating errors')
sourceweights_err = datanoise
sourcecube_err = sourcecube * np.sqrt( (datanoise/datacube)**2 + (sourceweights_err/sourceweights)**2 )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Generating 1D spectrum from source cube via:')
spec_wave = wavelengths
maskinvalid = np.ma.masked_invalid(sourcecube * sourcecube_err).mask
if spec1Dmethod == 'sum':
if verbose: print(' Simple summation of fluxes in sourcecube.')
spec_flux = np.sum(np.sum(np.ma.array(sourcecube,mask=maskinvalid),axis=1),axis=1).filled()
if verbose: print(' Errors are propagated as sum of squares.')
spec_err = np.sqrt( np.sum( np.sum(np.ma.array(sourcecube_err,mask=maskinvalid)**2,axis=1),axis=1) ).filled()
elif spec1Dmethod == 'sum_SNweight':
pdb.set_trace()
else:
sys.exit(' ---> The chosen spec1Dmethod ('+str(spec1Dmethod)+') is invalid')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Saving extracted 1D spectrum and source cube to \n '+specname)
mainHDU = afits.PrimaryHDU() # primary HDU
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
c1 = afits.Column(name='wave', format='D', unit='ANGSTROMS', array=spec_wave)
c2 = afits.Column(name='flux', format='D', unit='', array=spec_flux)
c3 = afits.Column(name='fluxerror', format='D', unit='', array=spec_err)
coldefs = afits.ColDefs([c1,c2,c3])
th = afits.BinTableHDU.from_columns(coldefs) # creating default header
# writing hdrkeys:'---KEY--', '----------------MAX LENGTH COMMENT-------------'
th.header.append(('EXTNAME ','SPEC1D' ,'cube containing source'),end=True)
th.header.append(('SPECMETH' , spec1Dmethod ,'Method used for spectral extraction'),end=True)
head = th.header
tbHDU = afits.BinTableHDU.from_columns(coldefs, header=head)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if sourcecube_hdr != 'None':
sourceHDU = afits.ImageHDU(sourcecube) # default HDU with default minimal header
for hdrkey in list(sourcecube_hdr.keys()):
if not hdrkey in list(sourceHDU.header.keys()):
sourceHDU.header.append((hdrkey,sourcecube_hdr[hdrkey],sourcecube_hdr.comments[hdrkey]),end=True)
sourceHDU.header.append(('EXTNAME ','SOURCECUBE' ,'cube containing source'),end=True)
hdulist = afits.HDUList([mainHDU,tbHDU,sourceHDU])
else:
hdulist = afits.HDUList([mainHDU,tbHDU])
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
hdulist.writeto(specname, overwrite=True)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
return sourcecube, sourcecube_err, spec_wave, spec_flux, spec_err
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
def plot_1Dspecs(filelist,plotname='./tdose_1Dspectra.pdf',colors=None,labels=None,plotSNcurve=False,
tdose_wavecol='wave',tdose_fluxcol='flux',tdose_errcol='fluxerror',
simsources=None,simsourcefile='/Users/kschmidt/work/TDOSE/mock_cube_sourcecat161213_all.fits',
sim_cube_dim=None,comparisonspecs=None,comp_colors=['blue'],comp_labels=None,
comp_wavecol='WAVE_AIR',comp_fluxcol='FLUX',comp_errcol='FLUXERR',
xrange=None,yrange=None,showspecs=False,shownoise=True,
skyspecs=None,sky_colors=['red'],sky_labels=['sky'],
sky_wavecol='lambda',sky_fluxcol='data',sky_errcol='stat',
showlinelists=None,linelistcolors=['gray'],smooth=0,ylog=False,
plotratio=False,
verbose=True,pubversion=False):
"""
Plots of multiple 1D spectra
--- INPUT ---
filelist List of spectra filenames to plot
plotname Name of plot to generate
colors Colors of the spectra in filelist to use
labels Labels of the spectra in filelist to use
plotSNcurve Show signal-to-noise curve instead of flux spectra
tdose_wavecol Wavelength column of the spectra in filelist
tdose_fluxcol Flux column of the spectra in filelist
tdose_errcol Flux error column of the spectra in filelist
simsources To plot simulated sources provide ids here
simsourcefile Source file with simulated sources to plot
sim_cube_dim Dimensions of simulated cubes
comparisonspecs To plot comparison spectra provide the filenames of those here
comp_colors Colors of the spectra in comparisonspecs list to use
comp_labels Labels of the spectra in comparisonspecs list to use
comp_wavecol Wavelength column of the spectra in comparisonspecs list
comp_fluxcol Flux column of the spectra in comparisonspecs list
comp_errcol Flux error column of the spectra in comparisonspecs list
xrange Xrange of plot
yrange Yrange of plot
showspecs To show plot instead of storing it to disk set showspecs=True
shownoise To add noise envelope around spectrum set shownoise=True
skyspecs To plot sky spectra provide the filenames of those here
sky_colors Colors of the spectra in skyspecs list to use
sky_labels Labels of the spectra in skyspecs list to use
sky_wavecol Wavelength column of the spectra in skyspecs list
sky_fluxcol Flux column of the spectra in skyspecs list
sky_errcol Flux error column of the spectra in skyspecs list
showlinelists To show line lists provide a list of arrays of dimension (Nlines,2) where each row in the
arrays contains [waveobs, name], where 'waveobs' is the observed wavelengths and 'name' is
a string with the name of each of the Nlines postions to mark on the spectrum.
linelistcolors List of colors for line lists provided in showlinelists
smooth To smooth the spectra, provide sigma of the 1D gaussian smoothing kernel to apply.
For smooth = 0, no smoothing is performed.
ylog To plot y-axis in log scale set to true
plotratio To plot the ratio between the main spectrum and the comparison spectra instead of the actual
spectra, set this keyword to true.
verbose Toggle verbosity
pubversion Generate more publication friendly version of figure
"""
if len(filelist) == 1:
if verbose: print(' - Plotting data from '+filelist[0])
else:
if verbose: print(' - Plotting data from filelist ')
if pubversion:
fig = plt.figure(figsize=(6, 3))
fig.subplots_adjust(wspace=0.1, hspace=0.1,left=0.15, right=0.95, bottom=0.18, top=0.83)
Fsize = 12
else:
fig = plt.figure(figsize=(10, 3))
fig.subplots_adjust(wspace=0.1, hspace=0.1,left=0.06, right=0.81, bottom=0.15, top=0.95)
Fsize = 10
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Looking for flux units in spectra
bunit = 'BUNIT FLUX' # Default BUNIT
for unitspec in filelist:
if bunit == 'BUNIT FLUX':
try:
sourcecubehdr = afits.open(unitspec)['SOURCECUBE'].header
bunit = sourcecubehdr['BUNIT']
except:
try: # Backwards compatibility to TDOSE v2.0 extractions
sourcecubehdr = afits.open(unitspec)[2].header
bunit = sourcecubehdr['BUNIT']
except:
pass
if bunit == 'BUNIT FLUX':
if verbose: print(' - Did not find BUNIT in SOURCECUBE header for any spectra in filelist - are they not from TDOSE?')
if bunit == '10**(-20)*erg/s/cm**2/Angstrom': # Making bunit LaTeXy for MUSE-Wide BUNIT format
bunit = '1e-20 erg/s/cm$^2$/\AA'
else:
bunit = '$'+bunit+'$' # minimizing pronlems with LaTeXing plot axes
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
lthick = 1
plt.rc('text', usetex=True)
plt.rc('font', family='serif',size=Fsize)
plt.rc('xtick', labelsize=Fsize)
plt.rc('ytick', labelsize=Fsize)
plt.clf()
plt.ioff()
#plt.title(plotname.split('TDOSE 1D spectra'),fontsize=Fsize)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
for ff, specfile in enumerate(filelist):
specdat = afits.open(specfile)[1].data
if colors is None:
spec_color = None
else:
spec_color = colors[ff]
if labels is None:
spec_label = specfile
else:
spec_label = labels[ff]
if xrange is not None:
goodent = np.where((specdat[tdose_wavecol] > xrange[0]) & (specdat[tdose_wavecol] < xrange[1]))[0]
if goodent == []:
if verbose: print(' - The chosen xrange is not covered by the input spectrum. Plotting full spectrum')
goodent = np.arange(len(specdat[tdose_wavecol]))
else:
goodent = np.arange(len(specdat[tdose_wavecol]))
if plotSNcurve:
try:
s2ndat = specdat['s2n'][goodent]
except:
s2ndat = specdat[tdose_fluxcol][goodent]/specdat[tdose_errcol][goodent]
if smooth > 0:
s2ndat = snf.gaussian_filter(s2ndat, smooth)
if not plotratio:
plt.plot(specdat[tdose_wavecol][goodent],s2ndat,color=spec_color,lw=lthick, label=spec_label)
ylabel = 'S/N'
else:
plt.plot(specdat[tdose_wavecol][goodent],s2ndat/s2ndat,color=spec_color,lw=lthick, label=None)
ylabel = 'S/N ratio'
#plotname = plotname.replace('.pdf','_S2N.pdf')
else:
fillalpha = 0.30
fluxdat = specdat[tdose_fluxcol][goodent]
errlow = specdat[tdose_fluxcol][goodent]-specdat[tdose_errcol][goodent]
errhigh = specdat[tdose_fluxcol][goodent]+specdat[tdose_errcol][goodent]
if smooth > 0:
fluxdat = snf.gaussian_filter(fluxdat, smooth)
errlow = snf.gaussian_filter(errlow, smooth)
errhigh = snf.gaussian_filter(errhigh, smooth)
if smooth > 0:
fluxdat = snf.gaussian_filter(fluxdat, smooth)
if not plotratio:
if shownoise:
plt.fill_between(specdat[tdose_wavecol][goodent],errlow,errhigh,
alpha=fillalpha,color=spec_color)
plt.plot(specdat[tdose_wavecol][goodent],fluxdat,
color=spec_color,lw=lthick, label=spec_label)
ylabel = tdose_fluxcol
else:
plt.plot(specdat[tdose_wavecol][goodent],fluxdat/fluxdat,
color=spec_color,lw=lthick, label=None)
ylabel = tdose_fluxcol+' ratio '
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if simsources is not None:
sim_total = np.zeros(len(specdat[tdose_wavecol]))
for sourcenumber in simsources:
sourcedat = afits.open(simsourcefile)[1].data
xpos = sourcedat['xpos'][sourcenumber]
ypos = sourcedat['ypos'][sourcenumber]
fluxscale = sourcedat['fluxscale'][sourcenumber]
sourcetype = sourcedat['sourcetype'][sourcenumber]
spectype = sourcedat['spectype'][sourcenumber]
sourcecube = tbmc.gen_source_cube([ypos,xpos],fluxscale,sourcetype,spectype,cube_dim=sim_cube_dim,
verbose=verbose,showsourceimgs=False)
simspec = np.sum( np.sum(sourcecube, axis=1), axis=1)
sim_total = sim_total + simspec
if smooth > 0:
simspec = snf.gaussian_filter(simspec, smooth)
plt.plot(specdat[tdose_wavecol],simspec,'--',color='black',lw=lthick)
plt.plot(specdat[tdose_wavecol],sim_total,'--',color='black',lw=lthick,
label='Sim. spectrum: \nsimsource='+str(simsources))
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if comparisonspecs is not None:
for cc, comparisonspec in enumerate(comparisonspecs):
compdat = afits.open(comparisonspec)[1].data
if xrange is not None:
goodent = np.where((compdat[comp_wavecol] > xrange[0]) & (compdat[comp_wavecol] < xrange[1]))[0]
if goodent == []:
if verbose: print(' - The chosen xrange is not covered by the comparison spectrum. Plotting full spectrum')
goodent = np.arange(len(compdat[comp_wavecol]))
else:
goodent = np.arange(len(compdat[comp_wavecol]))
if comp_colors is None:
comp_color = None
else:
comp_color = comp_colors[cc]
if comp_labels is None:
comp_label = comparisonspec
else:
comp_label = comp_labels[cc]
if plotSNcurve:
s2ncompdat = compdat[comp_fluxcol][goodent]/compdat[comp_errcol][goodent]
if smooth > 0:
s2ncompdat = snf.gaussian_filter(s2ncompdat, smooth)
if not plotratio:
plt.plot(compdat[comp_wavecol][goodent],s2ncompdat,
color=comp_color,lw=lthick, label=comp_label)
else:
plt.plot(compdat[comp_wavecol][goodent],s2ndat/s2ncompdat,
color=comp_color,lw=lthick, label=comp_label)
else:
fillalpha = 0.30
fluxcompdat = compdat[comp_fluxcol][goodent]
errlow = compdat[comp_fluxcol][goodent]-compdat[comp_errcol][goodent]
errhigh = compdat[comp_fluxcol][goodent]+compdat[comp_errcol][goodent]
if smooth > 0:
fluxcompdat = snf.gaussian_filter(fluxcompdat, smooth)
errlow = snf.gaussian_filter(errlow, smooth)
errhigh = snf.gaussian_filter(errhigh, smooth)
if not plotratio:
if shownoise:
plt.fill_between(compdat[comp_wavecol][goodent],errlow,errhigh,
alpha=fillalpha,color=comp_color)
plt.plot(compdat[comp_wavecol][goodent],fluxcompdat,
color=comp_color,lw=lthick, label=comp_label)
else:
plt.plot(compdat[comp_wavecol][goodent],fluxdat/fluxcompdat,
color=comp_color,lw=lthick, label=comp_label)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if skyspecs is not None:
for ss, skyspec in enumerate(skyspecs):
skydat = afits.open(skyspec)[1].data
if xrange is not None:
goodent = np.where((skydat[sky_wavecol] > xrange[0]) & (skydat[sky_wavecol] < xrange[1]))[0]
if goodent == []:
if verbose: print(' - The chosen xrange is not covered by the sky spectrum. Plotting full spectrum')
goodent = np.arange(len(skydat[sky_wavecol]))
else:
goodent = np.arange(len(skydat[sky_wavecol]))
if sky_colors is None:
sky_color = None
else:
sky_color = sky_colors[ss]
if sky_labels is None:
sky_label = skyspec
else:
sky_label = sky_labels[ss]
if plotSNcurve:
s2nsky = skydat[sky_fluxcol][goodent]/skydat[sky_errcol][goodent]
if smooth > 0:
s2nsky = snf.gaussian_filter(s2nsky, smooth)
plt.plot(skydat[sky_wavecol][goodent],s2nsky,
color=sky_color,lw=lthick, label=sky_label)
else:
fillalpha = 0.30
fluxsky = skydat[sky_fluxcol][goodent]
errlow = skydat[sky_fluxcol][goodent]-skydat[sky_errcol][goodent]
errhigh = skydat[sky_fluxcol][goodent]+skydat[sky_errcol][goodent]
if smooth > 0:
fluxsky = snf.gaussian_filter(fluxsky, smooth)
errlow = snf.gaussian_filter(errlow, smooth)
errhigh = snf.gaussian_filter(errhigh, smooth)
if shownoise:
plt.fill_between(skydat[sky_wavecol][goodent],errlow,errhigh,
alpha=fillalpha,color=sky_color)
plt.plot(skydat[sky_wavecol][goodent],fluxsky,
color=sky_color,lw=lthick, label=sky_label)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if xrange is None:
xvals = [4800,9300]
else:
xvals = xrange
plt.plot(xvals,[0,0],'--k',lw=lthick)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
plt.xlabel('Wavelength [\AA]', fontsize=Fsize)
if pubversion:
if plotSNcurve:
ylabel = 'Signal-to-Noise'
else:
ylabel = 'Flux ['+str(bunit)+']'
if plotratio:
ylabel = ylabel+' ratio'
plt.ylabel(ylabel, fontsize=Fsize)
if ylog:
plt.yscale('log')
if yrange is not None:
plt.ylim(yrange)
if xrange is not None:
plt.xlim(xrange)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if showlinelists is not None:
for sl, showlinelist in enumerate(showlinelists):
ymin, ymax = plt.ylim()
xmin, xmax = plt.xlim()
for ww, wave in enumerate(showlinelist[:,0]):
wave = float(wave)
if (wave < xmax) & (wave > xmin):
plt.plot([wave,wave],[ymin,ymax],linestyle='--',color=linelistcolors[sl],lw=lthick)
plt.text(wave,ymin+1.03*np.abs([ymax-ymin]),showlinelist[:,1][ww],color=linelistcolors[sl], fontsize=Fsize-2., ha='center')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if pubversion:
leg = plt.legend(fancybox=True, loc='upper center',prop={'size':Fsize-2},ncol=4,numpoints=1,
bbox_to_anchor=(0.44, 1.27)) # add the legend
else:
leg = plt.legend(fancybox=True, loc='upper right',prop={'size':Fsize},ncol=1,numpoints=1,
bbox_to_anchor=(1.25, 1.03)) # add the legend
leg.get_frame().set_alpha(0.7)
if showspecs:
if verbose: print(' Showing plot (not saving to file)')
plt.show()
else:
if verbose: print(' Saving plot to',plotname)
plt.savefig(plotname)
plt.clf()
plt.close('all')
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
def plot_histograms(datavectors,plotname='./tdose_cubehist.pdf',colors=None,labels=None,bins=None,
xrange=None,yrange=None,verbose=True,norm=True,ylog=True):
"""
Plot histograms of a set of data vectors.
--- INPUT ---
datavectors Set of data vectors to plot histograms of
plotname Name of plot to generate
colors Colors to use for histograms
labels Labels for the data vectors
bins Bins to use for histograms. Can be generated with np.arange(minval,maxval+binwidth,binwidth)
xrange Xrange of plot
yrange Yrange of plot
verbose Toggle verbosity
norm Noramlize the histograms
ylog Use a logarithmic y-axes when plotting
"""
Ndat = len(datavectors)
if verbose: print(' - Plotting histograms of N = '+str(Ndat)+' data vectors')
if colors is None:
colors = ['blue']*Ndat
if labels is None:
labels = ['data vector no. '+str(ii+1) for ii in np.arange(Ndat)]
if bins is None:
bins = np.arange(-100,102,2)
fig = plt.figure(figsize=(10, 3))
fig.subplots_adjust(wspace=0.1, hspace=0.1,left=0.08, right=0.81, bottom=0.1, top=0.95)
Fsize = 10
lthick = 1
plt.rc('text', usetex=True)
plt.rc('font', family='serif',size=Fsize)
plt.rc('xtick', labelsize=Fsize)
plt.rc('ytick', labelsize=Fsize)
plt.clf()
plt.ioff()
#plt.title(plotname.split('TDOSE 1D spectra'),fontsize=Fsize)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
for dd, datavec in enumerate(datavectors):
hist = plt.hist(datavec[~np.isnan(datavec)],color=colors[dd],bins=bins,histtype="step",lw=lthick,
label=labels[dd],normed=norm)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if yrange is None:
yvals = [1e-5,1e8]
else:
yvals = yrange
plt.plot([0,0],yvals,'--k',lw=lthick)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
plt.xlabel('', fontsize=Fsize)
plt.ylabel('\#', fontsize=Fsize)
if yrange is not None:
plt.ylim(yrange)
if xrange is not None:
plt.xlim(xrange)
if ylog:
plt.yscale('log')
leg = plt.legend(fancybox=True, loc='upper right',prop={'size':Fsize},ncol=1,numpoints=1,
bbox_to_anchor=(1.25, 1.03)) # add the legend
leg.get_frame().set_alpha(0.7)
if verbose: print(' Saving plot to',plotname)
plt.savefig(plotname)
plt.clf()
plt.close('all')
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = | mit |
imtapps/django-imt-fork | tests/regressiontests/model_formsets_regress/tests.py | 44 | 16775 | from __future__ import absolute_import, unicode_literals
from django import forms
from django.forms.formsets import BaseFormSet, DELETION_FIELD_NAME
from django.forms.util import ErrorDict, ErrorList
from django.forms.models import modelform_factory, inlineformset_factory, modelformset_factory, BaseModelFormSet
from django.test import TestCase
from django.utils import six
from .models import User, UserSite, Restaurant, Manager, Network, Host
class InlineFormsetTests(TestCase):
def test_formset_over_to_field(self):
"A formset over a ForeignKey with a to_field can be saved. Regression for #10243"
Form = modelform_factory(User)
FormSet = inlineformset_factory(User, UserSite)
# Instantiate the Form and FormSet to prove
# you can create a form with no data
form = Form()
form_set = FormSet(instance=User())
# Now create a new User and UserSite instance
data = {
'serial': '1',
'username': 'apollo13',
'usersite_set-TOTAL_FORMS': '1',
'usersite_set-INITIAL_FORMS': '0',
'usersite_set-MAX_NUM_FORMS': '0',
'usersite_set-0-data': '10',
'usersite_set-0-user': 'apollo13'
}
user = User()
form = Form(data)
if form.is_valid():
user = form.save()
else:
self.fail('Errors found on form:%s' % form_set)
form_set = FormSet(data, instance=user)
if form_set.is_valid():
form_set.save()
usersite = UserSite.objects.all().values()
self.assertEqual(usersite[0]['data'], 10)
self.assertEqual(usersite[0]['user_id'], 'apollo13')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now update the UserSite instance
data = {
'usersite_set-TOTAL_FORMS': '1',
'usersite_set-INITIAL_FORMS': '1',
'usersite_set-MAX_NUM_FORMS': '0',
'usersite_set-0-id': six.text_type(usersite[0]['id']),
'usersite_set-0-data': '11',
'usersite_set-0-user': 'apollo13'
}
form_set = FormSet(data, instance=user)
if form_set.is_valid():
form_set.save()
usersite = UserSite.objects.all().values()
self.assertEqual(usersite[0]['data'], 11)
self.assertEqual(usersite[0]['user_id'], 'apollo13')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now add a new UserSite instance
data = {
'usersite_set-TOTAL_FORMS': '2',
'usersite_set-INITIAL_FORMS': '1',
'usersite_set-MAX_NUM_FORMS': '0',
'usersite_set-0-id': six.text_type(usersite[0]['id']),
'usersite_set-0-data': '11',
'usersite_set-0-user': 'apollo13',
'usersite_set-1-data': '42',
'usersite_set-1-user': 'apollo13'
}
form_set = FormSet(data, instance=user)
if form_set.is_valid():
form_set.save()
usersite = UserSite.objects.all().values().order_by('data')
self.assertEqual(usersite[0]['data'], 11)
self.assertEqual(usersite[0]['user_id'], 'apollo13')
self.assertEqual(usersite[1]['data'], 42)
self.assertEqual(usersite[1]['user_id'], 'apollo13')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
def test_formset_over_inherited_model(self):
"A formset over a ForeignKey with a to_field can be saved. Regression for #11120"
Form = modelform_factory(Restaurant)
FormSet = inlineformset_factory(Restaurant, Manager)
# Instantiate the Form and FormSet to prove
# you can create a form with no data
form = Form()
form_set = FormSet(instance=Restaurant())
# Now create a new Restaurant and Manager instance
data = {
'name': "Guido's House of Pasta",
'manager_set-TOTAL_FORMS': '1',
'manager_set-INITIAL_FORMS': '0',
'manager_set-MAX_NUM_FORMS': '0',
'manager_set-0-name': 'Guido Van Rossum'
}
restaurant = User()
form = Form(data)
if form.is_valid():
restaurant = form.save()
else:
self.fail('Errors found on form:%s' % form_set)
form_set = FormSet(data, instance=restaurant)
if form_set.is_valid():
form_set.save()
manager = Manager.objects.all().values()
self.assertEqual(manager[0]['name'], 'Guido Van Rossum')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now update the Manager instance
data = {
'manager_set-TOTAL_FORMS': '1',
'manager_set-INITIAL_FORMS': '1',
'manager_set-MAX_NUM_FORMS': '0',
'manager_set-0-id': six.text_type(manager[0]['id']),
'manager_set-0-name': 'Terry Gilliam'
}
form_set = FormSet(data, instance=restaurant)
if form_set.is_valid():
form_set.save()
manager = Manager.objects.all().values()
self.assertEqual(manager[0]['name'], 'Terry Gilliam')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now add a new Manager instance
data = {
'manager_set-TOTAL_FORMS': '2',
'manager_set-INITIAL_FORMS': '1',
'manager_set-MAX_NUM_FORMS': '0',
'manager_set-0-id': six.text_type(manager[0]['id']),
'manager_set-0-name': 'Terry Gilliam',
'manager_set-1-name': 'John Cleese'
}
form_set = FormSet(data, instance=restaurant)
if form_set.is_valid():
form_set.save()
manager = Manager.objects.all().values().order_by('name')
self.assertEqual(manager[0]['name'], 'John Cleese')
self.assertEqual(manager[1]['name'], 'Terry Gilliam')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
def test_formset_with_none_instance(self):
"A formset with instance=None can be created. Regression for #11872"
Form = modelform_factory(User)
FormSet = inlineformset_factory(User, UserSite)
# Instantiate the Form and FormSet to prove
# you can create a formset with an instance of None
form = Form(instance=None)
formset = FormSet(instance=None)
def test_empty_fields_on_modelformset(self):
"No fields passed to modelformset_factory should result in no fields on returned forms except for the id. See #14119."
UserFormSet = modelformset_factory(User, fields=())
formset = UserFormSet()
for form in formset.forms:
self.assertTrue('id' in form.fields)
self.assertEqual(len(form.fields), 1)
def test_save_as_new_with_new_inlines(self):
"""
Existing and new inlines are saved with save_as_new.
Regression for #14938.
"""
efnet = Network.objects.create(name="EFNet")
host1 = Host.objects.create(hostname="irc.he.net", network=efnet)
HostFormSet = inlineformset_factory(Network, Host)
# Add a new host, modify previous host, and save-as-new
data = {
'host_set-TOTAL_FORMS': '2',
'host_set-INITIAL_FORMS': '1',
'host_set-MAX_NUM_FORMS': '0',
'host_set-0-id': six.text_type(host1.id),
'host_set-0-hostname': 'tranquility.hub.dal.net',
'host_set-1-hostname': 'matrix.de.eu.dal.net'
}
# To save a formset as new, it needs a new hub instance
dalnet = Network.objects.create(name="DALnet")
formset = HostFormSet(data, instance=dalnet, save_as_new=True)
self.assertTrue(formset.is_valid())
formset.save()
self.assertQuerysetEqual(
dalnet.host_set.order_by("hostname"),
["<Host: matrix.de.eu.dal.net>", "<Host: tranquility.hub.dal.net>"]
)
def test_initial_data(self):
user = User.objects.create(username="bibi", serial=1)
UserSite.objects.create(user=user, data=7)
FormSet = inlineformset_factory(User, UserSite, extra=2)
formset = FormSet(instance=user, initial=[{'data': 41}, {'data': 42}])
self.assertEqual(formset.forms[0].initial['data'], 7)
self.assertEqual(formset.extra_forms[0].initial['data'], 41)
self.assertTrue('value="42"' in formset.extra_forms[1].as_p())
class FormsetTests(TestCase):
def test_error_class(self):
'''
Test the type of Formset and Form error attributes
'''
Formset = modelformset_factory(User)
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '0',
'form-0-id': '',
'form-0-username': 'apollo13',
'form-0-serial': '1',
'form-1-id': '',
'form-1-username': 'apollo13',
'form-1-serial': '2',
}
formset = Formset(data)
# check if the returned error classes are correct
# note: formset.errors returns a list as documented
self.assertTrue(isinstance(formset.errors, list))
self.assertTrue(isinstance(formset.non_form_errors(), ErrorList))
for form in formset.forms:
self.assertTrue(isinstance(form.errors, ErrorDict))
self.assertTrue(isinstance(form.non_field_errors(), ErrorList))
def test_initial_data(self):
User.objects.create(username="bibi", serial=1)
Formset = modelformset_factory(User, extra=2)
formset = Formset(initial=[{'username': 'apollo11'}, {'username': 'apollo12'}])
self.assertEqual(formset.forms[0].initial['username'], "bibi")
self.assertEqual(formset.extra_forms[0].initial['username'], "apollo11")
self.assertTrue('value="apollo12"' in formset.extra_forms[1].as_p())
def test_extraneous_query_is_not_run(self):
Formset = modelformset_factory(Network)
data = {'test-TOTAL_FORMS': '1',
'test-INITIAL_FORMS': '0',
'test-MAX_NUM_FORMS': '',
'test-0-name': 'Random Place', }
with self.assertNumQueries(1):
formset = Formset(data, prefix="test")
formset.save()
class CustomWidget(forms.CharField):
pass
class UserSiteForm(forms.ModelForm):
class Meta:
model = UserSite
widgets = {'data': CustomWidget}
class Callback(object):
def __init__(self):
self.log = []
def __call__(self, db_field, **kwargs):
self.log.append((db_field, kwargs))
return db_field.formfield(**kwargs)
class FormfieldCallbackTests(TestCase):
"""
Regression for #13095: Using base forms with widgets
defined in Meta should not raise errors.
"""
def test_inlineformset_factory_default(self):
Formset = inlineformset_factory(User, UserSite, form=UserSiteForm)
form = Formset().forms[0]
self.assertTrue(isinstance(form['data'].field.widget, CustomWidget))
def test_modelformset_factory_default(self):
Formset = modelformset_factory(UserSite, form=UserSiteForm)
form = Formset().forms[0]
self.assertTrue(isinstance(form['data'].field.widget, CustomWidget))
def assertCallbackCalled(self, callback):
id_field, user_field, data_field = UserSite._meta.fields
expected_log = [
(id_field, {}),
(user_field, {}),
(data_field, {'widget': CustomWidget}),
]
self.assertEqual(callback.log, expected_log)
def test_inlineformset_custom_callback(self):
callback = Callback()
inlineformset_factory(User, UserSite, form=UserSiteForm,
formfield_callback=callback)
self.assertCallbackCalled(callback)
def test_modelformset_custom_callback(self):
callback = Callback()
modelformset_factory(UserSite, form=UserSiteForm,
formfield_callback=callback)
self.assertCallbackCalled(callback)
class BaseCustomDeleteFormSet(BaseFormSet):
"""
A formset mix-in that lets a form decide if it's to be deleted.
Works for BaseFormSets. Also works for ModelFormSets with #14099 fixed.
form.should_delete() is called. The formset delete field is also suppressed.
"""
def add_fields(self, form, index):
super(BaseCustomDeleteFormSet, self).add_fields(form, index)
self.can_delete = True
if DELETION_FIELD_NAME in form.fields:
del form.fields[DELETION_FIELD_NAME]
def _should_delete_form(self, form):
return hasattr(form, 'should_delete') and form.should_delete()
class FormfieldShouldDeleteFormTests(TestCase):
"""
Regression for #14099: BaseModelFormSet should use ModelFormSet method _should_delete_form
"""
class BaseCustomDeleteModelFormSet(BaseModelFormSet, BaseCustomDeleteFormSet):
""" Model FormSet with CustomDelete MixIn """
class CustomDeleteUserForm(forms.ModelForm):
""" A model form with a 'should_delete' method """
class Meta:
model = User
def should_delete(self):
""" delete form if odd PK """
return self.instance.pk % 2 != 0
NormalFormset = modelformset_factory(User, form=CustomDeleteUserForm, can_delete=True)
DeleteFormset = modelformset_factory(User, form=CustomDeleteUserForm, formset=BaseCustomDeleteModelFormSet)
data = {
'form-TOTAL_FORMS': '4',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '4',
'form-0-username': 'John',
'form-0-serial': '1',
'form-1-username': 'Paul',
'form-1-serial': '2',
'form-2-username': 'George',
'form-2-serial': '3',
'form-3-username': 'Ringo',
'form-3-serial': '5',
}
delete_all_ids = {
'form-0-DELETE': '1',
'form-1-DELETE': '1',
'form-2-DELETE': '1',
'form-3-DELETE': '1',
}
def test_init_database(self):
""" Add test data to database via formset """
formset = self.NormalFormset(self.data)
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 4)
def test_no_delete(self):
""" Verify base formset doesn't modify database """
# reload database
self.test_init_database()
# pass standard data dict & see none updated
data = dict(self.data)
data['form-INITIAL_FORMS'] = 4
data.update(dict(
('form-%d-id' % i, user.pk)
for i,user in enumerate(User.objects.all())
))
formset = self.NormalFormset(data, queryset=User.objects.all())
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 0)
self.assertEqual(len(User.objects.all()), 4)
def test_all_delete(self):
""" Verify base formset honors DELETE field """
# reload database
self.test_init_database()
# create data dict with all fields marked for deletion
data = dict(self.data)
data['form-INITIAL_FORMS'] = 4
data.update(dict(
('form-%d-id' % i, user.pk)
for i,user in enumerate(User.objects.all())
))
data.update(self.delete_all_ids)
formset = self.NormalFormset(data, queryset=User.objects.all())
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 0)
self.assertEqual(len(User.objects.all()), 0)
def test_custom_delete(self):
""" Verify DeleteFormset ignores DELETE field and uses form method """
# reload database
self.test_init_database()
# Create formset with custom Delete function
# create data dict with all fields marked for deletion
data = dict(self.data)
data['form-INITIAL_FORMS'] = 4
data.update(dict(
('form-%d-id' % i, user.pk)
for i,user in enumerate(User.objects.all())
))
data.update(self.delete_all_ids)
formset = self.DeleteFormset(data, queryset=User.objects.all())
# verify two were deleted
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 0)
self.assertEqual(len(User.objects.all()), 2)
# verify no "odd" PKs left
odd_ids = [user.pk for user in User.objects.all() if user.pk % 2]
self.assertEqual(len(odd_ids), 0)
| bsd-3-clause |
geekboxzone/mmallow_external_skia | tools/find_bad_images_in_skps.py | 172 | 7405 | #!/usr/bin/env python
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This script will take as an argument either a list of skp files or a
set of directories that contains skp files. It will then test each
skp file with the `render_pictures` program. If that program either
spits out any unexpected output or doesn't return 0, I will flag that
skp file as problematic. We then extract all of the embedded images
inside the skp and test each one of them against the
SkImageDecoder::DecodeFile function. Again, we consider any
extraneous output or a bad return value an error. In the event of an
error, we retain the image and print out information about the error.
The output (on stdout) is formatted as a csv document.
A copy of each bad image is left in a directory created by
tempfile.mkdtemp().
"""
import glob
import os
import re
import shutil
import subprocess
import sys
import tempfile
import threading
import test_rendering # skia/trunk/tools. reuse FindPathToProgram()
USAGE = """
Usage:
{command} SKP_FILE [SKP_FILES]
{command} SKP_DIR [SKP_DIRS]\n
Environment variables:
To run multiple worker threads, set NUM_THREADS.
To use a different temporary storage location, set TMPDIR.
"""
def execute_program(args, ignores=None):
"""
Execute a process and waits for it to complete. Returns all
output (stderr and stdout) after (optional) filtering.
@param args is passed into subprocess.Popen().
@param ignores (optional) is a list of regular expression strings
that will be ignored in the output.
@returns a tuple (returncode, output)
"""
if ignores is None:
ignores = []
else:
ignores = [re.compile(ignore) for ignore in ignores]
proc = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output = ''.join(
line for line in proc.stdout
if not any(bool(ignore.match(line)) for ignore in ignores))
returncode = proc.wait()
return (returncode, output)
def list_files(paths):
"""
Accepts a list of directories or filenames on the command line.
We do not choose to recurse into directories beyond one level.
"""
class NotAFileException(Exception):
pass
for path in paths:
for globbedpath in glob.iglob(path): # useful on win32
if os.path.isdir(globbedpath):
for filename in os.listdir(globbedpath):
newpath = os.path.join(globbedpath, filename)
if os.path.isfile(newpath):
yield newpath
elif os.path.isfile(globbedpath):
yield globbedpath
else:
raise NotAFileException('{} is not a file'.format(globbedpath))
class BadImageFinder(object):
def __init__(self, directory=None):
self.render_pictures = test_rendering.FindPathToProgram(
'render_pictures')
self.test_image_decoder = test_rendering.FindPathToProgram(
'test_image_decoder')
assert os.path.isfile(self.render_pictures)
assert os.path.isfile(self.test_image_decoder)
if directory is None:
self.saved_image_dir = tempfile.mkdtemp(prefix='skia_skp_test_')
else:
assert os.path.isdir(directory)
self.saved_image_dir = directory
self.bad_image_count = 0
def process_files(self, skp_files):
for path in skp_files:
self.process_file(path)
def process_file(self, skp_file):
assert self.saved_image_dir is not None
assert os.path.isfile(skp_file)
args = [self.render_pictures, '--readPath', skp_file]
ignores = ['^process_in', '^deserializ', '^drawing...', '^Non-defaul']
returncode, output = execute_program(args, ignores)
if (returncode == 0) and not output:
return
temp_image_dir = tempfile.mkdtemp(prefix='skia_skp_test___')
args = [ self.render_pictures, '--readPath', skp_file,
'--writePath', temp_image_dir, '--writeEncodedImages']
subprocess.call(args, stderr=open(os.devnull,'w'),
stdout=open(os.devnull,'w'))
for image_name in os.listdir(temp_image_dir):
image_path = os.path.join(temp_image_dir, image_name)
assert(os.path.isfile(image_path))
args = [self.test_image_decoder, image_path]
returncode, output = execute_program(args, [])
if (returncode == 0) and not output:
os.remove(image_path)
continue
try:
shutil.move(image_path, self.saved_image_dir)
except (shutil.Error,):
# If this happens, don't stop the entire process,
# just warn the user.
os.remove(image_path)
sys.stderr.write('{0} is a repeat.\n'.format(image_name))
self.bad_image_count += 1
if returncode == 2:
returncode = 'SkImageDecoder::DecodeFile returns false'
elif returncode == 0:
returncode = 'extra verbosity'
assert output
elif returncode == -11:
returncode = 'segmentation violation'
else:
returncode = 'returncode: {}'.format(returncode)
output = output.strip().replace('\n',' ').replace('"','\'')
suffix = image_name[-3:]
output_line = '"{0}","{1}","{2}","{3}","{4}"\n'.format(
returncode, suffix, skp_file, image_name, output)
sys.stdout.write(output_line)
sys.stdout.flush()
os.rmdir(temp_image_dir)
return
def main(main_argv):
if not main_argv or main_argv[0] in ['-h', '-?', '-help', '--help']:
sys.stderr.write(USAGE.format(command=__file__))
return 1
if 'NUM_THREADS' in os.environ:
number_of_threads = int(os.environ['NUM_THREADS'])
if number_of_threads < 1:
number_of_threads = 1
else:
number_of_threads = 1
os.environ['skia_images_png_suppressDecoderWarnings'] = 'true'
os.environ['skia_images_jpeg_suppressDecoderWarnings'] = 'true'
temp_dir = tempfile.mkdtemp(prefix='skia_skp_test_')
sys.stderr.write('Directory for bad images: {}\n'.format(temp_dir))
sys.stdout.write('"Error","Filetype","SKP File","Image File","Output"\n')
sys.stdout.flush()
finders = [
BadImageFinder(temp_dir) for index in xrange(number_of_threads)]
arguments = [[] for index in xrange(number_of_threads)]
for index, item in enumerate(list_files(main_argv)):
## split up the given targets among the worker threads
arguments[index % number_of_threads].append(item)
threads = [
threading.Thread(
target=BadImageFinder.process_files, args=(finder,argument))
for finder, argument in zip(finders, arguments)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
number = sum(finder.bad_image_count for finder in finders)
sys.stderr.write('Number of bad images found: {}\n'.format(number))
return 0
if __name__ == '__main__':
exit(main(sys.argv[1:]))
# LocalWords: skp stdout csv
| bsd-3-clause |
wilvk/ansible | test/units/modules/network/eos/eos_module.py | 73 | 3143 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except:
pass
fixture_data[path] = data
return data
class TestEosModule(ModuleTestCase):
def execute_module(self, failed=False, changed=False, commands=None, inputs=None, sort=True, defaults=False, transport='cli'):
self.load_fixtures(commands, transport=transport)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if transport == 'eapi':
cmd = []
value = []
for item in result['commands']:
cmd.append(item['cmd'])
if 'input' in item:
value.append(item['input'])
if sort:
self.assertEqual(sorted(commands), sorted(cmd), cmd)
else:
self.assertEqual(commands, cmd, cmd)
if inputs:
self.assertEqual(inputs, value, value)
else:
if sort:
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
else:
self.assertEqual(commands, result['commands'], result['commands'])
return result
def failed(self):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None, transport='cli'):
pass
| gpl-3.0 |
rgerkin/inspyred | examples/advanced/tsp_ec_example.py | 4 | 1716 | from random import Random
from time import time
import math
import inspyred
def main(prng=None, display=False):
if prng is None:
prng = Random()
prng.seed(time())
points = [(110.0, 225.0), (161.0, 280.0), (325.0, 554.0), (490.0, 285.0),
(157.0, 443.0), (283.0, 379.0), (397.0, 566.0), (306.0, 360.0),
(343.0, 110.0), (552.0, 199.0)]
weights = [[0 for _ in range(len(points))] for _ in range(len(points))]
for i, p in enumerate(points):
for j, q in enumerate(points):
weights[i][j] = math.sqrt((p[0] - q[0])**2 + (p[1] - q[1])**2)
problem = inspyred.benchmarks.TSP(weights)
ea = inspyred.ec.EvolutionaryComputation(prng)
ea.selector = inspyred.ec.selectors.tournament_selection
ea.variator = [inspyred.ec.variators.partially_matched_crossover,
inspyred.ec.variators.inversion_mutation]
ea.replacer = inspyred.ec.replacers.generational_replacement
ea.terminator = inspyred.ec.terminators.generation_termination
final_pop = ea.evolve(generator=problem.generator,
evaluator=problem.evaluator,
bounder=problem.bounder,
maximize=problem.maximize,
pop_size=100,
max_generations=50,
tournament_size=5,
num_selected=100,
num_elites=1)
if display:
best = max(ea.population)
print('Best Solution: {0}: {1}'.format(str(best.candidate), 1/best.fitness))
return ea
if __name__ == '__main__':
main(display=True)
| gpl-3.0 |
waseem18/oh-mainline | vendor/packages/python-social-auth/social/tests/backends/test_yahoo.py | 76 | 2535 | import json
import requests
from httpretty import HTTPretty
from social.p3 import urlencode
from social.tests.backends.oauth import OAuth1Test
class YahooOAuth1Test(OAuth1Test):
backend_path = 'social.backends.yahoo.YahooOAuth'
user_data_url = 'https://social.yahooapis.com/v1/user/a-guid/profile?' \
'format=json'
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
request_token_body = urlencode({
'oauth_token_secret': 'foobar-secret',
'oauth_token': 'foobar',
'oauth_callback_confirmed': 'true'
})
guid_body = json.dumps({
'guid': {
'uri': 'https://social.yahooapis.com/v1/me/guid',
'value': 'a-guid'
}
})
user_data_body = json.dumps({
'profile': {
'bdRestricted': True,
'memberSince': '2007-12-11T14:40:30Z',
'image': {
'width': 192,
'imageUrl': 'http://l.yimg.com/dh/ap/social/profile/'
'profile_b192.png',
'size': '192x192',
'height': 192
},
'created': '2013-03-18T04:15:08Z',
'uri': 'https://social.yahooapis.com/v1/user/a-guid/profile',
'isConnected': False,
'profileUrl': 'http://profile.yahoo.com/a-guid',
'guid': 'a-guid',
'nickname': 'foobar',
'emails': [{
'handle': 'foobar@yahoo.com',
'id': 1,
'primary': True,
'type': 'HOME',
}, {
'handle': 'foobar@email.com',
'id': 2,
'type': 'HOME',
}],
}
})
def test_login(self):
HTTPretty.register_uri(
HTTPretty.GET,
'https://social.yahooapis.com/v1/me/guid?format=json',
status=200,
body=self.guid_body
)
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
def test_get_user_details(self):
HTTPretty.register_uri(
HTTPretty.GET,
self.user_data_url,
status=200,
body=self.user_data_body
)
response = requests.get(self.user_data_url)
user_details = self.backend.get_user_details(
response.json()['profile']
)
self.assertEqual(user_details['email'], 'foobar@yahoo.com')
| agpl-3.0 |
bmcfee/gordon | scripts/mbrainz_import.py | 1 | 6643 | #! /usr/bin/env python
# Copyright (C) 2010 Douglas Eck
#
# This file is part of Gordon.
#
# Gordon is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Gordon is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Gordon. If not, see <http://www.gnu.org/licenses/>.
"""Set up a local copy of the MusicBrainz database"""
import os, pg, sys, socket
import gordon.db.config as config
from gordon import make_subdirs
def logged_on_dbhost() :
"""returns true if we are logged in on local dbhost"""
if config.DEF_DBHOST.lower() == 'localhost':
return True
return socket.getfqdn()==socket.getfqdn(config.DEF_DBHOST)
def do_perms() :
#this allows us to import using the more powerful brams account but then give the dbs over to gordon
if not logged_on_dbhost() or os.environ['USER']<>config.DEF_DBSUPERUSER:
raise ValueError('This must be run as user %s from host %s' % (config.DEF_DBSUPERUSER,config.DEF_DBHOST))
dbmb = pg.connect('musicbrainz_db', user=config.DEF_DBSUPERUSER)
result = dbmb.query("select tablename from pg_tables where schemaname = 'public'").getresult()
for r in result :
table = r[0]
if table=='PendingData' :
continue
#q = 'alter table %s owner to "%s";' % (table, config.DEF_DBUSER)
q = 'grant select on %s to "%s";' % (table, config.DEF_DBUSER)
print q
try :
dbmb.query(q)
except :
print 'Failed'
def do_import() :
"""import tables. Does this need to have existing tables dropped first? I think so..."""
currdir=os.getcwd()
#first drop tables if they exist
cmd="dropdb 'musicbrainz_db'"
print 'Executing',cmd
os.system(cmd)
cmd="dropdb 'musicbrainz_db_raw'"
print 'Executing',cmd
os.system(cmd)
svnpath='%s/mbrainz/svn' % config.DEF_GORDON_DIR
reppath='%s/%s' % (svnpath,'mb_server')
os.chdir(reppath)
cmd='./admin/InitDb.pl --createdb --import ../../mbdump/latest/mb*bz2 --echo'
print 'Executing',cmd
os.system(cmd)
os.chdir(currdir)
def do_svn() :
"""Gets fresh svn repo to DEF_GORDON_DIR/mbrainz/svn/mb_server from the appropriate branch (NOT TRUNK!)"""
print 'TODO'
print 'If you do this you will need to edit cgi-bin/DBDefs.pl in the following way:'
print 'Replace musicbrainz_user with %s everywhere' % config.DEF_DBUSER
print 'Replace postgres with your superuser account if you have replaced postgres. E.g I use eckdoug'
print 'This should be automated here!'
print 'E.g replace username => "postgres", with username => "eckdoug",'
print 'Also set the password for user %s to be the password' % config.DEF_DBUSER
print ''
print 'Also if you are running with local=utf, you need to edit the appropriate part of InitDb.pl to skip'
print 'The test for locale.'
print 'E.g. replace in file admin/InitDb.pl: '
print ' unless ($locale eq "C")'
print 'with'
print ' unless (1)'
currdir=os.getcwd()
svnpath='%s/mbrainz/svn' % config.DEF_GORDON_DIR
reppath='%s/%s' % (svnpath,'mb_server')
if os.path.exists(reppath):
os.chdir(reppath)
print 'Doing svn update'
os.system('svn update')
else :
os.chdir(svnpath)
#os.system('svn co http://svn.musicbrainz.org/mb_server/trunk mb_server')
print 'We just rebuilt the musicbrainz svn. you will need to modify some config files. See:'
print 'Most should be in lib/DBDefs.pm.default lib/DBDefs.pm'
sys.exit(0)
os.chdir(currdir)
def do_ftp(site='ftp://ftp.musicbrainz.org/pub/musicbrainz/data/fullexport',force=False) :
"""Imports fresh database files to DEF_GORDON_DIR/mbrainz/mbdump/latest"""
import ftplib
ftp=ftplib.FTP('ftp.musicbrainz.org')
ftp.login('anonymous','')
ftp.cwd('pub/musicbrainz/data/fullexport')
for f in ftp.nlst() :
if f.startswith('latest-is') :
dr=f[10:]
print 'Latest is',dr
testdir=os.path.join(config.DEF_GORDON_DIR,'mbrainz','mbdump',dr)
if os.path.exists(testdir) :
print 'We already have this dump... skipping. Set force=True to force download'
if not force :
ftp.close()
return
#handle for our writing
def _ftp_handle(block) :
fout.write(block)
print ".",
#we should be in the right directory now
ftp.cwd(dr)
for f in ftp.nlst() :
f=f.strip()
# Open the file for writing in binary mode
fnout=os.path.join(config.DEF_GORDON_DIR,'mbrainz','mbdump',dr,f)
make_subdirs(fnout)
print 'Opening local file ' + fnout
fout= open(fnout, 'wb')
print 'downloading',f
ftp.retrbinary('RETR ' + f, _ftp_handle)
print 'Done downloading',f
fout.close()
ftp.close()
currdir=os.getcwd()
#create symlink from latest to our new dump
os.chdir('%s/mbrainz/mbdump' % config.DEF_GORDON_DIR)
try :
os.system('rm latest')
except :
pass
os.system('ln -s %s latest' % dr)
os.chdir(currdir)
def mbrainz_import():
if not logged_on_dbhost() or os.environ['USER']<>config.DEF_DBSUPERUSER:
raise ValueError('This must be run as user %s from host %s' % (config.DEF_DBSUPERUSER,config.DEF_DBHOST))
do_ftp()
do_svn()
#raw_input('commented out do_ftp and do_svn temporarily!')
do_import()
do_perms()
def die_with_usage() :
print 'This program rebuilds the musicbrainz databases.'
print 'It loads the dumps to %s/mbrainz/mbdump/latest' % config.DEF_GORDON_DIR
print 'It then updates the svn code in %s/mbrainz/svn' % config.DEF_GORDON_DIR
print 'It then does necessary data imports from the dumps'
print ('It then sets appropriate permissions to allow user %s to read the databases'
% config.DEF_DBUSER)
print ''
print 'To run code:'
print 'Be sure you are logged into %s' % config.DEF_DBHOST
print 'Be sure you are user %s' % config.DEF_DBSUPERUSER
print 'mbrainz_import.py go'
sys.exit(0)
if __name__=='__main__' :
if len(sys.argv)<2 :
die_with_usage()
mbrainz_import()
| gpl-3.0 |
mcardillo55/django | tests/user_commands/tests.py | 205 | 7165 | import os
from django.apps import apps
from django.core import management
from django.core.management import BaseCommand, CommandError, find_commands
from django.core.management.utils import find_command, popen_wrapper
from django.db import connection
from django.test import SimpleTestCase, ignore_warnings, override_settings
from django.test.utils import captured_stderr, captured_stdout, extend_sys_path
from django.utils import translation
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.six import StringIO
# A minimal set of apps to avoid system checks running on all apps.
@override_settings(
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'user_commands',
],
)
class CommandTests(SimpleTestCase):
def test_command(self):
out = StringIO()
management.call_command('dance', stdout=out)
self.assertIn("I don't feel like dancing Rock'n'Roll.\n", out.getvalue())
def test_command_style(self):
out = StringIO()
management.call_command('dance', style='Jive', stdout=out)
self.assertIn("I don't feel like dancing Jive.\n", out.getvalue())
# Passing options as arguments also works (thanks argparse)
management.call_command('dance', '--style', 'Jive', stdout=out)
self.assertIn("I don't feel like dancing Jive.\n", out.getvalue())
def test_language_preserved(self):
out = StringIO()
with translation.override('fr'):
management.call_command('dance', stdout=out)
self.assertEqual(translation.get_language(), 'fr')
def test_explode(self):
""" Test that an unknown command raises CommandError """
self.assertRaises(CommandError, management.call_command, ('explode',))
def test_system_exit(self):
""" Exception raised in a command should raise CommandError with
call_command, but SystemExit when run from command line
"""
with self.assertRaises(CommandError):
management.call_command('dance', example="raise")
with captured_stderr() as stderr, self.assertRaises(SystemExit):
management.ManagementUtility(['manage.py', 'dance', '--example=raise']).execute()
self.assertIn("CommandError", stderr.getvalue())
def test_deactivate_locale_set(self):
# Deactivate translation when set to true
out = StringIO()
with translation.override('pl'):
management.call_command('leave_locale_alone_false', stdout=out)
self.assertEqual(out.getvalue(), "")
def test_configured_locale_preserved(self):
# Leaves locale from settings when set to false
out = StringIO()
with translation.override('pl'):
management.call_command('leave_locale_alone_true', stdout=out)
self.assertEqual(out.getvalue(), "pl\n")
def test_find_command_without_PATH(self):
"""
find_command should still work when the PATH environment variable
doesn't exist (#22256).
"""
current_path = os.environ.pop('PATH', None)
try:
self.assertIsNone(find_command('_missing_'))
finally:
if current_path is not None:
os.environ['PATH'] = current_path
def test_discover_commands_in_eggs(self):
"""
Test that management commands can also be loaded from Python eggs.
"""
egg_dir = '%s/eggs' % os.path.dirname(upath(__file__))
egg_name = '%s/basic.egg' % egg_dir
with extend_sys_path(egg_name):
with self.settings(INSTALLED_APPS=['commandegg']):
cmds = find_commands(os.path.join(apps.get_app_config('commandegg').path, 'management'))
self.assertEqual(cmds, ['eggcommand'])
def test_call_command_option_parsing(self):
"""
When passing the long option name to call_command, the available option
key is the option dest name (#22985).
"""
out = StringIO()
management.call_command('dance', stdout=out, opt_3=True)
self.assertIn("option3", out.getvalue())
self.assertNotIn("opt_3", out.getvalue())
self.assertNotIn("opt-3", out.getvalue())
@ignore_warnings(category=RemovedInDjango110Warning)
def test_optparse_compatibility(self):
"""
optparse should be supported during Django 1.8/1.9 releases.
"""
out = StringIO()
management.call_command('optparse_cmd', stdout=out)
self.assertEqual(out.getvalue(), "All right, let's dance Rock'n'Roll.\n")
# Simulate command line execution
with captured_stdout() as stdout, captured_stderr():
management.execute_from_command_line(['django-admin', 'optparse_cmd'])
self.assertEqual(stdout.getvalue(), "All right, let's dance Rock'n'Roll.\n")
def test_calling_a_command_with_only_empty_parameter_should_ends_gracefully(self):
out = StringIO()
management.call_command('hal', "--empty", stdout=out)
self.assertIn("Dave, I can't do that.\n", out.getvalue())
def test_calling_command_with_app_labels_and_parameters_should_be_ok(self):
out = StringIO()
management.call_command('hal', 'myapp', "--verbosity", "3", stdout=out)
self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
def test_calling_command_with_parameters_and_app_labels_at_the_end_should_be_ok(self):
out = StringIO()
management.call_command('hal', "--verbosity", "3", "myapp", stdout=out)
self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
def test_calling_a_command_with_no_app_labels_and_parameters_should_raise_a_command_error(self):
out = StringIO()
with self.assertRaises(CommandError):
management.call_command('hal', stdout=out)
def test_output_transaction(self):
out = StringIO()
management.call_command('transaction', stdout=out, no_color=True)
output = out.getvalue().strip()
self.assertTrue(output.startswith(connection.ops.start_transaction_sql()))
self.assertTrue(output.endswith(connection.ops.end_transaction_sql()))
def test_call_command_no_checks(self):
"""
By default, call_command should not trigger the check framework, unless
specifically asked.
"""
self.counter = 0
def patched_check(self_, **kwargs):
self.counter = self.counter + 1
saved_check = BaseCommand.check
BaseCommand.check = patched_check
try:
management.call_command("dance", verbosity=0)
self.assertEqual(self.counter, 0)
management.call_command("dance", verbosity=0, skip_checks=False)
self.assertEqual(self.counter, 1)
finally:
BaseCommand.check = saved_check
class UtilsTests(SimpleTestCase):
def test_no_existent_external_program(self):
self.assertRaises(CommandError, popen_wrapper, ['a_42_command_that_doesnt_exist_42'])
| bsd-3-clause |
totallybradical/temp_servo2 | tests/wpt/web-platform-tests/tools/pytest/doc/en/genapi.py | 203 | 1131 | import textwrap
import inspect
class Writer:
def __init__(self, clsname):
self.clsname = clsname
def __enter__(self):
self.file = open("%s.api" % self.clsname, "w")
return self
def __exit__(self, *args):
self.file.close()
print "wrote", self.file.name
def line(self, line):
self.file.write(line+"\n")
def docmethod(self, method):
doc = " ".join(method.__doc__.split())
indent = " "
w = textwrap.TextWrapper(initial_indent=indent,
subsequent_indent=indent)
spec = inspect.getargspec(method)
del spec.args[0]
self.line(".. py:method:: " + method.__name__ +
inspect.formatargspec(*spec))
self.line("")
self.line(w.fill(doc))
self.line("")
def pytest_funcarg__a(request):
with Writer("request") as writer:
writer.docmethod(request.getfuncargvalue)
writer.docmethod(request.cached_setup)
writer.docmethod(request.addfinalizer)
writer.docmethod(request.applymarker)
def test_hello(a):
pass
| mpl-2.0 |
tboyce021/home-assistant | tests/components/config/test_automation.py | 1 | 5030 | """Test Automation config panel."""
import json
from homeassistant.bootstrap import async_setup_component
from homeassistant.components import config
from tests.async_mock import patch
from tests.components.blueprint.conftest import stub_blueprint_populate # noqa
async def test_get_device_config(hass, hass_client):
"""Test getting device config."""
with patch.object(config, "SECTIONS", ["automation"]):
await async_setup_component(hass, "config", {})
client = await hass_client()
def mock_read(path):
"""Mock reading data."""
return [{"id": "sun"}, {"id": "moon"}]
with patch("homeassistant.components.config._read", mock_read):
resp = await client.get("/api/config/automation/config/moon")
assert resp.status == 200
result = await resp.json()
assert result == {"id": "moon"}
async def test_update_device_config(hass, hass_client):
"""Test updating device config."""
with patch.object(config, "SECTIONS", ["automation"]):
await async_setup_component(hass, "config", {})
client = await hass_client()
orig_data = [{"id": "sun"}, {"id": "moon"}]
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
), patch("homeassistant.config.async_hass_config_yaml", return_value={}):
resp = await client.post(
"/api/config/automation/config/moon",
data=json.dumps({"trigger": [], "action": [], "condition": []}),
)
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
assert list(orig_data[1]) == ["id", "trigger", "condition", "action"]
assert orig_data[1] == {"id": "moon", "trigger": [], "condition": [], "action": []}
assert written[0] == orig_data
async def test_bad_formatted_automations(hass, hass_client):
"""Test that we handle automations without ID."""
with patch.object(config, "SECTIONS", ["automation"]):
await async_setup_component(hass, "config", {})
client = await hass_client()
orig_data = [
{
# No ID
"action": {"event": "hello"}
},
{"id": "moon"},
]
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
), patch("homeassistant.config.async_hass_config_yaml", return_value={}):
resp = await client.post(
"/api/config/automation/config/moon",
data=json.dumps({"trigger": [], "action": [], "condition": []}),
)
await hass.async_block_till_done()
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
# Verify ID added to orig_data
assert "id" in orig_data[0]
assert orig_data[1] == {"id": "moon", "trigger": [], "condition": [], "action": []}
async def test_delete_automation(hass, hass_client):
"""Test deleting an automation."""
ent_reg = await hass.helpers.entity_registry.async_get_registry()
assert await async_setup_component(
hass,
"automation",
{
"automation": [
{
"id": "sun",
"trigger": {"platform": "event", "event_type": "test_event"},
"action": {"service": "test.automation"},
},
{
"id": "moon",
"trigger": {"platform": "event", "event_type": "test_event"},
"action": {"service": "test.automation"},
},
]
},
)
assert len(ent_reg.entities) == 2
with patch.object(config, "SECTIONS", ["automation"]):
assert await async_setup_component(hass, "config", {})
client = await hass_client()
orig_data = [{"id": "sun"}, {"id": "moon"}]
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
), patch("homeassistant.config.async_hass_config_yaml", return_value={}):
resp = await client.delete("/api/config/automation/config/sun")
await hass.async_block_till_done()
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
assert len(written) == 1
assert written[0][0]["id"] == "moon"
assert len(ent_reg.entities) == 1
| apache-2.0 |
mbeyeler/pulse2percept | examples/implants/plot_custom_electrode_array.py | 1 | 6434 | """
============================================================================
Creating your own electrode array
============================================================================
This example shows how to create a new
:py:class:`~pulse2percept.implants.ElectrodeArray` object.
As the base class for all electrode arrays in pulse2percept, the
:py:class:`~pulse2percept.implants.ElectrodeArray` class provides a blue print
for the functionality that every electrode array should have.
First and foremost, an :py:class:`~pulse2percept.implants.ElectrodeArray`
contains a collection of :py:class:`~pulse2percept.implants.Electrode` objects,
and new electrodes can be added via the
:py:func:`~pulse2percept.implants.ElectrodeArray.add_electrodes` method.
In addition, individual electrodes in the array can be accessed by indexing
using either their pre-assigned names (a string) or their place in the array
(integer).
Arranging electrodes in a circle
--------------------------------
In this example, we want to build a new type of electrode array that arranges
all of its electrodes in a circle.
To do this, we need to create a new class ``CircleElectrodeArray`` that is
a child of :py:class:`~pulse2percept.implants.ElectrodeArray`:
"""
##############################################################################
# .. code-block:: python
#
# class CircleElectrodeArray(ElectrodeArray):
# """Electrodes arranged in a circle"""
# ...
#
# This way, the ``CircleElectrodeArray`` class can access all public methods
# of :py:class:`~pulse2percept.implants.ElectrodeArray`.
#
# The constructor then has the job of creating all electrodes in the array
# and placing them at the appropriate location; for example, by using the
# :py:func:`~pulse2percept.implants.ElectrodeArray.add_electrodes` method.
#
# The constructor of the class should accept a number of arguments:
#
# - ``n_electrodes``: how many electrodes to arrange in a circle
# - ``radius``: the radius of the circle
# - ``x_center``: the x-coordinate of the center of the circle
# - ``y_center``: the y-coordinate of the center of the circle
#
# For simplicity, we will use :py:class:`~pulse2percept.implants.DiskElectrode`
# objects of a given radius (100um), although it would be relatively straightforward
# to allow the user to choose the electrode type.
from pulse2percept.implants import ElectrodeArray, DiskElectrode
import collections as coll
import numpy as np
class CircleElectrodeArray(ElectrodeArray):
def __init__(self, n_electrodes, radius, x_center, y_center):
"""Electrodes arranged in a circle
Electrodes will be named 'A0', 'A1', ...
Parameters
----------
n_electrodes : int
how many electrodes to arrange in a circle
radius : float
the radius of the circle (microns)
x_center, y_center : float
the x,y coordinates of the center of the circle (microns),
where (0,0) is the center of the fovea
"""
# The job of the constructor is to create the electrodes. We start
# with an empty collection:
self._electrodes = coll.OrderedDict()
# We then generate a number `n_electrodes` of electrodes, arranged on
# the circumference of a circle:
for n in range(n_electrodes):
# Angular position of the electrode:
ang = 2.0 * np.pi / n_electrodes * n
# Create the disk electrode:
electrode = DiskElectrode(x_center + np.cos(ang) * radius,
y_center + np.sin(ang) * radius, 0, 100)
# Add the electrode to the collection:
self.add_electrode('A' + str(n), electrode)
##############################################################################
# Using the CircleElectrodeArray class
# ------------------------------------
#
# To use the new class, we need to specify all input arguments and pass them
# to the constructor:
n_electrodes = 10
radius = 1000 # radius in microns
x_center = 0 # x-coordinate of circle center (microns)
y_center = 0 # y-coordinate of circle center (microns)
# Create a new instance of type CircleElectrodeArray:
earray = CircleElectrodeArray(n_electrodes, radius, x_center, y_center)
print(earray)
##############################################################################
# Individual electrodes can be accessed by their name or integer index:
earray[0]
earray['A0']
earray[0] == earray['A0']
##############################################################################
# Visualizing the electrode array
# -------------------------------
#
# Electrode arrays come with their own plotting method:
earray.plot()
##############################################################################
# By default, the method will use the current Axes object or create a new one
# if none exists. Alternatively, you can specify ``ax=`` yourself.
#
# Extending the CircleElectrodeArray class
# ----------------------------------------
#
# Similar to extending :py:class:`~pulse2percept.implants.ElectrodeArray` for
# our purposes, we can extend ``CircleElectrodeArray``.
#
# To add new functionality, we could simply edit the above constructor.
# However, nobody stops us from creating our own hierarchy of classes.
#
# For example, we could build a ``FlexibleCircleElectrodeArray`` that allows us
# to remove individual electrodes from the array:
class FlexibleCircleElectrodeArray(CircleElectrodeArray):
def remove(self, name):
"""Deletean electrode from the array
Parameters
----------
name : int, string
the name of the electrode to be removed
"""
del self.electrodes[name]
##############################################################################
# Note how we didn't even specify a constructor.
# By default, the class inherits all (public) functionality from its parent,
# including its constructor. So the following line will create the same
# electrode array as above:
flex_earray = FlexibleCircleElectrodeArray(
n_electrodes, radius, x_center, y_center)
print(flex_earray)
##############################################################################
# A single electrode can be removed by passing its name to the ``remove``
# method:
# Remove electrode 'A1'
flex_earray.remove('A1')
# Replot the implant:
flex_earray.plot()
| bsd-3-clause |
mbr0wn/gnuradio | gr-digital/examples/narrowband/benchmark_add_channel.py | 6 | 3590 | #!/usr/bin/env python
#
# Copyright 2010,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import channels, gr
from gnuradio import blocks
from gnuradio import eng_notation
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import random, math, sys
class my_top_block(gr.top_block):
def __init__(self, ifile, ofile, options):
gr.top_block.__init__(self)
SNR = 10.0**(options.snr / 10.0)
frequency_offset = options.frequency_offset
time_offset = options.time_offset
phase_offset = options.phase_offset*(math.pi / 180.0)
# calculate noise voltage from SNR
power_in_signal = abs(options.tx_amplitude)**2
noise_power = power_in_signal / SNR
noise_voltage = math.sqrt(noise_power)
self.src = blocks.file_source(gr.sizeof_gr_complex, ifile)
#self.throttle = blocks.throttle(gr.sizeof_gr_complex, options.sample_rate)
self.channel = channels.channel_model(noise_voltage, frequency_offset,
time_offset, noise_seed=-random.randint(0,100000))
self.phase = blocks.multiply_const_cc(complex(math.cos(phase_offset),
math.sin(phase_offset)))
self.snk = blocks.file_sink(gr.sizeof_gr_complex, ofile)
self.connect(self.src, self.channel, self.phase, self.snk)
# /////////////////////////////////////////////////////////////////////////////
# main
# /////////////////////////////////////////////////////////////////////////////
def main():
# Create Options Parser:
usage = "benchmack_add_channel.py [options] <input file> <output file>"
parser = OptionParser (usage=usage, option_class=eng_option, conflict_handler="resolve")
parser.add_option("-n", "--snr", type="eng_float", default=30,
help="set the SNR of the channel in dB [default=%default]")
parser.add_option("", "--seed", action="store_true", default=False,
help="use a random seed for AWGN noise [default=%default]")
parser.add_option("-f", "--frequency-offset", type="eng_float", default=0,
help="set frequency offset introduced by channel [default=%default]")
parser.add_option("-t", "--time-offset", type="eng_float", default=1.0,
help="set timing offset between Tx and Rx [default=%default]")
parser.add_option("-p", "--phase-offset", type="eng_float", default=0,
help="set phase offset (in degrees) between Tx and Rx [default=%default]")
parser.add_option("-m", "--use-multipath", action="store_true", default=False,
help="Use a multipath channel [default=%default]")
parser.add_option("", "--tx-amplitude", type="eng_float",
default=1.0,
help="tell the simulator the signal amplitude [default=%default]")
(options, args) = parser.parse_args ()
if len(args) != 2:
parser.print_help(sys.stderr)
sys.exit(1)
ifile = args[0]
ofile = args[1]
# build the graph
tb = my_top_block(ifile, ofile, options)
r = gr.enable_realtime_scheduling()
if r != gr.RT_OK:
print("Warning: Failed to enable realtime scheduling.")
tb.start() # start flow graph
tb.wait() # wait for it to finish
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| gpl-3.0 |
cloudera/hue | desktop/core/ext-py/protobuf-3.13.0/google/protobuf/test_messages_proto2_pb2.py | 3 | 135641 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/test_messages_proto2.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/test_messages_proto2.proto',
package='protobuf_test_messages.proto2',
syntax='proto2',
serialized_options=b'\n(com.google.protobuf_test_messages.proto2H\001\370\001\001',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n*google/protobuf/test_messages_proto2.proto\x12\x1dprotobuf_test_messages.proto2\"\xfe\x39\n\x12TestAllTypesProto2\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05\x12\x16\n\x0eoptional_int64\x18\x02 \x01(\x03\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x17\n\x0foptional_uint64\x18\x04 \x01(\x04\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_sint64\x18\x06 \x01(\x12\x12\x18\n\x10optional_fixed32\x18\x07 \x01(\x07\x12\x18\n\x10optional_fixed64\x18\x08 \x01(\x06\x12\x19\n\x11optional_sfixed32\x18\t \x01(\x0f\x12\x19\n\x11optional_sfixed64\x18\n \x01(\x10\x12\x16\n\x0eoptional_float\x18\x0b \x01(\x02\x12\x17\n\x0foptional_double\x18\x0c \x01(\x01\x12\x15\n\roptional_bool\x18\r \x01(\x08\x12\x17\n\x0foptional_string\x18\x0e \x01(\t\x12\x16\n\x0eoptional_bytes\x18\x0f \x01(\x0c\x12`\n\x17optional_nested_message\x18\x12 \x01(\x0b\x32?.protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage\x12U\n\x18optional_foreign_message\x18\x13 \x01(\x0b\x32\x33.protobuf_test_messages.proto2.ForeignMessageProto2\x12Z\n\x14optional_nested_enum\x18\x15 \x01(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnum\x12O\n\x15optional_foreign_enum\x18\x16 \x01(\x0e\x32\x30.protobuf_test_messages.proto2.ForeignEnumProto2\x12!\n\x15optional_string_piece\x18\x18 \x01(\tB\x02\x08\x02\x12\x19\n\roptional_cord\x18\x19 \x01(\tB\x02\x08\x01\x12L\n\x11recursive_message\x18\x1b \x01(\x0b\x32\x31.protobuf_test_messages.proto2.TestAllTypesProto2\x12\x16\n\x0erepeated_int32\x18\x1f \x03(\x05\x12\x16\n\x0erepeated_int64\x18 \x03(\x03\x12\x17\n\x0frepeated_uint32\x18! \x03(\r\x12\x17\n\x0frepeated_uint64\x18\" \x03(\x04\x12\x17\n\x0frepeated_sint32\x18# \x03(\x11\x12\x17\n\x0frepeated_sint64\x18$ \x03(\x12\x12\x18\n\x10repeated_fixed32\x18% \x03(\x07\x12\x18\n\x10repeated_fixed64\x18& \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\' \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18( \x03(\x10\x12\x16\n\x0erepeated_float\x18) \x03(\x02\x12\x17\n\x0frepeated_double\x18* \x03(\x01\x12\x15\n\rrepeated_bool\x18+ \x03(\x08\x12\x17\n\x0frepeated_string\x18, \x03(\t\x12\x16\n\x0erepeated_bytes\x18- \x03(\x0c\x12`\n\x17repeated_nested_message\x18\x30 \x03(\x0b\x32?.protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage\x12U\n\x18repeated_foreign_message\x18\x31 \x03(\x0b\x32\x33.protobuf_test_messages.proto2.ForeignMessageProto2\x12Z\n\x14repeated_nested_enum\x18\x33 \x03(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnum\x12O\n\x15repeated_foreign_enum\x18\x34 \x03(\x0e\x32\x30.protobuf_test_messages.proto2.ForeignEnumProto2\x12!\n\x15repeated_string_piece\x18\x36 \x03(\tB\x02\x08\x02\x12\x19\n\rrepeated_cord\x18\x37 \x03(\tB\x02\x08\x01\x12\x18\n\x0cpacked_int32\x18K \x03(\x05\x42\x02\x10\x01\x12\x18\n\x0cpacked_int64\x18L \x03(\x03\x42\x02\x10\x01\x12\x19\n\rpacked_uint32\x18M \x03(\rB\x02\x10\x01\x12\x19\n\rpacked_uint64\x18N \x03(\x04\x42\x02\x10\x01\x12\x19\n\rpacked_sint32\x18O \x03(\x11\x42\x02\x10\x01\x12\x19\n\rpacked_sint64\x18P \x03(\x12\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed32\x18Q \x03(\x07\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed64\x18R \x03(\x06\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed32\x18S \x03(\x0f\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed64\x18T \x03(\x10\x42\x02\x10\x01\x12\x18\n\x0cpacked_float\x18U \x03(\x02\x42\x02\x10\x01\x12\x19\n\rpacked_double\x18V \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0bpacked_bool\x18W \x03(\x08\x42\x02\x10\x01\x12\\\n\x12packed_nested_enum\x18X \x03(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnumB\x02\x10\x01\x12\x1a\n\x0eunpacked_int32\x18Y \x03(\x05\x42\x02\x10\x00\x12\x1a\n\x0eunpacked_int64\x18Z \x03(\x03\x42\x02\x10\x00\x12\x1b\n\x0funpacked_uint32\x18[ \x03(\rB\x02\x10\x00\x12\x1b\n\x0funpacked_uint64\x18\\ \x03(\x04\x42\x02\x10\x00\x12\x1b\n\x0funpacked_sint32\x18] \x03(\x11\x42\x02\x10\x00\x12\x1b\n\x0funpacked_sint64\x18^ \x03(\x12\x42\x02\x10\x00\x12\x1c\n\x10unpacked_fixed32\x18_ \x03(\x07\x42\x02\x10\x00\x12\x1c\n\x10unpacked_fixed64\x18` \x03(\x06\x42\x02\x10\x00\x12\x1d\n\x11unpacked_sfixed32\x18\x61 \x03(\x0f\x42\x02\x10\x00\x12\x1d\n\x11unpacked_sfixed64\x18\x62 \x03(\x10\x42\x02\x10\x00\x12\x1a\n\x0eunpacked_float\x18\x63 \x03(\x02\x42\x02\x10\x00\x12\x1b\n\x0funpacked_double\x18\x64 \x03(\x01\x42\x02\x10\x00\x12\x19\n\runpacked_bool\x18\x65 \x03(\x08\x42\x02\x10\x00\x12^\n\x14unpacked_nested_enum\x18\x66 \x03(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnumB\x02\x10\x00\x12]\n\x0fmap_int32_int32\x18\x38 \x03(\x0b\x32\x44.protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32Int32Entry\x12]\n\x0fmap_int64_int64\x18\x39 \x03(\x0b\x32\x44.protobuf_test_messages.proto2.TestAllTypesProto2.MapInt64Int64Entry\x12\x61\n\x11map_uint32_uint32\x18: \x03(\x0b\x32\x46.protobuf_test_messages.proto2.TestAllTypesProto2.MapUint32Uint32Entry\x12\x61\n\x11map_uint64_uint64\x18; \x03(\x0b\x32\x46.protobuf_test_messages.proto2.TestAllTypesProto2.MapUint64Uint64Entry\x12\x61\n\x11map_sint32_sint32\x18< \x03(\x0b\x32\x46.protobuf_test_messages.proto2.TestAllTypesProto2.MapSint32Sint32Entry\x12\x61\n\x11map_sint64_sint64\x18= \x03(\x0b\x32\x46.protobuf_test_messages.proto2.TestAllTypesProto2.MapSint64Sint64Entry\x12\x65\n\x13map_fixed32_fixed32\x18> \x03(\x0b\x32H.protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed32Fixed32Entry\x12\x65\n\x13map_fixed64_fixed64\x18? \x03(\x0b\x32H.protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed64Fixed64Entry\x12i\n\x15map_sfixed32_sfixed32\x18@ \x03(\x0b\x32J.protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed32Sfixed32Entry\x12i\n\x15map_sfixed64_sfixed64\x18\x41 \x03(\x0b\x32J.protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed64Sfixed64Entry\x12]\n\x0fmap_int32_float\x18\x42 \x03(\x0b\x32\x44.protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32FloatEntry\x12_\n\x10map_int32_double\x18\x43 \x03(\x0b\x32\x45.protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32DoubleEntry\x12Y\n\rmap_bool_bool\x18\x44 \x03(\x0b\x32\x42.protobuf_test_messages.proto2.TestAllTypesProto2.MapBoolBoolEntry\x12\x61\n\x11map_string_string\x18\x45 \x03(\x0b\x32\x46.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringStringEntry\x12_\n\x10map_string_bytes\x18\x46 \x03(\x0b\x32\x45.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringBytesEntry\x12p\n\x19map_string_nested_message\x18G \x03(\x0b\x32M.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedMessageEntry\x12r\n\x1amap_string_foreign_message\x18H \x03(\x0b\x32N.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignMessageEntry\x12j\n\x16map_string_nested_enum\x18I \x03(\x0b\x32J.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedEnumEntry\x12l\n\x17map_string_foreign_enum\x18J \x03(\x0b\x32K.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignEnumEntry\x12\x16\n\x0coneof_uint32\x18o \x01(\rH\x00\x12_\n\x14oneof_nested_message\x18p \x01(\x0b\x32?.protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessageH\x00\x12\x16\n\x0coneof_string\x18q \x01(\tH\x00\x12\x15\n\x0boneof_bytes\x18r \x01(\x0cH\x00\x12\x14\n\noneof_bool\x18s \x01(\x08H\x00\x12\x16\n\x0coneof_uint64\x18t \x01(\x04H\x00\x12\x15\n\x0boneof_float\x18u \x01(\x02H\x00\x12\x16\n\x0coneof_double\x18v \x01(\x01H\x00\x12R\n\noneof_enum\x18w \x01(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnumH\x00\x12\x45\n\x04\x64\x61ta\x18\xc9\x01 \x01(\n26.protobuf_test_messages.proto2.TestAllTypesProto2.Data\x12\x13\n\nfieldname1\x18\x91\x03 \x01(\x05\x12\x14\n\x0b\x66ield_name2\x18\x92\x03 \x01(\x05\x12\x15\n\x0c_field_name3\x18\x93\x03 \x01(\x05\x12\x16\n\rfield__name4_\x18\x94\x03 \x01(\x05\x12\x14\n\x0b\x66ield0name5\x18\x95\x03 \x01(\x05\x12\x16\n\rfield_0_name6\x18\x96\x03 \x01(\x05\x12\x13\n\nfieldName7\x18\x97\x03 \x01(\x05\x12\x13\n\nFieldName8\x18\x98\x03 \x01(\x05\x12\x14\n\x0b\x66ield_Name9\x18\x99\x03 \x01(\x05\x12\x15\n\x0c\x46ield_Name10\x18\x9a\x03 \x01(\x05\x12\x15\n\x0c\x46IELD_NAME11\x18\x9b\x03 \x01(\x05\x12\x15\n\x0c\x46IELD_name12\x18\x9c\x03 \x01(\x05\x12\x17\n\x0e__field_name13\x18\x9d\x03 \x01(\x05\x12\x17\n\x0e__Field_name14\x18\x9e\x03 \x01(\x05\x12\x16\n\rfield__name15\x18\x9f\x03 \x01(\x05\x12\x16\n\rfield__Name16\x18\xa0\x03 \x01(\x05\x12\x17\n\x0e\x66ield_name17__\x18\xa1\x03 \x01(\x05\x12\x17\n\x0e\x46ield_name18__\x18\xa2\x03 \x01(\x05\x1a\x62\n\rNestedMessage\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\x46\n\x0b\x63orecursive\x18\x02 \x01(\x0b\x32\x31.protobuf_test_messages.proto2.TestAllTypesProto2\x1a\x34\n\x12MapInt32Int32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x34\n\x12MapInt64Int64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x36\n\x14MapUint32Uint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x1a\x36\n\x14MapUint64Uint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x04:\x02\x38\x01\x1a\x36\n\x14MapSint32Sint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x11:\x02\x38\x01\x1a\x36\n\x14MapSint64Sint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x12\x12\r\n\x05value\x18\x02 \x01(\x12:\x02\x38\x01\x1a\x38\n\x16MapFixed32Fixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x07\x12\r\n\x05value\x18\x02 \x01(\x07:\x02\x38\x01\x1a\x38\n\x16MapFixed64Fixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\r\n\x05value\x18\x02 \x01(\x06:\x02\x38\x01\x1a:\n\x18MapSfixed32Sfixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x0f\x12\r\n\x05value\x18\x02 \x01(\x0f:\x02\x38\x01\x1a:\n\x18MapSfixed64Sfixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x10\x12\r\n\x05value\x18\x02 \x01(\x10:\x02\x38\x01\x1a\x34\n\x12MapInt32FloatEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x1a\x35\n\x13MapInt32DoubleEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\x32\n\x10MapBoolBoolEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\x1a\x36\n\x14MapStringStringEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x35\n\x13MapStringBytesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\x1a~\n\x1bMapStringNestedMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12N\n\x05value\x18\x02 \x01(\x0b\x32?.protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage:\x02\x38\x01\x1as\n\x1cMapStringForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x42\n\x05value\x18\x02 \x01(\x0b\x32\x33.protobuf_test_messages.proto2.ForeignMessageProto2:\x02\x38\x01\x1ax\n\x18MapStringNestedEnumEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12K\n\x05value\x18\x02 \x01(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnum:\x02\x38\x01\x1am\n\x19MapStringForeignEnumEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12?\n\x05value\x18\x02 \x01(\x0e\x32\x30.protobuf_test_messages.proto2.ForeignEnumProto2:\x02\x38\x01\x1a\x33\n\x04\x44\x61ta\x12\x14\n\x0bgroup_int32\x18\xca\x01 \x01(\x05\x12\x15\n\x0cgroup_uint32\x18\xcb\x01 \x01(\r\x1a!\n\x11MessageSetCorrect*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\x1a\xe0\x01\n\x1bMessageSetCorrectExtension1\x12\x0b\n\x03str\x18\x19 \x01(\t2\xb3\x01\n\x15message_set_extension\x12\x43.protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrect\x18\xf9\xbb^ \x01(\x0b\x32M.protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension1\x1a\xdf\x01\n\x1bMessageSetCorrectExtension2\x12\t\n\x01i\x18\t \x01(\x05\x32\xb4\x01\n\x15message_set_extension\x12\x43.protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrect\x18\x90\xb3\xfc\x01 \x01(\x0b\x32M.protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension2\"9\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x12\x07\n\x03\x42\x41Z\x10\x02\x12\x10\n\x03NEG\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01*\x05\x08x\x10\xc9\x01\x42\r\n\x0boneof_fieldJ\x06\x08\xe8\x07\x10\x90N\"!\n\x14\x46oreignMessageProto2\x12\t\n\x01\x63\x18\x01 \x01(\x05\"\xc1\x02\n\x15UnknownToTestAllTypes\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0foptional_string\x18\xea\x07 \x01(\t\x12L\n\x0enested_message\x18\xeb\x07 \x01(\x0b\x32\x33.protobuf_test_messages.proto2.ForeignMessageProto2\x12Z\n\roptionalgroup\x18\xec\x07 \x01(\n2B.protobuf_test_messages.proto2.UnknownToTestAllTypes.OptionalGroup\x12\x16\n\roptional_bool\x18\xee\x07 \x01(\x08\x12\x17\n\x0erepeated_int32\x18\xf3\x07 \x03(\x05\x1a\x1a\n\rOptionalGroup\x12\t\n\x01\x61\x18\x01 \x01(\x05*F\n\x11\x46oreignEnumProto2\x12\x0f\n\x0b\x46OREIGN_FOO\x10\x00\x12\x0f\n\x0b\x46OREIGN_BAR\x10\x01\x12\x0f\n\x0b\x46OREIGN_BAZ\x10\x02:J\n\x0f\x65xtension_int32\x12\x31.protobuf_test_messages.proto2.TestAllTypesProto2\x18x \x01(\x05\x42/\n(com.google.protobuf_test_messages.proto2H\x01\xf8\x01\x01'
)
_FOREIGNENUMPROTO2 = _descriptor.EnumDescriptor(
name='ForeignEnumProto2',
full_name='protobuf_test_messages.proto2.ForeignEnumProto2',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FOREIGN_FOO', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FOREIGN_BAR', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FOREIGN_BAZ', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=7861,
serialized_end=7931,
)
_sym_db.RegisterEnumDescriptor(_FOREIGNENUMPROTO2)
ForeignEnumProto2 = enum_type_wrapper.EnumTypeWrapper(_FOREIGNENUMPROTO2)
FOREIGN_FOO = 0
FOREIGN_BAR = 1
FOREIGN_BAZ = 2
EXTENSION_INT32_FIELD_NUMBER = 120
extension_int32 = _descriptor.FieldDescriptor(
name='extension_int32', full_name='protobuf_test_messages.proto2.extension_int32', index=0,
number=120, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
_TESTALLTYPESPROTO2_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FOO', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BAR', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BAZ', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NEG', index=3, number=-1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=7413,
serialized_end=7470,
)
_sym_db.RegisterEnumDescriptor(_TESTALLTYPESPROTO2_NESTEDENUM)
_TESTALLTYPESPROTO2_NESTEDMESSAGE = _descriptor.Descriptor(
name='NestedMessage',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='a', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage.a', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='corecursive', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage.corecursive', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5454,
serialized_end=5552,
)
_TESTALLTYPESPROTO2_MAPINT32INT32ENTRY = _descriptor.Descriptor(
name='MapInt32Int32Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32Int32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32Int32Entry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32Int32Entry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5554,
serialized_end=5606,
)
_TESTALLTYPESPROTO2_MAPINT64INT64ENTRY = _descriptor.Descriptor(
name='MapInt64Int64Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt64Int64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt64Int64Entry.key', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt64Int64Entry.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5608,
serialized_end=5660,
)
_TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY = _descriptor.Descriptor(
name='MapUint32Uint32Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint32Uint32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint32Uint32Entry.key', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint32Uint32Entry.value', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5662,
serialized_end=5716,
)
_TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY = _descriptor.Descriptor(
name='MapUint64Uint64Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint64Uint64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint64Uint64Entry.key', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint64Uint64Entry.value', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5718,
serialized_end=5772,
)
_TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY = _descriptor.Descriptor(
name='MapSint32Sint32Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint32Sint32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint32Sint32Entry.key', index=0,
number=1, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint32Sint32Entry.value', index=1,
number=2, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5774,
serialized_end=5828,
)
_TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY = _descriptor.Descriptor(
name='MapSint64Sint64Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint64Sint64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint64Sint64Entry.key', index=0,
number=1, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint64Sint64Entry.value', index=1,
number=2, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5830,
serialized_end=5884,
)
_TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY = _descriptor.Descriptor(
name='MapFixed32Fixed32Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed32Fixed32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed32Fixed32Entry.key', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed32Fixed32Entry.value', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5886,
serialized_end=5942,
)
_TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY = _descriptor.Descriptor(
name='MapFixed64Fixed64Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed64Fixed64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed64Fixed64Entry.key', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed64Fixed64Entry.value', index=1,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5944,
serialized_end=6000,
)
_TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY = _descriptor.Descriptor(
name='MapSfixed32Sfixed32Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed32Sfixed32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed32Sfixed32Entry.key', index=0,
number=1, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed32Sfixed32Entry.value', index=1,
number=2, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6002,
serialized_end=6060,
)
_TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY = _descriptor.Descriptor(
name='MapSfixed64Sfixed64Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed64Sfixed64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed64Sfixed64Entry.key', index=0,
number=1, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed64Sfixed64Entry.value', index=1,
number=2, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6062,
serialized_end=6120,
)
_TESTALLTYPESPROTO2_MAPINT32FLOATENTRY = _descriptor.Descriptor(
name='MapInt32FloatEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32FloatEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32FloatEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32FloatEntry.value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6122,
serialized_end=6174,
)
_TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY = _descriptor.Descriptor(
name='MapInt32DoubleEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32DoubleEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32DoubleEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32DoubleEntry.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6176,
serialized_end=6229,
)
_TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY = _descriptor.Descriptor(
name='MapBoolBoolEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapBoolBoolEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapBoolBoolEntry.key', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapBoolBoolEntry.value', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6231,
serialized_end=6281,
)
_TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY = _descriptor.Descriptor(
name='MapStringStringEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringStringEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringStringEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringStringEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6283,
serialized_end=6337,
)
_TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY = _descriptor.Descriptor(
name='MapStringBytesEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringBytesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringBytesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringBytesEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6339,
serialized_end=6392,
)
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY = _descriptor.Descriptor(
name='MapStringNestedMessageEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedMessageEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedMessageEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedMessageEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6394,
serialized_end=6520,
)
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY = _descriptor.Descriptor(
name='MapStringForeignMessageEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignMessageEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignMessageEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignMessageEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6522,
serialized_end=6637,
)
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY = _descriptor.Descriptor(
name='MapStringNestedEnumEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedEnumEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedEnumEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedEnumEntry.value', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6639,
serialized_end=6759,
)
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY = _descriptor.Descriptor(
name='MapStringForeignEnumEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignEnumEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignEnumEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignEnumEntry.value', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6761,
serialized_end=6870,
)
_TESTALLTYPESPROTO2_DATA = _descriptor.Descriptor(
name='Data',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.Data',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='group_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.Data.group_int32', index=0,
number=202, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='group_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.Data.group_uint32', index=1,
number=203, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6872,
serialized_end=6923,
)
_TESTALLTYPESPROTO2_MESSAGESETCORRECT = _descriptor.Descriptor(
name='MessageSetCorrect',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrect',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\010\001',
is_extendable=True,
syntax='proto2',
extension_ranges=[(4, 2147483647), ],
oneofs=[
],
serialized_start=6925,
serialized_end=6958,
)
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1 = _descriptor.Descriptor(
name='MessageSetCorrectExtension1',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension1',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='str', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension1.str', index=0,
number=25, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
_descriptor.FieldDescriptor(
name='message_set_extension', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension1.message_set_extension', index=0,
number=1547769, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6961,
serialized_end=7185,
)
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2 = _descriptor.Descriptor(
name='MessageSetCorrectExtension2',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension2',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='i', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension2.i', index=0,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
_descriptor.FieldDescriptor(
name='message_set_extension', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension2.message_set_extension', index=0,
number=4135312, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7188,
serialized_end=7411,
)
_TESTALLTYPESPROTO2 = _descriptor.Descriptor(
name='TestAllTypesProto2',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='optional_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_int32', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_int64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_int64', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_uint32', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_uint64', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_sint32', index=4,
number=5, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_sint64', index=5,
number=6, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_fixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_fixed32', index=6,
number=7, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_fixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_fixed64', index=7,
number=8, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sfixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_sfixed32', index=8,
number=9, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sfixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_sfixed64', index=9,
number=10, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_float', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_double', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_bool', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_string', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_string', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_bytes', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_bytes', index=14,
number=15, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_nested_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_nested_message', index=15,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_foreign_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_foreign_message', index=16,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_nested_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_nested_enum', index=17,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_foreign_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_foreign_enum', index=18,
number=22, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_string_piece', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_string_piece', index=19,
number=24, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\010\002', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_cord', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_cord', index=20,
number=25, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\010\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='recursive_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.recursive_message', index=21,
number=27, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_int32', index=22,
number=31, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_int64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_int64', index=23,
number=32, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_uint32', index=24,
number=33, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_uint64', index=25,
number=34, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_sint32', index=26,
number=35, type=17, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_sint64', index=27,
number=36, type=18, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_fixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_fixed32', index=28,
number=37, type=7, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_fixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_fixed64', index=29,
number=38, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sfixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_sfixed32', index=30,
number=39, type=15, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sfixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_sfixed64', index=31,
number=40, type=16, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_float', index=32,
number=41, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_double', index=33,
number=42, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_bool', index=34,
number=43, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_string', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_string', index=35,
number=44, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_bytes', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_bytes', index=36,
number=45, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_nested_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_nested_message', index=37,
number=48, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_foreign_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_foreign_message', index=38,
number=49, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_nested_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_nested_enum', index=39,
number=51, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_foreign_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_foreign_enum', index=40,
number=52, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_string_piece', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_string_piece', index=41,
number=54, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\010\002', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_cord', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_cord', index=42,
number=55, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\010\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_int32', index=43,
number=75, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_int64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_int64', index=44,
number=76, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_uint32', index=45,
number=77, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_uint64', index=46,
number=78, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_sint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_sint32', index=47,
number=79, type=17, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_sint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_sint64', index=48,
number=80, type=18, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_fixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_fixed32', index=49,
number=81, type=7, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_fixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_fixed64', index=50,
number=82, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_sfixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_sfixed32', index=51,
number=83, type=15, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_sfixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_sfixed64', index=52,
number=84, type=16, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_float', index=53,
number=85, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_double', index=54,
number=86, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_bool', index=55,
number=87, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_nested_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_nested_enum', index=56,
number=88, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_int32', index=57,
number=89, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_int64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_int64', index=58,
number=90, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_uint32', index=59,
number=91, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_uint64', index=60,
number=92, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_sint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_sint32', index=61,
number=93, type=17, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_sint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_sint64', index=62,
number=94, type=18, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_fixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_fixed32', index=63,
number=95, type=7, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_fixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_fixed64', index=64,
number=96, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_sfixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_sfixed32', index=65,
number=97, type=15, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_sfixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_sfixed64', index=66,
number=98, type=16, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_float', index=67,
number=99, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_double', index=68,
number=100, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_bool', index=69,
number=101, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_nested_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_nested_enum', index=70,
number=102, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_int32_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_int32_int32', index=71,
number=56, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_int64_int64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_int64_int64', index=72,
number=57, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_uint32_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_uint32_uint32', index=73,
number=58, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_uint64_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_uint64_uint64', index=74,
number=59, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_sint32_sint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_sint32_sint32', index=75,
number=60, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_sint64_sint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_sint64_sint64', index=76,
number=61, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_fixed32_fixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_fixed32_fixed32', index=77,
number=62, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_fixed64_fixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_fixed64_fixed64', index=78,
number=63, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_sfixed32_sfixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_sfixed32_sfixed32', index=79,
number=64, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_sfixed64_sfixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_sfixed64_sfixed64', index=80,
number=65, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_int32_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_int32_float', index=81,
number=66, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_int32_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_int32_double', index=82,
number=67, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_bool_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_bool_bool', index=83,
number=68, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_string', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_string', index=84,
number=69, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_bytes', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_bytes', index=85,
number=70, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_nested_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_nested_message', index=86,
number=71, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_foreign_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_foreign_message', index=87,
number=72, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_nested_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_nested_enum', index=88,
number=73, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_foreign_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_foreign_enum', index=89,
number=74, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_uint32', index=90,
number=111, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_nested_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_nested_message', index=91,
number=112, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_string', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_string', index=92,
number=113, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_bytes', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_bytes', index=93,
number=114, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_bool', index=94,
number=115, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_uint64', index=95,
number=116, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_float', index=96,
number=117, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_double', index=97,
number=118, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_enum', index=98,
number=119, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.data', index=99,
number=201, type=10, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fieldname1', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.fieldname1', index=100,
number=401, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field_name2', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field_name2', index=101,
number=402, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='_field_name3', full_name='protobuf_test_messages.proto2.TestAllTypesProto2._field_name3', index=102,
number=403, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field__name4_', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field__name4_', index=103,
number=404, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field0name5', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field0name5', index=104,
number=405, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field_0_name6', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field_0_name6', index=105,
number=406, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fieldName7', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.fieldName7', index=106,
number=407, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='FieldName8', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.FieldName8', index=107,
number=408, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field_Name9', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field_Name9', index=108,
number=409, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='Field_Name10', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.Field_Name10', index=109,
number=410, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='FIELD_NAME11', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.FIELD_NAME11', index=110,
number=411, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='FIELD_name12', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.FIELD_name12', index=111,
number=412, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='__field_name13', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.__field_name13', index=112,
number=413, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='__Field_name14', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.__Field_name14', index=113,
number=414, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field__name15', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field__name15', index=114,
number=415, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field__Name16', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field__Name16', index=115,
number=416, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field_name17__', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field_name17__', index=116,
number=417, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='Field_name18__', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.Field_name18__', index=117,
number=418, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TESTALLTYPESPROTO2_NESTEDMESSAGE, _TESTALLTYPESPROTO2_MAPINT32INT32ENTRY, _TESTALLTYPESPROTO2_MAPINT64INT64ENTRY, _TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY, _TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY, _TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY, _TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY, _TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY, _TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY, _TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY, _TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY, _TESTALLTYPESPROTO2_MAPINT32FLOATENTRY, _TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY, _TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY, _TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY, _TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY, _TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY, _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY, _TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY, _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY, _TESTALLTYPESPROTO2_DATA, _TESTALLTYPESPROTO2_MESSAGESETCORRECT, _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1, _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2, ],
enum_types=[
_TESTALLTYPESPROTO2_NESTEDENUM,
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(120, 201), ],
oneofs=[
_descriptor.OneofDescriptor(
name='oneof_field', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_field',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=78,
serialized_end=7500,
)
_FOREIGNMESSAGEPROTO2 = _descriptor.Descriptor(
name='ForeignMessageProto2',
full_name='protobuf_test_messages.proto2.ForeignMessageProto2',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='c', full_name='protobuf_test_messages.proto2.ForeignMessageProto2.c', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7502,
serialized_end=7535,
)
_UNKNOWNTOTESTALLTYPES_OPTIONALGROUP = _descriptor.Descriptor(
name='OptionalGroup',
full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.OptionalGroup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='a', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.OptionalGroup.a', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7833,
serialized_end=7859,
)
_UNKNOWNTOTESTALLTYPES = _descriptor.Descriptor(
name='UnknownToTestAllTypes',
full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='optional_int32', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.optional_int32', index=0,
number=1001, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_string', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.optional_string', index=1,
number=1002, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_message', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.nested_message', index=2,
number=1003, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optionalgroup', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.optionalgroup', index=3,
number=1004, type=10, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_bool', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.optional_bool', index=4,
number=1006, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_int32', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.repeated_int32', index=5,
number=1011, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_UNKNOWNTOTESTALLTYPES_OPTIONALGROUP, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7538,
serialized_end=7859,
)
_TESTALLTYPESPROTO2_NESTEDMESSAGE.fields_by_name['corecursive'].message_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_NESTEDMESSAGE.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPINT32INT32ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPINT64INT64ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPINT32FLOATENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY.fields_by_name['value'].message_type = _TESTALLTYPESPROTO2_NESTEDMESSAGE
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = _FOREIGNMESSAGEPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY.fields_by_name['value'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY.fields_by_name['value'].enum_type = _FOREIGNENUMPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_DATA.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MESSAGESETCORRECT.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPESPROTO2_NESTEDMESSAGE
_TESTALLTYPESPROTO2.fields_by_name['optional_foreign_message'].message_type = _FOREIGNMESSAGEPROTO2
_TESTALLTYPESPROTO2.fields_by_name['optional_nested_enum'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2.fields_by_name['optional_foreign_enum'].enum_type = _FOREIGNENUMPROTO2
_TESTALLTYPESPROTO2.fields_by_name['recursive_message'].message_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPESPROTO2_NESTEDMESSAGE
_TESTALLTYPESPROTO2.fields_by_name['repeated_foreign_message'].message_type = _FOREIGNMESSAGEPROTO2
_TESTALLTYPESPROTO2.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2.fields_by_name['repeated_foreign_enum'].enum_type = _FOREIGNENUMPROTO2
_TESTALLTYPESPROTO2.fields_by_name['packed_nested_enum'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2.fields_by_name['unpacked_nested_enum'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2.fields_by_name['map_int32_int32'].message_type = _TESTALLTYPESPROTO2_MAPINT32INT32ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_int64_int64'].message_type = _TESTALLTYPESPROTO2_MAPINT64INT64ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_uint32_uint32'].message_type = _TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_uint64_uint64'].message_type = _TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_sint32_sint32'].message_type = _TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_sint64_sint64'].message_type = _TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_fixed32_fixed32'].message_type = _TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_fixed64_fixed64'].message_type = _TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_sfixed32_sfixed32'].message_type = _TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_sfixed64_sfixed64'].message_type = _TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_int32_float'].message_type = _TESTALLTYPESPROTO2_MAPINT32FLOATENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_int32_double'].message_type = _TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_bool_bool'].message_type = _TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_string'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_bytes'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_nested_message'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_foreign_message'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_nested_enum'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_foreign_enum'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY
_TESTALLTYPESPROTO2.fields_by_name['oneof_nested_message'].message_type = _TESTALLTYPESPROTO2_NESTEDMESSAGE
_TESTALLTYPESPROTO2.fields_by_name['oneof_enum'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2.fields_by_name['data'].message_type = _TESTALLTYPESPROTO2_DATA
_TESTALLTYPESPROTO2_NESTEDENUM.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_uint32'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_uint32'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_nested_message'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_nested_message'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_string'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_string'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_bytes'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_bytes'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_bool'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_bool'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_uint64'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_uint64'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_float'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_float'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_double'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_double'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_enum'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_enum'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_UNKNOWNTOTESTALLTYPES_OPTIONALGROUP.containing_type = _UNKNOWNTOTESTALLTYPES
_UNKNOWNTOTESTALLTYPES.fields_by_name['nested_message'].message_type = _FOREIGNMESSAGEPROTO2
_UNKNOWNTOTESTALLTYPES.fields_by_name['optionalgroup'].message_type = _UNKNOWNTOTESTALLTYPES_OPTIONALGROUP
DESCRIPTOR.message_types_by_name['TestAllTypesProto2'] = _TESTALLTYPESPROTO2
DESCRIPTOR.message_types_by_name['ForeignMessageProto2'] = _FOREIGNMESSAGEPROTO2
DESCRIPTOR.message_types_by_name['UnknownToTestAllTypes'] = _UNKNOWNTOTESTALLTYPES
DESCRIPTOR.enum_types_by_name['ForeignEnumProto2'] = _FOREIGNENUMPROTO2
DESCRIPTOR.extensions_by_name['extension_int32'] = extension_int32
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestAllTypesProto2 = _reflection.GeneratedProtocolMessageType('TestAllTypesProto2', (_message.Message,), {
'NestedMessage' : _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_NESTEDMESSAGE,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage)
})
,
'MapInt32Int32Entry' : _reflection.GeneratedProtocolMessageType('MapInt32Int32Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPINT32INT32ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32Int32Entry)
})
,
'MapInt64Int64Entry' : _reflection.GeneratedProtocolMessageType('MapInt64Int64Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPINT64INT64ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapInt64Int64Entry)
})
,
'MapUint32Uint32Entry' : _reflection.GeneratedProtocolMessageType('MapUint32Uint32Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapUint32Uint32Entry)
})
,
'MapUint64Uint64Entry' : _reflection.GeneratedProtocolMessageType('MapUint64Uint64Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapUint64Uint64Entry)
})
,
'MapSint32Sint32Entry' : _reflection.GeneratedProtocolMessageType('MapSint32Sint32Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapSint32Sint32Entry)
})
,
'MapSint64Sint64Entry' : _reflection.GeneratedProtocolMessageType('MapSint64Sint64Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapSint64Sint64Entry)
})
,
'MapFixed32Fixed32Entry' : _reflection.GeneratedProtocolMessageType('MapFixed32Fixed32Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed32Fixed32Entry)
})
,
'MapFixed64Fixed64Entry' : _reflection.GeneratedProtocolMessageType('MapFixed64Fixed64Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed64Fixed64Entry)
})
,
'MapSfixed32Sfixed32Entry' : _reflection.GeneratedProtocolMessageType('MapSfixed32Sfixed32Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed32Sfixed32Entry)
})
,
'MapSfixed64Sfixed64Entry' : _reflection.GeneratedProtocolMessageType('MapSfixed64Sfixed64Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed64Sfixed64Entry)
})
,
'MapInt32FloatEntry' : _reflection.GeneratedProtocolMessageType('MapInt32FloatEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPINT32FLOATENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32FloatEntry)
})
,
'MapInt32DoubleEntry' : _reflection.GeneratedProtocolMessageType('MapInt32DoubleEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32DoubleEntry)
})
,
'MapBoolBoolEntry' : _reflection.GeneratedProtocolMessageType('MapBoolBoolEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapBoolBoolEntry)
})
,
'MapStringStringEntry' : _reflection.GeneratedProtocolMessageType('MapStringStringEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringStringEntry)
})
,
'MapStringBytesEntry' : _reflection.GeneratedProtocolMessageType('MapStringBytesEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringBytesEntry)
})
,
'MapStringNestedMessageEntry' : _reflection.GeneratedProtocolMessageType('MapStringNestedMessageEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedMessageEntry)
})
,
'MapStringForeignMessageEntry' : _reflection.GeneratedProtocolMessageType('MapStringForeignMessageEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignMessageEntry)
})
,
'MapStringNestedEnumEntry' : _reflection.GeneratedProtocolMessageType('MapStringNestedEnumEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedEnumEntry)
})
,
'MapStringForeignEnumEntry' : _reflection.GeneratedProtocolMessageType('MapStringForeignEnumEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignEnumEntry)
})
,
'Data' : _reflection.GeneratedProtocolMessageType('Data', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_DATA,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.Data)
})
,
'MessageSetCorrect' : _reflection.GeneratedProtocolMessageType('MessageSetCorrect', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MESSAGESETCORRECT,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrect)
})
,
'MessageSetCorrectExtension1' : _reflection.GeneratedProtocolMessageType('MessageSetCorrectExtension1', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension1)
})
,
'MessageSetCorrectExtension2' : _reflection.GeneratedProtocolMessageType('MessageSetCorrectExtension2', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension2)
})
,
'DESCRIPTOR' : _TESTALLTYPESPROTO2,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2)
})
_sym_db.RegisterMessage(TestAllTypesProto2)
_sym_db.RegisterMessage(TestAllTypesProto2.NestedMessage)
_sym_db.RegisterMessage(TestAllTypesProto2.MapInt32Int32Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapInt64Int64Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapUint32Uint32Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapUint64Uint64Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapSint32Sint32Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapSint64Sint64Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapFixed32Fixed32Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapFixed64Fixed64Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapSfixed32Sfixed32Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapSfixed64Sfixed64Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapInt32FloatEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapInt32DoubleEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapBoolBoolEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringStringEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringBytesEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringNestedMessageEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringForeignMessageEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringNestedEnumEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringForeignEnumEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.Data)
_sym_db.RegisterMessage(TestAllTypesProto2.MessageSetCorrect)
_sym_db.RegisterMessage(TestAllTypesProto2.MessageSetCorrectExtension1)
_sym_db.RegisterMessage(TestAllTypesProto2.MessageSetCorrectExtension2)
ForeignMessageProto2 = _reflection.GeneratedProtocolMessageType('ForeignMessageProto2', (_message.Message,), {
'DESCRIPTOR' : _FOREIGNMESSAGEPROTO2,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.ForeignMessageProto2)
})
_sym_db.RegisterMessage(ForeignMessageProto2)
UnknownToTestAllTypes = _reflection.GeneratedProtocolMessageType('UnknownToTestAllTypes', (_message.Message,), {
'OptionalGroup' : _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), {
'DESCRIPTOR' : _UNKNOWNTOTESTALLTYPES_OPTIONALGROUP,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.UnknownToTestAllTypes.OptionalGroup)
})
,
'DESCRIPTOR' : _UNKNOWNTOTESTALLTYPES,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.UnknownToTestAllTypes)
})
_sym_db.RegisterMessage(UnknownToTestAllTypes)
_sym_db.RegisterMessage(UnknownToTestAllTypes.OptionalGroup)
TestAllTypesProto2.RegisterExtension(extension_int32)
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1.extensions_by_name['message_set_extension'].message_type = _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1
TestAllTypesProto2.MessageSetCorrect.RegisterExtension(_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1.extensions_by_name['message_set_extension'])
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2.extensions_by_name['message_set_extension'].message_type = _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2
TestAllTypesProto2.MessageSetCorrect.RegisterExtension(_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2.extensions_by_name['message_set_extension'])
DESCRIPTOR._options = None
_TESTALLTYPESPROTO2_MAPINT32INT32ENTRY._options = None
_TESTALLTYPESPROTO2_MAPINT64INT64ENTRY._options = None
_TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY._options = None
_TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY._options = None
_TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY._options = None
_TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY._options = None
_TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY._options = None
_TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY._options = None
_TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY._options = None
_TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY._options = None
_TESTALLTYPESPROTO2_MAPINT32FLOATENTRY._options = None
_TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY._options = None
_TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY._options = None
_TESTALLTYPESPROTO2_MESSAGESETCORRECT._options = None
_TESTALLTYPESPROTO2.fields_by_name['optional_string_piece']._options = None
_TESTALLTYPESPROTO2.fields_by_name['optional_cord']._options = None
_TESTALLTYPESPROTO2.fields_by_name['repeated_string_piece']._options = None
_TESTALLTYPESPROTO2.fields_by_name['repeated_cord']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_int32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_int64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_uint32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_uint64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_sint32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_sint64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_fixed32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_fixed64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_sfixed32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_sfixed64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_float']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_double']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_bool']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_nested_enum']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_int32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_int64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_uint32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_uint64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_sint32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_sint64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_fixed32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_fixed64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_sfixed32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_sfixed64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_float']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_double']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_bool']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_nested_enum']._options = None
# @@protoc_insertion_point(module_scope)
| apache-2.0 |
MaxVanDeursen/tribler | Tribler/Test/Core/Modules/RestApi/test_torrents_endpoint.py | 2 | 7299 | import json
import time
from twisted.internet.defer import inlineCallbacks
from Tribler.Core.TorrentChecker.torrent_checker import TorrentChecker
from Tribler.Core.Utilities.network_utils import get_random_port
from Tribler.Core.simpledefs import NTFY_CHANNELCAST, NTFY_TORRENTS
from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest
from Tribler.Test.Core.base_test import MockObject
from Tribler.Test.twisted_thread import deferred
from Tribler.Test.util.Tracker.HTTPTracker import HTTPTracker
from Tribler.Test.util.Tracker.UDPTracker import UDPTracker
from Tribler.dispersy.util import blocking_call_on_reactor_thread
class TestTorrentsEndpoint(AbstractApiTest):
@deferred(timeout=10)
def test_get_random_torrents(self):
"""
Testing whether random torrents are returned if random torrents are fetched
"""
def verify_torrents(results):
json_results = json.loads(results)
self.assertEqual(len(json_results['torrents']), 2)
channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST)
channel_db_handler._get_my_dispersy_cid = lambda: "myfakedispersyid"
channel_id = channel_db_handler.on_channel_from_dispersy('rand', 42, 'Fancy channel', 'Fancy description')
torrent_list = [
[channel_id, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", [['file1.txt', 42]], []],
[channel_id, 2, 2, ('b' * 40).decode('hex'), 1470000000, "ubuntu2-torrent.iso", [['file2.txt', 42]], []],
[channel_id, 3, 3, ('c' * 40).decode('hex'), 1480000000, "badterm", [['file1.txt', 42]], []],
[channel_id, 4, 4, ('d' * 40).decode('hex'), 1490000000, "badterm", [['file2.txt', 42]], []],
[channel_id, 5, 5, ('e' * 40).decode('hex'), 1500000000, "badterm", [['file3.txt', 42]], []],
]
channel_db_handler.on_torrents_from_dispersy(torrent_list)
self.should_check_equality = False
return self.do_request('torrents/random?limit=5', expected_code=200).addCallback(verify_torrents)
@deferred(timeout=10)
def test_random_torrents_negative(self):
"""
Testing whether error 400 is returned when a negative limit is passed to the request to fetch random torrents
"""
expected_json = {"error": "the limit parameter must be a positive number"}
return self.do_request('torrents/random?limit=-5', expected_code=400, expected_json=expected_json)
class TestTorrentTrackersEndpoint(AbstractApiTest):
@deferred(timeout=10)
def test_get_torrent_trackers_404(self):
"""
Testing whether we get an error 404 if we are fetching the trackers of a non-existent torrent
"""
self.should_check_equality = False
return self.do_request('torrents/%s/trackers' % ('a' * 40), expected_code=404)
@deferred(timeout=10)
def test_get_torrent_trackers(self):
"""
Testing whether fetching the trackers of a non-existent torrent is successful
"""
torrent_db = self.session.open_dbhandler(NTFY_TORRENTS)
torrent_db.addExternalTorrentNoDef('a' * 20, 'ubuntu-torrent.iso', [['file1.txt', 42]],
('udp://trackerurl.com:1234/announce',
'http://trackerurl.com:4567/announce'), time.time())
def verify_trackers(trackers):
self.assertIn('DHT', trackers)
self.assertIn('udp://trackerurl.com:1234', trackers)
self.assertIn('http://trackerurl.com:4567/announce', trackers)
self.should_check_equality = False
return self.do_request('torrents/%s/trackers' % ('a' * 20).encode('hex'), expected_code=200)\
.addCallback(verify_trackers)
class TestTorrentHealthEndpoint(AbstractApiTest):
@blocking_call_on_reactor_thread
@inlineCallbacks
def setUp(self, autoload_discovery=True):
yield super(TestTorrentHealthEndpoint, self).setUp(autoload_discovery=autoload_discovery)
min_base_port, max_base_port = self.get_bucket_range_port()
self.udp_port = get_random_port(min_port=min_base_port, max_port=max_base_port)
self.udp_tracker = UDPTracker(self.udp_port)
self.http_port = get_random_port(min_port=min_base_port, max_port=max_base_port)
self.http_tracker = HTTPTracker(self.http_port)
@blocking_call_on_reactor_thread
@inlineCallbacks
def tearDown(self, annotate=True):
self.session.lm.ltmgr = None
yield self.udp_tracker.stop()
yield self.http_tracker.stop()
yield super(TestTorrentHealthEndpoint, self).tearDown(annotate=annotate)
@deferred(timeout=20)
@inlineCallbacks
def test_check_torrent_health(self):
"""
Test the endpoint to fetch the health of a torrent
"""
torrent_db = self.session.open_dbhandler(NTFY_TORRENTS)
torrent_db.addExternalTorrentNoDef('a' * 20, 'ubuntu-torrent.iso', [['file1.txt', 42]],
('udp://localhost:%s/announce' % self.udp_port,
'http://localhost:%s/announce' % self.http_port), time.time())
url = 'torrents/%s/health?timeout=10&refresh=1' % ('a' * 20).encode('hex')
self.should_check_equality = False
yield self.do_request(url, expected_code=400, request_type='GET') # No torrent checker
def call_cb(infohash, callback, **_):
callback({"seeders": 1, "leechers": 2})
# Initialize the torrent checker
self.session.lm.torrent_checker = TorrentChecker(self.session)
self.session.lm.torrent_checker.initialize()
self.session.lm.ltmgr = MockObject()
self.session.lm.ltmgr.get_metainfo = call_cb
yield self.do_request('torrents/%s/health' % ('f' * 40), expected_code=404, request_type='GET')
def verify_response_no_trackers(response):
json_response = json.loads(response)
self.assertTrue('DHT' in json_response['health'])
def verify_response_with_trackers(response):
json_response = json.loads(response)
expected_dict = {u"health":
{u"DHT":
{u"leechers": 2, u"seeders": 1, u"infohash": (u'a' * 20).encode('hex')},
u"udp://localhost:%s" % self.udp_port:
{u"leechers": 20, u"seeders": 10, u"infohash": (u'a' * 20).encode('hex')},
u"http://localhost:%s/announce" % self.http_port:
{u"leechers": 30, u"seeders": 20, u"infohash": (u'a' * 20).encode('hex')}}}
self.assertDictEqual(json_response, expected_dict)
yield self.do_request(url, expected_code=200, request_type='GET').addCallback(verify_response_no_trackers)
self.udp_tracker.start()
self.udp_tracker.tracker_info.add_info_about_infohash('a' * 20, 10, 20)
self.http_tracker.start()
self.http_tracker.tracker_info.add_info_about_infohash('a' * 20, 20, 30)
yield self.do_request(url, expected_code=200, request_type='GET').addCallback(verify_response_with_trackers)
| lgpl-3.0 |
romain-dartigues/ansible | lib/ansible/modules/packaging/os/swupd.py | 5 | 8754 | #!/usr/bin/python
# (c) 2017, Alberto Murillo <alberto.murillo.silva@intel.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: swupd
short_description: Manages updates and bundles in ClearLinux systems.
description:
- Manages updates and bundles with the swupd bundle manager, which is used by the
Clear Linux Project for Intel Architecture.
version_added: "2.3"
author: Alberto Murillo (@albertomurillo)
options:
contenturl:
description:
- URL pointing to the contents of available bundles.
If not specified, the contents are retrieved from clearlinux.org.
format:
description:
- The format suffix for version file downloads. For example [1,2,3,staging,etc].
If not specified, the default format is used.
manifest:
description:
- The manifest contains information about the bundles at certaion version of the OS.
Specify a Manifest version to verify against that version or leave unspecified to
verify against the current version.
aliases: [release, version]
name:
description:
- Name of the (I)bundle to install or remove.
aliases: [bundle]
state:
description:
- Indicates the desired (I)bundle state. C(present) ensures the bundle
is installed while C(absent) ensures the (I)bundle is not installed.
default: present
choices: [present, absent]
update:
description:
- Updates the OS to the latest version.
type: bool
url:
description:
- Overrides both I(contenturl) and I(versionurl).
verify:
description:
- Verify content for OS version.
type: bool
versionurl:
description:
- URL for version string download.
'''
EXAMPLES = '''
- name: Update the OS to the latest version
swupd:
update: yes
- name: Installs the "foo" bundle
swupd:
name: foo
state: present
- name: Removes the "foo" bundle
swupd:
name: foo
state: absent
- name: Check integrity of filesystem
swupd:
verify: yes
- name: Downgrade OS to release 12920
swupd:
verify: yes
manifest: 12920
'''
RETURN = '''
stdout:
description: stdout of swupd
returned: always
type: string
stderr:
description: stderr of swupd
returned: always
type: string
'''
import os
from ansible.module_utils.basic import AnsibleModule
class Swupd(object):
FILES_NOT_MATCH = "files did not match"
FILES_REPLACED = "missing files were replaced"
FILES_FIXED = "files were fixed"
FILES_DELETED = "files were deleted"
def __init__(self, module):
# Fail if swupd is not found
self.module = module
self.swupd_cmd = module.get_bin_path("swupd", False)
if not self.swupd_cmd:
module.fail_json(msg="Could not find swupd.")
# Initialize parameters
for key in module.params.keys():
setattr(self, key, module.params[key])
# Initialize return values
self.changed = False
self.failed = False
self.msg = None
self.rc = None
self.stderr = ""
self.stdout = ""
def _run_cmd(self, cmd):
self.rc, self.stdout, self.stderr = self.module.run_command(cmd, check_rc=False)
def _get_cmd(self, command):
cmd = "%s %s" % (self.swupd_cmd, command)
if self.format:
cmd += " --format=%s" % self.format
if self.manifest:
cmd += " --manifest=%s" % self.manifest
if self.url:
cmd += " --url=%s" % self.url
else:
if self.contenturl and command != "check-update":
cmd += " --contenturl=%s" % self.contenturl
if self.versionurl:
cmd += " --versionurl=%s" % self.versionurl
return cmd
def _is_bundle_installed(self, bundle):
try:
os.stat("/usr/share/clear/bundles/%s" % bundle)
except OSError:
return False
return True
def _needs_update(self):
cmd = self._get_cmd("check-update")
self._run_cmd(cmd)
if self.rc == 0:
return True
if self.rc == 1:
return False
self.failed = True
self.msg = "Failed to check for updates"
def _needs_verify(self):
cmd = self._get_cmd("verify")
self._run_cmd(cmd)
if self.rc != 0:
self.failed = True
self.msg = "Failed to check for filesystem inconsistencies."
if self.FILES_NOT_MATCH in self.stdout:
return True
return False
def install_bundle(self, bundle):
"""Installs a bundle with `swupd bundle-add bundle`"""
if self.module.check_mode:
self.module.exit_json(changed=not self._is_bundle_installed(bundle))
if self._is_bundle_installed(bundle):
self.msg = "Bundle %s is already installed" % bundle
return
cmd = self._get_cmd("bundle-add %s" % bundle)
self._run_cmd(cmd)
if self.rc == 0:
self.changed = True
self.msg = "Bundle %s installed" % bundle
return
self.failed = True
self.msg = "Failed to install bundle %s" % bundle
def remove_bundle(self, bundle):
"""Removes a bundle with `swupd bundle-remove bundle`"""
if self.module.check_mode:
self.module.exit_json(changed=self._is_bundle_installed(bundle))
if not self._is_bundle_installed(bundle):
self.msg = "Bundle %s not installed"
return
cmd = self._get_cmd("bundle-remove %s" % bundle)
self._run_cmd(cmd)
if self.rc == 0:
self.changed = True
self.msg = "Bundle %s removed" % bundle
return
self.failed = True
self.msg = "Failed to remove bundle %s" % bundle
def update_os(self):
"""Updates the os with `swupd update`"""
if self.module.check_mode:
self.module.exit_json(changed=self._needs_update())
if not self._needs_update():
self.msg = "There are no updates available"
return
cmd = self._get_cmd("update")
self._run_cmd(cmd)
if self.rc == 0:
self.changed = True
self.msg = "Update successful"
return
self.failed = True
self.msg = "Failed to check for updates"
def verify_os(self):
"""Verifies filesystem against specified or current version"""
if self.module.check_mode:
self.module.exit_json(changed=self._needs_verify())
if not self._needs_verify():
self.msg = "No files where changed"
return
cmd = self._get_cmd("verify --fix")
self._run_cmd(cmd)
if self.rc == 0 and (self.FILES_REPLACED in self.stdout or self.FILES_FIXED in self.stdout or self.FILES_DELETED in self.stdout):
self.changed = True
self.msg = "Fix successful"
return
self.failed = True
self.msg = "Failed to verify the OS"
def main():
"""The main function."""
module = AnsibleModule(
argument_spec=dict(
contenturl=dict(type="str"),
format=dict(type="str"),
manifest=dict(aliases=["release", "version"], type="int"),
name=dict(aliases=["bundle"], type="str"),
state=dict(default="present", choices=["present", "absent"], type="str"),
update=dict(default=False, type="bool"),
url=dict(type="str"),
verify=dict(default=False, type="bool"),
versionurl=dict(type="str"),
),
required_one_of=[["name", "update", "verify"]],
mutually_exclusive=[["name", "update", "verify"]],
supports_check_mode=True
)
swupd = Swupd(module)
name = module.params["name"]
state = module.params["state"]
update = module.params["update"]
verify = module.params["verify"]
if update:
swupd.update_os()
elif verify:
swupd.verify_os()
elif state == "present":
swupd.install_bundle(name)
elif state == "absent":
swupd.remove_bundle(name)
else:
swupd.failed = True
if swupd.failed:
module.fail_json(msg=swupd.msg, stdout=swupd.stdout, stderr=swupd.stderr)
else:
module.exit_json(changed=swupd.changed, msg=swupd.msg, stdout=swupd.stdout, stderr=swupd.stderr)
if __name__ == '__main__':
main()
| gpl-3.0 |
DONIKAN/django | django/core/serializers/python.py | 140 | 7685 | """
A Python "serializer". Doesn't do much serializing per se -- just converts to
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
other serializers.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from django.apps import apps
from django.conf import settings
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils import six
from django.utils.encoding import force_text, is_protected_type
class Serializer(base.Serializer):
"""
Serializes a QuerySet to basic Python objects.
"""
internal_use_only = True
def start_serialization(self):
self._current = None
self.objects = []
def end_serialization(self):
pass
def start_object(self, obj):
self._current = OrderedDict()
def end_object(self, obj):
self.objects.append(self.get_dump_object(obj))
self._current = None
def get_dump_object(self, obj):
model = obj._meta.proxy_for_model if obj._deferred else obj.__class__
data = OrderedDict([('model', force_text(model._meta))])
if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'):
data["pk"] = force_text(obj._get_pk_val(), strings_only=True)
data['fields'] = self._current
return data
def handle_field(self, obj, field):
value = field.value_from_object(obj)
# Protected types (i.e., primitives like None, numbers, dates,
# and Decimals) are passed through as is. All other values are
# converted to string first.
if is_protected_type(value):
self._current[field.name] = value
else:
self._current[field.name] = field.value_to_string(obj)
def handle_fk_field(self, obj, field):
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
related = getattr(obj, field.name)
if related:
value = related.natural_key()
else:
value = None
else:
value = getattr(obj, field.get_attname())
if not is_protected_type(value):
value = field.value_to_string(obj)
self._current[field.name] = value
def handle_m2m_field(self, obj, field):
if field.remote_field.through._meta.auto_created:
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
m2m_value = lambda value: value.natural_key()
else:
m2m_value = lambda value: force_text(value._get_pk_val(), strings_only=True)
self._current[field.name] = [m2m_value(related)
for related in getattr(obj, field.name).iterator()]
def getvalue(self):
return self.objects
def Deserializer(object_list, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
db = options.pop('using', DEFAULT_DB_ALIAS)
ignore = options.pop('ignorenonexistent', False)
for d in object_list:
# Look up the model and starting build a dict of data for it.
try:
Model = _get_model(d["model"])
except base.DeserializationError:
if ignore:
continue
else:
raise
data = {}
if 'pk' in d:
try:
data[Model._meta.pk.attname] = Model._meta.pk.to_python(d.get('pk'))
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), None)
m2m_data = {}
field_names = {f.name for f in Model._meta.get_fields()}
# Handle each field
for (field_name, field_value) in six.iteritems(d["fields"]):
if ignore and field_name not in field_names:
# skip fields no longer on model
continue
if isinstance(field_value, str):
field_value = force_text(
field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True
)
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel):
if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'):
def m2m_convert(value):
if hasattr(value, '__iter__') and not isinstance(value, six.text_type):
return field.remote_field.model._default_manager.db_manager(db).get_by_natural_key(*value).pk
else:
return force_text(field.remote_field.model._meta.pk.to_python(value), strings_only=True)
else:
m2m_convert = lambda v: force_text(field.remote_field.model._meta.pk.to_python(v), strings_only=True)
try:
m2m_data[field.name] = []
for pk in field_value:
m2m_data[field.name].append(m2m_convert(pk))
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), pk)
# Handle FK fields
elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel):
if field_value is not None:
try:
if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'):
if hasattr(field_value, '__iter__') and not isinstance(field_value, six.text_type):
obj = field.remote_field.model._default_manager.db_manager(db).get_by_natural_key(*field_value)
value = getattr(obj, field.remote_field.field_name)
# If this is a natural foreign key to an object that
# has a FK/O2O as the foreign key, use the FK value
if field.remote_field.model._meta.pk.remote_field:
value = value.pk
else:
value = field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value)
data[field.attname] = value
else:
data[field.attname] = field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
else:
data[field.attname] = None
# Handle all other fields
else:
try:
data[field.name] = field.to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
obj = base.build_instance(Model, data, db)
yield base.DeserializedObject(obj, m2m_data)
def _get_model(model_identifier):
"""
Helper to look up a model from an "app_label.model_name" string.
"""
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier)
| bsd-3-clause |
valexandersaulys/prudential_insurance_kaggle | venv/lib/python2.7/site-packages/sklearn/manifold/spectral_embedding_.py | 10 | 19912 | """Spectral Embedding"""
# Author: Gael Varoquaux <gael.varoquaux@normalesup.org>
# Wei LI <kuantkid@gmail.com>
# License: BSD 3 clause
import warnings
import numpy as np
from scipy import sparse
from scipy.linalg import eigh
from scipy.sparse.linalg import lobpcg
from ..base import BaseEstimator
from ..externals import six
from ..utils import check_random_state, check_array, check_symmetric
from ..utils.extmath import _deterministic_vector_sign_flip
from ..utils.graph import graph_laplacian
from ..utils.sparsetools import connected_components
from ..utils.arpack import eigsh
from ..metrics.pairwise import rbf_kernel
from ..neighbors import kneighbors_graph
def _graph_connected_component(graph, node_id):
"""Find the largest graph connected components that contains one
given node
Parameters
----------
graph : array-like, shape: (n_samples, n_samples)
adjacency matrix of the graph, non-zero weight means an edge
between the nodes
node_id : int
The index of the query node of the graph
Returns
-------
connected_components_matrix : array-like, shape: (n_samples,)
An array of bool value indicating the indexes of the nodes
belonging to the largest connected components of the given query
node
"""
connected_components_matrix = np.zeros(
shape=(graph.shape[0]), dtype=np.bool)
connected_components_matrix[node_id] = True
n_node = graph.shape[0]
for i in range(n_node):
last_num_component = connected_components_matrix.sum()
_, node_to_add = np.where(graph[connected_components_matrix] != 0)
connected_components_matrix[node_to_add] = True
if last_num_component >= connected_components_matrix.sum():
break
return connected_components_matrix
def _graph_is_connected(graph):
""" Return whether the graph is connected (True) or Not (False)
Parameters
----------
graph : array-like or sparse matrix, shape: (n_samples, n_samples)
adjacency matrix of the graph, non-zero weight means an edge
between the nodes
Returns
-------
is_connected : bool
True means the graph is fully connected and False means not
"""
if sparse.isspmatrix(graph):
# sparse graph, find all the connected components
n_connected_components, _ = connected_components(graph)
return n_connected_components == 1
else:
# dense graph, find all connected components start from node 0
return _graph_connected_component(graph, 0).sum() == graph.shape[0]
def _set_diag(laplacian, value):
"""Set the diagonal of the laplacian matrix and convert it to a
sparse format well suited for eigenvalue decomposition
Parameters
----------
laplacian : array or sparse matrix
The graph laplacian
value : float
The value of the diagonal
Returns
-------
laplacian : array or sparse matrix
An array of matrix in a form that is well suited to fast
eigenvalue decomposition, depending on the band width of the
matrix.
"""
n_nodes = laplacian.shape[0]
# We need all entries in the diagonal to values
if not sparse.isspmatrix(laplacian):
laplacian.flat[::n_nodes + 1] = value
else:
laplacian = laplacian.tocoo()
diag_idx = (laplacian.row == laplacian.col)
laplacian.data[diag_idx] = value
# If the matrix has a small number of diagonals (as in the
# case of structured matrices coming from images), the
# dia format might be best suited for matvec products:
n_diags = np.unique(laplacian.row - laplacian.col).size
if n_diags <= 7:
# 3 or less outer diagonals on each side
laplacian = laplacian.todia()
else:
# csr has the fastest matvec and is thus best suited to
# arpack
laplacian = laplacian.tocsr()
return laplacian
def spectral_embedding(adjacency, n_components=8, eigen_solver=None,
random_state=None, eigen_tol=0.0,
norm_laplacian=True, drop_first=True):
"""Project the sample on the first eigenvectors of the graph Laplacian.
The adjacency matrix is used to compute a normalized graph Laplacian
whose spectrum (especially the eigenvectors associated to the
smallest eigenvalues) has an interpretation in terms of minimal
number of cuts necessary to split the graph into comparably sized
components.
This embedding can also 'work' even if the ``adjacency`` variable is
not strictly the adjacency matrix of a graph but more generally
an affinity or similarity matrix between samples (for instance the
heat kernel of a euclidean distance matrix or a k-NN matrix).
However care must taken to always make the affinity matrix symmetric
so that the eigenvector decomposition works as expected.
Read more in the :ref:`User Guide <spectral_embedding>`.
Parameters
----------
adjacency : array-like or sparse matrix, shape: (n_samples, n_samples)
The adjacency matrix of the graph to embed.
n_components : integer, optional, default 8
The dimension of the projection subspace.
eigen_solver : {None, 'arpack', 'lobpcg', or 'amg'}, default None
The eigenvalue decomposition strategy to use. AMG requires pyamg
to be installed. It can be faster on very large, sparse problems,
but may also lead to instabilities.
random_state : int seed, RandomState instance, or None (default)
A pseudo random number generator used for the initialization of the
lobpcg eigenvectors decomposition when eigen_solver == 'amg'.
By default, arpack is used.
eigen_tol : float, optional, default=0.0
Stopping criterion for eigendecomposition of the Laplacian matrix
when using arpack eigen_solver.
drop_first : bool, optional, default=True
Whether to drop the first eigenvector. For spectral embedding, this
should be True as the first eigenvector should be constant vector for
connected graph, but for spectral clustering, this should be kept as
False to retain the first eigenvector.
norm_laplacian : bool, optional, default=True
If True, then compute normalized Laplacian.
Returns
-------
embedding : array, shape=(n_samples, n_components)
The reduced samples.
Notes
-----
Spectral embedding is most useful when the graph has one connected
component. If there graph has many components, the first few eigenvectors
will simply uncover the connected components of the graph.
References
----------
* http://en.wikipedia.org/wiki/LOBPCG
* Toward the Optimal Preconditioned Eigensolver: Locally Optimal
Block Preconditioned Conjugate Gradient Method
Andrew V. Knyazev
http://dx.doi.org/10.1137%2FS1064827500366124
"""
adjacency = check_symmetric(adjacency)
try:
from pyamg import smoothed_aggregation_solver
except ImportError:
if eigen_solver == "amg":
raise ValueError("The eigen_solver was set to 'amg', but pyamg is "
"not available.")
if eigen_solver is None:
eigen_solver = 'arpack'
elif eigen_solver not in ('arpack', 'lobpcg', 'amg'):
raise ValueError("Unknown value for eigen_solver: '%s'."
"Should be 'amg', 'arpack', or 'lobpcg'"
% eigen_solver)
random_state = check_random_state(random_state)
n_nodes = adjacency.shape[0]
# Whether to drop the first eigenvector
if drop_first:
n_components = n_components + 1
if not _graph_is_connected(adjacency):
warnings.warn("Graph is not fully connected, spectral embedding"
" may not work as expected.")
laplacian, dd = graph_laplacian(adjacency,
normed=norm_laplacian, return_diag=True)
if (eigen_solver == 'arpack'
or eigen_solver != 'lobpcg' and
(not sparse.isspmatrix(laplacian)
or n_nodes < 5 * n_components)):
# lobpcg used with eigen_solver='amg' has bugs for low number of nodes
# for details see the source code in scipy:
# https://github.com/scipy/scipy/blob/v0.11.0/scipy/sparse/linalg/eigen
# /lobpcg/lobpcg.py#L237
# or matlab:
# http://www.mathworks.com/matlabcentral/fileexchange/48-lobpcg-m
laplacian = _set_diag(laplacian, 1)
# Here we'll use shift-invert mode for fast eigenvalues
# (see http://docs.scipy.org/doc/scipy/reference/tutorial/arpack.html
# for a short explanation of what this means)
# Because the normalized Laplacian has eigenvalues between 0 and 2,
# I - L has eigenvalues between -1 and 1. ARPACK is most efficient
# when finding eigenvalues of largest magnitude (keyword which='LM')
# and when these eigenvalues are very large compared to the rest.
# For very large, very sparse graphs, I - L can have many, many
# eigenvalues very near 1.0. This leads to slow convergence. So
# instead, we'll use ARPACK's shift-invert mode, asking for the
# eigenvalues near 1.0. This effectively spreads-out the spectrum
# near 1.0 and leads to much faster convergence: potentially an
# orders-of-magnitude speedup over simply using keyword which='LA'
# in standard mode.
try:
# We are computing the opposite of the laplacian inplace so as
# to spare a memory allocation of a possibly very large array
laplacian *= -1
lambdas, diffusion_map = eigsh(laplacian, k=n_components,
sigma=1.0, which='LM',
tol=eigen_tol)
embedding = diffusion_map.T[n_components::-1] * dd
except RuntimeError:
# When submatrices are exactly singular, an LU decomposition
# in arpack fails. We fallback to lobpcg
eigen_solver = "lobpcg"
# Revert the laplacian to its opposite to have lobpcg work
laplacian *= -1
if eigen_solver == 'amg':
# Use AMG to get a preconditioner and speed up the eigenvalue
# problem.
if not sparse.issparse(laplacian):
warnings.warn("AMG works better for sparse matrices")
# lobpcg needs double precision floats
laplacian = check_array(laplacian, dtype=np.float64,
accept_sparse=True)
laplacian = _set_diag(laplacian, 1)
ml = smoothed_aggregation_solver(check_array(laplacian, 'csr'))
M = ml.aspreconditioner()
X = random_state.rand(laplacian.shape[0], n_components + 1)
X[:, 0] = dd.ravel()
lambdas, diffusion_map = lobpcg(laplacian, X, M=M, tol=1.e-12,
largest=False)
embedding = diffusion_map.T * dd
if embedding.shape[0] == 1:
raise ValueError
elif eigen_solver == "lobpcg":
# lobpcg needs double precision floats
laplacian = check_array(laplacian, dtype=np.float64,
accept_sparse=True)
if n_nodes < 5 * n_components + 1:
# see note above under arpack why lobpcg has problems with small
# number of nodes
# lobpcg will fallback to eigh, so we short circuit it
if sparse.isspmatrix(laplacian):
laplacian = laplacian.toarray()
lambdas, diffusion_map = eigh(laplacian)
embedding = diffusion_map.T[:n_components] * dd
else:
laplacian = _set_diag(laplacian, 1)
# We increase the number of eigenvectors requested, as lobpcg
# doesn't behave well in low dimension
X = random_state.rand(laplacian.shape[0], n_components + 1)
X[:, 0] = dd.ravel()
lambdas, diffusion_map = lobpcg(laplacian, X, tol=1e-15,
largest=False, maxiter=2000)
embedding = diffusion_map.T[:n_components] * dd
if embedding.shape[0] == 1:
raise ValueError
embedding = _deterministic_vector_sign_flip(embedding)
if drop_first:
return embedding[1:n_components].T
else:
return embedding[:n_components].T
class SpectralEmbedding(BaseEstimator):
"""Spectral embedding for non-linear dimensionality reduction.
Forms an affinity matrix given by the specified function and
applies spectral decomposition to the corresponding graph laplacian.
The resulting transformation is given by the value of the
eigenvectors for each data point.
Read more in the :ref:`User Guide <spectral_embedding>`.
Parameters
-----------
n_components : integer, default: 2
The dimension of the projected subspace.
eigen_solver : {None, 'arpack', 'lobpcg', or 'amg'}
The eigenvalue decomposition strategy to use. AMG requires pyamg
to be installed. It can be faster on very large, sparse problems,
but may also lead to instabilities.
random_state : int seed, RandomState instance, or None, default : None
A pseudo random number generator used for the initialization of the
lobpcg eigenvectors decomposition when eigen_solver == 'amg'.
affinity : string or callable, default : "nearest_neighbors"
How to construct the affinity matrix.
- 'nearest_neighbors' : construct affinity matrix by knn graph
- 'rbf' : construct affinity matrix by rbf kernel
- 'precomputed' : interpret X as precomputed affinity matrix
- callable : use passed in function as affinity
the function takes in data matrix (n_samples, n_features)
and return affinity matrix (n_samples, n_samples).
gamma : float, optional, default : 1/n_features
Kernel coefficient for rbf kernel.
n_neighbors : int, default : max(n_samples/10 , 1)
Number of nearest neighbors for nearest_neighbors graph building.
Attributes
----------
embedding_ : array, shape = (n_samples, n_components)
Spectral embedding of the training matrix.
affinity_matrix_ : array, shape = (n_samples, n_samples)
Affinity_matrix constructed from samples or precomputed.
References
----------
- A Tutorial on Spectral Clustering, 2007
Ulrike von Luxburg
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.165.9323
- On Spectral Clustering: Analysis and an algorithm, 2011
Andrew Y. Ng, Michael I. Jordan, Yair Weiss
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.19.8100
- Normalized cuts and image segmentation, 2000
Jianbo Shi, Jitendra Malik
http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.160.2324
"""
def __init__(self, n_components=2, affinity="nearest_neighbors",
gamma=None, random_state=None, eigen_solver=None,
n_neighbors=None):
self.n_components = n_components
self.affinity = affinity
self.gamma = gamma
self.random_state = random_state
self.eigen_solver = eigen_solver
self.n_neighbors = n_neighbors
@property
def _pairwise(self):
return self.affinity == "precomputed"
def _get_affinity_matrix(self, X, Y=None):
"""Calculate the affinity matrix from data
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples
and n_features is the number of features.
If affinity is "precomputed"
X : array-like, shape (n_samples, n_samples),
Interpret X as precomputed adjacency graph computed from
samples.
Returns
-------
affinity_matrix, shape (n_samples, n_samples)
"""
if self.affinity == 'precomputed':
self.affinity_matrix_ = X
return self.affinity_matrix_
if self.affinity == 'nearest_neighbors':
if sparse.issparse(X):
warnings.warn("Nearest neighbors affinity currently does "
"not support sparse input, falling back to "
"rbf affinity")
self.affinity = "rbf"
else:
self.n_neighbors_ = (self.n_neighbors
if self.n_neighbors is not None
else max(int(X.shape[0] / 10), 1))
self.affinity_matrix_ = kneighbors_graph(X, self.n_neighbors_,
include_self=True)
# currently only symmetric affinity_matrix supported
self.affinity_matrix_ = 0.5 * (self.affinity_matrix_ +
self.affinity_matrix_.T)
return self.affinity_matrix_
if self.affinity == 'rbf':
self.gamma_ = (self.gamma
if self.gamma is not None else 1.0 / X.shape[1])
self.affinity_matrix_ = rbf_kernel(X, gamma=self.gamma_)
return self.affinity_matrix_
self.affinity_matrix_ = self.affinity(X)
return self.affinity_matrix_
def fit(self, X, y=None):
"""Fit the model from data in X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples
and n_features is the number of features.
If affinity is "precomputed"
X : array-like, shape (n_samples, n_samples),
Interpret X as precomputed adjacency graph computed from
samples.
Returns
-------
self : object
Returns the instance itself.
"""
X = check_array(X, ensure_min_samples=2, estimator=self)
random_state = check_random_state(self.random_state)
if isinstance(self.affinity, six.string_types):
if self.affinity not in set(("nearest_neighbors", "rbf",
"precomputed")):
raise ValueError(("%s is not a valid affinity. Expected "
"'precomputed', 'rbf', 'nearest_neighbors' "
"or a callable.") % self.affinity)
elif not callable(self.affinity):
raise ValueError(("'affinity' is expected to be an an affinity "
"name or a callable. Got: %s") % self.affinity)
affinity_matrix = self._get_affinity_matrix(X)
self.embedding_ = spectral_embedding(affinity_matrix,
n_components=self.n_components,
eigen_solver=self.eigen_solver,
random_state=random_state)
return self
def fit_transform(self, X, y=None):
"""Fit the model from data in X and transform X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples
and n_features is the number of features.
If affinity is "precomputed"
X : array-like, shape (n_samples, n_samples),
Interpret X as precomputed adjacency graph computed from
samples.
Returns
-------
X_new: array-like, shape (n_samples, n_components)
"""
self.fit(X)
return self.embedding_
| gpl-2.0 |
tectronics/mythbox | resources/lib/zope.interface/zope/interface/declarations.py | 11 | 40956 | ##############################################################################
# Copyright (c) 2003 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
##############################################################################
"""Implementation of interface declarations
There are three flavors of declarations:
- Declarations are used to simply name declared interfaces.
- ImplementsDeclarations are used to express the interfaces that a
class implements (that instances of the class provides).
Implements specifications support inheriting interfaces.
- ProvidesDeclarations are used to express interfaces directly
provided by objects.
$Id: declarations.py 70112 2006-09-12 04:51:16Z baijum $
"""
__docformat__ = 'restructuredtext'
import sys
import types
import weakref
from zope.interface.interface import InterfaceClass, Specification
from ro import mergeOrderings, ro
import exceptions
from types import ClassType, ModuleType
from zope.interface.advice import addClassAdvisor
# Registry of class-implementation specifications
BuiltinImplementationSpecifications = {}
class Declaration(Specification):
"""Interface declarations"""
def __init__(self, *interfaces):
Specification.__init__(self, _normalizeargs(interfaces))
def changed(self, originally_changed):
Specification.changed(self, originally_changed)
try:
del self._v_attrs
except AttributeError:
pass
def __contains__(self, interface):
"""Test whether an interface is in the specification
for example:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(I1): pass
...
>>> class I3(Interface): pass
...
>>> class I4(I3): pass
...
>>> spec = Declaration(I2, I3)
>>> spec = Declaration(I4, spec)
>>> int(I1 in spec)
0
>>> int(I2 in spec)
1
>>> int(I3 in spec)
1
>>> int(I4 in spec)
1
"""
return self.extends(interface) and interface in self.interfaces()
def __iter__(self):
"""Return an iterator for the interfaces in the specification
for example:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(I1): pass
...
>>> class I3(Interface): pass
...
>>> class I4(I3): pass
...
>>> spec = Declaration(I2, I3)
>>> spec = Declaration(I4, spec)
>>> i = iter(spec)
>>> i.next().getName()
'I4'
>>> i.next().getName()
'I2'
>>> i.next().getName()
'I3'
>>> list(i)
[]
"""
return self.interfaces()
def flattened(self):
"""Return an iterator of all included and extended interfaces
for example:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(I1): pass
...
>>> class I3(Interface): pass
...
>>> class I4(I3): pass
...
>>> spec = Declaration(I2, I3)
>>> spec = Declaration(I4, spec)
>>> i = spec.flattened()
>>> i.next().getName()
'I4'
>>> i.next().getName()
'I2'
>>> i.next().getName()
'I1'
>>> i.next().getName()
'I3'
>>> i.next().getName()
'Interface'
>>> list(i)
[]
"""
return iter(self.__iro__)
def __sub__(self, other):
"""Remove interfaces from a specification
Examples:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(I1): pass
...
>>> class I3(Interface): pass
...
>>> class I4(I3): pass
...
>>> spec = Declaration()
>>> [iface.getName() for iface in spec]
[]
>>> spec -= I1
>>> [iface.getName() for iface in spec]
[]
>>> spec -= Declaration(I1, I2)
>>> [iface.getName() for iface in spec]
[]
>>> spec = Declaration(I2, I4)
>>> [iface.getName() for iface in spec]
['I2', 'I4']
>>> [iface.getName() for iface in spec - I4]
['I2']
>>> [iface.getName() for iface in spec - I1]
['I4']
>>> [iface.getName() for iface
... in spec - Declaration(I3, I4)]
['I2']
"""
return Declaration(
*[i for i in self.interfaces()
if not [j for j in other.interfaces()
if i.extends(j, 0)]
]
)
def __add__(self, other):
"""Add two specifications or a specification and an interface
Examples:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(I1): pass
...
>>> class I3(Interface): pass
...
>>> class I4(I3): pass
...
>>> spec = Declaration()
>>> [iface.getName() for iface in spec]
[]
>>> [iface.getName() for iface in spec+I1]
['I1']
>>> [iface.getName() for iface in I1+spec]
['I1']
>>> spec2 = spec
>>> spec += I1
>>> [iface.getName() for iface in spec]
['I1']
>>> [iface.getName() for iface in spec2]
[]
>>> spec2 += Declaration(I3, I4)
>>> [iface.getName() for iface in spec2]
['I3', 'I4']
>>> [iface.getName() for iface in spec+spec2]
['I1', 'I3', 'I4']
>>> [iface.getName() for iface in spec2+spec]
['I3', 'I4', 'I1']
"""
seen = {}
result = []
for i in self.interfaces():
if i not in seen:
seen[i] = 1
result.append(i)
for i in other.interfaces():
if i not in seen:
seen[i] = 1
result.append(i)
return Declaration(*result)
__radd__ = __add__
##############################################################################
#
# Implementation specifications
#
# These specify interfaces implemented by instances of classes
class Implements(Declaration):
# class whose specification should be used as additional base
inherit = None
# interfaces actually declared for a class
declared = ()
__name__ = '?'
def __repr__(self):
return '<implementedBy %s>' % (self.__name__)
def __reduce__(self):
return implementedBy, (self.inherit, )
def implementedByFallback(cls):
"""Return the interfaces implemented for a class' instances
The value returned is an IDeclaration.
for example:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(I1): pass
...
>>> class I3(Interface): pass
...
>>> class I4(I3): pass
...
>>> class C1(object):
... implements(I2)
>>> class C2(C1):
... implements(I3)
>>> [i.getName() for i in implementedBy(C2)]
['I3', 'I2']
Really, any object should be able to receive a successful answer, even
an instance:
>>> class Callable(object):
... def __call__(self):
... return self
>>> implementedBy(Callable())
<implementedBy zope.interface.declarations.?>
Note that the name of the spec ends with a '?', because the `Callable`
instance does not have a `__name__` attribute.
"""
# This also manages storage of implementation specifications
try:
spec = cls.__dict__.get('__implemented__')
except AttributeError:
# we can't get the class dict. This is probably due to a
# security proxy. If this is the case, then probably no
# descriptor was installed for the class.
# We don't want to depend directly on zope.security in
# zope.interface, but we'll try to make reasonable
# accommodations in an indirect way.
# We'll check to see if there's an implements:
spec = getattr(cls, '__implemented__', None)
if spec is None:
# There's no spec stred in the class. Maybe its a builtin:
spec = BuiltinImplementationSpecifications.get(cls)
if spec is not None:
return spec
return _empty
if spec.__class__ == Implements:
# we defaulted to _empty or there was a spec. Good enough.
# Return it.
return spec
# TODO: need old style __implements__ compatibility?
# Hm, there's an __implemented__, but it's not a spec. Must be
# an old-style declaration. Just compute a spec for it
return Declaration(*_normalizeargs((spec, )))
if isinstance(spec, Implements):
return spec
if spec is None:
spec = BuiltinImplementationSpecifications.get(cls)
if spec is not None:
return spec
# TODO: need old style __implements__ compatibility?
if spec is not None:
# old-style __implemented__ = foo declaration
spec = (spec, ) # tuplefy, as it might be just an int
spec = Implements(*_normalizeargs(spec))
spec.inherit = None # old-style implies no inherit
del cls.__implemented__ # get rid of the old-style declaration
else:
try:
bases = cls.__bases__
except AttributeError:
if not callable(cls):
raise TypeError("ImplementedBy called for non-factory", cls)
bases = ()
spec = Implements(*[implementedBy(c) for c in bases])
spec.inherit = cls
spec.__name__ = (getattr(cls, '__module__', '?') or '?') + \
'.' + (getattr(cls, '__name__', '?') or '?')
try:
cls.__implemented__ = spec
if not hasattr(cls, '__providedBy__'):
cls.__providedBy__ = objectSpecificationDescriptor
if (isinstance(cls, DescriptorAwareMetaClasses)
and
'__provides__' not in cls.__dict__):
# Make sure we get a __provides__ descriptor
cls.__provides__ = ClassProvides(
cls,
getattr(cls, '__class__', type(cls)),
)
except TypeError:
if not isinstance(cls, type):
raise TypeError("ImplementedBy called for non-type", cls)
BuiltinImplementationSpecifications[cls] = spec
return spec
implementedBy = implementedByFallback
def classImplementsOnly(cls, *interfaces):
"""Declare the only interfaces implemented by instances of a class
The arguments after the class are one or more interfaces or interface
specifications (``IDeclaration`` objects).
The interfaces given (including the interfaces in the specifications)
replace any previous declarations.
Consider the following example:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(Interface): pass
...
>>> class I3(Interface): pass
...
>>> class I4(Interface): pass
...
>>> class A(object):
... implements(I3)
>>> class B(object):
... implements(I4)
>>> class C(A, B):
... pass
>>> classImplementsOnly(C, I1, I2)
>>> [i.getName() for i in implementedBy(C)]
['I1', 'I2']
Instances of ``C`` provide only ``I1``, ``I2``, and regardless of
whatever interfaces instances of ``A`` and ``B`` implement.
"""
spec = implementedBy(cls)
spec.declared = ()
spec.inherit = None
classImplements(cls, *interfaces)
def classImplements(cls, *interfaces):
"""Declare additional interfaces implemented for instances of a class
The arguments after the class are one or more interfaces or
interface specifications (``IDeclaration`` objects).
The interfaces given (including the interfaces in the specifications)
are added to any interfaces previously declared.
Consider the following example:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(Interface): pass
...
>>> class I3(Interface): pass
...
>>> class I4(Interface): pass
...
>>> class I5(Interface): pass
...
>>> class A(object):
... implements(I3)
>>> class B(object):
... implements(I4)
>>> class C(A, B):
... pass
>>> classImplements(C, I1, I2)
>>> [i.getName() for i in implementedBy(C)]
['I1', 'I2', 'I3', 'I4']
>>> classImplements(C, I5)
>>> [i.getName() for i in implementedBy(C)]
['I1', 'I2', 'I5', 'I3', 'I4']
Instances of ``C`` provide ``I1``, ``I2``, ``I5``, and whatever
interfaces instances of ``A`` and ``B`` provide.
"""
spec = implementedBy(cls)
spec.declared += tuple(_normalizeargs(interfaces))
# compute the bases
bases = []
seen = {}
for b in spec.declared:
if b not in seen:
seen[b] = 1
bases.append(b)
if spec.inherit is not None:
for c in spec.inherit.__bases__:
b = implementedBy(c)
if b not in seen:
seen[b] = 1
bases.append(b)
spec.__bases__ = tuple(bases)
def _implements_advice(cls):
interfaces, classImplements = cls.__dict__['__implements_advice_data__']
del cls.__implements_advice_data__
classImplements(cls, *interfaces)
return cls
class implementer:
def __init__(self, *interfaces):
self.interfaces = interfaces
def __call__(self, ob):
if isinstance(ob, DescriptorAwareMetaClasses):
raise TypeError("Can't use implementer with classes. Use one of "
"the class-declaration functions instead."
)
spec = Implements(*self.interfaces)
try:
ob.__implemented__ = spec
except AttributeError:
raise TypeError("Can't declare implements", ob)
return ob
def _implements(name, interfaces, classImplements):
frame = sys._getframe(2)
locals = frame.f_locals
# Try to make sure we were called from a class def. In 2.2.0 we can't
# check for __module__ since it doesn't seem to be added to the locals
# until later on.
if (locals is frame.f_globals) or (
('__module__' not in locals) and sys.version_info[:3] > (2, 2, 0)):
raise TypeError(name+" can be used only from a class definition.")
if '__implements_advice_data__' in locals:
raise TypeError(name+" can be used only once in a class definition.")
locals['__implements_advice_data__'] = interfaces, classImplements
addClassAdvisor(_implements_advice, depth=3)
def implements(*interfaces):
"""Declare interfaces implemented by instances of a class
This function is called in a class definition.
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
The interfaces given (including the interfaces in the
specifications) are added to any interfaces previously
declared.
Previous declarations include declarations for base classes
unless implementsOnly was used.
This function is provided for convenience. It provides a more
convenient way to call classImplements. For example::
implements(I1)
is equivalent to calling::
classImplements(C, I1)
after the class has been created.
Consider the following example::
>>> from zope.interface import Interface
>>> class IA1(Interface): pass
...
>>> class IA2(Interface): pass
...
>>> class IB(Interface): pass
...
>>> class IC(Interface): pass
...
>>> class A(object): implements(IA1, IA2)
...
>>> class B(object): implements(IB)
...
>>> class C(A, B):
... implements(IC)
>>> ob = C()
>>> int(IA1 in providedBy(ob))
1
>>> int(IA2 in providedBy(ob))
1
>>> int(IB in providedBy(ob))
1
>>> int(IC in providedBy(ob))
1
Instances of ``C`` implement ``I1``, ``I2``, and whatever interfaces
instances of ``A`` and ``B`` implement.
"""
_implements("implements", interfaces, classImplements)
def implementsOnly(*interfaces):
"""Declare the only interfaces implemented by instances of a class
This function is called in a class definition.
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
Previous declarations including declarations for base classes
are overridden.
This function is provided for convenience. It provides a more
convenient way to call classImplementsOnly. For example::
implementsOnly(I1)
is equivalent to calling::
classImplementsOnly(I1)
after the class has been created.
Consider the following example::
>>> from zope.interface import Interface
>>> class IA1(Interface): pass
...
>>> class IA2(Interface): pass
...
>>> class IB(Interface): pass
...
>>> class IC(Interface): pass
...
>>> class A(object): implements(IA1, IA2)
...
>>> class B(object): implements(IB)
...
>>> class C(A, B):
... implementsOnly(IC)
>>> ob = C()
>>> int(IA1 in providedBy(ob))
0
>>> int(IA2 in providedBy(ob))
0
>>> int(IB in providedBy(ob))
0
>>> int(IC in providedBy(ob))
1
Instances of ``C`` implement ``IC``, regardless of what
instances of ``A`` and ``B`` implement.
"""
_implements("implementsOnly", interfaces, classImplementsOnly)
##############################################################################
#
# Instance declarations
class Provides(Declaration): # Really named ProvidesClass
"""Implement __provides__, the instance-specific specification
When an object is pickled, we pickle the interfaces that it implements.
"""
def __init__(self, cls, *interfaces):
self.__args = (cls, ) + interfaces
self._cls = cls
Declaration.__init__(self, *(interfaces + (implementedBy(cls), )))
def __reduce__(self):
return Provides, self.__args
__module__ = 'zope.interface'
def __get__(self, inst, cls):
"""Make sure that a class __provides__ doesn't leak to an instance
For example:
>>> from zope.interface import Interface
>>> class IFooFactory(Interface): pass
...
>>> class C(object):
... pass
>>> C.__provides__ = ProvidesClass(C, IFooFactory)
>>> [i.getName() for i in C.__provides__]
['IFooFactory']
>>> getattr(C(), '__provides__', 0)
0
"""
if inst is None and cls is self._cls:
# We were accessed through a class, so we are the class'
# provides spec. Just return this object, but only if we are
# being called on the same class that we were defined for:
return self
raise AttributeError('__provides__')
ProvidesClass = Provides
# Registry of instance declarations
# This is a memory optimization to allow objects to share specifications.
InstanceDeclarations = weakref.WeakValueDictionary()
def Provides(*interfaces):
"""Cache instance declarations
Instance declarations are shared among instances that have the same
declaration. The declarations are cached in a weak value dictionary.
(Note that, in the examples below, we are going to make assertions about
the size of the weakvalue dictionary. For the assertions to be
meaningful, we need to force garbage collection to make sure garbage
objects are, indeed, removed from the system. Depending on how Python
is run, we may need to make multiple calls to be sure. We provide a
collect function to help with this:
>>> import gc
>>> def collect():
... for i in range(4):
... gc.collect()
)
>>> collect()
>>> before = len(InstanceDeclarations)
>>> class C(object):
... pass
>>> from zope.interface import Interface
>>> class I(Interface):
... pass
>>> c1 = C()
>>> c2 = C()
>>> len(InstanceDeclarations) == before
1
>>> directlyProvides(c1, I)
>>> len(InstanceDeclarations) == before + 1
1
>>> directlyProvides(c2, I)
>>> len(InstanceDeclarations) == before + 1
1
>>> del c1
>>> collect()
>>> len(InstanceDeclarations) == before + 1
1
>>> del c2
>>> collect()
>>> len(InstanceDeclarations) == before
1
"""
spec = InstanceDeclarations.get(interfaces)
if spec is None:
spec = ProvidesClass(*interfaces)
InstanceDeclarations[interfaces] = spec
return spec
Provides.__safe_for_unpickling__ = True
DescriptorAwareMetaClasses = ClassType, type
def directlyProvides(object, *interfaces):
"""Declare interfaces declared directly for an object
The arguments after the object are one or more interfaces or interface
specifications (``IDeclaration`` objects).
The interfaces given (including the interfaces in the specifications)
replace interfaces previously declared for the object.
Consider the following example:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(Interface): pass
...
>>> class IA1(Interface): pass
...
>>> class IA2(Interface): pass
...
>>> class IB(Interface): pass
...
>>> class IC(Interface): pass
...
>>> class A(object): implements(IA1, IA2)
...
>>> class B(object): implements(IB)
...
>>> class C(A, B):
... implements(IC)
>>> ob = C()
>>> directlyProvides(ob, I1, I2)
>>> int(I1 in providedBy(ob))
1
>>> int(I2 in providedBy(ob))
1
>>> int(IA1 in providedBy(ob))
1
>>> int(IA2 in providedBy(ob))
1
>>> int(IB in providedBy(ob))
1
>>> int(IC in providedBy(ob))
1
The object, ``ob`` provides ``I1``, ``I2``, and whatever interfaces
instances have been declared for instances of ``C``.
To remove directly provided interfaces, use ``directlyProvidedBy`` and
subtract the unwanted interfaces. For example:
>>> directlyProvides(ob, directlyProvidedBy(ob)-I2)
>>> int(I1 in providedBy(ob))
1
>>> int(I2 in providedBy(ob))
0
removes I2 from the interfaces directly provided by ``ob``. The object,
``ob`` no longer directly provides ``I2``, although it might still
provide ``I2`` if it's class implements ``I2``.
To add directly provided interfaces, use ``directlyProvidedBy`` and
include additional interfaces. For example:
>>> int(I2 in providedBy(ob))
0
>>> directlyProvides(ob, directlyProvidedBy(ob), I2)
adds ``I2`` to the interfaces directly provided by ob::
>>> int(I2 in providedBy(ob))
1
"""
# We need to avoid setting this attribute on meta classes that
# don't support descriptors.
# We can do away with this check when we get rid of the old EC
cls = getattr(object, '__class__', None)
if cls is not None and getattr(cls, '__class__', None) is cls:
# It's a meta class (well, at least it it could be an extension class)
if not isinstance(object, DescriptorAwareMetaClasses):
raise TypeError("Attempt to make an interface declaration on a "
"non-descriptor-aware class")
interfaces = _normalizeargs(interfaces)
if cls is None:
cls = type(object)
issub = False
for damc in DescriptorAwareMetaClasses:
if issubclass(cls, damc):
issub = True
break
if issub:
# we have a class or type. We'll use a special descriptor
# that provides some extra caching
object.__provides__ = ClassProvides(object, cls, *interfaces)
else:
object.__provides__ = Provides(cls, *interfaces)
def alsoProvides(object, *interfaces):
"""Declare interfaces declared directly for an object
The arguments after the object are one or more interfaces or interface
specifications (``IDeclaration`` objects).
The interfaces given (including the interfaces in the specifications) are
added to the interfaces previously declared for the object.
Consider the following example:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(Interface): pass
...
>>> class IA1(Interface): pass
...
>>> class IA2(Interface): pass
...
>>> class IB(Interface): pass
...
>>> class IC(Interface): pass
...
>>> class A(object): implements(IA1, IA2)
...
>>> class B(object): implements(IB)
...
>>> class C(A, B):
... implements(IC)
>>> ob = C()
>>> directlyProvides(ob, I1)
>>> int(I1 in providedBy(ob))
1
>>> int(I2 in providedBy(ob))
0
>>> int(IA1 in providedBy(ob))
1
>>> int(IA2 in providedBy(ob))
1
>>> int(IB in providedBy(ob))
1
>>> int(IC in providedBy(ob))
1
>>> alsoProvides(ob, I2)
>>> int(I1 in providedBy(ob))
1
>>> int(I2 in providedBy(ob))
1
>>> int(IA1 in providedBy(ob))
1
>>> int(IA2 in providedBy(ob))
1
>>> int(IB in providedBy(ob))
1
>>> int(IC in providedBy(ob))
1
The object, ``ob`` provides ``I1``, ``I2``, and whatever interfaces
instances have been declared for instances of ``C``. Notice that the
alsoProvides just extends the provided interfaces.
"""
directlyProvides(object, directlyProvidedBy(object), *interfaces)
def noLongerProvides(object, interface):
"""
This removes a directly provided interface from an object.
Consider the following two interfaces:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(Interface): pass
...
``I1`` is provided through the class, ``I2`` is directly provided
by the object:
>>> class C(object):
... implements(I1)
>>> c = C()
>>> alsoProvides(c, I2)
>>> I2.providedBy(c)
True
Remove I2 from c again:
>>> noLongerProvides(c, I2)
>>> I2.providedBy(c)
False
Removing an interface that is provided through the class is not possible:
>>> noLongerProvides(c, I1)
Traceback (most recent call last):
...
ValueError: Can only remove directly provided interfaces.
"""
directlyProvides(object, directlyProvidedBy(object)-interface)
if interface.providedBy(object):
raise ValueError("Can only remove directly provided interfaces.")
class ClassProvidesBasePy(object):
def __get__(self, inst, cls):
if cls is self._cls:
# We only work if called on the class we were defined for
if inst is None:
# We were accessed through a class, so we are the class'
# provides spec. Just return this object as is:
return self
return self._implements
raise AttributeError('__provides__')
ClassProvidesBase = ClassProvidesBasePy
# Try to get C base:
try:
import _zope_interface_coptimizations
except ImportError:
pass
else:
from _zope_interface_coptimizations import ClassProvidesBase
class ClassProvides(Declaration, ClassProvidesBase):
"""Special descriptor for class __provides__
The descriptor caches the implementedBy info, so that
we can get declarations for objects without instance-specific
interfaces a bit quicker.
For example:
>>> from zope.interface import Interface
>>> class IFooFactory(Interface):
... pass
>>> class IFoo(Interface):
... pass
>>> class C(object):
... implements(IFoo)
... classProvides(IFooFactory)
>>> [i.getName() for i in C.__provides__]
['IFooFactory']
>>> [i.getName() for i in C().__provides__]
['IFoo']
"""
def __init__(self, cls, metacls, *interfaces):
self._cls = cls
self._implements = implementedBy(cls)
self.__args = (cls, metacls, ) + interfaces
Declaration.__init__(self, *(interfaces + (implementedBy(metacls), )))
def __reduce__(self):
return self.__class__, self.__args
# Copy base-class method for speed
__get__ = ClassProvidesBase.__get__
def directlyProvidedBy(object):
"""Return the interfaces directly provided by the given object
The value returned is an ``IDeclaration``.
"""
provides = getattr(object, "__provides__", None)
if (provides is None # no spec
or
# We might have gotten the implements spec, as an
# optimization. If so, it's like having only one base, that we
# lop off to exclude class-supplied declarations:
isinstance(provides, Implements)
):
return _empty
# Strip off the class part of the spec:
return Declaration(provides.__bases__[:-1])
def classProvides(*interfaces):
"""Declare interfaces provided directly by a class
This function is called in a class definition.
The arguments are one or more interfaces or interface specifications
(``IDeclaration`` objects).
The given interfaces (including the interfaces in the specifications)
are used to create the class's direct-object interface specification.
An error will be raised if the module class has an direct interface
specification. In other words, it is an error to call this function more
than once in a class definition.
Note that the given interfaces have nothing to do with the interfaces
implemented by instances of the class.
This function is provided for convenience. It provides a more convenient
way to call directlyProvidedByProvides for a class. For example::
classProvides(I1)
is equivalent to calling::
directlyProvides(theclass, I1)
after the class has been created.
For example:
>>> from zope.interface import Interface
>>> class IFoo(Interface): pass
...
>>> class IFooFactory(Interface): pass
...
>>> class C(object):
... implements(IFoo)
... classProvides(IFooFactory)
>>> [i.getName() for i in C.__providedBy__]
['IFooFactory']
>>> [i.getName() for i in C().__providedBy__]
['IFoo']
if equivalent to:
>>> from zope.interface import Interface
>>> class IFoo(Interface): pass
...
>>> class IFooFactory(Interface): pass
...
>>> class C(object):
... implements(IFoo)
>>> directlyProvides(C, IFooFactory)
>>> [i.getName() for i in C.__providedBy__]
['IFooFactory']
>>> [i.getName() for i in C().__providedBy__]
['IFoo']
If classProvides is called outside of a class definition, it fails.
>>> classProvides(IFooFactory)
Traceback (most recent call last):
...
TypeError: classProvides can be used only from a class definition.
"""
frame = sys._getframe(1)
locals = frame.f_locals
# Try to make sure we were called from a class def
if (locals is frame.f_globals) or ('__module__' not in locals):
raise TypeError("classProvides can be used only from a class definition.")
if '__provides__' in locals:
raise TypeError(
"classProvides can only be used once in a class definition.")
locals["__provides__"] = _normalizeargs(interfaces)
addClassAdvisor(_classProvides_advice, depth=2)
def _classProvides_advice(cls):
interfaces = cls.__dict__['__provides__']
del cls.__provides__
directlyProvides(cls, *interfaces)
return cls
def moduleProvides(*interfaces):
"""Declare interfaces provided by a module
This function is used in a module definition.
The arguments are one or more interfaces or interface specifications
(``IDeclaration`` objects).
The given interfaces (including the interfaces in the specifications) are
used to create the module's direct-object interface specification. An
error will be raised if the module already has an interface specification.
In other words, it is an error to call this function more than once in a
module definition.
This function is provided for convenience. It provides a more convenient
way to call directlyProvides. For example::
moduleImplements(I1)
is equivalent to::
directlyProvides(sys.modules[__name__], I1)
"""
frame = sys._getframe(1)
locals = frame.f_locals
# Try to make sure we were called from a class def
if (locals is not frame.f_globals) or ('__name__' not in locals):
raise TypeError(
"moduleProvides can only be used from a module definition.")
if '__provides__' in locals:
raise TypeError(
"moduleProvides can only be used once in a module definition.")
locals["__provides__"] = Provides(ModuleType,
*_normalizeargs(interfaces))
##############################################################################
#
# Declaration querying support
def ObjectSpecification(direct, cls):
"""Provide object specifications
These combine information for the object and for it's classes.
For example:
>>> from zope.interface import Interface
>>> class I1(Interface): pass
...
>>> class I2(Interface): pass
...
>>> class I3(Interface): pass
...
>>> class I31(I3): pass
...
>>> class I4(Interface): pass
...
>>> class I5(Interface): pass
...
>>> class A(object): implements(I1)
...
>>> class B(object): __implemented__ = I2
...
>>> class C(A, B): implements(I31)
...
>>> c = C()
>>> directlyProvides(c, I4)
>>> [i.getName() for i in providedBy(c)]
['I4', 'I31', 'I1', 'I2']
>>> [i.getName() for i in providedBy(c).flattened()]
['I4', 'I31', 'I3', 'I1', 'I2', 'Interface']
>>> int(I1 in providedBy(c))
1
>>> int(I3 in providedBy(c))
0
>>> int(providedBy(c).extends(I3))
1
>>> int(providedBy(c).extends(I31))
1
>>> int(providedBy(c).extends(I5))
0
>>> class COnly(A, B): implementsOnly(I31)
...
>>> class D(COnly): implements(I5)
...
>>> c = D()
>>> directlyProvides(c, I4)
>>> [i.getName() for i in providedBy(c)]
['I4', 'I5', 'I31']
>>> [i.getName() for i in providedBy(c).flattened()]
['I4', 'I5', 'I31', 'I3', 'Interface']
>>> int(I1 in providedBy(c))
0
>>> int(I3 in providedBy(c))
0
>>> int(providedBy(c).extends(I3))
1
>>> int(providedBy(c).extends(I1))
0
>>> int(providedBy(c).extends(I31))
1
>>> int(providedBy(c).extends(I5))
1
"""
return Provides(cls, direct)
def getObjectSpecification(ob):
provides = getattr(ob, '__provides__', None)
if provides is not None:
return provides
try:
cls = ob.__class__
except AttributeError:
# We can't get the class, so just consider provides
return _empty
return implementedBy(cls)
def providedBy(ob):
# Here we have either a special object, an old-style declaration
# or a descriptor
# Try to get __providedBy__
try:
r = ob.__providedBy__
except AttributeError:
# Not set yet. Fall back to lower-level thing that computes it
return getObjectSpecification(ob)
try:
# We might have gotten a descriptor from an instance of a
# class (like an ExtensionClass) that doesn't support
# descriptors. We'll make sure we got one by trying to get
# the only attribute, which all specs have.
r.extends
except AttributeError:
# The object's class doesn't understand descriptors.
# Sigh. We need to get an object descriptor, but we have to be
# careful. We want to use the instance's __provides__, if
# there is one, but only if it didn't come from the class.
try:
r = ob.__provides__
except AttributeError:
# No __provides__, so just fall back to implementedBy
return implementedBy(ob.__class__)
# We need to make sure we got the __provides__ from the
# instance. We'll do this by making sure we don't get the same
# thing from the class:
try:
cp = ob.__class__.__provides__
except AttributeError:
# The ob doesn't have a class or the class has no
# provides, assume we're done:
return r
if r is cp:
# Oops, we got the provides from the class. This means
# the object doesn't have it's own. We should use implementedBy
return implementedBy(ob.__class__)
return r
class ObjectSpecificationDescriptorPy(object):
"""Implement the `__providedBy__` attribute
The `__providedBy__` attribute computes the interfaces peovided by
an object.
"""
def __get__(self, inst, cls):
"""Get an object specification for an object
For example:
>>> from zope.interface import Interface
>>> class IFoo(Interface): pass
...
>>> class IFooFactory(Interface): pass
...
>>> class C(object):
... implements(IFoo)
... classProvides(IFooFactory)
>>> [i.getName() for i in C.__providedBy__]
['IFooFactory']
>>> [i.getName() for i in C().__providedBy__]
['IFoo']
"""
# Get an ObjectSpecification bound to either an instance or a class,
# depending on how we were accessed.
if inst is None:
return getObjectSpecification(cls)
provides = getattr(inst, '__provides__', None)
if provides is not None:
return provides
return implementedBy(cls)
ObjectSpecificationDescriptor = ObjectSpecificationDescriptorPy
##############################################################################
def _normalizeargs(sequence, output = None):
"""Normalize declaration arguments
Normalization arguments might contain Declarions, tuples, or single
interfaces.
Anything but individial interfaces or implements specs will be expanded.
"""
if output is None:
output = []
cls = sequence.__class__
if InterfaceClass in cls.__mro__ or Implements in cls.__mro__:
output.append(sequence)
else:
for v in sequence:
_normalizeargs(v, output)
return output
_empty = Declaration()
try:
import _zope_interface_coptimizations
except ImportError:
pass
else:
from _zope_interface_coptimizations import implementedBy, providedBy
from _zope_interface_coptimizations import getObjectSpecification
from _zope_interface_coptimizations import ObjectSpecificationDescriptor
objectSpecificationDescriptor = ObjectSpecificationDescriptor()
| gpl-2.0 |
darkleons/BE | addons/website_customer/controllers/main.py | 64 | 4267 | # -*- coding: utf-8 -*-
import openerp
from openerp import SUPERUSER_ID
from openerp.addons.web import http
from openerp.addons.website.models.website import unslug
from openerp.tools.translate import _
from openerp.addons.web.http import request
import werkzeug.urls
class WebsiteCustomer(http.Controller):
_references_per_page = 20
@http.route([
'/customers',
'/customers/page/<int:page>',
'/customers/country/<int:country_id>',
'/customers/country/<country_name>-<int:country_id>',
'/customers/country/<int:country_id>/page/<int:page>',
'/customers/country/<country_name>-<int:country_id>/page/<int:page>',
], type='http', auth="public", website=True)
def customers(self, country_id=0, page=0, country_name='', **post):
cr, uid, context = request.cr, request.uid, request.context
country_obj = request.registry['res.country']
partner_obj = request.registry['res.partner']
partner_name = post.get('search', '')
domain = [('website_published', '=', True), ('assigned_partner_id', '!=', False)]
if partner_name:
domain += [
'|',
('name', 'ilike', post.get("search")),
('website_description', 'ilike', post.get("search"))
]
# group by country, based on customers found with the search(domain)
countries = partner_obj.read_group(
cr, openerp.SUPERUSER_ID, domain, ["id", "country_id"],
groupby="country_id", orderby="country_id", context=request.context)
country_count = partner_obj.search(
cr, openerp.SUPERUSER_ID, domain, count=True, context=request.context)
if country_id:
domain += [('country_id', '=', country_id)]
if not any(x['country_id'][0] == country_id for x in countries):
country = country_obj.read(cr, uid, country_id, ['name'], context)
if country:
countries.append({
'country_id_count': 0,
'country_id': (country_id, country['name'])
})
countries.sort(key=lambda d: d['country_id'][1])
countries.insert(0, {
'country_id_count': country_count,
'country_id': (0, _("All Countries"))
})
# search customers to display
partner_count = partner_obj.search_count(cr, openerp.SUPERUSER_ID, domain, context=request.context)
# pager
url = '/customers'
if country_id:
url += '/country/%s' % country_id
pager = request.website.pager(
url=url, total=partner_count, page=page, step=self._references_per_page,
scope=7, url_args=post
)
partner_ids = partner_obj.search(request.cr, openerp.SUPERUSER_ID, domain,
offset=pager['offset'], limit=self._references_per_page,
context=request.context)
google_map_partner_ids = ','.join(map(str, partner_ids))
partners = partner_obj.browse(request.cr, openerp.SUPERUSER_ID, partner_ids, request.context)
values = {
'countries': countries,
'current_country_id': country_id or 0,
'partners': partners,
'google_map_partner_ids': google_map_partner_ids,
'pager': pager,
'post': post,
'search_path': "?%s" % werkzeug.url_encode(post),
}
return request.website.render("website_customer.index", values)
# Do not use semantic controller due to SUPERUSER_ID
@http.route(['/customers/<partner_id>'], type='http', auth="public", website=True)
def partners_detail(self, partner_id, **post):
_, partner_id = unslug(partner_id)
if partner_id:
partner = request.registry['res.partner'].browse(request.cr, SUPERUSER_ID, partner_id, context=request.context)
if partner.exists() and partner.website_published:
values = {}
values['main_object'] = values['partner'] = partner
return request.website.render("website_customer.details", values)
return self.customers(**post)
| agpl-3.0 |
phantomnat/crazy-bike-client | lib/cfclient/utils/guiconfig.py | 24 | 2174 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Gives access for reading and writing application configuration parameters
"""
__author__ = 'Bitcraze AB'
__all__ = ['GuiConfig']
import logging
from cfclient.utils.config import Config
from PyQt4.QtCore import QString
logger = logging.getLogger(__name__)
class GuiConfig(Config):
""" Singleton class for accessing application configuration """
def set(self, key, value):
""" Set the value of a config parameter """
strval = value
if (isinstance(value, QString)):
strval = str(value)
Config.set(self, key, strval)
def get(self, key):
""" Get the value of a config parameter """
value = None
if (key in self._data):
value = self._data[key]
elif (key in self._readonly):
value = self._readonly[key]
else:
raise KeyError("Could not get the paramter [%s]" % key)
if (isinstance(value, unicode)):
value = str(value)
return value
def dump(self):
print self._data
| gpl-2.0 |
tpodowd/boto | tests/integration/route53/test_alias_resourcerecordsets.py | 113 | 4876 | # Copyright (c) 2014 Netflix, Inc. Stefan Praszalowicz
# Copyright (c) 2014 42Lines, Inc. Jim Browne
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import time
from tests.compat import unittest
from boto.route53.connection import Route53Connection
from boto.route53.record import ResourceRecordSets
from boto.route53.exception import DNSServerError
class TestRoute53AliasResourceRecordSets(unittest.TestCase):
route53 = True
def setUp(self):
super(TestRoute53AliasResourceRecordSets, self).setUp()
self.conn = Route53Connection()
self.base_domain = 'boto-test-%s.com' % str(int(time.time()))
self.zone = self.conn.create_zone(self.base_domain)
# a standard record to use as the target for our alias
self.zone.add_a('target.%s' % self.base_domain, '102.11.23.1')
def tearDown(self):
self.zone.delete_a('target.%s' % self.base_domain)
self.zone.delete()
super(TestRoute53AliasResourceRecordSets, self).tearDown()
def test_incomplete_add_alias_failure(self):
base_record = dict(name="alias.%s." % self.base_domain,
type="A",
alias_dns_name="target.%s" % self.base_domain,
alias_hosted_zone_id=self.zone.id,
identifier="boto:TestRoute53AliasResourceRecordSets")
rrs = ResourceRecordSets(self.conn, self.zone.id)
rrs.add_change(action="UPSERT", **base_record)
try:
self.assertRaises(DNSServerError, rrs.commit)
except:
# if the call somehow goes through, delete our unexpected new record before failing test
rrs = ResourceRecordSets(self.conn, self.zone.id)
rrs.add_change(action="DELETE", **base_record)
rrs.commit()
raise
def test_add_alias(self):
base_record = dict(name="alias.%s." % self.base_domain,
type="A",
alias_evaluate_target_health=False,
alias_dns_name="target.%s" % self.base_domain,
alias_hosted_zone_id=self.zone.id,
identifier="boto:TestRoute53AliasResourceRecordSets")
rrs = ResourceRecordSets(self.conn, self.zone.id)
rrs.add_change(action="UPSERT", **base_record)
rrs.commit()
rrs = ResourceRecordSets(self.conn, self.zone.id)
rrs.add_change(action="DELETE", **base_record)
rrs.commit()
def test_set_alias(self):
base_record = dict(name="alias.%s." % self.base_domain,
type="A",
identifier="boto:TestRoute53AliasResourceRecordSets")
rrs = ResourceRecordSets(self.conn, self.zone.id)
new = rrs.add_change(action="UPSERT", **base_record)
new.set_alias(self.zone.id, "target.%s" % self.base_domain, False)
rrs.commit()
rrs = ResourceRecordSets(self.conn, self.zone.id)
delete = rrs.add_change(action="DELETE", **base_record)
delete.set_alias(self.zone.id, "target.%s" % self.base_domain, False)
rrs.commit()
def test_set_alias_backwards_compatability(self):
base_record = dict(name="alias.%s." % self.base_domain,
type="A",
identifier="boto:TestRoute53AliasResourceRecordSets")
rrs = ResourceRecordSets(self.conn, self.zone.id)
new = rrs.add_change(action="UPSERT", **base_record)
new.set_alias(self.zone.id, "target.%s" % self.base_domain)
rrs.commit()
rrs = ResourceRecordSets(self.conn, self.zone.id)
delete = rrs.add_change(action="DELETE", **base_record)
delete.set_alias(self.zone.id, "target.%s" % self.base_domain)
rrs.commit()
if __name__ == '__main__':
unittest.main()
| mit |
asterix135/whoshouldivotefor | explorer/migrations/0008_auto_20170627_0253.py | 1 | 1741 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-27 06:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('explorer', '0007_auto_20170626_0543'),
]
operations = [
migrations.CreateModel(
name='IssueCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.CharField(max_length=100, unique=True)),
],
),
migrations.AlterField(
model_name='answer',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='answers', to='explorer.Question'),
),
migrations.AlterField(
model_name='poll',
name='election',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='polls', to='explorer.Election'),
),
migrations.RemoveField(
model_name='question',
name='poll',
),
migrations.AddField(
model_name='question',
name='poll',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='questions', to='explorer.Poll'),
preserve_default=False,
),
migrations.AddField(
model_name='question',
name='category',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.PROTECT, to='explorer.IssueCategory'),
preserve_default=False,
),
]
| mit |
suiyuan2009/tensorflow | tensorflow/contrib/tpu/python/tpu/tpu_feed.py | 15 | 25641 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===================================================================
"""Helper library for handling infeed between hosts and TPUs.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.contrib.tpu.python.ops import tpu_ops
from tensorflow.contrib.tpu.python.tpu import tpu_sharding
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
class InfeedQueue(object):
"""A helper object to build a device infeed queue.
The InfeedQueue builds the host-side and device-side Ops to enqueue and
dequeue elements, respectively, and ensures that their types and
shapes match.
"""
def __init__(self,
number_of_tuple_elements=None,
tuple_types=None,
tuple_shapes=None,
shard_dimensions=None,
name=None):
"""Creates a new InfeedQueue with the given configuration.
The configuration need not be fully specified at creation since it
can be modified subsequently by methods that set the values
explicitly or infer them from the shapes of inputs.
Args:
number_of_tuple_elements: the number of Tensors fed atomically through the
queue, must be present unless it can be inferred from other arguments.
tuple_types: if not None, a list of types of the elements of the queue.
tuple_shapes: if not None, a list of shapes of the elements of the queue.
shard_dimensions: if not None, a list of dimensions on which the
elements of the queue should be sharded during automatic
parallelization.
name: the name of the queue.
Raises:
ValueError: if number_of_tuple_elements <= 0; or
number_of_tuple_arguments, tuple_types, tuple_shapes, and
shard_dimensions are all None; or the length of tuple_types,
tuple_shapes, or shard_dimensions is not equal to
number_of_tuple_elements; or any element of shard_dimensions
can't be converted to a Dimension.
TypeError: if any element of tuple_types or tuple_shapes can't
be converted to a dtype or TensorShape, respectively.
"""
self._frozen = False
self._generated_enqueue_ops = False
self._generated_dequeue_op = False
self._name = "InfeedQueue" if name is None else name
if number_of_tuple_elements is None:
if tuple_types is not None:
number_of_tuple_elements = len(tuple_types)
elif tuple_shapes is not None:
number_of_tuple_elements = len(tuple_shapes)
elif shard_dimensions is not None:
number_of_tuple_elements = len(shard_dimensions)
else:
raise ValueError(
"number of tuple elements cannot be inferred from InfeedQueue "
"constructor"
)
if number_of_tuple_elements <= 0:
raise ValueError("number_of_tuple_elements %d must be > 0" %
number_of_tuple_elements)
# Make an empty sharding policy for each tuple element.
self._sharding_policies = [
tpu_sharding.ShardingPolicy()
for _ in xrange(number_of_tuple_elements)
]
if tuple_types is not None:
self.set_tuple_types(tuple_types)
else:
self._tuple_types = None
if tuple_shapes is not None:
self.set_tuple_shapes(tuple_shapes)
else:
self._tuple_shapes = None
if shard_dimensions is not None:
self.set_shard_dimensions(shard_dimensions)
self._validate()
def _validate(self):
"""Checks that the configuration is self-consistent.
Raises:
ValueError: if the shapes and sharding policies don't match.
"""
if self.tuple_shapes is not None:
for (policy, shape) in zip(self._sharding_policies, self._tuple_shapes):
# Raise an error if the policy is incompatible with the shape.
_ = policy.get_sharded_shape(shape)
@property
def number_of_tuple_elements(self):
"""Returns the number of InfeedQueue tuple elements."""
return len(self._sharding_policies)
@property
def tuple_types(self):
"""Returns the types of the InfeedQueue tuple elements."""
return self._tuple_types
def set_tuple_types(self, tuple_types):
"""Sets the type of each element of the queue.
tuple_types must be a list of length
self.number_of_tuple_elements, and each element must be
convertible to a dtype.
Args:
tuple_types: the types of each queue element.
Raises:
ValueError: if tuple_types is not of length
self.number_of_tuple_elements.
TypeError: if an element of tuple_types cannot be converted to a
dtype.
"""
if len(tuple_types) != self.number_of_tuple_elements:
raise ValueError("tuple_types is %s, but must be a list of length %d" %
(str(tuple_types), self.number_of_tuple_elements))
if self._frozen:
for (frozen, updated) in zip(self._tuple_types, tuple_types):
if frozen != updated:
raise ValueError(
"Trying to update InfeedQueue with frozen configuration with an "
"incompatible type. Frozen types are %s, updated types are %s" % (
str(self._tuple_types), str(tuple_types)))
else:
try:
self._tuple_types = [dtypes.as_dtype(t) for t in tuple_types]
except (TypeError) as e:
raise TypeError(
"tuple_types is %s, but must be a list of elements each "
"convertible to dtype: got error %s" % (str(tuple_types), str(e)))
@property
def tuple_shapes(self):
"""Returns the shapes of the InfeedQueue tuple elements."""
return self._tuple_shapes
def set_tuple_shapes(self, tuple_shapes):
"""Sets the shape of each element of the queue.
tuple_shapes must be a list of length
self.number_of_tuple_elements, and each element must be
convertible to a TensorShape.
Args:
tuple_shapes: the shapes of each queue element.
Raises:
ValueError: if tuple_shapes is not of length
self.number_of_tuple_elements.
TypeError: if an element of tuple_shapes cannot be converted to
a TensorShape.
"""
if len(tuple_shapes) != self.number_of_tuple_elements:
raise ValueError("tuple_shapes is %s, but must be a list of length %d" %
(str(tuple_shapes), self.number_of_tuple_elements))
try:
tuple_shapes = [tensor_shape.as_shape(shape) for shape in tuple_shapes]
except (ValueError, TypeError) as e:
raise TypeError(
"tuple_shapes is %s, but must be a list of elements each "
"convertible to TensorShape: got error %s" % (str(tuple_shapes),
str(e)))
if self._frozen:
for (frozen, updated) in zip(self._tuple_shapes, tuple_shapes):
if frozen != updated:
raise ValueError(
"Trying to update InfeedQueue with frozen configuration with an "
"incompatible shape. Frozen shapes are %s, updated shapes are %s"
% (str(self._tuple_shapes), str(tuple_shapes)))
else:
self._tuple_shapes = tuple_shapes
self._validate()
@property
def sharding_policies(self):
"""Returns the sharding policies of the InfeedQueue tuple elements."""
return self._sharding_policies
@property
def shard_dimensions(self):
"""Gets the shard dimension of each tuple element.
Returns:
A list of length number_of_tuple_elements, where each list entry
is the shard dimension of that tuple element or None if the
shard dimension has not been set.
"""
# The number of shards is always the same for all the policies.
return [policy.shard_dimension for policy in self._sharding_policies]
def set_shard_dimensions(self, shard_dimensions):
"""Sets the shard_dimension of each element of the queue.
shard_dimensions must be a list of length
self.number_of_tuple_elements, and each element must be
convertible to a Dimension compatible with self.tuple_shapes.
Args:
shard_dimensions: the dimensions of each queue element.
Raises:
ValueError: if shard_dimensions is not of length
self.number_of_tuple_elements; or an element of
shard_dimensions cannot be converted to a Dimension; or an
element of shard_dimensions is a Dimension that is out of
range for the corresponding tuple element shape.
"""
if len(shard_dimensions) != self.number_of_tuple_elements:
raise ValueError("shard_dimensions is %s, but must be a list of length %d"
% (str(shard_dimensions),
self.number_of_tuple_elements))
for (policy, dimension) in zip(self._sharding_policies, shard_dimensions):
policy.set_shard_dimension(dimension)
self._validate()
@property
def number_of_shards(self):
"""Gets the number of shards to use for the InfeedQueue.
Returns:
Number of shards or None if the number of shards has not been set.
"""
# The number of shards is always the same for all the policies.
return self._sharding_policies[0].number_of_shards
def set_number_of_shards(self, number_of_shards):
"""Sets the number of shards to use for the InfeedQueue.
Args:
number_of_shards: number of ways to shard the InfeedQueue.
Raises:
ValueError: if number_of_shards is not > 0; or the policies have
been frozen and number_of_shards was already set to something
else.
"""
for policy in self._sharding_policies:
policy.set_number_of_shards(number_of_shards)
self._validate()
def set_configuration_from_input_tensors(self, input_tensors):
"""Sets the shapes and types of the queue tuple elements.
input_tensors is a list of Tensors whose types and shapes are used
to set the queue configuration.
Args:
input_tensors: list of Tensors of the same types and shapes as
the desired queue Tuple.
Raises:
ValueError: if input_tensors is not a list of length
self.number_of_tuple_elements
"""
if len(input_tensors) != self.number_of_tuple_elements:
raise ValueError(
"input_tensors is %s, but should be a list of %d Tensors", (
str(input_tensors), self.number_of_tuple_elements))
self.set_tuple_shapes([t.shape for t in input_tensors])
self.set_tuple_types([t.dtype for t in input_tensors])
def set_configuration_from_sharded_input_tensors(self, input_tensors):
"""Sets the shapes and types of the queue tuple elements.
input_tensors is a list of lists of Tensors whose types and shapes are used
to set the queue configuration. The length of the outer list is the number
of shards required, and each inner list is the tuple of Tensors to use to
determine the types and shapes of the corresponding shard. This method
depends on the shard dimension, and calling it freezes the shard policy.
Args:
input_tensors: list of lists of Tensors. The outer list length corresponds
to the desired number of shards, and each inner list is the size
and shape of the desired configuration of the corresponding shard.
Raises:
ValueError: if any inner list is not a list of length
self.number_of_tuple_elements; or the inner lists do not combine to
form a consistent unsharded shape.
TypeError: if the types of the Tensors in the inner lists do not match.
"""
if not self._frozen:
# Unset the tuple shapes in case the configuration becomes
# transiently inconsistent.
self._tuple_shapes = None
number_of_shards = len(input_tensors)
self.set_number_of_shards(number_of_shards)
for t in input_tensors:
if len(t) != self.number_of_tuple_elements:
raise ValueError(
"input_tensors is %s but must be a list of lists, where each inner"
" list has length number_of_tuple_elements=%d" % (
str(input_tensors), self.number_of_tuple_elements))
# Transpose the inputs to make a list of shard shapes for each tuple
# element.
sharded_shapes = [[t[i].shape for t in input_tensors]
for i in xrange(self.number_of_tuple_elements)]
# For each tuple, get the unsharded shape using that tuple's policy.
unsharded_shapes = [
policy.get_unsharded_shape(s)
for (policy, s) in zip(self._sharding_policies, sharded_shapes)
]
self.set_tuple_shapes(unsharded_shapes)
for i in xrange(1, self.number_of_shards):
for (t1, t2) in zip(input_tensors[0], input_tensors[i]):
if t1.dtype != t2.dtype:
raise TypeError(
"types of the tuple elements of input_tensors %s are not "
"consistent" % str(input_tensors))
self.set_tuple_types([t.dtype for t in input_tensors[0]])
def freeze(self):
"""Freezes the InfeedQueue so it can no longer be modified.
The configuration is implicitly frozen before any host-side or
device-side Ops are generated. The configuration cannot be frozen
until the types and shapes of the tuple elements have been set.
Raises:
ValueError: if the types or shapes of the tuple elements have not been
set.
"""
self._frozen = True
if self._tuple_types is None:
raise ValueError(
"Can't freeze an InfeedQueue without setting all tuple types.")
if self._tuple_shapes is None:
raise ValueError(
"Can't freeze an InfeedQueue without setting all tuple shapes.")
for shape in self._tuple_shapes:
if shape.dims is None:
raise ValueError(
"Can't freeze an InfeedQueue without setting all tuple shapes.")
for policy in self._sharding_policies:
policy.freeze()
self._validate()
def generate_dequeue_op(self):
"""Generates the device-side Op to dequeue a tuple from the queue.
Implicitly freezes the queue configuration if it is not already
frozen, which will raise errors if the shapes and types have not
been fully specified.
Returns:
A list of Outputs corresponding to a shard of infeed dequeued
into XLA, suitable for use within a replicated block.
Raises:
ValueError: if the types or shapes of the tuple elements have not been
set; or if a dequeue op has already been generated.
"""
self.freeze()
if self._generated_dequeue_op:
raise ValueError("Can't generate two dequeue Ops from the same queue")
self._generated_dequeue_op = True
full_name = "%s/dequeue" % self._name
sharded_shapes = [
policy.get_sharded_shape(shape)
for (shape, policy) in zip(self._tuple_shapes, self._sharding_policies)
]
return tpu_ops.infeed_dequeue_tuple(
dtypes=self._tuple_types, shapes=sharded_shapes, name=full_name)
def _generate_enqueue_op(self,
inputs,
name_prefix,
index,
device=None,
tpu_ordinal=-1):
"""Generate a host-side Op to enqueue a tuple to the queue.
If device is None the inputs are all required to have the same
device specification, and the enqueue Op is colocated with
inputs[0]. Otherwise the enqueue Op is placed on 'device'.
Args:
inputs: a list of Tensors with the types and shapes of the tuple elements.
name_prefix: the base name for the Op.
index: the shard index, used to uniquify the Op name.
device: device to place the Op on, or None if it should be
colocated with the inputs.
tpu_ordinal: ordinal of the TPU device on the host to use for
infeed if device is a CPU device. Should be set to -1 if device
is a TPU device.
Returns:
An Op corresponding to a shard of infeed enqueued at the host,
suitable for use within a replicated block.
Raises:
ValueError: if device is None and inputs do not all have the
same device specification.
"""
full_name = "%s/%d" % (name_prefix, index)
shapes = [t.shape for t in inputs]
if device is None:
devices = [t.device for t in inputs]
for i in xrange(1, self.number_of_tuple_elements):
if devices[0] != devices[i]:
raise ValueError(
"input devices for shard %d are %s, but should all be the same",
index, str(devices))
with ops.colocate_with(inputs[0]):
return tpu_ops.infeed_enqueue_tuple(
inputs=inputs,
shapes=shapes,
name=full_name,
device_ordinal=tpu_ordinal)
else:
with ops.device(device):
return tpu_ops.infeed_enqueue_tuple(
inputs=inputs,
shapes=shapes,
name=full_name,
device_ordinal=tpu_ordinal)
def generate_enqueue_ops(self, sharded_inputs, tpu_ordinal_function=None):
"""Generates the host-side Ops to enqueue the shards of a tuple.
sharded_inputs is a list, one for each shard, of lists of
Tensors. sharded_inputs[0] is the tuple of Tensors to use to feed
shard 0 if the queue. Returns the host-side Ops that must be run to
enqueue the sharded tuple. The Op for shard i is colocated with the inputs
for shard i.
Implicitly freezes the queue configuration if it is not already
frozen. If the configuration has already been frozen, and is not
compatible with the types and shapes of sharded_inputs, an error
will be raised.
Args:
sharded_inputs: a list of lists of Tensors. The length of the outer list
determines the number of shards. Each inner list indicates the types
and shapes of the tuples in the corresponding shard.
tpu_ordinal_function: if not None, a function that takes the
shard index as input and returns the ordinal of the TPU device
the shard's infeed should be placed on. tpu_ordinal_function must be
set if the inputs are placed on CPU devices.
Returns:
A list of host-side Ops, one for each shard, that when executed together
will enqueue a full-size element of infeed.
Raises:
ValueError: if the queue configuration has previously been frozen and the
shapes of the elements of sharded_inputs are not compatible with the
frozen configuration; or if the shapes of the elements of sharded_inputs
don't form a consistent unsharded tuple; or if the elements of a tuple
have different device constraints.
TypeError: if the queue configuration has previously been frozen and the
types of the elements of sharded_inputs are not compatible with the
frozen configuration; or if the types of the elements of sharded_inputs
don't form a consistent unsharded tuple.
"""
self.set_configuration_from_sharded_input_tensors(sharded_inputs)
self.freeze()
if self._generated_enqueue_ops:
raise ValueError("Can't generate two enqueue Ops from the same queue")
self._generated_enqueue_ops = True
if tpu_ordinal_function is None:
tpu_ordinal_function = lambda index: -1
name_prefix = "%s/enqueue" % self._name
return [
self._generate_enqueue_op(shard, name_prefix, index,
tpu_ordinal=tpu_ordinal_function(index))
for (shard, index) in zip(sharded_inputs, xrange(self.number_of_shards))
]
# TODO(misard) Generalize this to the case of systems that don't
# have 8 devices per host, and figure out what to do with
# model-parallelism.
def _default_placement_function(self, index):
return "/task:%d/device:CPU:0" % (index / 8)
def _default_ordinal_function(self, index):
return index % 8
# TODO(b/36470756) remove this from tutorials once we have a better story
# for automatic placement of input pipelines.
def split_inputs_and_generate_enqueue_ops(self,
inputs,
global_tpu_id=None,
placement_function=None,
tpu_ordinal_function=None):
"""POORLY-PERFORMING ON MULTI-HOST SYSTEMS.
Generates the host-side Ops to enqueue a tuple.
This method performs poorly because it takes an entire input on a single
host, splits it, and distributes it to all of the cores. It is present only
to simplify tutorial examples.
inputs is a list of Tensors to use to feed the queue. Each input is split
into self.number_of_shards shards. Returns an Op for each shard to enqueue
the shard. The Op for shard i is placed on device placement_function(i).
Implicitly freezes the queue configuration if it is not already
frozen. If the configuration has already been frozen, and is not
compatible with the types and shapes of inputs, an error
will be raised.
Args:
inputs: a list of Tensors which indicates the types and shapes of the
queue tuple.
global_tpu_id: if not None, a Numpy 2D array indicating the global
id of each TPU device in the system. The outer dimension of the
array is host task id, and the inner dimension is device ordinal,
so e.g., global_tpu_id[x][y] indicates the global id of device
/task:x/device:TPU_NODE:y. If global_tpu_id is not None, but
placement_function and ordinal_function are None, then global_tpu_id
will be used to place infeed on the TPUs with the first k global ids,
where k is the number of shards in the queue.
placement_function: if not None, a function that takes the shard
index as input and returns a device string indicating which
device the shard's infeed should be placed on. If placement_function
and tpu_ordinal_function are None, inputs are sharded round-robin
across the devices in the system.
tpu_ordinal_function: if not None, a function that takes the
shard index as input and returns the ordinal of the TPU device
the shard's infeed should be placed on. If placement_function
and tpu_ordinal_function are None, inputs are sharded round-robin
across the devices in the system.
Returns:
A list of host-side Ops, one for each shard, that when executed together
will enqueue a full-size element of infeed.
Raises:
ValueError: if the queue configuration has previously been frozen and the
shapes of the elements of inputs are not compatible with the frozen
configuration.
TypeError: if the queue configuration has previously been frozen and the
types of the elements of inputs are not compatible with the frozen
configuration.
"""
if global_tpu_id is None:
if placement_function is None:
placement_function = self._default_placement_function
if tpu_ordinal_function is None:
tpu_ordinal_function = self._default_ordinal_function
else:
global_id_map = {}
for host, devices in enumerate(global_tpu_id):
for ordinal, global_id in enumerate(devices):
global_id_map[global_id] = (host, ordinal)
def _placement_function_from_map(index):
return "/task:%d/device:CPU:0" % global_id_map[index][0]
def _ordinal_function_from_map(index):
return global_id_map[index][1]
if placement_function is None:
placement_function = _placement_function_from_map
if tpu_ordinal_function is None:
tpu_ordinal_function = _ordinal_function_from_map
self.set_configuration_from_input_tensors(inputs)
self.freeze()
if self._generated_enqueue_ops:
raise ValueError("Can't generate two enqueue Ops from the same queue")
self._generated_enqueue_ops = True
split_name_prefix = "%s/split" % self._name
if self.number_of_shards == 1:
transposed_sharded_inputs = [[inp] for inp in inputs]
else:
transposed_sharded_inputs = [
array_ops.split(
inp,
self.number_of_shards,
axis=policy.shard_dimension,
name="%s/%d" % (split_name_prefix, index))
for (inp, policy, index) in zip(inputs, self._sharding_policies,
xrange(self.number_of_tuple_elements))
]
sharded_inputs = [[shard[i] for shard in transposed_sharded_inputs]
for i in xrange(self.number_of_shards)]
name_prefix = "%s/enqueue" % self._name
return [
self._generate_enqueue_op(
shard,
name_prefix,
index,
device=placement_function(index),
tpu_ordinal=tpu_ordinal_function(index))
for (shard, index) in zip(sharded_inputs, xrange(self.number_of_shards))
]
| apache-2.0 |
stverhae/incubator-airflow | airflow/operators/check_operator.py | 42 | 9423 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import zip
from builtins import str
import logging
from airflow.exceptions import AirflowException
from airflow.hooks.base_hook import BaseHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class CheckOperator(BaseOperator):
"""
Performs checks against a db. The ``CheckOperator`` expects
a sql query that will return a single row. Each value on that
first row is evaluated using python ``bool`` casting. If any of the
values return ``False`` the check is failed and errors out.
Note that Python bool casting evals the following as ``False``:
* ``False``
* ``0``
* Empty string (``""``)
* Empty list (``[]``)
* Empty dictionary or set (``{}``)
Given a query like ``SELECT COUNT(*) FROM foo``, it will fail only if
the count ``== 0``. You can craft much more complex query that could,
for instance, check that the table has the same number of rows as
the source table upstream, or that the count of today's partition is
greater than yesterday's partition, or that a set of metrics are less
than 3 standard deviation for the 7 day average.
This operator can be used as a data quality check in your pipeline, and
depending on where you put it in your DAG, you have the choice to
stop the critical path, preventing from
publishing dubious data, or on the side and receive email alerts
without stopping the progress of the DAG.
Note that this is an abstract class and get_db_hook
needs to be defined. Whereas a get_db_hook is hook that gets a
single record from an external source.
:param sql: the sql to be executed
:type sql: string
"""
template_fields = ('sql',)
template_ext = ('.hql', '.sql',)
ui_color = '#fff7e6'
@apply_defaults
def __init__(
self, sql,
conn_id=None,
*args, **kwargs):
super(CheckOperator, self).__init__(*args, **kwargs)
self.conn_id = conn_id
self.sql = sql
def execute(self, context=None):
logging.info('Executing SQL check: ' + self.sql)
records = self.get_db_hook().get_first(self.sql)
logging.info("Record: " + str(records))
if not records:
raise AirflowException("The query returned None")
elif not all([bool(r) for r in records]):
exceptstr = "Test failed.\nQuery:\n{q}\nResults:\n{r!s}"
raise AirflowException(exceptstr.format(q=self.sql, r=records))
logging.info("Success.")
def get_db_hook(self):
return BaseHook.get_hook(conn_id=self.conn_id)
def _convert_to_float_if_possible(s):
'''
A small helper function to convert a string to a numeric value
if appropriate
:param s: the string to be converted
:type s: str
'''
try:
ret = float(s)
except (ValueError, TypeError):
ret = s
return ret
class ValueCheckOperator(BaseOperator):
"""
Performs a simple value check using sql code.
Note that this is an abstract class and get_db_hook
needs to be defined. Whereas a get_db_hook is hook that gets a
single record from an external source.
:param sql: the sql to be executed
:type sql: string
"""
__mapper_args__ = {
'polymorphic_identity': 'ValueCheckOperator'
}
template_fields = ('sql',)
template_ext = ('.hql', '.sql',)
ui_color = '#fff7e6'
@apply_defaults
def __init__(
self, sql, pass_value, tolerance=None,
conn_id=None,
*args, **kwargs):
super(ValueCheckOperator, self).__init__(*args, **kwargs)
self.sql = sql
self.conn_id = conn_id
self.pass_value = _convert_to_float_if_possible(pass_value)
tol = _convert_to_float_if_possible(tolerance)
self.tol = tol if isinstance(tol, float) else None
self.is_numeric_value_check = isinstance(self.pass_value, float)
self.has_tolerance = self.tol is not None
def execute(self, context=None):
logging.info('Executing SQL check: ' + self.sql)
records = self.get_db_hook().get_first(self.sql)
if not records:
raise AirflowException("The query returned None")
test_results = []
except_temp = ("Test failed.\nPass value:{self.pass_value}\n"
"Query:\n{self.sql}\nResults:\n{records!s}")
if not self.is_numeric_value_check:
tests = [str(r) == self.pass_value for r in records]
elif self.is_numeric_value_check:
try:
num_rec = [float(r) for r in records]
except (ValueError, TypeError) as e:
cvestr = "Converting a result to float failed.\n"
raise AirflowException(cvestr+except_temp.format(**locals()))
if self.has_tolerance:
tests = [
r / (1 + self.tol) <= self.pass_value <= r / (1 - self.tol)
for r in num_rec]
else:
tests = [r == self.pass_value for r in num_rec]
if not all(tests):
raise AirflowException(except_temp.format(**locals()))
def get_db_hook(self):
return BaseHook.get_hook(conn_id=self.conn_id)
class IntervalCheckOperator(BaseOperator):
"""
Checks that the values of metrics given as SQL expressions are within
a certain tolerance of the ones from days_back before.
Note that this is an abstract class and get_db_hook
needs to be defined. Whereas a get_db_hook is hook that gets a
single record from an external source.
:param table: the table name
:type table: str
:param days_back: number of days between ds and the ds we want to check
against. Defaults to 7 days
:type days_back: int
:param metrics_threshold: a dictionary of ratios indexed by metrics
:type metrics_threshold: dict
"""
__mapper_args__ = {
'polymorphic_identity': 'IntervalCheckOperator'
}
template_fields = ('sql1', 'sql2')
template_ext = ('.hql', '.sql',)
ui_color = '#fff7e6'
@apply_defaults
def __init__(
self, table, metrics_thresholds,
date_filter_column='ds', days_back=-7,
conn_id=None,
*args, **kwargs):
super(IntervalCheckOperator, self).__init__(*args, **kwargs)
self.table = table
self.metrics_thresholds = metrics_thresholds
self.metrics_sorted = sorted(metrics_thresholds.keys())
self.date_filter_column = date_filter_column
self.days_back = -abs(days_back)
self.conn_id = conn_id
sqlexp = ', '.join(self.metrics_sorted)
sqlt = ("SELECT {sqlexp} FROM {table}"
" WHERE {date_filter_column}=").format(**locals())
self.sql1 = sqlt + "'{{ ds }}'"
self.sql2 = sqlt + "'{{ macros.ds_add(ds, "+str(self.days_back)+") }}'"
def execute(self, context=None):
hook = self.get_db_hook()
logging.info('Executing SQL check: ' + self.sql2)
row2 = hook.get_first(self.sql2)
logging.info('Executing SQL check: ' + self.sql1)
row1 = hook.get_first(self.sql1)
if not row2:
raise AirflowException("The query {q} returned None").format(q=self.sql2)
if not row1:
raise AirflowException("The query {q} returned None").format(q=self.sql1)
current = dict(zip(self.metrics_sorted, row1))
reference = dict(zip(self.metrics_sorted, row2))
ratios = {}
test_results = {}
rlog = "Ratio for {0}: {1} \n Ratio threshold : {2}"
fstr = "'{k}' check failed. {r} is above {tr}"
estr = "The following tests have failed:\n {0}"
countstr = "The following {j} tests out of {n} failed:"
for m in self.metrics_sorted:
if current[m] == 0 or reference[m] == 0:
ratio = None
else:
ratio = float(max(current[m], reference[m])) / \
min(current[m], reference[m])
logging.info(rlog.format(m, ratio, self.metrics_thresholds[m]))
ratios[m] = ratio
test_results[m] = ratio < self.metrics_thresholds[m]
if not all(test_results.values()):
failed_tests = [it[0] for it in test_results.items() if not it[1]]
j = len(failed_tests)
n = len(self.metrics_sorted)
logging.warning(countstr.format(**locals()))
for k in failed_tests:
logging.warning(fstr.format(k=k, r=ratios[k],
tr=self.metrics_thresholds[k]))
raise AirflowException(estr.format(", ".join(failed_tests)))
logging.info("All tests have passed")
def get_db_hook(self):
return BaseHook.get_hook(conn_id=self.conn_id)
| apache-2.0 |
JerryWong0119/CocosBuilder-3.0-alpha5 | CocosBuilder/libs/nodejs/lib/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py | 107 | 43862 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Notes:
#
# This generates makefiles suitable for inclusion into the Android build system
# via an Android.mk file. It is based on make.py, the standard makefile
# generator.
#
# The code below generates a separate .mk file for each target, but
# all are sourced by the top-level GypAndroid.mk. This means that all
# variables in .mk-files clobber one another, and furthermore that any
# variables set potentially clash with other Android build system variables.
# Try to avoid setting global variables where possible.
import gyp
import gyp.common
import gyp.generator.make as make # Reuse global functions from make backend.
import os
import re
generator_default_variables = {
'OS': 'android',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.so',
'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)',
'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)',
'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)',
'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
'LIB_DIR': '$(obj).$(TOOLSET)',
'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
'RULE_INPUT_PATH': '$(RULE_SOURCES)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
'CONFIGURATION_NAME': 'NOT_USED_ON_ANDROID',
}
# Make supports multiple toolsets
generator_supports_multiple_toolsets = True
# Generator-specific gyp specs.
generator_additional_non_configuration_keys = [
# Boolean to declare that this target does not want its name mangled.
'android_unmangled_name',
]
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
SHARED_FOOTER = """\
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
# all the included sub-makefiles. This is just here to clarify.
gyp_all_modules:
"""
header = """\
# This file is generated by gyp; do not edit.
"""
android_standard_include_paths = set([
# JNI_H_INCLUDE in build/core/binary.mk
'dalvik/libnativehelper/include/nativehelper',
# from SRC_HEADERS in build/core/config.mk
'system/core/include',
'hardware/libhardware/include',
'hardware/libhardware_legacy/include',
'hardware/ril/include',
'dalvik/libnativehelper/include',
'frameworks/native/include',
'frameworks/native/opengl/include',
'frameworks/base/include',
'frameworks/base/opengl/include',
'frameworks/base/native/include',
'external/skia/include',
# TARGET_C_INCLUDES in build/core/combo/TARGET_linux-arm.mk
'bionic/libc/arch-arm/include',
'bionic/libc/include',
'bionic/libstdc++/include',
'bionic/libc/kernel/common',
'bionic/libc/kernel/arch-arm',
'bionic/libm/include',
'bionic/libm/include/arm',
'bionic/libthread_db/include',
])
# Map gyp target types to Android module classes.
MODULE_CLASSES = {
'static_library': 'STATIC_LIBRARIES',
'shared_library': 'SHARED_LIBRARIES',
'executable': 'EXECUTABLES',
}
def IsCPPExtension(ext):
return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx'
def Sourceify(path):
"""Convert a path to its source directory form. The Android backend does not
support options.generator_output, so this function is a noop."""
return path
# Map from qualified target to path to output.
# For Android, the target of these maps is a tuple ('static', 'modulename'),
# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string,
# since we link by module.
target_outputs = {}
# Map from qualified target to any linkable output. A subset
# of target_outputs. E.g. when mybinary depends on liba, we want to
# include liba in the linker line; when otherbinary depends on
# mybinary, we just want to build mybinary first.
target_link_deps = {}
class AndroidMkWriter(object):
"""AndroidMkWriter packages up the writing of one target-specific Android.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, android_top_dir):
self.android_top_dir = android_top_dir
def Write(self, qualified_target, base_path, output_filename, spec, configs,
part_of_all):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
"""
make.ensure_directory_exists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
self.qualified_target = qualified_target
self.path = base_path
self.target = spec['target_name']
self.type = spec['type']
self.toolset = spec['toolset']
deps, link_deps = self.ComputeDeps(spec)
# Some of the generation below can add extra output, sources, or
# link dependencies. All of the out params of the functions that
# follow use names like extra_foo.
extra_outputs = []
extra_sources = []
self.android_class = MODULE_CLASSES.get(self.type, 'GYP')
self.android_module = self.ComputeAndroidModule(spec)
(self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
self.output = self.output_binary = self.ComputeOutput(spec)
# Standard header.
self.WriteLn('include $(CLEAR_VARS)\n')
# Module class and name.
self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class)
self.WriteLn('LOCAL_MODULE := ' + self.android_module)
# Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE.
# The library module classes fail if the stem is set. ComputeOutputParts
# makes sure that stem == modulename in these cases.
if self.android_stem != self.android_module:
self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
self.WriteLn('LOCAL_MODULE_TAGS := optional')
if self.toolset == 'host':
self.WriteLn('LOCAL_IS_HOST_MODULE := true')
# Grab output directories; needed for Actions and Rules.
self.WriteLn('gyp_intermediate_dir := $(call local-intermediates-dir)')
self.WriteLn('gyp_shared_intermediate_dir := '
'$(call intermediates-dir-for,GYP,shared)')
self.WriteLn()
# List files this target depends on so that actions/rules/copies/sources
# can depend on the list.
# TODO: doesn't pull in things through transitive link deps; needed?
target_dependencies = [x[1] for x in deps if x[0] == 'path']
self.WriteLn('# Make sure our deps are built first.')
self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES',
local_pathify=True)
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
self.WriteActions(spec['actions'], extra_sources, extra_outputs)
# Rules must be early like actions.
if 'rules' in spec:
self.WriteRules(spec['rules'], extra_sources, extra_outputs)
if 'copies' in spec:
self.WriteCopies(spec['copies'], extra_outputs)
# GYP generated outputs.
self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True)
# Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend
# on both our dependency targets and our generated files.
self.WriteLn('# Make sure our deps and generated files are built first.')
self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) '
'$(GYP_GENERATED_OUTPUTS)')
self.WriteLn()
# Sources.
if spec.get('sources', []) or extra_sources:
self.WriteSources(spec, configs, extra_sources)
self.WriteTarget(spec, configs, deps, link_deps, part_of_all)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = ('path', self.output_binary)
# Update global list of link dependencies.
if self.type == 'static_library':
target_link_deps[qualified_target] = ('static', self.android_module)
elif self.type == 'shared_library':
target_link_deps[qualified_target] = ('shared', self.android_module)
self.fp.close()
return self.android_module
def WriteActions(self, actions, extra_sources, extra_outputs):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
"""
for action in actions:
name = make.StringToMakefileVariable('%s_%s' % (self.qualified_target,
action['action_name']))
self.WriteLn('### Rules for action "%s":' % action['action_name'])
inputs = action['inputs']
outputs = action['outputs']
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Action for target "%s" writes output to local path '
'"%s".' % (self.target, out))
dir = os.path.split(out)[0]
if dir:
dirs.add(dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources += outputs
# Prepare the actual command.
command = gyp.common.EncodePOSIXShellList(action['action'])
if 'message' in action:
quiet_cmd = 'Gyp action: %s ($@)' % action['message']
else:
quiet_cmd = 'Gyp action: %s ($@)' % name
if len(dirs) > 0:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
# The makefile rules are all relative to the top dir, but the gyp actions
# are defined relative to their containing dir. This replaces the gyp_*
# variables for the action rule with an absolute version so that the
# output goes in the right place.
# Only write the gyp_* rules for the "primary" output (:1);
# it's superfluous for the "extra outputs", and this avoids accidentally
# writing duplicate dummy rules for those outputs.
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_intermediate_dir)' %
main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' %
main_output)
for input in inputs:
assert ' ' not in input, (
"Spaces in action input filenames not supported (%s)" % input)
for output in outputs:
assert ' ' not in output, (
"Spaces in action output filenames not supported (%s)" % output)
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, ' '.join(map(self.LocalPathify, inputs))))
self.WriteLn('\t@echo "%s"' % quiet_cmd)
self.WriteLn('\t$(hide)%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output))
extra_outputs += outputs
self.WriteLn()
self.WriteLn()
def WriteRules(self, rules, extra_sources, extra_outputs):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
"""
if len(rules) == 0:
return
rule_trigger = '%s_rule_trigger' % self.android_module
did_write_rule = False
for rule in rules:
if len(rule.get('rule_sources', [])) == 0:
continue
did_write_rule = True
name = make.StringToMakefileVariable('%s_%s' % (self.qualified_target,
rule['rule_name']))
self.WriteLn('\n### Generated for rule "%s":' % name)
self.WriteLn('# "%s":' % rule)
inputs = rule.get('inputs')
for rule_source in rule.get('rule_sources', []):
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
(rule_source_root, rule_source_ext) = \
os.path.splitext(rule_source_basename)
outputs = [self.ExpandInputRoot(out, rule_source_root,
rule_source_dirname)
for out in rule['outputs']]
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Rule for target %s writes output to local path %s'
% (self.target, out))
dir = os.path.dirname(out)
if dir:
dirs.add(dir)
extra_outputs += outputs
if int(rule.get('process_outputs_as_sources', False)):
extra_sources.extend(outputs)
components = []
for component in rule['action']:
component = self.ExpandInputRoot(component, rule_source_root,
rule_source_dirname)
if '$(RULE_SOURCES)' in component:
component = component.replace('$(RULE_SOURCES)',
rule_source)
components.append(component)
command = gyp.common.EncodePOSIXShellList(components)
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
if dirs:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
# We set up a rule to build the first output, and then set up
# a rule for each additional output to depend on the first.
outputs = map(self.LocalPathify, outputs)
main_output = outputs[0]
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_intermediate_dir)'
% main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)'
% main_output)
main_output_deps = self.LocalPathify(rule_source)
if inputs:
main_output_deps += ' '
main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs])
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, main_output_deps))
self.WriteLn('\t%s\n' % command)
for output in outputs[1:]:
self.WriteLn('%s: %s' % (output, main_output))
self.WriteLn('.PHONY: %s' % (rule_trigger))
self.WriteLn('%s: %s' % (rule_trigger, main_output))
self.WriteLn('')
if did_write_rule:
extra_sources.append(rule_trigger) # Force all rules to run.
self.WriteLn('### Finished generating for all rules')
self.WriteLn('')
def WriteCopies(self, copies, extra_outputs):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
"""
self.WriteLn('### Generated for copy rule.')
variable = make.StringToMakefileVariable(self.qualified_target + '_copies')
outputs = []
for copy in copies:
for path in copy['files']:
# The Android build system does not allow generation of files into the
# source tree. The destination should start with a variable, which will
# typically be $(gyp_intermediate_dir) or
# $(gyp_shared_intermediate_dir). Note that we can't use an assertion
# because some of the gyp tests depend on this.
if not copy['destination'].startswith('$'):
print ('WARNING: Copy rule for target %s writes output to '
'local path %s' % (self.target, copy['destination']))
# LocalPathify() calls normpath, stripping trailing slashes.
path = Sourceify(self.LocalPathify(path))
filename = os.path.split(path)[1]
output = Sourceify(self.LocalPathify(os.path.join(copy['destination'],
filename)))
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' %
(output, path))
self.WriteLn('\t@echo Copying: $@')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) $(ACP) -r $< $@')
self.WriteLn()
outputs.append(output)
self.WriteLn('%s = %s' % (variable,
' '.join(map(make.QuoteSpaces, outputs))))
extra_outputs.append('$(%s)' % variable)
self.WriteLn()
def WriteSourceFlags(self, spec, configs):
"""Write out the flags and include paths used to compile source files for
the current target.
Args:
spec, configs: input from gyp.
"""
config = configs[spec['default_configuration']]
extracted_includes = []
self.WriteLn('\n# Flags passed to both C and C++ files.')
cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
config.get('cflags'))
extracted_includes.extend(includes_from_cflags)
self.WriteList(cflags, 'MY_CFLAGS')
cflags_c, includes_from_cflags_c = self.ExtractIncludesFromCFlags(
config.get('cflags_c'))
extracted_includes.extend(includes_from_cflags_c)
self.WriteList(cflags_c, 'MY_CFLAGS_C')
self.WriteList(config.get('defines'), 'MY_DEFS', prefix='-D',
quoter=make.EscapeCppDefine)
self.WriteLn('LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)')
# Undefine ANDROID for host modules
# TODO: the source code should not use macro ANDROID to tell if it's host or
# target module.
if self.toolset == 'host':
self.WriteLn('# Undefine ANDROID for host modules')
self.WriteLn('LOCAL_CFLAGS += -UANDROID')
self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
includes = list(config.get('include_dirs', []))
includes.extend(extracted_includes)
includes = map(Sourceify, map(self.LocalPathify, includes))
includes = self.NormalizeIncludePaths(includes)
self.WriteList(includes, 'LOCAL_C_INCLUDES')
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
'$(LOCAL_C_INCLUDES)')
self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS')
def WriteSources(self, spec, configs, extra_sources):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
We need to handle shared_intermediate directory source files as
a special case by copying them to the intermediate directory and
treating them as a genereated sources. Otherwise the Android build
rules won't pick them up.
Args:
spec, configs: input from gyp.
extra_sources: Sources generated from Actions or Rules.
"""
sources = filter(make.Compilable, spec.get('sources', []))
generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
extra_sources = filter(make.Compilable, extra_sources)
# Determine and output the C++ extension used by these sources.
# We simply find the first C++ file and use that extension.
all_sources = sources + extra_sources
local_cpp_extension = '.cpp'
for source in all_sources:
(root, ext) = os.path.splitext(source)
if IsCPPExtension(ext):
local_cpp_extension = ext
break
if local_cpp_extension != '.cpp':
self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension)
# We need to move any non-generated sources that are coming from the
# shared intermediate directory out of LOCAL_SRC_FILES and put them
# into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files
# that don't match our local_cpp_extension, since Android will only
# generate Makefile rules for a single LOCAL_CPP_EXTENSION.
local_files = []
for source in sources:
(root, ext) = os.path.splitext(source)
if '$(gyp_shared_intermediate_dir)' in source:
extra_sources.append(source)
elif '$(gyp_intermediate_dir)' in source:
extra_sources.append(source)
elif IsCPPExtension(ext) and ext != local_cpp_extension:
extra_sources.append(source)
else:
local_files.append(os.path.normpath(os.path.join(self.path, source)))
# For any generated source, if it is coming from the shared intermediate
# directory then we add a Make rule to copy them to the local intermediate
# directory first. This is because the Android LOCAL_GENERATED_SOURCES
# must be in the local module intermediate directory for the compile rules
# to work properly. If the file has the wrong C++ extension, then we add
# a rule to copy that to intermediates and use the new version.
final_generated_sources = []
# If a source file gets copied, we still need to add the orginal source
# directory as header search path, for GCC searches headers in the
# directory that contains the source file by default.
origin_src_dirs = []
for source in extra_sources:
local_file = source
if not '$(gyp_intermediate_dir)/' in local_file:
basename = os.path.basename(local_file)
local_file = '$(gyp_intermediate_dir)/' + basename
(root, ext) = os.path.splitext(local_file)
if IsCPPExtension(ext) and ext != local_cpp_extension:
local_file = root + local_cpp_extension
if local_file != source:
self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source)))
self.WriteLn('\tmkdir -p $(@D); cp $< $@')
origin_src_dirs.append(os.path.dirname(source))
final_generated_sources.append(local_file)
# We add back in all of the non-compilable stuff to make sure that the
# make rules have dependencies on them.
final_generated_sources.extend(generated_not_sources)
self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES')
origin_src_dirs = gyp.common.uniquer(origin_src_dirs)
origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs))
self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS')
self.WriteList(local_files, 'LOCAL_SRC_FILES')
# Write out the flags used to compile the source; this must be done last
# so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.
self.WriteSourceFlags(spec, configs)
def ComputeAndroidModule(self, spec):
"""Return the Android module name used for a gyp spec.
We use the complete qualified target name to avoid collisions between
duplicate targets in different directories. We also add a suffix to
distinguish gyp-generated module names.
"""
if int(spec.get('android_unmangled_name', 0)):
assert self.type != 'shared_library' or self.target.startswith('lib')
return self.target
if self.type == 'shared_library':
# For reasons of convention, the Android build system requires that all
# shared library modules are named 'libfoo' when generating -l flags.
prefix = 'lib_'
else:
prefix = ''
if spec['toolset'] == 'host':
suffix = '_host_gyp'
else:
suffix = '_gyp'
if self.path:
name = '%s%s_%s%s' % (prefix, self.path, self.target, suffix)
else:
name = '%s%s%s' % (prefix, self.target, suffix)
return make.StringToMakefileVariable(name)
def ComputeOutputParts(self, spec):
"""Return the 'output basename' of a gyp spec, split into filename + ext.
Android libraries must be named the same thing as their module name,
otherwise the linker can't find them, so product_name and so on must be
ignored if we are building a library, and the "lib" prepending is
not done for Android.
"""
assert self.type != 'loadable_module' # TODO: not supported?
target = spec['target_name']
target_prefix = ''
target_ext = ''
if self.type == 'static_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.a'
elif self.type == 'shared_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.so'
elif self.type == 'none':
target_ext = '.stamp'
elif self.type != 'executable':
print ("ERROR: What output file should be generated?",
"type", self.type, "target", target)
if self.type != 'static_library' and self.type != 'shared_library':
target_prefix = spec.get('product_prefix', target_prefix)
target = spec.get('product_name', target)
product_ext = spec.get('product_extension')
if product_ext:
target_ext = '.' + product_ext
target_stem = target_prefix + target
return (target_stem, target_ext)
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
return ''.join(self.ComputeOutputParts(spec))
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
if self.type == 'executable' and self.toolset == 'host':
# We install host executables into shared_intermediate_dir so they can be
# run by gyp rules that refer to PRODUCT_DIR.
path = '$(gyp_shared_intermediate_dir)'
elif self.type == 'shared_library':
if self.toolset == 'host':
path = '$(HOST_OUT_INTERMEDIATE_LIBRARIES)'
else:
path = '$(TARGET_OUT_INTERMEDIATE_LIBRARIES)'
else:
# Other targets just get built into their intermediate dir.
if self.toolset == 'host':
path = '$(call intermediates-dir-for,%s,%s,true)' % (self.android_class,
self.android_module)
else:
path = '$(call intermediates-dir-for,%s,%s)' % (self.android_class,
self.android_module)
assert spec.get('product_dir') is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
def NormalizeLdFlags(self, ld_flags):
""" Clean up ldflags from gyp file.
Remove any ldflags that contain android_top_dir.
Args:
ld_flags: ldflags from gyp files.
Returns:
clean ldflags
"""
clean_ldflags = []
for flag in ld_flags:
if self.android_top_dir in flag:
continue
clean_ldflags.append(flag)
return clean_ldflags
def NormalizeIncludePaths(self, include_paths):
""" Normalize include_paths.
Convert absolute paths to relative to the Android top directory;
filter out include paths that are already brought in by the Android build
system.
Args:
include_paths: A list of unprocessed include paths.
Returns:
A list of normalized include paths.
"""
normalized = []
for path in include_paths:
if path[0] == '/':
path = gyp.common.RelativePath(path, self.android_top_dir)
# Filter out the Android standard search path.
if path not in android_standard_include_paths:
normalized.append(path)
return normalized
def ExtractIncludesFromCFlags(self, cflags):
"""Extract includes "-I..." out from cflags
Args:
cflags: A list of compiler flags, which may be mixed with "-I.."
Returns:
A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
"""
clean_cflags = []
include_paths = []
if cflags:
for flag in cflags:
if flag.startswith('-I'):
include_paths.append(flag[2:])
else:
clean_cflags.append(flag)
return (clean_cflags, include_paths)
def ComputeAndroidLibraryModuleNames(self, libraries):
"""Compute the Android module names from libraries, ie spec.get('libraries')
Args:
libraries: the value of spec.get('libraries')
Returns:
A tuple (static_lib_modules, dynamic_lib_modules)
"""
static_lib_modules = []
dynamic_lib_modules = []
for libs in libraries:
# Libs can have multiple words.
for lib in libs.split():
# Filter the system libraries, which are added by default by the Android
# build system.
if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or
lib.endswith('libgcc.a')):
continue
match = re.search(r'([^/]+)\.a$', lib)
if match:
static_lib_modules.append(match.group(1))
continue
match = re.search(r'([^/]+)\.so$', lib)
if match:
dynamic_lib_modules.append(match.group(1))
continue
# "-lstlport" -> libstlport
if lib.startswith('-l'):
if lib.endswith('_static'):
static_lib_modules.append('lib' + lib[2:])
else:
dynamic_lib_modules.append('lib' + lib[2:])
return (static_lib_modules, dynamic_lib_modules)
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if 'dependencies' in spec:
deps.extend([target_outputs[dep] for dep in spec['dependencies']
if target_outputs[dep]])
for dep in spec['dependencies']:
if dep in target_link_deps:
link_deps.append(target_link_deps[dep])
deps.extend(link_deps)
return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
def WriteTargetFlags(self, spec, configs, link_deps):
"""Write Makefile code to specify the link flags and library dependencies.
spec, configs: input from gyp.
link_deps: link dependency list; see ComputeDeps()
"""
config = configs[spec['default_configuration']]
# LDFLAGS
ldflags = list(config.get('ldflags', []))
static_flags, dynamic_flags = self.ComputeAndroidLibraryModuleNames(
ldflags)
self.WriteLn('')
self.WriteList(self.NormalizeLdFlags(ldflags), 'LOCAL_LDFLAGS')
# Libraries (i.e. -lfoo)
libraries = gyp.common.uniquer(spec.get('libraries', []))
static_libs, dynamic_libs = self.ComputeAndroidLibraryModuleNames(
libraries)
# Link dependencies (i.e. libfoo.a, libfoo.so)
static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
self.WriteLn('')
self.WriteList(static_flags + static_libs + static_link_deps,
'LOCAL_STATIC_LIBRARIES')
self.WriteLn('# Enable grouping to fix circular references')
self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
self.WriteLn('')
self.WriteList(dynamic_flags + dynamic_libs + shared_link_deps,
'LOCAL_SHARED_LIBRARIES')
def WriteTarget(self, spec, configs, deps, link_deps, part_of_all):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
part_of_all: flag indicating this target is part of 'all'
"""
self.WriteLn('### Rules for final target.')
if self.type != 'none':
self.WriteTargetFlags(spec, configs, link_deps)
# Add to the set of targets which represent the gyp 'all' target. We use the
# name 'gyp_all_modules' as the Android build system doesn't allow the use
# of the Make target 'all' and because 'all_modules' is the equivalent of
# the Make target 'all' on Android.
if part_of_all:
self.WriteLn('# Add target alias to "gyp_all_modules" target.')
self.WriteLn('.PHONY: gyp_all_modules')
self.WriteLn('gyp_all_modules: %s' % self.android_module)
self.WriteLn('')
# Add an alias from the gyp target name to the Android module name. This
# simplifies manual builds of the target, and is required by the test
# framework.
if self.target != self.android_module:
self.WriteLn('# Alias gyp target name.')
self.WriteLn('.PHONY: %s' % self.target)
self.WriteLn('%s: %s' % (self.target, self.android_module))
self.WriteLn('')
# Add the command to trigger build of the target type depending
# on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
# NOTE: This has to come last!
modifier = ''
if self.toolset == 'host':
modifier = 'HOST_'
if self.type == 'static_library':
self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier)
elif self.type == 'shared_library':
self.WriteLn('LOCAL_PRELINK_MODULE := false')
self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
elif self.type == 'executable':
if self.toolset == 'host':
self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
else:
# Don't install target executables for now, as it results in them being
# included in ROM. This can be revisited if there's a reason to install
# them later.
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
else:
self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
self.WriteLn()
self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
self.WriteLn()
self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)')
self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) touch $@')
def WriteList(self, value_list, variable=None, prefix='',
quoter=make.QuoteIfNecessary, local_pathify=False):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
values = ''
if value_list:
value_list = [quoter(prefix + l) for l in value_list]
if local_pathify:
value_list = [self.LocalPathify(l) for l in value_list]
values = ' \\\n\t' + ' \\\n\t'.join(value_list)
self.fp.write('%s :=%s\n\n' % (variable, values))
def WriteLn(self, text=''):
self.fp.write(text + '\n')
def LocalPathify(self, path):
"""Convert a subdirectory-relative path into a normalized path which starts
with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
Absolute paths, or paths that contain variables, are just normalized."""
if '$(' in path or os.path.isabs(path):
# path is not a file in the project tree in this case, but calling
# normpath is still important for trimming trailing slashes.
return os.path.normpath(path)
local_path = os.path.join('$(LOCAL_PATH)', self.path, path)
local_path = os.path.normpath(local_path)
# Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH)
# - i.e. that the resulting path is still inside the project tree. The
# path may legitimately have ended up containing just $(LOCAL_PATH), though,
# so we don't look for a slash.
assert local_path.startswith('$(LOCAL_PATH)'), (
'Path %s attempts to escape from gyp path %s !)' % (path, self.path))
return local_path
def ExpandInputRoot(self, template, expansion, dirname):
if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
return template
path = template % {
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
return path
def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
build_files):
"""Write the target to regenerate the Makefile."""
options = params['options']
# Sort to avoid non-functional changes to makefile.
build_files = sorted([os.path.join('$(LOCAL_PATH)', f) for f in build_files])
build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
for filename in params['build_files_arg']]
build_files_args = [os.path.join('$(PRIVATE_LOCAL_PATH)', f)
for f in build_files_args]
gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
options.toplevel_dir)
makefile_path = os.path.join('$(LOCAL_PATH)', makefile_name)
if not gyp_binary.startswith(os.sep):
gyp_binary = os.path.join('.', gyp_binary)
root_makefile.write('GYP_FILES := \\\n %s\n\n' %
'\\\n '.join(map(Sourceify, build_files)))
root_makefile.write('%s: PRIVATE_LOCAL_PATH := $(LOCAL_PATH)\n' %
makefile_path)
root_makefile.write('%s: $(GYP_FILES)\n' % makefile_path)
root_makefile.write('\techo ACTION Regenerating $@\n\t%s\n\n' %
gyp.common.EncodePOSIXShellList([gyp_binary, '-fandroid'] +
gyp.RegenerateFlags(options) +
build_files_args))
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
limit_to_target_all = generator_flags.get('limit_to_target_all', False)
android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
def CalculateMakefilePath(build_file, base_name):
"""Determine where to write a Makefile for a given gyp file."""
# Paths in gyp files are relative to the .gyp file, but we want
# paths relative to the source root for the master makefile. Grab
# the path of the .gyp file as the base to relativize against.
# E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.depth)
# We write the file in the base_path directory.
output_file = os.path.join(options.depth, base_path, base_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.toplevel_dir)
return base_path, output_file
# TODO: search for the first non-'Default' target. This can go
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
toolsets = set([target_dicts[target]['toolset'] for target in target_list])
for target in target_list:
spec = target_dicts[target]
if spec['default_configuration'] != 'Default':
default_configuration = spec['default_configuration']
break
if not default_configuration:
default_configuration = 'Default'
srcdir = '.'
makefile_name = 'GypAndroid.mk' + options.suffix
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
make.ensure_directory_exists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(header)
# We set LOCAL_PATH just once, here, to the top of the project tree. This
# allows all the other paths we use to be relative to the Android.mk file,
# as the Android build system expects.
root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n')
# Find the list of targets that derive from the gyp file(s) being built.
needed_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
needed_targets.add(target)
build_files = set()
include_list = set()
android_modules = {}
for qualified_target in target_list:
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
qualified_target)
build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
included_files = data[build_file]['included_files']
for included_file in included_files:
# The included_files entries are relative to the dir of the build file
# that included them, so we have to undo that and then make them relative
# to the root dir.
relative_include_file = gyp.common.RelativePath(
gyp.common.UnrelativePath(included_file, build_file),
options.toplevel_dir)
abs_include_file = os.path.abspath(relative_include_file)
# If the include file is from the ~/.gyp dir, we should use absolute path
# so that relocating the src dir doesn't break the path.
if (params['home_dot_gyp'] and
abs_include_file.startswith(params['home_dot_gyp'])):
build_files.add(abs_include_file)
else:
build_files.add(relative_include_file)
base_path, output_file = CalculateMakefilePath(build_file,
target + '.' + toolset + options.suffix + '.mk')
spec = target_dicts[qualified_target]
configs = spec['configurations']
part_of_all = (qualified_target in needed_targets and
not int(spec.get('suppress_wildcard', False)))
if limit_to_target_all and not part_of_all:
continue
writer = AndroidMkWriter(android_top_dir)
android_module = writer.Write(qualified_target, base_path, output_file,
spec, configs, part_of_all=part_of_all)
if android_module in android_modules:
print ('ERROR: Android module names must be unique. The following '
'targets both generate Android module name %s.\n %s\n %s' %
(android_module, android_modules[android_module],
qualified_target))
return
android_modules[android_module] = qualified_target
# Our root_makefile lives at the source root. Compute the relative path
# from there to the output_file for including.
mkfile_rel_path = gyp.common.RelativePath(output_file,
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
# Some tools need to know the absolute path of the top directory.
root_makefile.write('GYP_ABS_ANDROID_TOP_DIR := $(shell pwd)\n')
# Write out the sorted list of includes.
root_makefile.write('\n')
for include_file in sorted(include_list):
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
root_makefile.write('\n')
if generator_flags.get('auto_regeneration', True):
WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
root_makefile.write(SHARED_FOOTER)
root_makefile.close()
| mit |
diagramsoftware/odoo | addons/l10n_ch/__init__.py | 424 | 1212 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Financial contributors: Hasa SA, Open Net SA,
# Prisme Solutions Informatique SA, Quod SA
#
# Translation contributors: brain-tec AG, Agile Business Group
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import account_wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ryfeus/lambda-packs | pytorch/source/numpy/polynomial/__init__.py | 18 | 1134 | """
A sub-package for efficiently dealing with polynomials.
Within the documentation for this sub-package, a "finite power series,"
i.e., a polynomial (also referred to simply as a "series") is represented
by a 1-D numpy array of the polynomial's coefficients, ordered from lowest
order term to highest. For example, array([1,2,3]) represents
``P_0 + 2*P_1 + 3*P_2``, where P_n is the n-th order basis polynomial
applicable to the specific module in question, e.g., `polynomial` (which
"wraps" the "standard" basis) or `chebyshev`. For optimal performance,
all operations on polynomials, including evaluation at an argument, are
implemented as operations on the coefficients. Additional (module-specific)
information can be found in the docstring for the module of interest.
"""
from __future__ import division, absolute_import, print_function
from .polynomial import Polynomial
from .chebyshev import Chebyshev
from .legendre import Legendre
from .hermite import Hermite
from .hermite_e import HermiteE
from .laguerre import Laguerre
from numpy._pytesttester import PytestTester
test = PytestTester(__name__)
del PytestTester
| mit |
Geoportail-Luxembourg/geoportailv3 | geoportal/geoportailv3_geoportal/scripts/db2es.py | 1 | 3233 | # -*- coding: utf-8 -*-
from pyramid.paster import bootstrap
import psycopg2
from psycopg2.extras import DictCursor
import sys
import getopt
import json
from elasticsearch import helpers
from elasticsearch.helpers import BulkIndexError
from elasticsearch.exceptions import ConnectionTimeout
from geoportailv3_geoportal.lib.search import get_elasticsearch, get_index, \
ensure_index
"""
Utility functions for importing data into Elasticsearch from database
"""
def get_cursor():
source_conf = {
'database': 'search',
'user': 'postgres',
'password': '',
'host': 'luigi11',
'port': '5432'
}
conn = psycopg2.connect(**source_conf)
cursor = conn.cursor(cursor_factory=DictCursor)
query = "Select *, ST_AsGeoJSON(ST_Transform(\"searchLayer\".geom,4326)) as geom_4326 \
from public.\"searchLayer\" ;"
cursor.execute(query)
return cursor
def update_document(index, type, obj_id, obj=None):
doc = {
"_index": index,
"_type": "poi",
"_id": obj_id,
}
doc['_source'] = {}
doc['_source']['ts'] = json.loads(obj['geom_4326'])
doc['_source']['object_id'] = obj_id
doc['_source']['fk'] = obj['fk']
doc['_source']['object_type'] = 'poi'
doc['_source']['layer_name'] = obj['type']
doc['_source']['label'] = obj['label']
doc['_source']['role_id'] = 1
doc['_source']['public'] = True
return doc
def statuslog(text):
sys.stdout.write(text)
sys.stdout.flush()
def main():
env = bootstrap('development.ini')
request = env['request']
try:
opts, args = getopt.getopt(sys.argv[1:], 'ri', ['reset', 'index'])
except getopt.GetoptError as err:
print(str(err))
sys.exit(2)
index = False
reset = False
for o, a in opts:
if o in ('-r', '--reset'):
statuslog('\rResetting Index')
reset = True
if o in ('-i', '--index'):
statuslog('\rChecking Index')
index = True
import time
index_name = get_index(request) + '_' + time.strftime("%Y%m%d")
ensure_index(get_elasticsearch(request), index_name, reset)
if index is True:
statuslog("\rCreating Database Query ")
c = get_cursor()
counter = 1
while True:
multiple = 250
results = c.fetchmany(multiple)
doc_list = []
for result in results:
doc = update_document(get_index(request),
'poi',
result['id'],
result)
doc_list.append(doc)
statuslog("\rIndexed Elements: %i" % int(counter))
counter = counter + 1
try:
helpers.bulk(client=get_elasticsearch(request),
actions=doc_list,
chunk_size=multiple,
raise_on_error=True)
except (BulkIndexError, ConnectionTimeout) as e:
print("\n {}".format(e))
if not results:
statuslog("\n")
break
if __name__ == '__main__':
main()
| mit |
Jgarcia-IAS/localizacion | openerp/addons-extra/odoo-pruebas/odoo-server/addons/web/doc/conf.py | 494 | 8552 | # -*- coding: utf-8 -*-
#
# OpenERP Technical Documentation configuration file, created by
# sphinx-quickstart on Fri Feb 17 16:14:06 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('_themes'))
sys.path.insert(0, os.path.abspath('../addons'))
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.viewcode',
'patchqueue'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'OpenERP Web Developers Documentation'
copyright = u'2012, OpenERP s.a.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '7.0'
# The full version, including alpha/beta/rc tags.
release = '7.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'flask'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': ['sidebarintro.html', 'sourcelink.html', 'searchbox.html'],
'**': ['sidebarlogo.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'openerp-web-doc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'openerp-web-doc.tex', u'OpenERP Web Developers Documentation',
u'OpenERP s.a.', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'openerp-web-doc', u'OpenERP Web Developers Documentation',
[u'OpenERP s.a.'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'OpenERPWebDocumentation', u'OpenERP Web Developers Documentation',
u'OpenERP s.a.', 'OpenERPWebDocumentation', 'Developers documentation for the openerp-web project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
todo_include_todos = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
'openerpserver': ('http://doc.openerp.com/trunk/developers/server', None),
}
| agpl-3.0 |
hradec/gaffer | python/GafferImageTest/ImageTestCase.py | 2 | 6622 | ##########################################################################
#
# Copyright (c) 2015, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import six
import imath
import IECore
import IECoreImage
import Gaffer
import GafferTest
import GafferImage
import GafferImageTest
class ImageTestCase( GafferTest.TestCase ) :
def setUp( self ) :
GafferTest.TestCase.setUp( self )
sanitiser = GafferImageTest.ContextSanitiser()
sanitiser.__enter__()
self.addCleanup( sanitiser.__exit__, None, None, None )
def assertImageHashesEqual( self, imageA, imageB ) :
self.assertEqual( imageA["format"].hash(), imageB["format"].hash() )
self.assertEqual( imageA["dataWindow"].hash(), imageB["dataWindow"].hash() )
self.assertEqual( imageA["metadata"].hash(), imageB["metadata"].hash() )
self.assertEqual( imageA["channelNames"].hash(), imageB["channelNames"].hash() )
dataWindow = imageA["dataWindow"].getValue()
self.assertEqual( dataWindow, imageB["dataWindow"].getValue() )
channelNames = imageA["channelNames"].getValue()
self.assertEqual( channelNames, imageB["channelNames"].getValue() )
tileOrigin = GafferImage.ImagePlug.tileOrigin( dataWindow.min() )
while tileOrigin.y < dataWindow.max().y :
tileOrigin.x = GafferImage.ImagePlug.tileOrigin( dataWindow.min() ).x
while tileOrigin.x < dataWindow.max().x :
for channelName in channelNames :
self.assertEqual(
imageA.channelDataHash( channelName, tileOrigin ),
imageB.channelDataHash( channelName, tileOrigin )
)
tileOrigin.x += GafferImage.ImagePlug.tileSize()
tileOrigin.y += GafferImage.ImagePlug.tileSize()
def assertImagesEqual( self, imageA, imageB, maxDifference = 0.0, ignoreMetadata = False, ignoreDataWindow = False ) :
self.longMessage = True
self.assertEqual( imageA["format"].getValue(), imageB["format"].getValue() )
if not ignoreDataWindow :
self.assertEqual( imageA["dataWindow"].getValue(), imageB["dataWindow"].getValue() )
if not ignoreMetadata :
self.assertEqual( imageA["metadata"].getValue(), imageB["metadata"].getValue() )
self.assertEqual( imageA["channelNames"].getValue(), imageB["channelNames"].getValue() )
deep = imageA["deep"].getValue()
self.assertEqual( deep, imageB["deep"].getValue() )
if not deep:
difference = GafferImage.Merge()
difference["in"][0].setInput( imageA )
difference["in"][1].setInput( imageB )
difference["operation"].setValue( GafferImage.Merge.Operation.Difference )
stats = GafferImage.ImageStats()
stats["in"].setInput( difference["out"] )
stats["area"].setValue( imageA["format"].getValue().getDisplayWindow() )
for channelName in imageA["channelNames"].getValue() :
stats["channels"].setValue( IECore.StringVectorData( [ channelName ] * 4 ) )
self.assertLessEqual( stats["max"]["r"].getValue(), maxDifference, "Channel {0}".format( channelName ) )
# Access the tiles, because this will throw an error if the sample offsets are bogus
GafferImage.ImageAlgo.tiles( imageA )
GafferImage.ImageAlgo.tiles( imageB )
else:
pixelDataA = GafferImage.ImageAlgo.tiles( imageA )
pixelDataB = GafferImage.ImageAlgo.tiles( imageB )
if pixelDataA != pixelDataB:
self.assertEqual( pixelDataA.keys(), pixelDataB.keys() )
self.assertEqual( pixelDataA["tileOrigins"], pixelDataB["tileOrigins"] )
for k in pixelDataA.keys():
if k == "tileOrigins":
continue
for i in range( len( pixelDataA[k] ) ):
if pixelDataA[k][i] != pixelDataB[k][i]:
tileStr = str( pixelDataA["tileOrigins"][i] )
self.assertEqual( len( pixelDataA[k][i] ), len( pixelDataB[k][i] ), " while checking pixel data %s : %s" % ( k, tileStr ) )
for j in range( len( pixelDataA[k][i] ) ):
self.assertEqual( pixelDataA[k][i][j], pixelDataB[k][i][j] , " while checking pixel data %s : %s at index %i" % ( k, tileStr, j ) )
## Returns an image node with an empty data window. This is useful in
# verifying that nodes deal correctly with such inputs.
def emptyImage( self ) :
emptyCrop = GafferImage.Crop( "Crop" )
emptyCrop["Constant"] = GafferImage.Constant()
emptyCrop["Constant"]["format"].setValue( GafferImage.Format( 100, 100, 1.000 ) )
emptyCrop["in"].setInput( emptyCrop["Constant"]["out"] )
emptyCrop["area"].setValue( imath.Box2i() )
emptyCrop["affectDisplayWindow"].setValue( False )
self.assertEqual( emptyCrop["out"]["dataWindow"].getValue(), imath.Box2i() )
return emptyCrop
def deepImage( self ):
return self.DeepImage()
def assertRaisesDeepNotSupported( self, node ) :
flat = GafferImage.Constant()
node["in"].setInput( flat["out"] )
self.assertNotEqual( GafferImage.ImageAlgo.imageHash( flat["out"] ), GafferImage.ImageAlgo.imageHash( node["out"] ) )
deep = GafferImage.Empty()
node["in"].setInput( deep["out"] )
six.assertRaisesRegex( self, RuntimeError, 'Deep data not supported in input "in*', GafferImage.ImageAlgo.image, node["out"] )
| bsd-3-clause |
boa19861105/Butterfly-Kernel | tools/perf/python/twatch.py | 7370 | 1334 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
dlsun/symbulate | symbulate/index_sets.py | 1 | 1353 | import numbers
class IndexSet(object):
def __init__(self):
return
def __getitem__(self, t):
if t in self:
return t
else:
raise KeyError("Time %.2f not in index set." % t)
def __contains__(self, value):
return False
def __eq__(self, other):
return type(other) == type(self)
class Reals(IndexSet):
def __init__(self):
return
def __contains__(self, value):
try:
return -float("inf") < value < float("inf")
except:
return False
class Naturals(IndexSet):
def __init__(self):
return
def __contains__(self, value):
try:
return (
value >= 0 and
(isinstance(value, numbers.Integral) or
value.is_integer())
)
except:
return False
class DiscreteTimeSequence(IndexSet):
def __init__(self, fs):
self.fs = fs
def __getitem__(self, n):
return n / self.fs
def __contains__(self, value):
return float(value * self.fs).is_integer()
def __eq__(self, index):
return (
isinstance(index, DiscreteTimeSequence) and
(self.fs == index.fs)
)
class Integers(DiscreteTimeSequence):
def __init__(self):
self.fs = 1
| mit |
kurikaesu/arsenalsuite | cpp/lib/PyQt4/pyuic/uic/exceptions.py | 27 | 2169 | #############################################################################
##
## Copyright (C) 2011 Riverbank Computing Limited.
## Copyright (C) 2006 Thorsten Marek.
## All right reserved.
##
## This file is part of PyQt.
##
## You may use this file under the terms of the GPL v2 or the revised BSD
## license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of the Riverbank Computing Limited nor the names
## of its contributors may be used to endorse or promote products
## derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
#############################################################################
class NoSuchWidgetError(Exception):
def __str__(self):
return "Unknown Qt widget: %s" % (self.args[0],)
class UnsupportedPropertyError(Exception):
pass
class WidgetPluginError(Exception):
pass
| gpl-2.0 |
regisf/Strawberry | strawberrylib/blueprints/__init__.py | 1 | 1171 | # -*- coding: utf-8 -*-
# Strawberry Blog Engine
#
# Copyright (c) 2014 Regis FLORET
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
__author__ = 'Regis FLORET'
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.