repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
rosmo/ansible | test/units/modules/network/fortimanager/fortimanager_module.py | 111 | 2107 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
class TestFortimanagerModule(ModuleTestCase):
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
self.load_fixtures(commands)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if sort:
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
else:
self.assertEqual(commands, result['commands'], result['commands'])
return result
def failed(self):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None):
pass
| gpl-3.0 |
geminateCoder/Character-Archive-Website | Lib/site-packages/sqlalchemy/testing/config.py | 38 | 2469 | # testing/config.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import collections
requirements = None
db = None
db_url = None
db_opts = None
file_config = None
test_schema = None
test_schema_2 = None
_current = None
_skip_test_exception = None
class Config(object):
def __init__(self, db, db_opts, options, file_config):
self.db = db
self.db_opts = db_opts
self.options = options
self.file_config = file_config
self.test_schema = "test_schema"
self.test_schema_2 = "test_schema_2"
_stack = collections.deque()
_configs = {}
@classmethod
def register(cls, db, db_opts, options, file_config):
"""add a config as one of the global configs.
If there are no configs set up yet, this config also
gets set as the "_current".
"""
cfg = Config(db, db_opts, options, file_config)
cls._configs[cfg.db.name] = cfg
cls._configs[(cfg.db.name, cfg.db.dialect)] = cfg
cls._configs[cfg.db] = cfg
return cfg
@classmethod
def set_as_current(cls, config, namespace):
global db, _current, db_url, test_schema, test_schema_2, db_opts
_current = config
db_url = config.db.url
db_opts = config.db_opts
test_schema = config.test_schema
test_schema_2 = config.test_schema_2
namespace.db = db = config.db
@classmethod
def push_engine(cls, db, namespace):
assert _current, "Can't push without a default Config set up"
cls.push(
Config(
db, _current.db_opts, _current.options, _current.file_config),
namespace
)
@classmethod
def push(cls, config, namespace):
cls._stack.append(_current)
cls.set_as_current(config, namespace)
@classmethod
def reset(cls, namespace):
if cls._stack:
cls.set_as_current(cls._stack[0], namespace)
cls._stack.clear()
@classmethod
def all_configs(cls):
for cfg in set(cls._configs.values()):
yield cfg
@classmethod
def all_dbs(cls):
for cfg in cls.all_configs():
yield cfg.db
def skip_test(self, msg):
skip_test(msg)
def skip_test(msg):
raise _skip_test_exception(msg)
| cc0-1.0 |
TheTypoMaster/my-vim-set-mac | .vim/bundle/YouCompleteMe/third_party/ycmd/third_party/jedi/test/static_analysis/star_arguments.py | 20 | 2237 | # -----------------
# *args
# -----------------
def simple(a):
return a
def nested(*args):
return simple(*args)
nested(1)
#! 6 type-error-too-few-arguments
nested()
def nested_no_call_to_function(*args):
return simple(1, *args)
def simple2(a, b, c):
return b
def nested(*args):
return simple2(1, *args)
def nested_twice(*args1):
return nested(*args1)
nested_twice(2, 3)
#! 13 type-error-too-few-arguments
nested_twice(2)
#! 19 type-error-too-many-arguments
nested_twice(2, 3, 4)
# A named argument can be located before *args.
def star_args_with_named(*args):
return simple2(c='', *args)
star_args_with_named(1, 2)
# -----------------
# **kwargs
# -----------------
def kwargs_test(**kwargs):
return simple2(1, **kwargs)
kwargs_test(c=3, b=2)
#! 12 type-error-too-few-arguments
kwargs_test(c=3)
#! 12 type-error-too-few-arguments
kwargs_test(b=2)
#! 22 type-error-keyword-argument
kwargs_test(b=2, c=3, d=4)
#! 12 type-error-multiple-values
kwargs_test(b=2, c=3, a=4)
def kwargs_nested(**kwargs):
return kwargs_test(b=2, **kwargs)
kwargs_nested(c=3)
#! 13 type-error-too-few-arguments
kwargs_nested()
#! 19 type-error-keyword-argument
kwargs_nested(c=2, d=4)
#! 14 type-error-multiple-values
kwargs_nested(c=2, a=4)
# TODO reenable
##! 14 type-error-multiple-values
#kwargs_nested(b=3, c=2)
# -----------------
# mixed *args/**kwargs
# -----------------
def simple_mixed(a, b, c):
return b
def mixed(*args, **kwargs):
return simple_mixed(1, *args, **kwargs)
mixed(1, 2)
mixed(1, c=2)
mixed(b=2, c=3)
mixed(c=4, b='')
# need separate functions, otherwise these might swallow the errors
def mixed2(*args, **kwargs):
return simple_mixed(1, *args, **kwargs)
#! 7 type-error-too-few-arguments
mixed2(c=2)
#! 7 type-error-too-few-arguments
mixed2(3)
#! 13 type-error-too-many-arguments
mixed2(3, 4, 5)
# TODO reenable
##! 13 type-error-too-many-arguments
#mixed2(3, 4, c=5)
#! 7 type-error-multiple-values
mixed2(3, b=5)
# -----------------
# plain wrong arguments
# -----------------
#! 12 type-error-star-star
simple(1, **[])
#! 12 type-error-star-star
simple(1, **1)
class A(): pass
#! 12 type-error-star-star
simple(1, **A())
#! 11 type-error-star
simple(1, *1)
| gpl-2.0 |
meidli/yabgp | yabgp/tests/unit/message/attribute/nlri/labeled_unicast/test_ipv4_labeled_unicast.py | 2 | 2240 | # Copyright 2015-2016 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from yabgp.message.attribute.nlri.labeled_unicast.ipv4 import IPv4LabeledUnicast
class TestIPv4LabeledUnicast(unittest.TestCase):
def test_construct(self):
nlri_list = [
{'prefix': '34.1.41.0/24', 'label': [321]},
{'prefix': '34.1.42.0/24', 'label': [322]}
]
nlri_hex = b'\x30\x00\x14\x11\x22\x01\x29\x30\x00\x14\x21\x22\x01\x2a'
self.assertEqual(nlri_hex, IPv4LabeledUnicast.construct(nlri_list))
def test_construct_with_multi_label(self):
nlri_list = [
{'prefix': '34.1.41.0/24', 'label': [321, 322]},
{'prefix': '34.1.42.0/24', 'label': [321, 322]}
]
nlri_hex = b'\x48\x00\x14\x10\x00\x14\x21\x22\x01\x29\x48\x00\x14\x10\x00\x14\x21\x22\x01\x2a'
self.assertEqual(nlri_hex, IPv4LabeledUnicast.construct(nlri_list))
def test_parse(self):
nlri_list = [
{'prefix': '34.1.41.0/24', 'label': [321]},
{'prefix': '34.1.42.0/24', 'label': [322]}
]
nlri_hex = b'\x30\x00\x14\x11\x22\x01\x29\x30\x00\x14\x21\x22\x01\x2a'
self.assertEqual(nlri_list, IPv4LabeledUnicast.parse(nlri_hex))
def test_parse_with_multi_label(self):
nlri_list = [
{'prefix': '34.1.41.0/24', 'label': [321, 322]},
{'prefix': '34.1.42.0/24', 'label': [321, 322]}
]
nlri_hex = b'\x48\x00\x14\x10\x00\x14\x21\x22\x01\x29\x48\x00\x14\x10\x00\x14\x21\x22\x01\x2a'
self.assertEqual(nlri_list, IPv4LabeledUnicast.parse(nlri_hex))
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
cyberark-bizdev/ansible | test/integration/targets/vault/faux-editor.py | 43 | 1211 | #!/usr/bin/env python
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-vault is a script that encrypts/decrypts YAML files. See
# http://docs.ansible.com/playbooks_vault.html for more details.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import time
import os
def main(args):
path = os.path.abspath(args[1])
fo = open(path, 'r+')
content = fo.readlines()
content.append('faux editor added at %s\n' % time.time())
fo.seek(0)
fo.write(''.join(content))
fo.close()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[:]))
| gpl-3.0 |
18098924759/Wox | PythonHome/Lib/site-packages/requests/packages/urllib3/response.py | 328 | 10347 | # urllib3/response.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import logging
import zlib
import io
from ._collections import HTTPHeaderDict
from .exceptions import DecodeError
from .packages.six import string_types as basestring, binary_type
from .util import is_fp_closed
log = logging.getLogger(__name__)
class DeflateDecoder(object):
def __init__(self):
self._first_try = True
self._data = binary_type()
self._obj = zlib.decompressobj()
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not self._first_try:
return self._obj.decompress(data)
self._data += data
try:
return self._obj.decompress(data)
except zlib.error:
self._first_try = False
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self.decompress(self._data)
finally:
self._data = None
def _get_decoder(mode):
if mode == 'gzip':
return zlib.decompressobj(16 + zlib.MAX_WBITS)
return DeflateDecoder()
class HTTPResponse(io.IOBase):
"""
HTTP Response container.
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed.
Extra parameters for behaviour not present in httplib.HTTPResponse:
:param preload_content:
If True, the response's body will be preloaded during construction.
:param decode_content:
If True, attempts to decode specific content-encoding's based on headers
(like 'gzip' and 'deflate') will be skipped and raw data will be used
instead.
:param original_response:
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
"""
CONTENT_DECODERS = ['gzip', 'deflate']
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None):
self.headers = HTTPHeaderDict()
if headers:
self.headers.update(headers)
self.status = status
self.version = version
self.reason = reason
self.strict = strict
self.decode_content = decode_content
self._decoder = None
self._body = body if body and isinstance(body, basestring) else None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
self._pool = pool
self._connection = connection
if hasattr(body, 'read'):
self._fp = body
if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
"""
Should we redirect and where to?
:returns: Truthy redirect location string if we got a redirect status
code and valid location. ``None`` if redirect status and no
location. ``False`` if not a redirect status code.
"""
if self.status in self.REDIRECT_STATUSES:
return self.headers.get('location')
return False
def release_conn(self):
if not self._pool or not self._connection:
return
self._pool._put_conn(self._connection)
self._connection = None
@property
def data(self):
# For backwords-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
if self._fp:
return self.read(cache_content=True)
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param cache_content:
If True, will save the returned data such that the same result is
returned despite of the state of the underlying file object. This
is useful if you want the ``.data`` property to continue working
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
# Note: content-encoding value should be case-insensitive, per RFC 2616
# Section 3.5
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None:
if content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
if decode_content is None:
decode_content = self.decode_content
if self._fp is None:
return
flush_decoder = False
try:
if amt is None:
# cStringIO doesn't like amt=None
data = self._fp.read()
flush_decoder = True
else:
cache_content = False
data = self._fp.read(amt)
if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
# Close the connection when no data is returned
#
# This is redundant to what httplib/http.client _should_
# already do. However, versions of python released before
# December 15, 2012 (http://bugs.python.org/issue16298) do not
# properly close the connection in all cases. There is no harm
# in redundantly calling close.
self._fp.close()
flush_decoder = True
self._fp_bytes_read += len(data)
try:
if decode_content and self._decoder:
data = self._decoder.decompress(data)
except (IOError, zlib.error) as e:
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding,
e)
if flush_decoder and decode_content and self._decoder:
buf = self._decoder.decompress(binary_type())
data += buf + self._decoder.flush()
if cache_content:
self._body = data
return data
finally:
if self._original_response and self._original_response.isclosed():
self.release_conn()
def stream(self, amt=2**16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
``amt`` bytes have been read from the connection or until the
connection is closed.
:param amt:
How much of the content to read. The generator will return up to
much data per iteration, but may return less. This is particularly
likely when using compressed data. However, the empty string will
never be returned.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data:
yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
headers = HTTPHeaderDict()
for k, v in r.getheaders():
headers.add(k, v)
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0)
return ResponseCls(body=r,
headers=headers,
status=r.status,
version=r.version,
reason=r.reason,
strict=strict,
original_response=r,
**response_kw)
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):
return self.headers
def getheader(self, name, default=None):
return self.headers.get(name, default)
# Overrides from io.IOBase
def close(self):
if not self.closed:
self._fp.close()
@property
def closed(self):
if self._fp is None:
return True
elif hasattr(self._fp, 'closed'):
return self._fp.closed
elif hasattr(self._fp, 'isclosed'): # Python 2
return self._fp.isclosed()
else:
return True
def fileno(self):
if self._fp is None:
raise IOError("HTTPResponse has no file to get a fileno from")
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
raise IOError("The file-like object this HTTPResponse is wrapped "
"around has no file descriptor")
def flush(self):
if self._fp is not None and hasattr(self._fp, 'flush'):
return self._fp.flush()
def readable(self):
return True
| mit |
edwatt/REU2014 | usrp_info_and_test.py | 1 | 2853 | #!/usr/bin/env python
"""
Retrieve operating parameters of connected USRP and loop through the operating spectrum trasmitting a constant wave signal
"""
from gnuradio import gr
from gnuradio import analog
from gnuradio import uhd
from time import sleep
MAX_RATE = 1000e6
class build_block(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
args = "" #only supporting USB USRPs for now
#find uhd devices
d = uhd.find_devices(uhd.device_addr(args))
if d:
uhd_type = d[0].get('type')
print "\nFound '%s'" % uhd_type
else:
print "\nNo device found"
self.u_tx = None
return
#check version of USRP and set num_channels
if uhd_type == "usrp":
tx_nchan = 2
rx_nchan = 2
else:
tx_nchan = 1
rx_nchan = 1
#setup transmit chain (usrp sink, signal source)
#usrp sink
stream_args = uhd.stream_args('fc32', channels = range(tx_nchan))
self.u_tx = uhd.usrp_sink(device_addr=args, stream_args=stream_args)
self.u_tx.set_samp_rate(MAX_RATE)
#analog signal source - sig_source_c(sampling_freq,waveform, wave_freq, ampl, offset=0)
self.tx_src0 = analog.sig_source_c(self.u_tx.get_samp_rate(), analog.GR_CONST_WAVE, 0, 1.0, 0)
#check and output freq range, gain range, num_channels
#gain range and max
tx_gain_range = self.u_tx.get_gain_range()
tx_gain_min = tx_gain_range.start()
tx_gain_max = tx_gain_range.stop()
#freq range
tx_freq_range = self.u_tx.get_freq_range()
tx_freq_low = tx_freq_range.start()
tx_freq_high = tx_freq_range.stop()
tx_freq_mid = (tx_freq_low + tx_freq_high) / 2.0
#output info
print "\nDevice Info"
print "\n\tType: %s" % uhd_type
print "\n\tMin Freq: %d MHz" % (tx_freq_low/1e6)
print "\tMax Freq: %d MHz" % (tx_freq_high/1e6)
print "\tMid Freq: %d MHz" % (tx_freq_mid/1e6)
print "\n\tMin Gain: %d dB" % tx_gain_min
print "\tMax Gain: %d dB" % tx_gain_max
#set initial parameters
for i in xrange(tx_nchan):
self.u_tx.set_center_freq(tx_freq_mid + i*1e6, i)
self.u_tx.set_gain(tx_gain_max, i)
#connect blocks
self.connect(self.tx_src0, self.u_tx)
def main():
try:
tb = build_block()
tb.start()
if tb.u_tx is not None:
print "Transmission test will cycle once through the operating frequencies hopping 10 MHz at a time"
raw_input("Press Enter to begin transmission test & Ctrl-C to exit\n")
start = tb.u_tx.get_freq_range().start()
stop = tb.u_tx.get_freq_range().stop()
freq_hops = int((stop - start) / 10e6) + 1
print "\nTransmit Frequencies:"
channel = 0 #default to first channel
for i in xrange(freq_hops):
trans_freq = start + i * 10e6
tb.u_tx.set_center_freq(trans_freq,channel)
print "\n%d MHz" % (trans_freq/1e6)
sleep(.3)
print "\nTest Over"
tb.stop()
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
| gpl-3.0 |
maxfierke/android_kernel_samsung_aries | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
hogarthj/ansible | lib/ansible/modules/source_control/github_deploy_key.py | 50 | 10752 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: github_deploy_key
version_added: "2.4"
author: "Ali (@bincyber)"
short_description: Manages deploy keys for GitHub repositories.
description:
- "Adds or removes deploy keys for GitHub repositories. Supports authentication using username and password,
username and password and 2-factor authentication code (OTP), OAuth2 token, or personal access token."
options:
owner:
description:
- The name of the individual account or organization that owns the GitHub repository.
required: true
aliases: [ 'account', 'organization' ]
repo:
description:
- The name of the GitHub repository.
required: true
aliases: [ 'repository' ]
name:
description:
- The name for the deploy key.
required: true
aliases: [ 'title', 'label' ]
key:
description:
- The SSH public key to add to the repository as a deploy key.
required: true
read_only:
description:
- If C(true), the deploy key will only be able to read repository contents. Otherwise, the deploy key will be able to read and write.
type: bool
default: 'yes'
state:
description:
- The state of the deploy key.
default: "present"
choices: [ "present", "absent" ]
force:
description:
- If C(true), forcefully adds the deploy key by deleting any existing deploy key with the same public key or title.
type: bool
default: 'no'
username:
description:
- The username to authenticate with.
password:
description:
- The password to authenticate with. A personal access token can be used here in place of a password.
token:
description:
- The OAuth2 token or personal access token to authenticate with. Mutually exclusive with I(password).
otp:
description:
- The 6 digit One Time Password for 2-Factor Authentication. Required together with I(username) and I(password).
aliases: ['2fa_token']
requirements:
- python-requests
notes:
- "Refer to GitHub's API documentation here: https://developer.github.com/v3/repos/keys/."
'''
EXAMPLES = '''
# add a new read-only deploy key to a GitHub repository using basic authentication
- github_deploy_key:
owner: "johndoe"
repo: "example"
name: "new-deploy-key"
key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDAwXxn7kIMNWzcDfou..."
read_only: yes
username: "johndoe"
password: "supersecretpassword"
# remove an existing deploy key from a GitHub repository
- github_deploy_key:
owner: "johndoe"
repository: "example"
name: "new-deploy-key"
key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDAwXxn7kIMNWzcDfou..."
force: yes
username: "johndoe"
password: "supersecretpassword"
state: absent
# add a new deploy key to a GitHub repository, replace an existing key, use an OAuth2 token to authenticate
- github_deploy_key:
owner: "johndoe"
repository: "example"
name: "new-deploy-key"
key: "{{ lookup('file', '~/.ssh/github.pub') }}"
force: yes
token: "ABAQDAwXxn7kIMNWzcDfo..."
# re-add a deploy key to a GitHub repository but with a different name
- github_deploy_key:
owner: "johndoe"
repository: "example"
name: "replace-deploy-key"
key: "{{ lookup('file', '~/.ssh/github.pub') }}"
username: "johndoe"
password: "supersecretpassword"
# add a new deploy key to a GitHub repository using 2FA
- github_deploy_key:
owner: "johndoe"
repo: "example"
name: "new-deploy-key-2"
key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDAwXxn7kIMNWzcDfou..."
username: "johndoe"
password: "supersecretpassword"
otp: 123456
'''
RETURN = '''
msg:
description: the status message describing what occurred
returned: always
type: string
sample: "Deploy key added successfully"
http_status_code:
description: the HTTP status code returned by the GitHub API
returned: failed
type: int
sample: 400
error:
description: the error message returned by the GitHub API
returned: failed
type: string
sample: "key is already in use"
id:
description: the key identifier assigned by GitHub for the deploy key
returned: changed
type: int
sample: 24381901
'''
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
class GithubDeployKey(object):
def __init__(self, module=None, url=None, state=None, username=None, password=None, token=None, otp=None):
self.module = module
self.url = url
self.state = state
self.username = username
self.password = password
self.token = token
self.otp = otp
self.timeout = 5
self.auth = None
self.headers = None
if username is not None and password is not None:
self.module.params['url_username'] = module.params['username']
self.module.params['url_password'] = module.params['password']
self.module.params['force_basic_auth'] = True
if self.otp is not None:
self.headers = {"X-GitHub-OTP": self.otp}
else:
self.headers = {"Authorization": "token {}".format(self.token)}
def get_existing_key(self, key, title, force):
resp, info = fetch_url(self.module, self.url, headers=self.headers, method="GET")
status_code = info["status"]
if status_code == 200:
response_body = json.loads(resp.read())
if response_body:
for i in response_body:
existing_key_id = str(i["id"])
if i["key"].split() == key.split()[:2]:
return existing_key_id
elif i['title'] == title and force:
return existing_key_id
else:
if self.state == 'absent':
self.module.exit_json(changed=False, msg="Deploy key does not exist")
else:
return None
elif status_code == 401:
self.module.fail_json(msg="Failed to connect to github.com due to invalid credentials", http_status_code=status_code)
elif status_code == 404:
self.module.fail_json(msg="GitHub repository does not exist", http_status_code=status_code)
else:
self.module.fail_json(msg="Failed to retrieve existing deploy keys", http_status_code=status_code)
def add_new_key(self, request_body):
resp, info = fetch_url(self.module, self.url, data=json.dumps(request_body), headers=self.headers, method="POST")
status_code = info["status"]
if status_code == 201:
response_body = json.loads(resp.read())
key_id = response_body["id"]
self.module.exit_json(changed=True, msg="Deploy key successfully added", id=key_id)
elif status_code == 401:
self.module.fail_json(msg="Failed to connect to github.com due to invalid credentials", http_status_code=status_code)
elif status_code == 404:
self.module.fail_json(msg="GitHub repository does not exist", http_status_code=status_code)
elif status_code == 422:
self.module.exit_json(changed=False, msg="Deploy key already exists")
else:
err = info["body"]
self.module.fail_json(msg="Failed to add deploy key", http_status_code=status_code, error=err)
def remove_existing_key(self, key_id):
resp, info = fetch_url(self.module, self.url + "/{}".format(key_id), headers=self.headers, method="DELETE")
status_code = info["status"]
if status_code == 204:
if self.state == 'absent':
self.module.exit_json(changed=True, msg="Deploy key successfully deleted", id=key_id)
else:
self.module.fail_json(msg="Failed to delete existing deploy key", id=key_id, http_status_code=status_code)
def main():
module = AnsibleModule(
argument_spec=dict(
owner=dict(required=True, type='str', aliases=['account', 'organization']),
repo=dict(required=True, type='str', aliases=['repository']),
name=dict(required=True, type='str', aliases=['title', 'label']),
key=dict(required=True, type='str'),
read_only=dict(required=False, type='bool', default=True),
state=dict(default='present', choices=['present', 'absent']),
force=dict(required=False, type='bool', default=False),
username=dict(required=False, type='str'),
password=dict(required=False, type='str', no_log=True),
otp=dict(required=False, type='int', aliases=['2fa_token'], no_log=True),
token=dict(required=False, type='str', no_log=True)
),
mutually_exclusive=[
['password', 'token']
],
required_together=[
['username', 'password'],
['otp', 'username', 'password']
],
required_one_of=[
['username', 'token']
],
supports_check_mode=True,
)
owner = module.params['owner']
repo = module.params['repo']
name = module.params['name']
key = module.params['key']
state = module.params['state']
read_only = module.params.get('read_only', True)
force = module.params.get('force', False)
username = module.params.get('username', None)
password = module.params.get('password', None)
token = module.params.get('token', None)
otp = module.params.get('otp', None)
GITHUB_API_URL = "https://api.github.com/repos/{}/{}/keys".format(owner, repo)
deploy_key = GithubDeployKey(module, GITHUB_API_URL, state, username, password, token, otp)
if module.check_mode:
key_id = deploy_key.get_existing_key(key, name, force)
if state == "present" and key_id is None:
module.exit_json(changed=True)
elif state == "present" and key_id is not None:
module.exit_json(changed=False)
# to forcefully modify an existing key, the existing key must be deleted first
if state == 'absent' or force:
key_id = deploy_key.get_existing_key(key, name, force)
if key_id is not None:
deploy_key.remove_existing_key(key_id)
deploy_key.add_new_key({"title": name, "key": key, "read_only": read_only})
if __name__ == '__main__':
main()
| gpl-3.0 |
Dave667/service | plugin.video.serialu.net/resources/lib/XMLTreeBuilder.py | 12 | 3857 | #
# ElementTree
# $Id: XMLTreeBuilder.py 3225 2007-08-27 21:32:08Z fredrik $
#
# an XML tree builder
#
# history:
# 2001-10-20 fl created
# 2002-05-01 fl added namespace support for xmllib
# 2002-07-27 fl require expat (1.5.2 code can use SimpleXMLTreeBuilder)
# 2002-08-17 fl use tag/attribute name memo cache
# 2002-12-04 fl moved XMLTreeBuilder to the ElementTree module
#
# Copyright (c) 1999-2004 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2007 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Tools to build element trees from XML files.
##
import ElementTree
##
# (obsolete) ElementTree builder for XML source data, based on the
# <b>expat</b> parser.
# <p>
# This class is an alias for ElementTree.XMLTreeBuilder. New code
# should use that version instead.
#
# @see elementtree.ElementTree
class TreeBuilder(ElementTree.XMLTreeBuilder):
pass
##
# (experimental) An alternate builder that supports manipulation of
# new elements.
class FancyTreeBuilder(TreeBuilder):
def __init__(self, html=0):
TreeBuilder.__init__(self, html)
self._parser.StartNamespaceDeclHandler = self._start_ns
self._parser.EndNamespaceDeclHandler = self._end_ns
self.namespaces = []
def _start(self, tag, attrib_in):
elem = TreeBuilder._start(self, tag, attrib_in)
self.start(elem)
def _start_list(self, tag, attrib_in):
elem = TreeBuilder._start_list(self, tag, attrib_in)
self.start(elem)
def _end(self, tag):
elem = TreeBuilder._end(self, tag)
self.end(elem)
def _start_ns(self, prefix, value):
self.namespaces.insert(0, (prefix, value))
def _end_ns(self, prefix):
assert self.namespaces.pop(0)[0] == prefix, "implementation confused"
##
# Hook method that's called when a new element has been opened.
# May access the <b>namespaces</b> attribute.
#
# @param element The new element. The tag name and attributes are,
# set, but it has no children, and the text and tail attributes
# are still empty.
def start(self, element):
pass
##
# Hook method that's called when a new element has been closed.
# May access the <b>namespaces</b> attribute.
#
# @param element The new element.
def end(self, element):
pass
| gpl-2.0 |
paxos1977/QuickFixLogFixup | QuickFixLogViewer.py | 1 | 52083 | import sublime, sublime_plugin
import re
def multiple_replace(dict, text):
pattern = re.compile("^(%s)\=" % "|".join(map(re.escape, dict.keys())))
lines = text.split("\x01")
newLines = []
for line in lines:
new_line = pattern.sub(lambda match: dict[match.string[match.start():match.end()-1]] + "=", line)
newLines.append(new_line)
return "\n".join(newLines)
class QuickFixLogFixupCommand(sublime_plugin.TextCommand):
""" When run on a QuickFix log, it will split on SOH fields,
then replace field numbers with names.
This makes things much more readable for a human."""
def run(self, edit):
""" Run the plugin"""
#self.view.insert(edit, 0, "Hello, World!")
documentRange = sublime.Region(0, self.view.size())
text = self.view.substr(documentRange)
text = multiple_replace(self.field_map, text)
self.view.replace(edit, documentRange, text)
""" The map of FIX field IDs to string descriptions."""
field_map = {
"1" : "Account"
, "10" : "CheckSum"
, "100" : "ExDestination"
, "1000": "UnderlyingTimeUnit"
, "1001": "LegTimeUnit"
, "1002": "AllocMethod"
, "1003": "TradeID"
, "1005": "SideTradeReportID"
, "1006": "SideFillStationCd"
, "1007": "SideReasonCd"
, "1008": "SideTrdSubTyp"
, "1009": "SideLastQty"
, "1009": "SideQty"
, "1011": "MessageEventSource"
, "1012": "SideTrdRegTimestamp"
, "1013": "SideTrdRegTimestampType"
, "1014": "SideTrdRegTimestampSrc"
, "1015": "AsOfIndicator"
, "1016": "NoSideTrdRegTS"
, "1017": "LegOptionRatio"
, "1018": "NoInstrumentParties"
, "1019": "InstrumentPartyID"
, "102" : "CxlRejReason"
, "1020": "TradeVolume"
, "1021": "MDBookType"
, "1022": "MDFeedType"
, "1023": "MDPriceLevel"
, "1024": "MDOriginType"
, "1025": "FirstPx"
, "1026": "MDEntrySpotRate"
, "1027": "MDEntryForwardPoints"
, "1028": "ManualOrderIndicator"
, "1029": "CustDirectedOrder"
, "103" : "OrdRejReason"
, "1030": "ReceivedDeptID"
, "1031": "CustOrderHandlingInst"
, "1032": "OrderHandlingInstSource"
, "1033": "DeskType"
, "1034": "DeskTypeSource"
, "1035": "DeskOrderHandlingInst"
, "1036": "ExecAckStatus"
, "1037": "UnderlyingDeliveryAmount"
, "1038": "UnderlyingCapValue"
, "1039": "UnderlyingSettlMethod"
, "104" : "IOIQualifier"
, "1040": "SecondaryTradeID"
, "1041": "FirmTradeID"
, "1042": "SecondaryFirmTradeID"
, "1043": "CollApplType"
, "1044": "UnderlyingAdjustedQuantity"
, "1045": "UnderlyingFXRate"
, "1046": "UnderlyingFXRateCalc"
, "1047": "AllocPositionEffect"
, "1048": "DealingCapacity"
, "1049": "InstrmtAssignmentMethod"
, "105" : "WaveNo"
, "1050": "InstrumentPartyIDSource"
, "1051": "InstrumentPartyRole"
, "1052": "NoInstrumentPartySubIDs"
, "1053": "InstrumentPartySubID"
, "1054": "InstrumentPartySubIDType"
, "1055": "PositionCurrency"
, "1056": "CalculatedCcyLastQty"
, "1057": "AggressorIndicator"
, "1058": "NoUndlyInstrumentParties"
, "1059": "UnderlyingInstrumentPartyID"
, "1059": "UndlyInstrumentPartyID"
, "106" : "Issuer"
, "1060": "UnderlyingInstrumentPartyIDSource"
, "1060": "UndlyInstrumentPartyIDSource"
, "1061": "UnderlyingInstrumentPartyRole"
, "1061": "UndlyInstrumentPartyRole"
, "1062": "NoUndlyInstrumentPartySubIDs"
, "1063": "UnderlyingInstrumentPartySubID"
, "1063": "UndlyInstrumentPartySubID"
, "1064": "UnderlyingInstrumentPartySubIDType"
, "1064": "UndlyInstrumentPartySubIDType"
, "1065": "BidSwapPoints"
, "1066": "OfferSwapPoints"
, "1067": "LegBidForwardPoints"
, "1068": "LegOfferForwardPoints"
, "1069": "SwapPoints"
, "107" : "SecurityDesc"
, "1070": "MDQuoteType"
, "1071": "LastSwapPoints"
, "1072": "SideGrossTradeAmt"
, "1073": "LegLastForwardPoints"
, "1074": "LegCalculatedCcyLastQty"
, "1075": "LegGrossTradeAmt"
, "1079": "MaturityTime"
, "108" : "HeartBtInt"
, "1080": "RefOrderID"
, "1081": "RefOrderIDSource"
, "1082": "SecondaryDisplayQty"
, "1083": "DisplayWhen"
, "1084": "DisplayMethod"
, "1085": "DisplayLowQty"
, "1086": "DisplayHighQty"
, "1087": "DisplayMinIncr"
, "1088": "RefreshQty"
, "1089": "MatchIncrement"
, "109" : "ClientID"
, "1090": "MaxPriceLevels"
, "1091": "PreTradeAnonymity"
, "1092": "PriceProtectionScope"
, "1093": "LotType"
, "1094": "PegPriceType"
, "1095": "PeggedRefPrice"
, "1096": "PegSecurityIDSource"
, "1097": "PegSecurityID"
, "1098": "PegSymbol"
, "1099": "PegSecurityDesc"
, "11" : "ClOrdID"
, "110" : "MinQty"
, "1100": "TriggerType"
, "1101": "TriggerAction"
, "1102": "TriggerPrice"
, "1103": "TriggerSymbol"
, "1104": "TriggerSecurityID"
, "1105": "TriggerSecurityIDSource"
, "1106": "TriggerSecurityDesc"
, "1107": "TriggerPriceType"
, "1108": "TriggerPriceTypeScope"
, "1109": "TriggerPriceDirection"
, "111" : "MaxFloor"
, "1110": "TriggerNewPrice"
, "1111": "TriggerOrderType"
, "1112": "TriggerNewQty"
, "1113": "TriggerTradingSessionID"
, "1114": "TriggerTradingSessionSubID"
, "1115": "OrderCategory"
, "1116": "NoRootPartyIDs"
, "1117": "RootPartyID"
, "1118": "RootPartyIDSource"
, "1119": "RootPartyRole"
, "112" : "TestReqID"
, "1120": "NoRootPartySubIDs"
, "1121": "RootPartySubID"
, "1122": "RootPartySubIDType"
, "1123": "TradeHandlingInstr"
, "1124": "OrigTradeHandlingInstr"
, "1125": "OrigTradeDate"
, "1126": "OrigTradeID"
, "1127": "OrigSecondaryTradeID"
, "1128": "ApplVerID"
, "1129": "CstmApplVerID"
, "113" : "ReportToExch"
, "1130": "RefApplVerID"
, "1131": "RefCstmApplVerID"
, "1132": "TZTransactTime"
, "1133": "ExDestinationIDSource"
, "1134": "ReportedPxDiff"
, "1135": "RptSys"
, "1136": "AllocClearingFeeIndicator"
, "1137": "DefaultApplVerID"
, "1138": "DisplayQty"
, "1139": "ExchangeSpecialInstructions"
, "114" : "LocateReqd"
, "1140": "MaxTradeVol"
, "1141": "NoMDFeedTypes"
, "1142": "MatchAlgorithm"
, "1143": "MaxPriceVariation"
, "1144": "ImpliedMarketIndicator"
, "1145": "EventTime"
, "1146": "MinPriceIncrementAmount"
, "1147": "UnitOfMeasureQty"
, "1148": "LowLimitPrice"
, "1149": "HighLimitPrice"
, "115" : "OnBehalfOfCompID"
, "1150": "TradingReferencePrice"
, "1151": "SecurityGroup"
, "1152": "LegNumber"
, "1153": "SettlementCycleNo"
, "1154": "SideCurrency"
, "1155": "SideSettlCurrency"
, "1156": "ApplExtID"
, "1157": "CcyAmt"
, "1158": "NoSettlDetails"
, "1159": "SettlObligMode"
, "116" : "OnBehalfOfSubID"
, "1160": "SettlObligMsgID"
, "1161": "SettlObligID"
, "1162": "SettlObligTransType"
, "1163": "SettlObligRefID"
, "1164": "SettlObligSource"
, "1165": "NoSettlOblig"
, "1166": "QuoteMsgID"
, "1167": "QuoteEntryStatus"
, "1168": "TotNoCxldQuotes"
, "1169": "TotNoAccQuotes"
, "117" : "QuoteID"
, "1170": "TotNoRejQuotes"
, "1171": "PrivateQuote"
, "1172": "RespondentType"
, "1173": "MDSubBookType"
, "1174": "SecurityTradingEvent"
, "1175": "NoStatsIndicators"
, "1176": "StatsType"
, "1177": "NoOfSecSizes"
, "1178": "MDSecSizeType"
, "1179": "MDSecSize"
, "118" : "NetMoney"
, "1180": "ApplID"
, "1181": "ApplSeqNum"
, "1182": "ApplBegSeqNum"
, "1183": "ApplEndSeqNum"
, "1184": "SecurityXMLLen"
, "1185": "SecurityXML"
, "1186": "SecurityXMLSchema"
, "1187": "RefreshIndicator"
, "1188": "Volatility"
, "1189": "TimeToExpiration"
, "119" : "SettlCurrAmt"
, "1190": "RiskFreeRate"
, "1191": "PriceUnitOfMeasure"
, "1192": "PriceUnitOfMeasureQty"
, "1193": "SettlMethod"
, "1194": "ExerciseStyle"
, "1195": "OptPayAmount"
, "1195": "OptPayoutAmount"
, "1196": "PriceQuoteMethod"
, "1197": "FuturesValuationMethod"
, "1197": "ValuationMethod"
, "1198": "ListMethod"
, "1199": "CapPrice"
, "12" : "Commission"
, "120" : "SettlCurrency"
, "1200": "FloorPrice"
, "1201": "NoStrikeRules"
, "1202": "StartStrikePxRange"
, "1203": "EndStrikePxRange"
, "1204": "StrikeIncrement"
, "1205": "NoTickRules"
, "1206": "StartTickPriceRange"
, "1207": "EndTickPriceRange"
, "1208": "TickIncrement"
, "1209": "TickRuleType"
, "121" : "ForexReq"
, "1210": "NestedInstrAttribType"
, "1211": "NestedInstrAttribValue"
, "1212": "LegMaturityTime"
, "1213": "UnderlyingMaturityTime"
, "1214": "DerivativeSymbol"
, "1215": "DerivativeSymbolSfx"
, "1216": "DerivativeSecurityID"
, "1217": "DerivativeSecurityIDSource"
, "1218": "NoDerivativeSecurityAltID"
, "1219": "DerivativeSecurityAltID"
, "122" : "OrigSendingTime"
, "1220": "DerivativeSecurityAltIDSource"
, "1221": "SecondaryLowLimitPrice"
, "1222": "MaturityRuleID"
, "1223": "StrikeRuleID"
, "1224": "LegUnitOfMeasureQty"
, "1225": "DerivativeOptPayAmount"
, "1226": "EndMaturityMonthYear"
, "1227": "ProductComplex"
, "1228": "DerivativeProductComplex"
, "1229": "MaturityMonthYearIncrement"
, "123" : "GapFillFlag"
, "1230": "SecondaryHighLimitPrice"
, "1231": "MinLotSize"
, "1232": "NoExecInstRules"
, "1234": "NoLotTypeRules"
, "1235": "NoMatchRules"
, "1236": "NoMaturityRules"
, "1237": "NoOrdTypeRules"
, "1239": "NoTimeInForceRules"
, "124" : "NoExecs"
, "1240": "SecondaryTradingReferencePrice"
, "1241": "StartMaturityMonthYear"
, "1242": "FlexProductEligibilityIndicator"
, "1243": "DerivFlexProductEligibilityIndicator"
, "1244": "FlexibleIndicator"
, "1245": "TradingCurrency"
, "1246": "DerivativeProduct"
, "1247": "DerivativeSecurityGroup"
, "1248": "DerivativeCFICode"
, "1249": "DerivativeSecurityType"
, "125" : "CxlType"
, "1250": "DerivativeSecuritySubType"
, "1251": "DerivativeMaturityMonthYear"
, "1252": "DerivativeMaturityDate"
, "1253": "DerivativeMaturityTime"
, "1254": "DerivativeSettleOnOpenFlag"
, "1255": "DerivativeInstrmtAssignmentMethod"
, "1256": "DerivativeSecurityStatus"
, "1257": "DerivativeInstrRegistry"
, "1258": "DerivativeCountryOfIssue"
, "1259": "DerivativeStateOrProvinceOfIssue"
, "126" : "ExpireTime"
, "1260": "DerivativeLocaleOfIssue"
, "1261": "DerivativeStrikePrice"
, "1262": "DerivativeStrikeCurrency"
, "1263": "DerivativeStrikeMultiplier"
, "1264": "DerivativeStrikeValue"
, "1265": "DerivativeOptAttribute"
, "1266": "DerivativeContractMultiplier"
, "1267": "DerivativeMinPriceIncrement"
, "1268": "DerivativeMinPriceIncrementAmount"
, "1269": "DerivativeUnitOfMeasure"
, "127" : "DKReason"
, "1270": "DerivativeUnitOfMeasureQty"
, "1271": "DerivativeTimeUnit"
, "1272": "DerivativeSecurityExchange"
, "1273": "DerivativePositionLimit"
, "1274": "DerivativeNTPositionLimit"
, "1275": "DerivativeIssuer"
, "1276": "DerivativeIssueDate"
, "1277": "DerivativeEncodedIssuerLen"
, "1278": "DerivativeEncodedIssuer"
, "1279": "DerivativeSecurityDesc"
, "128" : "DeliverToCompID"
, "1280": "DerivativeEncodedSecurityDescLen"
, "1281": "DerivativeEncodedSecurityDesc"
, "1282": "DerivativeSecurityXMLLen"
, "1283": "DerivativeSecurityXML"
, "1284": "DerivativeSecurityXMLSchema"
, "1285": "DerivativeContractSettlMonth"
, "1286": "NoDerivativeEvents"
, "1287": "DerivativeEventType"
, "1288": "DerivativeEventDate"
, "1289": "DerivativeEventTime"
, "129" : "DeliverToSubID"
, "1290": "DerivativeEventPx"
, "1291": "DerivativeEventText"
, "1292": "NoDerivativeInstrumentParties"
, "1293": "DerivativeInstrumentPartyID"
, "1294": "DerivativeInstrumentPartyIDSource"
, "1295": "DerivativeInstrumentPartyRole"
, "1296": "NoDerivativeInstrumentPartySubIDs"
, "1297": "DerivativeInstrumentPartySubID"
, "1298": "DerivativeInstrumentPartySubIDType"
, "1299": "DerivativeExerciseStyle"
, "13" : "CommType"
, "130" : "IOINaturalFlag"
, "1300": "MarketSegmentID"
, "1301": "MarketID"
, "1302": "MaturityMonthYearIncrementUnits"
, "1303": "MaturityMonthYearFormat"
, "1304": "StrikeExerciseStyle"
, "1305": "SecondaryPriceLimitType"
, "1306": "PriceLimitType"
, "1307": "DerivativeSecurityListRequestType"
, "1308": "ExecInstValue"
, "1309": "NoTradingSessionRules"
, "131" : "QuoteReqID"
, "1310": "NoMarketSegments"
, "1311": "NoDerivativeInstrAttrib"
, "1312": "NoNestedInstrAttrib"
, "1313": "DerivativeInstrAttribType"
, "1314": "DerivativeInstrAttribValue"
, "1315": "DerivativePriceUnitOfMeasure"
, "1316": "DerivativePriceUnitOfMeasureQty"
, "1317": "DerivativeSettlMethod"
, "1318": "DerivativePriceQuoteMethod"
, "1319": "DerivativeFuturesValuationMethod"
, "1319": "DerivativeValuationMethod"
, "132" : "BidPx"
, "1320": "DerivativeListMethod"
, "1321": "DerivativeCapPrice"
, "1322": "DerivativeFloorPrice"
, "1323": "DerivativePutOrCall"
, "1324": "ListUpdateAction"
, "1325": "ParentMktSegmID"
, "1326": "TradingSessionDesc"
, "1327": "TradSesUpdateAction"
, "1328": "RejectText"
, "1329": "FeeMultiplier"
, "133" : "OfferPx"
, "1330": "UnderlyingLegSymbol"
, "1331": "UnderlyingLegSymbolSfx"
, "1332": "UnderlyingLegSecurityID"
, "1333": "UnderlyingLegSecurityIDSource"
, "1334": "NoUnderlyingLegSecurityAltID"
, "1335": "UnderlyingLegSecurityAltID"
, "1336": "UnderlyingLegSecurityAltIDSource"
, "1337": "UnderlyingLegSecurityType"
, "1338": "UnderlyingLegSecuritySubType"
, "1339": "UnderlyingLegMaturityMonthYear"
, "134" : "BidSize"
, "1340": "UnderlyingLegStrikePrice"
, "1341": "UnderlyingLegSecurityExchange"
, "1342": "NoOfLegUnderlyings"
, "1343": "UnderlyingLegPutOrCall"
, "1344": "UnderlyingLegCFICode"
, "1345": "UnderlyingLegMaturityDate"
, "1346": "ApplReqID"
, "1347": "ApplReqType"
, "1348": "ApplResponseType"
, "1349": "ApplTotalMessageCount"
, "135" : "OfferSize"
, "1350": "ApplLastSeqNum"
, "1351": "NoApplIDs"
, "1352": "ApplResendFlag"
, "1353": "ApplResponseID"
, "1354": "ApplResponseError"
, "1355": "RefApplID"
, "1356": "ApplReportID"
, "1357": "RefApplLastSeqNum"
, "1358": "LegPutOrCall"
, "1359": "EncodedSymbolLen"
, "136" : "NoMiscFees"
, "1360": "EncodedSymbol"
, "1361": "TotNoFills"
, "1362": "NoFills"
, "1363": "FillExecID"
, "1364": "FillPx"
, "1365": "FillQty"
, "1366": "LegAllocID"
, "1367": "LegAllocSettlCurrency"
, "1368": "TradSesEvent"
, "1369": "MassActionReportID"
, "137" : "MiscFeeAmt"
, "1370": "NoNotAffectedOrders"
, "1371": "NotAffectedOrderID"
, "1372": "NotAffOrigClOrdID"
, "1373": "MassActionType"
, "1374": "MassActionScope"
, "1375": "MassActionResponse"
, "1376": "MassActionRejectReason"
, "1377": "MultilegModel"
, "1378": "MultilegPriceMethod"
, "1379": "LegVolatility"
, "138" : "MiscFeeCurr"
, "1380": "DividendYield"
, "1381": "LegDividendYield"
, "1382": "CurrencyRatio"
, "1383": "LegCurrencyRatio"
, "1384": "LegExecInst"
, "1385": "ContingencyType"
, "1386": "ListRejectReason"
, "1387": "NoTrdRepIndicators"
, "1388": "TrdRepPartyRole"
, "1389": "TrdRepIndicator"
, "139" : "MiscFeeType"
, "1390": "TradePublishIndicator"
, "1391": "UnderlyingLegOptAttribute"
, "1392": "UnderlyingLegSecurityDesc"
, "1393": "MarketReqID"
, "1394": "MarketReportID"
, "1395": "MarketUpdateAction"
, "1396": "MarketSegmentDesc"
, "1397": "EncodedMktSegmDescLen"
, "1398": "EncodedMktSegmDesc"
, "1399": "ApplNewSeqNum"
, "14" : "CumQty"
, "140" : "PrevClosePx"
, "1400": "EncryptedPasswordMethod"
, "1401": "EncryptedPasswordLen"
, "1402": "EncryptedPassword"
, "1403": "EncryptedNewPasswordLen"
, "1404": "EncryptedNewPassword"
, "1405": "UnderlyingLegMaturityTime"
, "1406": "RefApplExtID"
, "1407": "DefaultApplExtID"
, "1408": "DefaultCstmApplVerID"
, "1409": "SessionStatus"
, "141" : "ResetSeqNumFlag"
, "1410": "DefaultVerIndicator"
, "1411": "Nested4PartySubIDType"
, "1412": "Nested4PartySubID"
, "1413": "NoNested4PartySubIDs"
, "1414": "NoNested4PartyIDs"
, "1415": "Nested4PartyID"
, "1416": "Nested4PartyIDSource"
, "1417": "Nested4PartyRole"
, "1418": "LegLastQty"
, "1419": "UnderlyingExerciseStyle"
, "142" : "SenderLocationID"
, "1420": "LegExerciseStyle"
, "1421": "LegPriceUnitOfMeasure"
, "1422": "LegPriceUnitOfMeasureQty"
, "1423": "UnderlyingUnitOfMeasureQty"
, "1424": "UnderlyingPriceUnitOfMeasure"
, "1425": "UnderlyingPriceUnitOfMeasureQty"
, "1426": "ApplReportType"
, "1427": "SideExecID"
, "1428": "OrderDelay"
, "1429": "OrderDelayUnit"
, "143" : "TargetLocationID"
, "1430": "VenueType"
, "1431": "RefOrdIDReason"
, "1432": "OrigCustOrderCapacity"
, "1433": "RefApplReqID"
, "1434": "ModelType"
, "1435": "ContractMultiplierUnit"
, "1436": "LegContractMultiplierUnit"
, "1437": "UnderlyingContractMultiplierUnit"
, "1438": "DerivativeContractMultiplierUnit"
, "1439": "FlowScheduleType"
, "144" : "OnBehalfOfLocationID"
, "1440": "LegFlowScheduleType"
, "1441": "UnderlyingFlowScheduleType"
, "1442": "DerivativeFlowScheduleType"
, "1443": "FillLiquidityInd"
, "1444": "SideLiquidityInd"
, "1445": "NoRateSources"
, "1446": "RateSource"
, "1447": "RateSourceType"
, "1448": "ReferencePage"
, "1449": "RestructuringType"
, "145" : "DeliverToLocationID"
, "1450": "Seniority"
, "1451": "NotionalPercentageOutstanding"
, "1452": "OriginalNotionalPercentageOutstanding"
, "1453": "UnderlyingRestructuringType"
, "1454": "UnderlyingSeniority"
, "1455": "UnderlyingNotionalPercentageOutstanding"
, "1456": "UnderlyingOriginalNotionalPercentageOutstanding"
, "1457": "AttachmentPoint"
, "1458": "DetachmentPoint"
, "1459": "UnderlyingAttachmentPoint"
, "146" : "NoRelatedSym"
, "1460": "UnderlyingDetachmentPoint"
, "1461": "NoTargetPartyIDs"
, "1462": "TargetPartyID"
, "1463": "TargetPartyIDSource"
, "1464": "TargetPartyRole"
, "1465": "SecurityListID"
, "1466": "SecurityListRefID"
, "1467": "SecurityListDesc"
, "1468": "EncodedSecurityListDescLen"
, "1469": "EncodedSecurityListDesc"
, "147" : "Subject"
, "1470": "SecurityListType"
, "1471": "SecurityListTypeSource"
, "1472": "NewsID"
, "1473": "NewsCategory"
, "1474": "LanguageCode"
, "1475": "NoNewsRefIDs"
, "1476": "NewsRefID"
, "1477": "NewsRefType"
, "1478": "StrikePriceDeterminationMethod"
, "1479": "StrikePriceBoundaryMethod"
, "148" : "Headline"
, "1480": "StrikePriceBoundaryPrecision"
, "1481": "UnderlyingPriceDeterminationMethod"
, "1482": "OptPayoutType"
, "1483": "NoComplexEvents"
, "1484": "ComplexEventType"
, "1485": "ComplexOptPayoutAmount"
, "1486": "ComplexEventPrice"
, "1487": "ComplexEventPriceBoundaryMethod"
, "1488": "ComplexEventPriceBoundaryPrecision"
, "1489": "ComplexEventPriceTimeType"
, "149" : "URLLink"
, "1490": "ComplexEventCondition"
, "1491": "NoComplexEventDates"
, "1492": "ComplexEventStartDate"
, "1493": "ComplexEventEndDate"
, "1494": "NoComplexEventTimes"
, "1495": "ComplexEventStartTime"
, "1496": "ComplexEventEndTime"
, "1497": "StreamAsgnReqID"
, "1498": "StreamAsgnReqType"
, "1499": "NoAsgnReqs"
, "15" : "Currency"
, "150" : "ExecType"
, "1500": "MDStreamID"
, "1501": "StreamAsgnRptID"
, "1502": "StreamAsgnRejReason"
, "1503": "StreamAsgnAckType"
, "1504": "RelSymTransactTime"
, "1505": "PartyDetailsListRequestID"
, "1506": "NoPartyListResponseTypes"
, "1507": "PartyListResponseType"
, "1508": "NoRequestedPartyRoles"
, "1509": "RequestedPartyRole"
, "151" : "LeavesQty"
, "1510": "PartyDetailsListReportID"
, "1511": "PartyDetailsRequestResult"
, "1512": "TotNoPartyList"
, "1513": "NoPartyList"
, "1514": "NoPartyRelationships"
, "1515": "PartyRelationship"
, "1516": "NoPartyAltIDs"
, "1517": "PartyAltID"
, "1518": "PartyAltIDSource"
, "1519": "NoPartyAltSubIDs"
, "152" : "CashOrderQty"
, "1520": "PartyAltSubID"
, "1521": "PartyAltSubIDType"
, "1522": "NoContextPartyIDs"
, "1523": "ContextPartyID"
, "1524": "ContextPartyIDSource"
, "1525": "ContextPartyRole"
, "1526": "NoContextPartySubIDs"
, "1527": "ContextPartySubID"
, "1528": "ContextPartySubIDType"
, "1529": "NoRiskLimits"
, "153" : "AllocAvgPx"
, "1530": "RiskLimitType"
, "1531": "RiskLimitAmount"
, "1532": "RiskLimitCurrency"
, "1533": "RiskLimitPlatform"
, "1534": "NoRiskInstruments"
, "1535": "RiskInstrumentOperator"
, "1536": "RiskSymbol"
, "1537": "RiskSymbolSfx"
, "1538": "RiskSecurityID"
, "1539": "RiskSecurityIDSource"
, "154" : "AllocNetMoney"
, "1540": "NoRiskSecurityAltID"
, "1541": "RiskSecurityAltID"
, "1542": "RiskSecurityAltIDSource"
, "1543": "RiskProduct"
, "1544": "RiskProductComplex"
, "1545": "RiskSecurityGroup"
, "1546": "RiskCFICode"
, "1547": "RiskSecurityType"
, "1548": "RiskSecuritySubType"
, "1549": "RiskMaturityMonthYear"
, "155" : "SettlCurrFxRate"
, "1550": "RiskMaturityTime"
, "1551": "RiskRestructuringType"
, "1552": "RiskSeniority"
, "1553": "RiskPutOrCall"
, "1554": "RiskFlexibleIndicator"
, "1555": "RiskCouponRate"
, "1556": "RiskSecurityDesc"
, "1557": "RiskInstrumentSettlType"
, "1558": "RiskInstrumentMultiplier"
, "1559": "NoRiskWarningLevels"
, "156" : "SettlCurrFxRateCalc"
, "1560": "RiskWarningLevelPercent"
, "1561": "RiskWarningLevelName"
, "1562": "NoRelatedPartyIDs"
, "1563": "RelatedPartyID"
, "1564": "RelatedPartyIDSource"
, "1565": "RelatedPartyRole"
, "1566": "NoRelatedPartySubIDs"
, "1567": "RelatedPartySubID"
, "1568": "RelatedPartySubIDType"
, "1569": "NoRelatedPartyAltIDs"
, "157" : "NumDaysInterest"
, "1570": "RelatedPartyAltID"
, "1571": "RelatedPartyAltIDSource"
, "1572": "NoRelatedPartyAltSubIDs"
, "1573": "RelatedPartyAltSubID"
, "1574": "RelatedPartyAltSubIDType"
, "1575": "NoRelatedContextPartyIDs"
, "1576": "RelatedContextPartyID"
, "1577": "RelatedContextPartyIDSource"
, "1578": "RelatedContextPartyRole"
, "1579": "NoRelatedContextPartySubIDs"
, "158" : "AccruedInterestRate"
, "1580": "RelatedContextPartySubID"
, "1581": "RelatedContextPartySubIDType"
, "1582": "NoRelationshipRiskLimits"
, "1583": "RelationshipRiskLimitType"
, "1584": "RelationshipRiskLimitAmount"
, "1585": "RelationshipRiskLimitCurrency"
, "1586": "RelationshipRiskLimitPlatform"
, "1587": "NoRelationshipRiskInstruments"
, "1588": "RelationshipRiskInstrumentOperator"
, "1589": "RelationshipRiskSymbol"
, "159" : "AccruedInterestAmt"
, "1590": "RelationshipRiskSymbolSfx"
, "1591": "RelationshipRiskSecurityID"
, "1592": "RelationshipRiskSecurityIDSource"
, "1593": "NoRelationshipRiskSecurityAltID"
, "1594": "RelationshipRiskSecurityAltID"
, "1595": "RelationshipRiskSecurityAltIDSource"
, "1596": "RelationshipRiskProduct"
, "1597": "RelationshipRiskProductComplex"
, "1598": "RelationshipRiskSecurityGroup"
, "1599": "RelationshipRiskCFICode"
, "16" : "EndSeqNo"
, "160" : "SettlInstMode"
, "1600": "RelationshipRiskSecurityType"
, "1601": "RelationshipRiskSecuritySubType"
, "1602": "RelationshipRiskMaturityMonthYear"
, "1603": "RelationshipRiskMaturityTime"
, "1604": "RelationshipRiskRestructuringType"
, "1605": "RelationshipRiskSeniority"
, "1606": "RelationshipRiskPutOrCall"
, "1607": "RelationshipRiskFlexibleIndicator"
, "1608": "RelationshipRiskCouponRate"
, "1609": "RelationshipRiskSecurityExchange"
, "161" : "AllocText"
, "1610": "RelationshipRiskSecurityDesc"
, "1611": "RelationshipRiskInstrumentSettlType"
, "1612": "RelationshipRiskInstrumentMultiplier"
, "1613": "NoRelationshipRiskWarningLevels"
, "1614": "RelationshipRiskWarningLevelPercent"
, "1615": "RelationshipRiskWarningLevelName"
, "1616": "RiskSecurityExchange"
, "1617": "StreamAsgnType"
, "1618": "RelationshipRiskEncodedSecurityDescLen"
, "1619": "RelationshipRiskEncodedSecurityDesc"
, "162" : "SettlInstID"
, "1620": "RiskEncodedSecurityDescLen"
, "1621": "RiskEncodedSecurityDesc"
, "163" : "SettlInstTransType"
, "164" : "EmailThreadID"
, "165" : "SettlInstSource"
, "166" : "SettlLocation"
, "167" : "SecurityType"
, "168" : "EffectiveTime"
, "169" : "StandInstDbType"
, "17" : "ExecID"
, "170" : "StandInstDbName"
, "171" : "StandInstDbID"
, "172" : "SettlDeliveryType"
, "173" : "SettlDepositoryCode"
, "174" : "SettlBrkrCode"
, "175" : "SettlInstCode"
, "176" : "SecuritySettlAgentName"
, "177" : "SecuritySettlAgentCode"
, "178" : "SecuritySettlAgentAcctNum"
, "179" : "SecuritySettlAgentAcctName"
, "18" : "ExecInst"
, "180" : "SecuritySettlAgentContactName"
, "181" : "SecuritySettlAgentContactPhone"
, "182" : "CashSettlAgentName"
, "183" : "CashSettlAgentCode"
, "184" : "CashSettlAgentAcctNum"
, "185" : "CashSettlAgentAcctName"
, "186" : "CashSettlAgentContactName"
, "187" : "CashSettlAgentContactPhone"
, "188" : "BidSpotRate"
, "189" : "BidForwardPoints"
, "19" : "ExecRefID"
, "190" : "OfferSpotRate"
, "191" : "OfferForwardPoints"
, "192" : "OrderQty2"
, "193" : "FutSettDate2"
, "193" : "SettlDate2"
, "194" : "LastSpotRate"
, "195" : "LastForwardPoints"
, "196" : "AllocLinkID"
, "197" : "AllocLinkType"
, "198" : "SecondaryOrderID"
, "199" : "NoIOIQualifiers"
, "2" : "AdvId"
, "20" : "ExecTransType"
, "200" : "MaturityMonthYear"
, "201" : "PutOrCall"
, "202" : "StrikePrice"
, "203" : "CoveredOrUncovered"
, "204" : "CustomerOrFirm"
, "205" : "MaturityDay"
, "206" : "OptAttribute"
, "207" : "SecurityExchange"
, "208" : "NotifyBrokerOfCredit"
, "209" : "AllocHandlInst"
, "21" : "HandlInst"
, "210" : "MaxShow"
, "211" : "PegDifference"
, "211" : "PegOffsetValue"
, "212" : "XmlDataLen"
, "213" : "XmlData"
, "214" : "SettlInstRefID"
, "215" : "NoRoutingIDs"
, "216" : "RoutingType"
, "217" : "RoutingID"
, "218" : "Spread"
, "218" : "SpreadToBenchmark"
, "219" : "Benchmark"
, "22" : "IDSource"
, "22" : "SecurityIDSource"
, "220" : "BenchmarkCurveCurrency"
, "221" : "BenchmarkCurveName"
, "222" : "BenchmarkCurvePoint"
, "223" : "CouponRate"
, "224" : "CouponPaymentDate"
, "225" : "IssueDate"
, "226" : "RepurchaseTerm"
, "227" : "RepurchaseRate"
, "228" : "Factor"
, "229" : "TradeOriginationDate"
, "23" : "IOIid"
, "23" : "IOIID"
, "230" : "ExDate"
, "231" : "ContractMultiplier"
, "232" : "NoStipulations"
, "233" : "StipulationType"
, "234" : "StipulationValue"
, "235" : "YieldType"
, "236" : "Yield"
, "237" : "TotalTakedown"
, "238" : "Concession"
, "239" : "RepoCollateralSecurityType"
, "24" : "IOIOthSvc"
, "240" : "RedemptionDate"
, "241" : "UnderlyingCouponPaymentDate"
, "242" : "UnderlyingIssueDate"
, "243" : "UnderlyingRepoCollateralSecurityType"
, "244" : "UnderlyingRepurchaseTerm"
, "245" : "UnderlyingRepurchaseRate"
, "246" : "UnderlyingFactor"
, "247" : "UnderlyingRedemptionDate"
, "248" : "LegCouponPaymentDate"
, "249" : "LegIssueDate"
, "25" : "IOIQltyInd"
, "250" : "LegRepoCollateralSecurityType"
, "251" : "LegRepurchaseTerm"
, "252" : "LegRepurchaseRate"
, "253" : "LegFactor"
, "254" : "LegRedemptionDate"
, "255" : "CreditRating"
, "256" : "UnderlyingCreditRating"
, "257" : "LegCreditRating"
, "258" : "TradedFlatSwitch"
, "259" : "BasisFeatureDate"
, "26" : "IOIRefID"
, "260" : "BasisFeaturePrice"
, "262" : "MDReqID"
, "263" : "SubscriptionRequestType"
, "264" : "MarketDepth"
, "265" : "MDUpdateType"
, "266" : "AggregatedBook"
, "267" : "NoMDEntryTypes"
, "268" : "NoMDEntries"
, "269" : "MDEntryType"
, "27" : "IOIQty"
, "27" : "IOIShares"
, "270" : "MDEntryPx"
, "271" : "MDEntrySize"
, "272" : "MDEntryDate"
, "273" : "MDEntryTime"
, "274" : "TickDirection"
, "275" : "MDMkt"
, "276" : "QuoteCondition"
, "277" : "TradeCondition"
, "278" : "MDEntryID"
, "279" : "MDUpdateAction"
, "28" : "IOITransType"
, "280" : "MDEntryRefID"
, "281" : "MDReqRejReason"
, "282" : "MDEntryOriginator"
, "283" : "LocationID"
, "284" : "DeskID"
, "285" : "DeleteReason"
, "286" : "OpenCloseSettleFlag"
, "286" : "OpenCloseSettlFlag"
, "287" : "SellerDays"
, "288" : "MDEntryBuyer"
, "289" : "MDEntrySeller"
, "29" : "LastCapacity"
, "290" : "MDEntryPositionNo"
, "291" : "FinancialStatus"
, "292" : "CorporateAction"
, "293" : "DefBidSize"
, "294" : "DefOfferSize"
, "295" : "NoQuoteEntries"
, "296" : "NoQuoteSets"
, "297" : "QuoteAckStatus"
, "297" : "QuoteStatus"
, "298" : "QuoteCancelType"
, "299" : "QuoteEntryID"
, "3" : "AdvRefID"
, "30" : "LastMkt"
, "300" : "QuoteRejectReason"
, "301" : "QuoteResponseLevel"
, "302" : "QuoteSetID"
, "303" : "QuoteRequestType"
, "304" : "TotNoQuoteEntries"
, "304" : "TotQuoteEntries"
, "305" : "UnderlyingIDSource"
, "305" : "UnderlyingSecurityIDSource"
, "306" : "UnderlyingIssuer"
, "307" : "UnderlyingSecurityDesc"
, "308" : "UnderlyingSecurityExchange"
, "309" : "UnderlyingSecurityID"
, "31" : "LastPx"
, "310" : "UnderlyingSecurityType"
, "311" : "UnderlyingSymbol"
, "312" : "UnderlyingSymbolSfx"
, "313" : "UnderlyingMaturityMonthYear"
, "314" : "UnderlyingMaturityDay"
, "315" : "UnderlyingPutOrCall"
, "316" : "UnderlyingStrikePrice"
, "317" : "UnderlyingOptAttribute"
, "318" : "UnderlyingCurrency"
, "319" : "RatioQty"
, "32" : "LastQty"
, "32" : "LastShares"
, "320" : "SecurityReqID"
, "321" : "SecurityRequestType"
, "322" : "SecurityResponseID"
, "323" : "SecurityResponseType"
, "324" : "SecurityStatusReqID"
, "325" : "UnsolicitedIndicator"
, "326" : "SecurityTradingStatus"
, "327" : "HaltReasonChar"
, "327" : "HaltReasonInt"
, "328" : "InViewOfCommon"
, "329" : "DueToRelated"
, "33" : "LinesOfText"
, "33" : "NoLinesOfText"
, "330" : "BuyVolume"
, "331" : "SellVolume"
, "332" : "HighPx"
, "333" : "LowPx"
, "334" : "Adjustment"
, "335" : "TradSesReqID"
, "336" : "TradingSessionID"
, "337" : "ContraTrader"
, "338" : "TradSesMethod"
, "339" : "TradSesMode"
, "34" : "MsgSeqNum"
, "340" : "TradSesStatus"
, "341" : "TradSesStartTime"
, "342" : "TradSesOpenTime"
, "343" : "TradSesPreCloseTime"
, "344" : "TradSesCloseTime"
, "345" : "TradSesEndTime"
, "346" : "NumberOfOrders"
, "347" : "MessageEncoding"
, "348" : "EncodedIssuerLen"
, "349" : "EncodedIssuer"
, "35" : "MsgType"
, "350" : "EncodedSecurityDescLen"
, "351" : "EncodedSecurityDesc"
, "352" : "EncodedListExecInstLen"
, "353" : "EncodedListExecInst"
, "354" : "EncodedTextLen"
, "355" : "EncodedText"
, "356" : "EncodedSubjectLen"
, "357" : "EncodedSubject"
, "358" : "EncodedHeadlineLen"
, "359" : "EncodedHeadline"
, "36" : "NewSeqNo"
, "360" : "EncodedAllocTextLen"
, "361" : "EncodedAllocText"
, "362" : "EncodedUnderlyingIssuerLen"
, "363" : "EncodedUnderlyingIssuer"
, "364" : "EncodedUnderlyingSecurityDescLen"
, "365" : "EncodedUnderlyingSecurityDesc"
, "366" : "AllocPrice"
, "367" : "QuoteSetValidUntilTime"
, "368" : "QuoteEntryRejectReason"
, "369" : "LastMsgSeqNumProcessed"
, "37" : "OrderID"
, "370" : "OnBehalfOfSendingTime"
, "371" : "RefTagID"
, "372" : "RefMsgType"
, "373" : "SessionRejectReason"
, "374" : "BidRequestTransType"
, "375" : "ContraBroker"
, "376" : "ComplianceID"
, "377" : "SolicitedFlag"
, "378" : "ExecRestatementReason"
, "379" : "BusinessRejectRefID"
, "38" : "OrderQty"
, "380" : "BusinessRejectReason"
, "381" : "GrossTradeAmt"
, "382" : "NoContraBrokers"
, "383" : "MaxMessageSize"
, "384" : "NoMsgTypes"
, "385" : "MsgDirection"
, "386" : "NoTradingSessions"
, "387" : "TotalVolumeTraded"
, "388" : "DiscretionInst"
, "389" : "DiscretionOffset"
, "389" : "DiscretionOffsetValue"
, "39" : "OrdStatus"
, "390" : "BidID"
, "391" : "ClientBidID"
, "392" : "ListName"
, "393" : "TotalNumSecurities"
, "393" : "TotNoRelatedSym"
, "394" : "BidType"
, "395" : "NumTickets"
, "396" : "SideValue1"
, "397" : "SideValue2"
, "398" : "NoBidDescriptors"
, "399" : "BidDescriptorType"
, "4" : "AdvSide"
, "40" : "OrdType"
, "400" : "BidDescriptor"
, "401" : "SideValueInd"
, "402" : "LiquidityPctLow"
, "403" : "LiquidityPctHigh"
, "404" : "LiquidityValue"
, "405" : "EFPTrackingError"
, "406" : "FairValue"
, "407" : "OutsideIndexPct"
, "408" : "ValueOfFutures"
, "409" : "LiquidityIndType"
, "41" : "OrigClOrdID"
, "410" : "WtAverageLiquidity"
, "411" : "ExchangeForPhysical"
, "412" : "OutMainCntryUIndex"
, "413" : "CrossPercent"
, "414" : "ProgRptReqs"
, "415" : "ProgPeriodInterval"
, "416" : "IncTaxInd"
, "417" : "NumBidders"
, "418" : "BidTradeType"
, "418" : "TradeType"
, "419" : "BasisPxType"
, "42" : "OrigTime"
, "420" : "NoBidComponents"
, "421" : "Country"
, "422" : "TotNoStrikes"
, "423" : "PriceType"
, "424" : "DayOrderQty"
, "425" : "DayCumQty"
, "426" : "DayAvgPx"
, "427" : "GTBookingInst"
, "428" : "NoStrikes"
, "429" : "ListStatusType"
, "43" : "PossDupFlag"
, "430" : "NetGrossInd"
, "431" : "ListOrderStatus"
, "432" : "ExpireDate"
, "433" : "ListExecInstType"
, "434" : "CxlRejResponseTo"
, "435" : "UnderlyingCouponRate"
, "436" : "UnderlyingContractMultiplier"
, "437" : "ContraTradeQty"
, "438" : "ContraTradeTime"
, "439" : "ClearingFirm"
, "44" : "Price"
, "440" : "ClearingAccount"
, "441" : "LiquidityNumSecurities"
, "442" : "MultiLegReportingType"
, "443" : "StrikeTime"
, "444" : "ListStatusText"
, "445" : "EncodedListStatusTextLen"
, "446" : "EncodedListStatusText"
, "447" : "PartyIDSource"
, "448" : "PartyID"
, "449" : "TotalVolumeTradedDate"
, "45" : "RefSeqNum"
, "450" : "TotalVolumeTradedTime"
, "451" : "NetChgPrevDay"
, "452" : "PartyRole"
, "453" : "NoPartyIDs"
, "454" : "NoSecurityAltID"
, "455" : "SecurityAltID"
, "456" : "SecurityAltIDSource"
, "457" : "NoUnderlyingSecurityAltID"
, "458" : "UnderlyingSecurityAltID"
, "459" : "UnderlyingSecurityAltIDSource"
, "46" : "RelatdSym"
, "460" : "Product"
, "461" : "CFICode"
, "462" : "UnderlyingProduct"
, "463" : "UnderlyingCFICode"
, "464" : "TestMessageIndicator"
, "465" : "QuantityType"
, "466" : "BookingRefID"
, "467" : "IndividualAllocID"
, "468" : "RoundingDirection"
, "469" : "RoundingModulus"
, "47" : "Rule80A"
, "470" : "CountryOfIssue"
, "471" : "StateOrProvinceOfIssue"
, "472" : "LocaleOfIssue"
, "473" : "NoRegistDtls"
, "474" : "MailingDtls"
, "475" : "InvestorCountryOfResidence"
, "476" : "PaymentRef"
, "477" : "DistribPaymentMethod"
, "478" : "CashDistribCurr"
, "479" : "CommCurrency"
, "48" : "SecurityID"
, "480" : "CancellationRights"
, "481" : "MoneyLaunderingStatus"
, "482" : "MailingInst"
, "483" : "TransBkdTime"
, "484" : "ExecPriceType"
, "485" : "ExecPriceAdjustment"
, "486" : "DateOfBirth"
, "487" : "TradeReportTransType"
, "488" : "CardHolderName"
, "489" : "CardNumber"
, "49" : "SenderCompID"
, "490" : "CardExpDate"
, "491" : "CardIssNo"
, "491" : "CardIssNum"
, "492" : "PaymentMethod"
, "493" : "RegistAcctType"
, "494" : "Designation"
, "495" : "TaxAdvantageType"
, "496" : "RegistRejReasonText"
, "497" : "FundRenewWaiv"
, "498" : "CashDistribAgentName"
, "499" : "CashDistribAgentCode"
, "5" : "AdvTransType"
, "50" : "SenderSubID"
, "500" : "CashDistribAgentAcctNumber"
, "501" : "CashDistribPayRef"
, "502" : "CashDistribAgentAcctName"
, "503" : "CardStartDate"
, "504" : "PaymentDate"
, "505" : "PaymentRemitterID"
, "506" : "RegistStatus"
, "507" : "RegistRejReasonCode"
, "508" : "RegistRefID"
, "509" : "RegistDetls"
, "509" : "RegistDtls"
, "51" : "SendingDate"
, "510" : "NoDistribInsts"
, "511" : "RegistEmail"
, "512" : "DistribPercentage"
, "513" : "RegistID"
, "514" : "RegistTransType"
, "515" : "ExecValuationPoint"
, "516" : "OrderPercent"
, "517" : "OwnershipType"
, "518" : "NoContAmts"
, "519" : "ContAmtType"
, "52" : "SendingTime"
, "520" : "ContAmtValue"
, "521" : "ContAmtCurr"
, "522" : "OwnerType"
, "523" : "PartySubID"
, "524" : "NestedPartyID"
, "525" : "NestedPartyIDSource"
, "526" : "SecondaryClOrdID"
, "527" : "SecondaryExecID"
, "528" : "OrderCapacity"
, "529" : "OrderRestrictions"
, "53" : "Quantity"
, "53" : "Shares"
, "530" : "MassCancelRequestType"
, "531" : "MassCancelResponse"
, "532" : "MassCancelRejectReason"
, "533" : "TotalAffectedOrders"
, "534" : "NoAffectedOrders"
, "535" : "AffectedOrderID"
, "536" : "AffectedSecondaryOrderID"
, "537" : "QuoteType"
, "538" : "NestedPartyRole"
, "539" : "NoNestedPartyIDs"
, "54" : "Side"
, "540" : "TotalAccruedInterestAmt"
, "541" : "MaturityDate"
, "542" : "UnderlyingMaturityDate"
, "543" : "InstrRegistry"
, "544" : "CashMargin"
, "545" : "NestedPartySubID"
, "546" : "Scope"
, "547" : "MDImplicitDelete"
, "548" : "CrossID"
, "549" : "CrossType"
, "55" : "Symbol"
, "550" : "CrossPrioritization"
, "551" : "OrigCrossID"
, "552" : "NoSides"
, "553" : "Username"
, "554" : "Password"
, "555" : "NoLegs"
, "556" : "LegCurrency"
, "557" : "TotalNumSecurityTypes"
, "557" : "TotNoSecurityTypes"
, "558" : "NoSecurityTypes"
, "559" : "SecurityListRequestType"
, "56" : "TargetCompID"
, "560" : "SecurityRequestResult"
, "561" : "RoundLot"
, "562" : "MinTradeVol"
, "563" : "MultiLegRptTypeReq"
, "564" : "LegPositionEffect"
, "565" : "LegCoveredOrUncovered"
, "566" : "LegPrice"
, "567" : "TradSesStatusRejReason"
, "568" : "TradeRequestID"
, "569" : "TradeRequestType"
, "57" : "TargetSubID"
, "570" : "PreviouslyReported"
, "571" : "TradeReportID"
, "572" : "TradeReportRefID"
, "573" : "MatchStatus"
, "574" : "MatchType"
, "575" : "OddLot"
, "576" : "NoClearingInstructions"
, "577" : "ClearingInstruction"
, "578" : "TradeInputSource"
, "579" : "TradeInputDevice"
, "58" : "Text"
, "580" : "NoDates"
, "581" : "AccountType"
, "582" : "CustOrderCapacity"
, "583" : "ClOrdLinkID"
, "584" : "MassStatusReqID"
, "585" : "MassStatusReqType"
, "586" : "OrigOrdModTime"
, "587" : "LegSettlmntTyp"
, "587" : "LegSettlType"
, "588" : "LegFutSettDate"
, "588" : "LegSettlDate"
, "589" : "DayBookingInst"
, "59" : "TimeInForce"
, "590" : "BookingUnit"
, "591" : "PreallocMethod"
, "592" : "UnderlyingCountryOfIssue"
, "593" : "UnderlyingStateOrProvinceOfIssue"
, "594" : "UnderlyingLocaleOfIssue"
, "595" : "UnderlyingInstrRegistry"
, "596" : "LegCountryOfIssue"
, "597" : "LegStateOrProvinceOfIssue"
, "598" : "LegLocaleOfIssue"
, "599" : "LegInstrRegistry"
, "6" : "AvgPx"
, "60" : "TransactTime"
, "600" : "LegSymbol"
, "601" : "LegSymbolSfx"
, "602" : "LegSecurityID"
, "603" : "LegSecurityIDSource"
, "604" : "NoLegSecurityAltID"
, "605" : "LegSecurityAltID"
, "606" : "LegSecurityAltIDSource"
, "607" : "LegProduct"
, "608" : "LegCFICode"
, "609" : "LegSecurityType"
, "61" : "Urgency"
, "610" : "LegMaturityMonthYear"
, "611" : "LegMaturityDate"
, "612" : "LegStrikePrice"
, "613" : "LegOptAttribute"
, "614" : "LegContractMultiplier"
, "615" : "LegCouponRate"
, "616" : "LegSecurityExchange"
, "617" : "LegIssuer"
, "618" : "EncodedLegIssuerLen"
, "619" : "EncodedLegIssuer"
, "62" : "ValidUntilTime"
, "620" : "LegSecurityDesc"
, "621" : "EncodedLegSecurityDescLen"
, "622" : "EncodedLegSecurityDesc"
, "623" : "LegRatioQty"
, "624" : "LegSide"
, "625" : "TradingSessionSubID"
, "626" : "AllocType"
, "627" : "NoHops"
, "628" : "HopCompID"
, "629" : "HopSendingTime"
, "63" : "SettlmntTyp"
, "63" : "SettlType"
, "630" : "HopRefID"
, "631" : "MidPx"
, "632" : "BidYield"
, "633" : "MidYield"
, "634" : "OfferYield"
, "635" : "ClearingFeeIndicator"
, "636" : "WorkingIndicator"
, "637" : "LegLastPx"
, "638" : "PriorityIndicator"
, "639" : "PriceImprovement"
, "64" : "FutSettDate"
, "64" : "SettlDate"
, "640" : "Price2"
, "641" : "LastForwardPoints2"
, "642" : "BidForwardPoints2"
, "643" : "OfferForwardPoints2"
, "644" : "RFQReqID"
, "645" : "MktBidPx"
, "646" : "MktOfferPx"
, "647" : "MinBidSize"
, "648" : "MinOfferSize"
, "649" : "QuoteStatusReqID"
, "65" : "SymbolSfx"
, "650" : "LegalConfirm"
, "651" : "UnderlyingLastPx"
, "652" : "UnderlyingLastQty"
, "653" : "SecDefStatus"
, "654" : "LegRefID"
, "655" : "ContraLegRefID"
, "656" : "SettlCurrBidFxRate"
, "657" : "SettlCurrOfferFxRate"
, "658" : "QuoteRequestRejectReason"
, "659" : "SideComplianceID"
, "66" : "ListID"
, "660" : "AcctIDSource"
, "661" : "AllocAcctIDSource"
, "662" : "BenchmarkPrice"
, "663" : "BenchmarkPriceType"
, "664" : "ConfirmID"
, "665" : "ConfirmStatus"
, "666" : "ConfirmTransType"
, "667" : "ContractSettlMonth"
, "668" : "DeliveryForm"
, "669" : "LastParPx"
, "67" : "ListSeqNo"
, "670" : "NoLegAllocs"
, "671" : "LegAllocAccount"
, "672" : "LegIndividualAllocID"
, "673" : "LegAllocQty"
, "674" : "LegAllocAcctIDSource"
, "675" : "LegSettlCurrency"
, "676" : "LegBenchmarkCurveCurrency"
, "677" : "LegBenchmarkCurveName"
, "678" : "LegBenchmarkCurvePoint"
, "679" : "LegBenchmarkPrice"
, "68" : "ListNoOrds"
, "68" : "TotNoOrders"
, "680" : "LegBenchmarkPriceType"
, "681" : "LegBidPx"
, "682" : "LegIOIQty"
, "683" : "NoLegStipulations"
, "684" : "LegOfferPx"
, "685" : "LegOrderQty"
, "686" : "LegPriceType"
, "687" : "LegQty"
, "688" : "LegStipulationType"
, "689" : "LegStipulationValue"
, "69" : "ListExecInst"
, "690" : "LegSwapType"
, "691" : "Pool"
, "692" : "QuotePriceType"
, "693" : "QuoteRespID"
, "694" : "QuoteRespType"
, "695" : "QuoteQualifier"
, "696" : "YieldRedemptionDate"
, "697" : "YieldRedemptionPrice"
, "698" : "YieldRedemptionPriceType"
, "699" : "BenchmarkSecurityID"
, "7" : "BeginSeqNo"
, "70" : "AllocID"
, "700" : "ReversalIndicator"
, "701" : "YieldCalcDate"
, "702" : "NoPositions"
, "703" : "PosType"
, "704" : "LongQty"
, "705" : "ShortQty"
, "706" : "PosQtyStatus"
, "707" : "PosAmtType"
, "708" : "PosAmt"
, "709" : "PosTransType"
, "71" : "AllocTransType"
, "710" : "PosReqID"
, "711" : "NoUnderlyings"
, "712" : "PosMaintAction"
, "713" : "OrigPosReqRefID"
, "714" : "PosMaintRptRefID"
, "715" : "ClearingBusinessDate"
, "716" : "SettlSessID"
, "717" : "SettlSessSubID"
, "718" : "AdjustmentType"
, "719" : "ContraryInstructionIndicator"
, "72" : "RefAllocID"
, "720" : "PriorSpreadIndicator"
, "721" : "PosMaintRptID"
, "722" : "PosMaintStatus"
, "723" : "PosMaintResult"
, "724" : "PosReqType"
, "725" : "ResponseTransportType"
, "726" : "ResponseDestination"
, "727" : "TotalNumPosReports"
, "728" : "PosReqResult"
, "729" : "PosReqStatus"
, "73" : "NoOrders"
, "730" : "SettlPrice"
, "731" : "SettlPriceType"
, "732" : "UnderlyingSettlPrice"
, "733" : "UnderlyingSettlPriceType"
, "734" : "PriorSettlPrice"
, "735" : "NoQuoteQualifiers"
, "736" : "AllocSettlCurrency"
, "737" : "AllocSettlCurrAmt"
, "738" : "InterestAtMaturity"
, "739" : "LegDatedDate"
, "74" : "AvgPrxPrecision"
, "74" : "AvgPxPrecision"
, "740" : "LegPool"
, "741" : "AllocInterestAtMaturity"
, "742" : "AllocAccruedInterestAmt"
, "743" : "DeliveryDate"
, "744" : "AssignmentMethod"
, "745" : "AssignmentUnit"
, "746" : "OpenInterest"
, "747" : "ExerciseMethod"
, "748" : "TotNumTradeReports"
, "749" : "TradeRequestResult"
, "75" : "TradeDate"
, "750" : "TradeRequestStatus"
, "751" : "TradeReportRejectReason"
, "752" : "SideMultiLegReportingType"
, "753" : "NoPosAmt"
, "754" : "AutoAcceptIndicator"
, "755" : "AllocReportID"
, "756" : "NoNested2PartyIDs"
, "757" : "Nested2PartyID"
, "758" : "Nested2PartyIDSource"
, "759" : "Nested2PartyRole"
, "76" : "ExecBroker"
, "760" : "Nested2PartySubID"
, "761" : "BenchmarkSecurityIDSource"
, "762" : "SecuritySubType"
, "763" : "UnderlyingSecuritySubType"
, "764" : "LegSecuritySubType"
, "765" : "AllowableOneSidednessPct"
, "766" : "AllowableOneSidednessValue"
, "767" : "AllowableOneSidednessCurr"
, "768" : "NoTrdRegTimestamps"
, "769" : "TrdRegTimestamp"
, "77" : "OpenClose"
, "77" : "PositionEffect"
, "770" : "TrdRegTimestampType"
, "771" : "TrdRegTimestampOrigin"
, "772" : "ConfirmRefID"
, "773" : "ConfirmType"
, "774" : "ConfirmRejReason"
, "775" : "BookingType"
, "776" : "IndividualAllocRejCode"
, "777" : "SettlInstMsgID"
, "778" : "NoSettlInst"
, "779" : "LastUpdateTime"
, "78" : "NoAllocs"
, "780" : "AllocSettlInstType"
, "781" : "NoSettlPartyIDs"
, "782" : "SettlPartyID"
, "783" : "SettlPartyIDSource"
, "784" : "SettlPartyRole"
, "785" : "SettlPartySubID"
, "786" : "SettlPartySubIDType"
, "787" : "DlvyInstType"
, "788" : "TerminationType"
, "789" : "NextExpectedMsgSeqNum"
, "79" : "AllocAccount"
, "790" : "OrdStatusReqID"
, "791" : "SettlInstReqID"
, "792" : "SettlInstReqRejCode"
, "793" : "SecondaryAllocID"
, "794" : "AllocReportType"
, "795" : "AllocReportRefID"
, "796" : "AllocCancReplaceReason"
, "797" : "CopyMsgIndicator"
, "798" : "AllocAccountType"
, "799" : "OrderAvgPx"
, "8" : "BeginString"
, "80" : "AllocQty"
, "80" : "AllocShares"
, "800" : "OrderBookingQty"
, "801" : "NoSettlPartySubIDs"
, "802" : "NoPartySubIDs"
, "803" : "PartySubIDType"
, "804" : "NoNestedPartySubIDs"
, "805" : "NestedPartySubIDType"
, "806" : "NoNested2PartySubIDs"
, "807" : "Nested2PartySubIDType"
, "808" : "AllocIntermedReqType"
, "81" : "ProcessCode"
, "810" : "UnderlyingPx"
, "811" : "PriceDelta"
, "812" : "ApplQueueMax"
, "813" : "ApplQueueDepth"
, "814" : "ApplQueueResolution"
, "815" : "ApplQueueAction"
, "816" : "NoAltMDSource"
, "817" : "AltMDSourceID"
, "818" : "SecondaryTradeReportID"
, "819" : "AvgPxIndicator"
, "82" : "NoRpts"
, "820" : "TradeLinkID"
, "821" : "OrderInputDevice"
, "822" : "UnderlyingTradingSessionID"
, "823" : "UnderlyingTradingSessionSubID"
, "824" : "TradeLegRefID"
, "825" : "ExchangeRule"
, "826" : "TradeAllocIndicator"
, "827" : "ExpirationCycle"
, "828" : "TrdType"
, "829" : "TrdSubType"
, "83" : "RptSeq"
, "830" : "TransferReason"
, "831" : "AsgnReqID"
, "832" : "TotNumAssignmentReports"
, "833" : "AsgnRptID"
, "834" : "ThresholdAmount"
, "835" : "PegMoveType"
, "836" : "PegOffsetType"
, "837" : "PegLimitType"
, "838" : "PegRoundDirection"
, "839" : "PeggedPrice"
, "84" : "CxlQty"
, "840" : "PegScope"
, "841" : "DiscretionMoveType"
, "842" : "DiscretionOffsetType"
, "843" : "DiscretionLimitType"
, "844" : "DiscretionRoundDirection"
, "845" : "DiscretionPrice"
, "846" : "DiscretionScope"
, "847" : "TargetStrategy"
, "848" : "TargetStrategyParameters"
, "849" : "ParticipationRate"
, "85" : "NoDlvyInst"
, "850" : "TargetStrategyPerformance"
, "851" : "LastLiquidityInd"
, "852" : "PublishTrdIndicator"
, "853" : "ShortSaleReason"
, "854" : "QtyType"
, "855" : "SecondaryTrdType"
, "856" : "TradeReportType"
, "857" : "AllocNoOrdersType"
, "858" : "SharedCommission"
, "859" : "ConfirmReqID"
, "86" : "DlvyInst"
, "860" : "AvgParPx"
, "861" : "ReportedPx"
, "862" : "NoCapacities"
, "863" : "OrderCapacityQty"
, "864" : "NoEvents"
, "865" : "EventType"
, "866" : "EventDate"
, "867" : "EventPx"
, "868" : "EventText"
, "869" : "PctAtRisk"
, "87" : "AllocStatus"
, "870" : "NoInstrAttrib"
, "871" : "InstrAttribType"
, "872" : "InstrAttribValue"
, "873" : "DatedDate"
, "874" : "InterestAccrualDate"
, "875" : "CPProgram"
, "876" : "CPRegType"
, "877" : "UnderlyingCPProgram"
, "878" : "UnderlyingCPRegType"
, "879" : "UnderlyingQty"
, "88" : "AllocRejCode"
, "880" : "TrdMatchID"
, "881" : "SecondaryTradeReportRefID"
, "882" : "UnderlyingDirtyPrice"
, "883" : "UnderlyingEndPrice"
, "884" : "UnderlyingStartValue"
, "885" : "UnderlyingCurrentValue"
, "886" : "UnderlyingEndValue"
, "887" : "NoUnderlyingStips"
, "888" : "UnderlyingStipType"
, "889" : "UnderlyingStipValue"
, "89" : "Signature"
, "890" : "MaturityNetMoney"
, "891" : "MiscFeeBasis"
, "892" : "TotNoAllocs"
, "893" : "LastFragment"
, "894" : "CollReqID"
, "895" : "CollAsgnReason"
, "896" : "CollInquiryQualifier"
, "897" : "NoTrades"
, "898" : "MarginRatio"
, "899" : "MarginExcess"
, "9" : "BodyLength"
, "90" : "SecureDataLen"
, "900" : "TotalNetValue"
, "901" : "CashOutstanding"
, "902" : "CollAsgnID"
, "903" : "CollAsgnTransType"
, "904" : "CollRespID"
, "905" : "CollAsgnRespType"
, "906" : "CollAsgnRejectReason"
, "907" : "CollAsgnRefID"
, "908" : "CollRptID"
, "909" : "CollInquiryID"
, "91" : "SecureData"
, "910" : "CollStatus"
, "911" : "TotNumReports"
, "912" : "LastRptRequested"
, "913" : "AgreementDesc"
, "914" : "AgreementID"
, "915" : "AgreementDate"
, "916" : "StartDate"
, "917" : "EndDate"
, "918" : "AgreementCurrency"
, "919" : "DeliveryType"
, "92" : "BrokerOfCredit"
, "920" : "EndAccruedInterestAmt"
, "921" : "StartCash"
, "922" : "EndCash"
, "923" : "UserRequestID"
, "924" : "UserRequestType"
, "925" : "NewPassword"
, "926" : "UserStatus"
, "927" : "UserStatusText"
, "928" : "StatusValue"
, "929" : "StatusText"
, "93" : "SignatureLength"
, "930" : "RefCompID"
, "931" : "RefSubID"
, "932" : "NetworkResponseID"
, "933" : "NetworkRequestID"
, "934" : "LastNetworkResponseID"
, "935" : "NetworkRequestType"
, "936" : "NoCompIDs"
, "937" : "NetworkStatusResponseType"
, "938" : "NoCollInquiryQualifier"
, "939" : "TrdRptStatus"
, "94" : "EmailType"
, "940" : "AffirmStatus"
, "941" : "UnderlyingStrikeCurrency"
, "942" : "LegStrikeCurrency"
, "943" : "TimeBracket"
, "944" : "CollAction"
, "945" : "CollInquiryStatus"
, "946" : "CollInquiryResult"
, "947" : "StrikeCurrency"
, "948" : "NoNested3PartyIDs"
, "949" : "Nested3PartyID"
, "95" : "RawDataLength"
, "950" : "Nested3PartyIDSource"
, "951" : "Nested3PartyRole"
, "952" : "NoNested3PartySubIDs"
, "953" : "Nested3PartySubID"
, "954" : "Nested3PartySubIDType"
, "955" : "LegContractSettlMonth"
, "956" : "LegInterestAccrualDate"
, "957" : "NoStrategyParameters"
, "958" : "StrategyParameterName"
, "959" : "StrategyParameterType"
, "96" : "RawData"
, "960" : "StrategyParameterValue"
, "961" : "HostCrossID"
, "962" : "SideTimeInForce"
, "963" : "MDReportID"
, "964" : "SecurityReportID"
, "965" : "SecurityStatus"
, "966" : "SettleOnOpenFlag"
, "967" : "StrikeMultiplier"
, "968" : "StrikeValue"
, "969" : "MinPriceIncrement"
, "97" : "PossResend"
, "970" : "PositionLimit"
, "971" : "NTPositionLimit"
, "972" : "UnderlyingAllocationPercent"
, "973" : "UnderlyingCashAmount"
, "974" : "UnderlyingCashType"
, "975" : "UnderlyingSettlementType"
, "976" : "QuantityDate"
, "977" : "ContIntRptID"
, "978" : "LateIndicator"
, "979" : "InputSource"
, "98" : "EncryptMethod"
, "980" : "SecurityUpdateAction"
, "981" : "NoExpiration"
, "982" : "ExpirationQtyType"
, "982" : "ExpType"
, "983" : "ExpQty"
, "984" : "NoUnderlyingAmounts"
, "985" : "UnderlyingPayAmount"
, "986" : "UnderlyingCollectAmount"
, "987" : "UnderlyingSettlementDate"
, "988" : "UnderlyingSettlementStatus"
, "989" : "SecondaryIndividualAllocID"
, "99" : "StopPx"
, "990" : "LegReportID"
, "991" : "RndPx"
, "992" : "IndividualAllocType"
, "993" : "AllocCustomerCapacity"
, "994" : "TierCode"
, "996" : "UnitOfMeasure"
, "997" : "TimeUnit"
, "998" : "UnderlyingUnitOfMeasure"
, "999" : "LegUnitOfMeasure"
}
| bsd-3-clause |
unsiloai/syntaxnet-ops-hack | tensorflow/python/util/deprecation.py | 30 | 15749 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensor utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import re
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import decorator_utils
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
# Allow deprecation warnings to be silenced temporarily with a context manager.
_PRINT_DEPRECATION_WARNINGS = True
def _add_deprecated_function_notice_to_docstring(doc, date, instructions):
"""Adds a deprecation notice to a docstring for deprecated functions."""
return decorator_utils.add_notice_to_docstring(
doc, instructions,
'DEPRECATED FUNCTION',
'(deprecated)', [
'THIS FUNCTION IS DEPRECATED. It will be removed %s.' % (
'in a future version' if date is None else ('after %s' % date)),
'Instructions for updating:'])
def _add_deprecated_arg_notice_to_docstring(doc, date, instructions):
"""Adds a deprecation notice to a docstring for deprecated arguments."""
return decorator_utils.add_notice_to_docstring(
doc, instructions,
'DEPRECATED FUNCTION ARGUMENTS',
'(deprecated arguments)', [
'SOME ARGUMENTS ARE DEPRECATED. '
'They will be removed %s.' % (
'in a future version' if date is None else ('after %s' % date)),
'Instructions for updating:'])
def _validate_deprecation_args(date, instructions):
if date is not None and not re.match(r'20\d\d-[01]\d-[0123]\d', date):
raise ValueError('Date must be YYYY-MM-DD.')
if not instructions:
raise ValueError('Don\'t deprecate things without conversion instructions!')
def _call_location():
"""Returns call location given level up from current call."""
frame = tf_inspect.currentframe()
if frame:
# CPython internals are available, use them for performance.
# walk back two frames to get to deprecated function caller.
first_frame = frame.f_back
second_frame = first_frame.f_back
frame = second_frame if second_frame else first_frame
return '%s:%d' % (frame.f_code.co_filename, frame.f_lineno)
else:
# Slow fallback path
stack = tf_inspect.stack(0) # 0 avoids generating unused context
entry = stack[2]
return '%s:%d' % (entry[1], entry[2])
def deprecated(date, instructions):
"""Decorator for marking functions or methods deprecated.
This decorator logs a deprecation warning whenever the decorated function is
called. It has the following format:
<function> (from <module>) is deprecated and will be removed after <date>.
Instructions for updating:
<instructions>
If `date` is None, 'after <date>' is replaced with 'in a future version'.
<function> will include the class name if it is a method.
It also edits the docstring of the function: ' (deprecated)' is appended
to the first line of the docstring and a deprecation notice is prepended
to the rest of the docstring.
Args:
date: String or None. The date the function is scheduled to be removed.
Must be ISO 8601 (YYYY-MM-DD), or None.
instructions: String. Instructions on how to update code using the
deprecated function.
Returns:
Decorated function or method.
Raises:
ValueError: If date is not None or in ISO 8601 format, or instructions are
empty.
"""
_validate_deprecation_args(date, instructions)
def deprecated_wrapper(func):
"""Deprecation wrapper."""
decorator_utils.validate_callable(func, 'deprecated')
@functools.wraps(func)
def new_func(*args, **kwargs): # pylint: disable=missing-docstring
if _PRINT_DEPRECATION_WARNINGS:
logging.warning(
'From %s: %s (from %s) is deprecated and will be removed %s.\n'
'Instructions for updating:\n%s',
_call_location(), decorator_utils.get_qualified_name(func),
func.__module__,
'in a future version' if date is None else ('after %s' % date),
instructions)
return func(*args, **kwargs)
return tf_decorator.make_decorator(
func, new_func, 'deprecated',
_add_deprecated_function_notice_to_docstring(func.__doc__, date,
instructions))
return deprecated_wrapper
DeprecatedArgSpec = collections.namedtuple(
'DeprecatedArgSpec', ['position', 'has_ok_value', 'ok_value'])
def deprecated_args(date, instructions, *deprecated_arg_names_or_tuples):
"""Decorator for marking specific function arguments as deprecated.
This decorator logs a deprecation warning whenever the decorated function is
called with the deprecated argument. It has the following format:
Calling <function> (from <module>) with <arg> is deprecated and will be
removed after <date>. Instructions for updating:
<instructions>
If `date` is None, 'after <date>' is replaced with 'in a future version'.
<function> includes the class name if it is a method.
It also edits the docstring of the function: ' (deprecated arguments)' is
appended to the first line of the docstring and a deprecation notice is
prepended to the rest of the docstring.
Args:
date: String or None. The date the function is scheduled to be removed.
Must be ISO 8601 (YYYY-MM-DD), or None.
instructions: String. Instructions on how to update code using the
deprecated function.
*deprecated_arg_names_or_tuples: String. or 2-Tuple(String,
[ok_vals]). The string is the deprecated argument name.
Optionally, an ok-value may be provided. If the user provided
argument equals this value, the warning is suppressed.
Returns:
Decorated function or method.
Raises:
ValueError: If date is not None or in ISO 8601 format, instructions are
empty, the deprecated arguments are not present in the function
signature, or the second element of a deprecated_tuple is not a
list.
"""
_validate_deprecation_args(date, instructions)
if not deprecated_arg_names_or_tuples:
raise ValueError('Specify which argument is deprecated.')
def _get_arg_names_to_ok_vals():
"""Returns a dict mapping arg_name to DeprecatedArgSpec w/o position."""
d = {}
for name_or_tuple in deprecated_arg_names_or_tuples:
if isinstance(name_or_tuple, tuple):
d[name_or_tuple[0]] = DeprecatedArgSpec(-1, True, name_or_tuple[1])
else:
d[name_or_tuple] = DeprecatedArgSpec(-1, False, None)
return d
def _get_deprecated_positional_arguments(names_to_ok_vals, arg_spec):
"""Builds a dictionary from deprecated arguments to their spec.
Returned dict is keyed by argument name.
Each value is a DeprecatedArgSpec with the following fields:
position: The zero-based argument position of the argument
within the signature. None if the argument isn't found in
the signature.
ok_values: Values of this argument for which warning will be
suppressed.
Args:
names_to_ok_vals: dict from string arg_name to a list of values,
possibly empty, which should not elicit a warning.
arg_spec: Output from tf_inspect.getargspec on the called function.
Returns:
Dictionary from arg_name to DeprecatedArgSpec.
"""
arg_name_to_pos = dict(
(name, pos) for (pos, name) in enumerate(arg_spec.args))
deprecated_positional_args = {}
for arg_name, spec in iter(names_to_ok_vals.items()):
if arg_name in arg_name_to_pos:
pos = arg_name_to_pos[arg_name]
deprecated_positional_args[arg_name] = DeprecatedArgSpec(
pos, spec.has_ok_value, spec.ok_value)
return deprecated_positional_args
def deprecated_wrapper(func):
"""Deprecation decorator."""
decorator_utils.validate_callable(func, 'deprecated_args')
deprecated_arg_names = _get_arg_names_to_ok_vals()
arg_spec = tf_inspect.getargspec(func)
deprecated_positions = _get_deprecated_positional_arguments(
deprecated_arg_names, arg_spec)
is_varargs_deprecated = arg_spec.varargs in deprecated_arg_names
is_kwargs_deprecated = arg_spec.keywords in deprecated_arg_names
if (len(deprecated_positions) + is_varargs_deprecated + is_kwargs_deprecated
!= len(deprecated_arg_names_or_tuples)):
known_args = arg_spec.args + [arg_spec.varargs, arg_spec.keywords]
missing_args = [arg_name for arg_name in deprecated_arg_names
if arg_name not in known_args]
raise ValueError('The following deprecated arguments are not present '
'in the function signature: %s. '
'Found next arguments: %s.' % (missing_args, known_args))
def _same_value(a, b):
"""A comparison operation that works for multiple object types.
Returns True for two empty lists, two numeric values with the
same value, etc.
Returns False for (pd.DataFrame, None), and other pairs which
should not be considered equivalent.
Args:
a: value one of the comparison.
b: value two of the comparison.
Returns:
A boolean indicating whether the two inputs are the same value
for the purposes of deprecation.
"""
if a is b:
return True
try:
equality = a == b
if isinstance(equality, bool):
return equality
except TypeError:
return False
return False
@functools.wraps(func)
def new_func(*args, **kwargs):
"""Deprecation wrapper."""
if _PRINT_DEPRECATION_WARNINGS:
invalid_args = []
named_args = tf_inspect.getcallargs(func, *args, **kwargs)
for arg_name, spec in iter(deprecated_positions.items()):
if (spec.position < len(args) and
not (spec.has_ok_value and
_same_value(named_args[arg_name], spec.ok_value))):
invalid_args.append(arg_name)
if is_varargs_deprecated and len(args) > len(arg_spec.args):
invalid_args.append(arg_spec.varargs)
if is_kwargs_deprecated and kwargs:
invalid_args.append(arg_spec.keywords)
for arg_name in deprecated_arg_names:
if (arg_name in kwargs and
not (deprecated_positions[arg_name].has_ok_value and
_same_value(named_args[arg_name],
deprecated_positions[arg_name].ok_value))):
invalid_args.append(arg_name)
for arg_name in invalid_args:
logging.warning(
'From %s: calling %s (from %s) with %s is deprecated and will '
'be removed %s.\nInstructions for updating:\n%s',
_call_location(), decorator_utils.get_qualified_name(func),
func.__module__, arg_name,
'in a future version' if date is None else ('after %s' % date),
instructions)
return func(*args, **kwargs)
return tf_decorator.make_decorator(func, new_func, 'deprecated',
_add_deprecated_arg_notice_to_docstring(
func.__doc__, date, instructions))
return deprecated_wrapper
def deprecated_arg_values(date, instructions, **deprecated_kwargs):
"""Decorator for marking specific function argument values as deprecated.
This decorator logs a deprecation warning whenever the decorated function is
called with the deprecated argument values. It has the following format:
Calling <function> (from <module>) with <arg>=<value> is deprecated and
will be removed after <date>. Instructions for updating:
<instructions>
If `date` is None, 'after <date>' is replaced with 'in a future version'.
<function> will include the class name if it is a method.
It also edits the docstring of the function: ' (deprecated arguments)' is
appended to the first line of the docstring and a deprecation notice is
prepended to the rest of the docstring.
Args:
date: String or None. The date the function is scheduled to be removed.
Must be ISO 8601 (YYYY-MM-DD), or None
instructions: String. Instructions on how to update code using the
deprecated function.
**deprecated_kwargs: The deprecated argument values.
Returns:
Decorated function or method.
Raises:
ValueError: If date is not None or in ISO 8601 format, or instructions are
empty.
"""
_validate_deprecation_args(date, instructions)
if not deprecated_kwargs:
raise ValueError('Specify which argument values are deprecated.')
def deprecated_wrapper(func):
"""Deprecation decorator."""
decorator_utils.validate_callable(func, 'deprecated_arg_values')
@functools.wraps(func)
def new_func(*args, **kwargs):
"""Deprecation wrapper."""
if _PRINT_DEPRECATION_WARNINGS:
named_args = tf_inspect.getcallargs(func, *args, **kwargs)
for arg_name, arg_value in deprecated_kwargs.items():
if arg_name in named_args and named_args[arg_name] == arg_value:
logging.warning(
'From %s: calling %s (from %s) with %s=%s is deprecated and '
'will be removed %s.\nInstructions for updating:\n%s',
_call_location(), decorator_utils.get_qualified_name(func),
func.__module__, arg_name, arg_value,
'in a future version' if date is None else ('after %s' % date),
instructions)
return func(*args, **kwargs)
return tf_decorator.make_decorator(func, new_func, 'deprecated',
_add_deprecated_arg_notice_to_docstring(
func.__doc__, date, instructions))
return deprecated_wrapper
def deprecated_argument_lookup(new_name, new_value, old_name, old_value):
"""Looks up deprecated argument name and ensures both are not used.
Args:
new_name: new name of argument
new_value: value of new argument (or None if not used)
old_name: old name of argument
old_value: value of old argument (or None if not used)
Returns:
The effective argument that should be used.
Raises:
ValueError: if new_value and old_value are both non-null
"""
if old_value is not None:
if new_value is not None:
raise ValueError("Cannot specify both '%s' and '%s'" %
(old_name, new_name))
return old_value
return new_value
def rewrite_argument_docstring(old_doc, old_argument, new_argument):
return old_doc.replace('`%s`' % old_argument, '`%s`' % new_argument).replace(
'%s:' % old_argument, '%s:' % new_argument)
@tf_contextlib.contextmanager
def silence():
"""Temporarily silence deprecation warnings."""
global _PRINT_DEPRECATION_WARNINGS
print_deprecation_warnings = _PRINT_DEPRECATION_WARNINGS
_PRINT_DEPRECATION_WARNINGS = False
yield
_PRINT_DEPRECATION_WARNINGS = print_deprecation_warnings
| apache-2.0 |
4talesa/rethinkdb | external/v8_3.30.33.16/tools/testrunner/local/perfdata.py | 100 | 4212 | # Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import shelve
import threading
class PerfDataEntry(object):
def __init__(self):
self.avg = 0.0
self.count = 0
def AddResult(self, result):
kLearnRateLimiter = 99 # Greater value means slower learning.
# We use an approximation of the average of the last 100 results here:
# The existing average is weighted with kLearnRateLimiter (or less
# if there are fewer data points).
effective_count = min(self.count, kLearnRateLimiter)
self.avg = self.avg * effective_count + result
self.count = effective_count + 1
self.avg /= self.count
class PerfDataStore(object):
def __init__(self, datadir, arch, mode):
filename = os.path.join(datadir, "%s.%s.perfdata" % (arch, mode))
self.database = shelve.open(filename, protocol=2)
self.closed = False
self.lock = threading.Lock()
def __del__(self):
self.close()
def close(self):
if self.closed: return
self.database.close()
self.closed = True
def GetKey(self, test):
"""Computes the key used to access data for the given testcase."""
flags = "".join(test.flags)
return str("%s.%s.%s" % (test.suitename(), test.path, flags))
def FetchPerfData(self, test):
"""Returns the observed duration for |test| as read from the store."""
key = self.GetKey(test)
if key in self.database:
return self.database[key].avg
return None
def UpdatePerfData(self, test):
"""Updates the persisted value in the store with test.duration."""
testkey = self.GetKey(test)
self.RawUpdatePerfData(testkey, test.duration)
def RawUpdatePerfData(self, testkey, duration):
with self.lock:
if testkey in self.database:
entry = self.database[testkey]
else:
entry = PerfDataEntry()
entry.AddResult(duration)
self.database[testkey] = entry
class PerfDataManager(object):
def __init__(self, datadir):
self.datadir = os.path.abspath(datadir)
if not os.path.exists(self.datadir):
os.makedirs(self.datadir)
self.stores = {} # Keyed by arch, then mode.
self.closed = False
self.lock = threading.Lock()
def __del__(self):
self.close()
def close(self):
if self.closed: return
for arch in self.stores:
modes = self.stores[arch]
for mode in modes:
store = modes[mode]
store.close()
self.closed = True
def GetStore(self, arch, mode):
with self.lock:
if not arch in self.stores:
self.stores[arch] = {}
modes = self.stores[arch]
if not mode in modes:
modes[mode] = PerfDataStore(self.datadir, arch, mode)
return modes[mode]
| agpl-3.0 |
onceuponatimeforever/oh-mainline | vendor/packages/scrapy/scrapy/webservice.py | 18 | 3191 | """
Scrapy web services extension
See docs/topics/webservice.rst
"""
from twisted.web import server, error
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import NotConfigured
from scrapy import log, signals
from scrapy.utils.jsonrpc import jsonrpc_server_call
from scrapy.utils.serialize import ScrapyJSONEncoder, ScrapyJSONDecoder
from scrapy.utils.misc import load_object
from scrapy.utils.txweb import JsonResource as JsonResource_
from scrapy.utils.reactor import listen_tcp
from scrapy.utils.conf import build_component_list
class JsonResource(JsonResource_):
def __init__(self, crawler, target=None):
JsonResource_.__init__(self)
self.crawler = crawler
self.json_encoder = ScrapyJSONEncoder(crawler=crawler)
class JsonRpcResource(JsonResource):
def __init__(self, crawler, target=None):
JsonResource.__init__(self, crawler, target)
self.json_decoder = ScrapyJSONDecoder(crawler=crawler)
self.crawler = crawler
self._target = target
def render_GET(self, txrequest):
return self.get_target()
def render_POST(self, txrequest):
reqstr = txrequest.content.getvalue()
target = self.get_target()
return jsonrpc_server_call(target, reqstr, self.json_decoder)
def getChild(self, name, txrequest):
target = self.get_target()
try:
newtarget = getattr(target, name)
return JsonRpcResource(self.crawler, newtarget)
except AttributeError:
return error.NoResource("No such child resource.")
def get_target(self):
return self._target
class RootResource(JsonResource):
def render_GET(self, txrequest):
return {'resources': self.children.keys()}
def getChild(self, name, txrequest):
if name == '':
return self
return JsonResource.getChild(self, name, txrequest)
class WebService(server.Site):
def __init__(self, crawler):
if not crawler.settings.getbool('WEBSERVICE_ENABLED'):
raise NotConfigured
self.crawler = crawler
logfile = crawler.settings['WEBSERVICE_LOGFILE']
self.portrange = map(int, crawler.settings.getlist('WEBSERVICE_PORT'))
self.host = crawler.settings['WEBSERVICE_HOST']
root = RootResource(crawler)
reslist = build_component_list(crawler.settings['WEBSERVICE_RESOURCES_BASE'], \
crawler.settings['WEBSERVICE_RESOURCES'])
for res_cls in map(load_object, reslist):
res = res_cls(crawler)
root.putChild(res.ws_name, res)
server.Site.__init__(self, root, logPath=logfile)
self.noisy = False
dispatcher.connect(self.start_listening, signals.engine_started)
dispatcher.connect(self.stop_listening, signals.engine_stopped)
@classmethod
def from_crawler(cls, crawler):
return cls(crawler)
def start_listening(self):
self.port = listen_tcp(self.portrange, self.host, self)
h = self.port.getHost()
log.msg("Web service listening on %s:%d" % (h.host, h.port), log.DEBUG)
def stop_listening(self):
self.port.stopListening()
| agpl-3.0 |
dxd214/TeamTalk | win-client/3rdParty/src/json/devtools/fixeol.py | 247 | 1941 | import os.path
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
if not os.path.isfile( path ):
raise ValueError( 'Path "%s" is not a file' % path )
try:
f = open(path, 'rb')
except IOError, msg:
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
return False
try:
raw_lines = f.readlines()
finally:
f.close()
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
if raw_lines != fixed_lines:
print '%s =>' % path,
if not is_dry_run:
f = open(path, "wb")
try:
f.writelines(fixed_lines)
finally:
f.close()
if verbose:
print is_dry_run and ' NEED FIX' or ' FIXED'
return True
##
##
##
##def _do_fix( is_dry_run = True ):
## from waftools import antglob
## python_sources = antglob.glob( '.',
## includes = '**/*.py **/wscript **/wscript_build',
## excludes = antglob.default_excludes + './waf.py',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
## for path in python_sources:
## _fix_python_source( path, is_dry_run )
##
## cpp_sources = antglob.glob( '.',
## includes = '**/*.cpp **/*.h **/*.inl',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
## for path in cpp_sources:
## _fix_source_eol( path, is_dry_run )
##
##
##def dry_fix(context):
## _do_fix( is_dry_run = True )
##
##def fix(context):
## _do_fix( is_dry_run = False )
##
##def shutdown():
## pass
##
##def check(context):
## # Unit tests are run when "check" target is used
## ut = UnitTest.unit_test()
## ut.change_to_testfile_dir = True
## ut.want_to_see_test_output = True
## ut.want_to_see_test_error = True
## ut.run()
## ut.print_results()
| apache-2.0 |
lesserwhirls/scipy-cwt | scipy/sparse/sparsetools/csr.py | 5 | 32360 | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.1+capsulehack
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
# This file is compatible with both classic and new-style classes.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_csr', [dirname(__file__)])
except ImportError:
import _csr
return _csr
if fp is not None:
try:
_mod = imp.load_module('_csr', fp, pathname, description)
finally:
fp.close()
return _mod
_csr = swig_import_helper()
del swig_import_helper
else:
import _csr
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def expandptr(*args):
"""expandptr(int n_row, int Ap, int Bi)"""
return _csr.expandptr(*args)
def csr_matmat_pass1(*args):
"""
csr_matmat_pass1(int n_row, int n_col, int Ap, int Aj, int Bp, int Bj,
int Cp)
"""
return _csr.csr_matmat_pass1(*args)
def csr_count_blocks(*args):
"""csr_count_blocks(int n_row, int n_col, int R, int C, int Ap, int Aj) -> int"""
return _csr.csr_count_blocks(*args)
def csr_has_sorted_indices(*args):
"""csr_has_sorted_indices(int n_row, int Ap, int Aj) -> bool"""
return _csr.csr_has_sorted_indices(*args)
def csr_diagonal(*args):
"""
csr_diagonal(int n_row, int n_col, int Ap, int Aj, signed char Ax,
signed char Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
unsigned char Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, short Ax, short Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
unsigned short Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, int Ax, int Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
unsigned int Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, long long Ax,
long long Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
unsigned long long Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, float Ax, float Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, double Ax, double Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, long double Ax,
long double Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
npy_cfloat_wrapper Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
npy_cdouble_wrapper Yx)
csr_diagonal(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
npy_clongdouble_wrapper Yx)
"""
return _csr.csr_diagonal(*args)
def csr_scale_rows(*args):
"""
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, signed char Ax,
signed char Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
unsigned char Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, short Ax, short Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
unsigned short Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, int Ax, int Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
unsigned int Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, long long Ax,
long long Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
unsigned long long Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, float Ax, float Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, double Ax, double Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, long double Ax,
long double Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
npy_cfloat_wrapper Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
npy_cdouble_wrapper Xx)
csr_scale_rows(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
npy_clongdouble_wrapper Xx)
"""
return _csr.csr_scale_rows(*args)
def csr_scale_columns(*args):
"""
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, signed char Ax,
signed char Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
unsigned char Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, short Ax, short Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
unsigned short Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, int Ax, int Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
unsigned int Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, long long Ax,
long long Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
unsigned long long Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, float Ax, float Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, double Ax, double Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, long double Ax,
long double Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
npy_cfloat_wrapper Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
npy_cdouble_wrapper Xx)
csr_scale_columns(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
npy_clongdouble_wrapper Xx)
"""
return _csr.csr_scale_columns(*args)
def csr_tocsc(*args):
"""
csr_tocsc(int n_row, int n_col, int Ap, int Aj, signed char Ax,
int Bp, int Bi, signed char Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
int Bp, int Bi, unsigned char Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, short Ax, int Bp,
int Bi, short Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
int Bp, int Bi, unsigned short Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, int Ax, int Bp,
int Bi, int Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
int Bp, int Bi, unsigned int Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, long long Ax,
int Bp, int Bi, long long Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
int Bp, int Bi, unsigned long long Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, float Ax, int Bp,
int Bi, float Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, double Ax, int Bp,
int Bi, double Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, long double Ax,
int Bp, int Bi, long double Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
int Bp, int Bi, npy_cfloat_wrapper Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
int Bp, int Bi, npy_cdouble_wrapper Bx)
csr_tocsc(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
int Bp, int Bi, npy_clongdouble_wrapper Bx)
"""
return _csr.csr_tocsc(*args)
def csr_tobsr(*args):
"""
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
signed char Ax, int Bp, int Bj, signed char Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
unsigned char Ax, int Bp, int Bj, unsigned char Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
short Ax, int Bp, int Bj, short Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
unsigned short Ax, int Bp, int Bj, unsigned short Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
int Ax, int Bp, int Bj, int Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
unsigned int Ax, int Bp, int Bj, unsigned int Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
long long Ax, int Bp, int Bj, long long Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
unsigned long long Ax, int Bp, int Bj, unsigned long long Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
float Ax, int Bp, int Bj, float Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
double Ax, int Bp, int Bj, double Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
long double Ax, int Bp, int Bj, long double Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
npy_cfloat_wrapper Ax, int Bp, int Bj, npy_cfloat_wrapper Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
npy_cdouble_wrapper Ax, int Bp, int Bj, npy_cdouble_wrapper Bx)
csr_tobsr(int n_row, int n_col, int R, int C, int Ap, int Aj,
npy_clongdouble_wrapper Ax, int Bp, int Bj,
npy_clongdouble_wrapper Bx)
"""
return _csr.csr_tobsr(*args)
def csr_matmat_pass2(*args):
"""
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, signed char Ax,
int Bp, int Bj, signed char Bx, int Cp, int Cj,
signed char Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
int Bp, int Bj, unsigned char Bx, int Cp,
int Cj, unsigned char Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, short Ax, int Bp,
int Bj, short Bx, int Cp, int Cj, short Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
int Bp, int Bj, unsigned short Bx, int Cp,
int Cj, unsigned short Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, int Ax, int Bp,
int Bj, int Bx, int Cp, int Cj, int Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
int Bp, int Bj, unsigned int Bx, int Cp,
int Cj, unsigned int Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, long long Ax,
int Bp, int Bj, long long Bx, int Cp, int Cj,
long long Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
int Bp, int Bj, unsigned long long Bx,
int Cp, int Cj, unsigned long long Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, float Ax, int Bp,
int Bj, float Bx, int Cp, int Cj, float Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, double Ax, int Bp,
int Bj, double Bx, int Cp, int Cj, double Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, long double Ax,
int Bp, int Bj, long double Bx, int Cp, int Cj,
long double Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
int Bp, int Bj, npy_cfloat_wrapper Bx,
int Cp, int Cj, npy_cfloat_wrapper Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
int Bp, int Bj, npy_cdouble_wrapper Bx,
int Cp, int Cj, npy_cdouble_wrapper Cx)
csr_matmat_pass2(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
int Bp, int Bj, npy_clongdouble_wrapper Bx,
int Cp, int Cj, npy_clongdouble_wrapper Cx)
"""
return _csr.csr_matmat_pass2(*args)
def csr_matvec(*args):
"""
csr_matvec(int n_row, int n_col, int Ap, int Aj, signed char Ax,
signed char Xx, signed char Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
unsigned char Xx, unsigned char Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, short Ax, short Xx,
short Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
unsigned short Xx, unsigned short Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, int Ax, int Xx,
int Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
unsigned int Xx, unsigned int Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, long long Ax,
long long Xx, long long Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
unsigned long long Xx, unsigned long long Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, float Ax, float Xx,
float Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, double Ax, double Xx,
double Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, long double Ax,
long double Xx, long double Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
npy_cfloat_wrapper Xx, npy_cfloat_wrapper Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
npy_cdouble_wrapper Xx, npy_cdouble_wrapper Yx)
csr_matvec(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
npy_clongdouble_wrapper Xx, npy_clongdouble_wrapper Yx)
"""
return _csr.csr_matvec(*args)
def csr_matvecs(*args):
"""
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, signed char Ax,
signed char Xx, signed char Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, unsigned char Ax,
unsigned char Xx, unsigned char Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, short Ax,
short Xx, short Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, unsigned short Ax,
unsigned short Xx, unsigned short Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, int Ax,
int Xx, int Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, unsigned int Ax,
unsigned int Xx, unsigned int Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, long long Ax,
long long Xx, long long Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, unsigned long long Ax,
unsigned long long Xx,
unsigned long long Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, float Ax,
float Xx, float Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, double Ax,
double Xx, double Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, long double Ax,
long double Xx, long double Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, npy_cfloat_wrapper Ax,
npy_cfloat_wrapper Xx,
npy_cfloat_wrapper Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, npy_cdouble_wrapper Ax,
npy_cdouble_wrapper Xx,
npy_cdouble_wrapper Yx)
csr_matvecs(int n_row, int n_col, int n_vecs, int Ap, int Aj, npy_clongdouble_wrapper Ax,
npy_clongdouble_wrapper Xx,
npy_clongdouble_wrapper Yx)
"""
return _csr.csr_matvecs(*args)
def csr_elmul_csr(*args):
"""
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, signed char Ax,
int Bp, int Bj, signed char Bx, int Cp, int Cj,
signed char Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
int Bp, int Bj, unsigned char Bx, int Cp,
int Cj, unsigned char Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, short Ax, int Bp,
int Bj, short Bx, int Cp, int Cj, short Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
int Bp, int Bj, unsigned short Bx, int Cp,
int Cj, unsigned short Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, int Ax, int Bp,
int Bj, int Bx, int Cp, int Cj, int Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
int Bp, int Bj, unsigned int Bx, int Cp,
int Cj, unsigned int Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, long long Ax,
int Bp, int Bj, long long Bx, int Cp, int Cj,
long long Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
int Bp, int Bj, unsigned long long Bx,
int Cp, int Cj, unsigned long long Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, float Ax, int Bp,
int Bj, float Bx, int Cp, int Cj, float Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, double Ax, int Bp,
int Bj, double Bx, int Cp, int Cj, double Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, long double Ax,
int Bp, int Bj, long double Bx, int Cp, int Cj,
long double Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
int Bp, int Bj, npy_cfloat_wrapper Bx,
int Cp, int Cj, npy_cfloat_wrapper Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
int Bp, int Bj, npy_cdouble_wrapper Bx,
int Cp, int Cj, npy_cdouble_wrapper Cx)
csr_elmul_csr(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
int Bp, int Bj, npy_clongdouble_wrapper Bx,
int Cp, int Cj, npy_clongdouble_wrapper Cx)
"""
return _csr.csr_elmul_csr(*args)
def csr_eldiv_csr(*args):
"""
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, signed char Ax,
int Bp, int Bj, signed char Bx, int Cp, int Cj,
signed char Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
int Bp, int Bj, unsigned char Bx, int Cp,
int Cj, unsigned char Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, short Ax, int Bp,
int Bj, short Bx, int Cp, int Cj, short Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
int Bp, int Bj, unsigned short Bx, int Cp,
int Cj, unsigned short Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, int Ax, int Bp,
int Bj, int Bx, int Cp, int Cj, int Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
int Bp, int Bj, unsigned int Bx, int Cp,
int Cj, unsigned int Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, long long Ax,
int Bp, int Bj, long long Bx, int Cp, int Cj,
long long Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
int Bp, int Bj, unsigned long long Bx,
int Cp, int Cj, unsigned long long Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, float Ax, int Bp,
int Bj, float Bx, int Cp, int Cj, float Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, double Ax, int Bp,
int Bj, double Bx, int Cp, int Cj, double Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, long double Ax,
int Bp, int Bj, long double Bx, int Cp, int Cj,
long double Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
int Bp, int Bj, npy_cfloat_wrapper Bx,
int Cp, int Cj, npy_cfloat_wrapper Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
int Bp, int Bj, npy_cdouble_wrapper Bx,
int Cp, int Cj, npy_cdouble_wrapper Cx)
csr_eldiv_csr(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
int Bp, int Bj, npy_clongdouble_wrapper Bx,
int Cp, int Cj, npy_clongdouble_wrapper Cx)
"""
return _csr.csr_eldiv_csr(*args)
def csr_plus_csr(*args):
"""
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, signed char Ax,
int Bp, int Bj, signed char Bx, int Cp, int Cj,
signed char Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
int Bp, int Bj, unsigned char Bx, int Cp,
int Cj, unsigned char Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, short Ax, int Bp,
int Bj, short Bx, int Cp, int Cj, short Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
int Bp, int Bj, unsigned short Bx, int Cp,
int Cj, unsigned short Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, int Ax, int Bp,
int Bj, int Bx, int Cp, int Cj, int Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
int Bp, int Bj, unsigned int Bx, int Cp,
int Cj, unsigned int Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, long long Ax,
int Bp, int Bj, long long Bx, int Cp, int Cj,
long long Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
int Bp, int Bj, unsigned long long Bx,
int Cp, int Cj, unsigned long long Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, float Ax, int Bp,
int Bj, float Bx, int Cp, int Cj, float Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, double Ax, int Bp,
int Bj, double Bx, int Cp, int Cj, double Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, long double Ax,
int Bp, int Bj, long double Bx, int Cp, int Cj,
long double Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
int Bp, int Bj, npy_cfloat_wrapper Bx,
int Cp, int Cj, npy_cfloat_wrapper Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
int Bp, int Bj, npy_cdouble_wrapper Bx,
int Cp, int Cj, npy_cdouble_wrapper Cx)
csr_plus_csr(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
int Bp, int Bj, npy_clongdouble_wrapper Bx,
int Cp, int Cj, npy_clongdouble_wrapper Cx)
"""
return _csr.csr_plus_csr(*args)
def csr_minus_csr(*args):
"""
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, signed char Ax,
int Bp, int Bj, signed char Bx, int Cp, int Cj,
signed char Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
int Bp, int Bj, unsigned char Bx, int Cp,
int Cj, unsigned char Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, short Ax, int Bp,
int Bj, short Bx, int Cp, int Cj, short Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
int Bp, int Bj, unsigned short Bx, int Cp,
int Cj, unsigned short Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, int Ax, int Bp,
int Bj, int Bx, int Cp, int Cj, int Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
int Bp, int Bj, unsigned int Bx, int Cp,
int Cj, unsigned int Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, long long Ax,
int Bp, int Bj, long long Bx, int Cp, int Cj,
long long Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
int Bp, int Bj, unsigned long long Bx,
int Cp, int Cj, unsigned long long Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, float Ax, int Bp,
int Bj, float Bx, int Cp, int Cj, float Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, double Ax, int Bp,
int Bj, double Bx, int Cp, int Cj, double Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, long double Ax,
int Bp, int Bj, long double Bx, int Cp, int Cj,
long double Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
int Bp, int Bj, npy_cfloat_wrapper Bx,
int Cp, int Cj, npy_cfloat_wrapper Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
int Bp, int Bj, npy_cdouble_wrapper Bx,
int Cp, int Cj, npy_cdouble_wrapper Cx)
csr_minus_csr(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
int Bp, int Bj, npy_clongdouble_wrapper Bx,
int Cp, int Cj, npy_clongdouble_wrapper Cx)
"""
return _csr.csr_minus_csr(*args)
def csr_sort_indices(*args):
"""
csr_sort_indices(int n_row, int Ap, int Aj, signed char Ax)
csr_sort_indices(int n_row, int Ap, int Aj, unsigned char Ax)
csr_sort_indices(int n_row, int Ap, int Aj, short Ax)
csr_sort_indices(int n_row, int Ap, int Aj, unsigned short Ax)
csr_sort_indices(int n_row, int Ap, int Aj, int Ax)
csr_sort_indices(int n_row, int Ap, int Aj, unsigned int Ax)
csr_sort_indices(int n_row, int Ap, int Aj, long long Ax)
csr_sort_indices(int n_row, int Ap, int Aj, unsigned long long Ax)
csr_sort_indices(int n_row, int Ap, int Aj, float Ax)
csr_sort_indices(int n_row, int Ap, int Aj, double Ax)
csr_sort_indices(int n_row, int Ap, int Aj, long double Ax)
csr_sort_indices(int n_row, int Ap, int Aj, npy_cfloat_wrapper Ax)
csr_sort_indices(int n_row, int Ap, int Aj, npy_cdouble_wrapper Ax)
csr_sort_indices(int n_row, int Ap, int Aj, npy_clongdouble_wrapper Ax)
"""
return _csr.csr_sort_indices(*args)
def csr_eliminate_zeros(*args):
"""
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, signed char Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, unsigned char Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, short Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, unsigned short Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, int Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, unsigned int Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, long long Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, float Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, double Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, long double Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax)
csr_eliminate_zeros(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax)
"""
return _csr.csr_eliminate_zeros(*args)
def csr_sum_duplicates(*args):
"""
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, signed char Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, unsigned char Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, short Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, unsigned short Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, int Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, unsigned int Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, long long Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, float Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, double Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, long double Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax)
csr_sum_duplicates(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax)
"""
return _csr.csr_sum_duplicates(*args)
def get_csr_submatrix(*args):
"""
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, signed char Ax,
int ir0, int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
int ir0, int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, short Ax, int ir0,
int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
int ir0, int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, int Ax, int ir0,
int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
int ir0, int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, long long Ax,
int ir0, int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
int ir0, int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, float Ax, int ir0,
int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, double Ax, int ir0,
int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, long double Ax,
int ir0, int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
int ir0, int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
int ir0, int ir1, int ic0, int ic1)
get_csr_submatrix(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
int ir0, int ir1, int ic0, int ic1)
"""
return _csr.get_csr_submatrix(*args)
def csr_sample_values(*args):
"""
csr_sample_values(int n_row, int n_col, int Ap, int Aj, signed char Ax,
int n_samples, int Bi, int Bj, signed char Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, unsigned char Ax,
int n_samples, int Bi, int Bj, unsigned char Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, short Ax, int n_samples,
int Bi, int Bj, short Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, unsigned short Ax,
int n_samples, int Bi, int Bj, unsigned short Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, int Ax, int n_samples,
int Bi, int Bj, int Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, unsigned int Ax,
int n_samples, int Bi, int Bj, unsigned int Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, long long Ax,
int n_samples, int Bi, int Bj, long long Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, unsigned long long Ax,
int n_samples, int Bi, int Bj, unsigned long long Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, float Ax, int n_samples,
int Bi, int Bj, float Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, double Ax, int n_samples,
int Bi, int Bj, double Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, long double Ax,
int n_samples, int Bi, int Bj, long double Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, npy_cfloat_wrapper Ax,
int n_samples, int Bi, int Bj, npy_cfloat_wrapper Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, npy_cdouble_wrapper Ax,
int n_samples, int Bi, int Bj, npy_cdouble_wrapper Bx)
csr_sample_values(int n_row, int n_col, int Ap, int Aj, npy_clongdouble_wrapper Ax,
int n_samples, int Bi, int Bj,
npy_clongdouble_wrapper Bx)
"""
return _csr.csr_sample_values(*args)
| bsd-3-clause |
Pretio/boto | boto/codedeploy/__init__.py | 113 | 1663 | # Copyright (c) 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the AWS CodeDeploy service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
from boto.codedeploy.layer1 import CodeDeployConnection
return get_regions('codedeploy', connection_cls=CodeDeployConnection)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| mit |
nebril/fuel-web | fuel_agent/fuel_agent/tests/test_md_utils.py | 2 | 10607 | # Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslotest import base as test_base
import six
from fuel_agent import errors
from fuel_agent.utils import hardware as hu
from fuel_agent.utils import md as mu
from fuel_agent.utils import utils
if six.PY2:
OPEN_FUNCTION_NAME = '__builtin__.open'
else:
OPEN_FUNCTION_NAME = 'builtins.open'
class TestMdUtils(test_base.BaseTestCase):
@mock.patch('fuel_agent.utils.md.utils.execute')
def test_mddisplay_nostate_detail(self, mock_exec):
mock_exec.return_value = (
"""/dev/md127:
Version : imsm
Raid Level : container
Total Devices : 2
Working Devices : 2
UUID : 46a4fc60:21554de1:1edfad0f:c137ddac
Member Arrays :
Number Major Minor RaidDevice
0 8 0 - /dev/sda
1 8 16 - /dev/sdb""",
''
)
expected = [{
'Raid Level': 'container',
'UUID': '46a4fc60:21554de1:1edfad0f:c137ddac',
'Version': 'imsm',
'devices': ['/dev/sda', '/dev/sdb'],
'name': '/dev/md127',
}]
mds = mu.mddisplay(['/dev/md127'])
mock_exec.assert_called_once_with(
'mdadm', '--detail', '/dev/md127', check_exit_code=[0])
self.assertItemsEqual(expected, mds)
@mock.patch.object(utils, 'execute')
def test_mddisplay(self, mock_exec):
# should read file /proc/mdstat
# should get detailed description for all md devices
# should return list of dicts representing md devices
mock_open_data = """Personalities : [raid1]
md0 : active raid1 loop5[1] loop4[0]
102272 blocks super 1.2 [2/2] [UU]
unused devices: <none>
"""
mock_open = mock.mock_open(read_data=mock_open_data)
patcher = mock.patch(OPEN_FUNCTION_NAME, new=mock_open)
patcher.start()
mock_exec.return_value = (
"""/dev/md0:
Version : 1.2
Creation Time : Wed Jun 18 18:44:57 2014
Raid Level : raid1
Array Size : 102272 (99.89 MiB 104.73 MB)
Used Dev Size : 102272 (99.89 MiB 104.73 MB)
Raid Devices : 2
Total Devices : 2
Persistence : Superblock is persistent
Update Time : Wed Jun 18 18:45:01 2014
State : clean
Active Devices : 2
Working Devices : 2
Failed Devices : 0
Spare Devices : 0
Name : localhost.localdomain:0 (local to host
localhost.localdomain)
UUID : 12dd4cfc:6b2ac9db:94564538:a6ffee82
Events : 17
Number Major Minor RaidDevice State
0 7 4 0 active sync /dev/loop4
1 7 5 1 active sync /dev/loop5""",
''
)
expected = [{
'name': '/dev/md0',
'Version': '1.2',
'Raid Level': 'raid1',
'Raid Devices': '2',
'Active Devices': '2',
'Spare Devices': '0',
'Failed Devices': '0',
'State': 'clean',
'UUID': '12dd4cfc:6b2ac9db:94564538:a6ffee82',
'devices': ['/dev/loop4', '/dev/loop5']
}]
mds = mu.mddisplay()
mock_exec.assert_called_once_with(
'mdadm', '--detail', '/dev/md0', check_exit_code=[0])
key = lambda x: x['name']
self.assertEqual(sorted(expected, key=key), sorted(mds, key=key))
patcher.stop()
@mock.patch.object(mu, 'mdclean')
@mock.patch.object(hu, 'list_block_devices')
@mock.patch.object(mu, 'mddisplay')
@mock.patch.object(utils, 'execute')
def test_mdcreate_ok(self, mock_exec, mock_mddisplay,
mock_bdevs, mock_mdclean):
# should check if md already exists
# should check if md level is valid
# should check if all necessary devices exist
# should check if all devices are not parts of some md
# should clean md metadata which possibly are on all devices
# should run mdadm command to create new md
mock_mddisplay.return_value = \
[{'name': '/dev/md10', 'devices': ['/dev/fake10']},
# should also accept devices with missing 'devices' entry
{'name': '/dev/md11'}]
mock_bdevs.return_value = [{'device': '/dev/fake1'},
{'device': '/dev/fake2'}]
mu.mdcreate('/dev/md0', 'mirror', '/dev/fake1', '/dev/fake2')
mock_mdclean_expected_calls = [mock.call('/dev/fake1'),
mock.call('/dev/fake2')]
self.assertEqual(mock_mdclean_expected_calls,
mock_mdclean.call_args_list)
mock_exec.assert_called_once_with(
'mdadm', '--create', '--force', '/dev/md0', '-e0.90',
'--level=mirror',
'--raid-devices=2', '/dev/fake1', '/dev/fake2',
check_exit_code=[0])
@mock.patch.object(mu, 'mddisplay')
def test_mdcreate_duplicate(self, mock_mddisplay):
# should check if md already exists
# should raise error if it exists
mock_mddisplay.return_value = [{'name': '/dev/md0'}]
self.assertRaises(
errors.MDAlreadyExistsError, mu.mdcreate,
'/dev/md0', 'mirror', '/dev/fake')
@mock.patch.object(mu, 'mddisplay')
def test_mdcreate_unsupported_level(self, mock_mddisplay):
# should check if md level is valid
# should raise error if it is not
mock_mddisplay.return_value = [{'name': '/dev/md10'}]
self.assertRaises(
errors.MDWrongSpecError, mu.mdcreate,
'/dev/md0', 'badlevel', '/dev/fake')
@mock.patch.object(hu, 'list_block_devices')
@mock.patch.object(mu, 'mddisplay')
def test_mdcreate_device_not_found(self, mock_mddisplay, mock_bdevs):
# should check if all devices exist
# should raise error if at least one device does not
mock_mddisplay.return_value = [{'name': '/dev/md10'}]
mock_bdevs.return_value = [{'device': '/dev/fake1'},
{'device': '/dev/fake10'}]
self.assertRaises(
errors.MDNotFoundError, mu.mdcreate,
'/dev/md0', 'mirror', '/dev/fake1', '/dev/fake2')
@mock.patch.object(hu, 'list_block_devices')
@mock.patch.object(mu, 'mddisplay')
def test_mdcreate_device_attached(self, mock_mddisplay, mock_bdevs):
# should check if all necessary devices are not attached to some md
# should raise error if at least one device is attached
mock_mddisplay.return_value = [{'name': '/dev/md10',
'devices': ['/dev/fake2']}]
mock_bdevs.return_value = [{'device': '/dev/fake1'},
{'device': '/dev/fake2'}]
self.assertRaises(
errors.MDDeviceDuplicationError, mu.mdcreate,
'/dev/md0', 'mirror', '/dev/fake1', '/dev/fake2')
@mock.patch.object(utils, 'execute')
@mock.patch.object(mu, 'mdclean')
@mock.patch.object(hu, 'list_block_devices')
@mock.patch.object(mu, 'mddisplay')
def test_mdcreate_device_clean(self, mock_mddisplay,
mock_bdevs, mock_mdclean, mock_exec):
# should clean md metadata on all devices before building new md
mock_mddisplay.return_value = []
mock_bdevs.return_value = [{'device': '/dev/fake1'},
{'device': '/dev/fake2'}]
mu.mdcreate('/dev/md0', 'mirror', '/dev/fake1', '/dev/fake2')
expected_calls = [mock.call('/dev/fake1'), mock.call('/dev/fake2')]
self.assertEqual(mock_mdclean.call_args_list, expected_calls)
@mock.patch.object(mu, 'mdclean')
@mock.patch.object(mu, 'mdremove')
@mock.patch.object(mu, 'mddisplay')
def test_mdclean_all(self, mock_mddisplay, mock_mdremove, mock_mdclean):
mock_mddisplay.side_effect = [
[{'name': '/dev/md10', 'devices': ['/dev/fake10']},
{'name': '/dev/md11'}],
[{'name': '/dev/md11'}],
[]
]
mu.mdclean_all()
mock_mdremove_expected_calls = [
mock.call('/dev/md10'), mock.call('/dev/md11'),
mock.call('/dev/md11')]
mock_mdclean.assert_called_once_with('/dev/fake10')
self.assertEqual(mock_mdremove.call_args_list,
mock_mdremove_expected_calls)
@mock.patch.object(mu, 'mdclean')
@mock.patch.object(mu, 'mdremove')
@mock.patch.object(mu, 'mddisplay')
def test_mdclean_all_fail(self, mock_mddisplay, mock_mdremove,
mock_mdclean):
mock_mddisplay.return_value = [{'name': '/dev/md11'}]
self.assertRaises(errors.MDRemovingError, mu.mdclean_all)
@mock.patch.object(utils, 'execute')
@mock.patch.object(mu, 'get_mdnames')
def test_mdremove_ok(self, mock_get_mdn, mock_exec):
# should check if md exists
# should run mdadm command to remove md device
mock_get_mdn.return_value = ['/dev/md0']
expected_calls = [
mock.call('udevadm', 'settle', '--quiet', check_exit_code=[0]),
mock.call('mdadm', '--stop', '/dev/md0', check_exit_code=[0]),
mock.call('mdadm', '--remove', '/dev/md0', check_exit_code=[0, 1])
]
mu.mdremove('/dev/md0')
self.assertEqual(mock_exec.call_args_list, expected_calls)
@mock.patch.object(mu, 'get_mdnames')
def test_mdremove_notfound(self, mock_get_mdn):
# should check if md exists
# should raise error if it does not
mock_get_mdn.return_value = ['/dev/md0']
self.assertRaises(
errors.MDNotFoundError, mu.mdremove, '/dev/md1')
@mock.patch.object(utils, 'execute')
def test_mdclean(self, mock_exec):
mu.mdclean('/dev/md0')
mock_exec.assert_called_once_with('mdadm', '--zero-superblock',
'--force', '/dev/md0',
check_exit_code=[0])
| apache-2.0 |
Dekken/tick | tick/prox/prox_l2sq.py | 2 | 2481 | # License: BSD 3 clause
# -*- coding: utf8 -*-
import numpy as np
from .base import Prox
from .build.prox import ProxL2SqDouble as _ProxL2sqDouble
from .build.prox import ProxL2SqFloat as _ProxL2sqFloat
__author__ = 'Stephane Gaiffas'
dtype_map = {
np.dtype("float64"): _ProxL2sqDouble,
np.dtype("float32"): _ProxL2sqFloat
}
class ProxL2Sq(Prox):
"""Proximal operator of the squared L2 norm (ridge penalization)
Parameters
----------
strength : `float`, default=0.
Level of L2 penalization
range : `tuple` of two `int`, default=`None`
Range on which the prox is applied. If `None` then the prox is
applied on the whole vector
positive : `bool`, default=`False`
If True, apply L2 penalization together with a projection
onto the set of vectors with non-negative entries
Attributes
----------
dtype : `{'float64', 'float32'}`
Type of the arrays used.
"""
_attrinfos = {
"strength": {
"writable": True,
"cpp_setter": "set_strength"
},
"positive": {
"writable": True,
"cpp_setter": "set_positive"
}
}
def __init__(self, strength: float, range: tuple = None,
positive: bool = False):
Prox.__init__(self, range)
self.positive = positive
self.strength = strength
self._prox = self._build_cpp_prox("float64")
def _call(self, coeffs: np.ndarray, step: object, out: np.ndarray):
self._prox.call(coeffs, step, out)
def value(self, coeffs: np.ndarray):
"""
Returns the value of the penalization at ``coeffs``
Parameters
----------
coeffs : `numpy.ndarray`, shape=(n_coeffs,)
The value of the penalization is computed at this point
Returns
-------
output : `float`
Value of the penalization at ``coeffs``
"""
return self._prox.value(coeffs)
def _build_cpp_prox(self, dtype_or_object_with_dtype):
self.dtype = self._extract_dtype(dtype_or_object_with_dtype)
prox_class = self._get_typed_class(dtype_or_object_with_dtype,
dtype_map)
if self.range is None:
return prox_class(self.strength, self.positive)
else:
return prox_class(self.strength, self.range[0], self.range[1],
self.positive)
| bsd-3-clause |
dimkal/mne-python | mne/io/bti/read.py | 10 | 3534 | # Authors: Denis A. Engemann <denis.engemann@gmail.com>
# simplified BSD-3 license
import struct
import numpy as np
from ...externals.six import b
def _unpack_matrix(fid, fmt, rows, cols, dtype):
""" Aux Function """
out = np.zeros((rows, cols), dtype=dtype)
bsize = struct.calcsize(fmt)
string = fid.read(bsize)
data = struct.unpack(fmt, string)
iter_mat = [(r, c) for r in range(rows) for c in range(cols)]
for idx, (row, col) in enumerate(iter_mat):
out[row, col] = data[idx]
return out
def _unpack_simple(fid, fmt, count):
""" Aux Function """
bsize = struct.calcsize(fmt)
string = fid.read(bsize)
data = list(struct.unpack(fmt, string))
out = data if count < 2 else list(data)
if len(out) > 0:
out = out[0]
return out
def read_str(fid, count=1):
""" Read string """
fmt = '>' + ('c' * count)
data = list(struct.unpack(fmt, fid.read(struct.calcsize(fmt))))
bytestr = b('').join(data[0:data.index(b('\x00')) if b('\x00') in data else
count])
return str(bytestr.decode('ascii')) # Return native str type for Py2/3
def read_char(fid, count=1):
" Read character from bti file """
return _unpack_simple(fid, '>' + ('c' * count), count)
def read_bool(fid, count=1):
""" Read bool value from bti file """
return _unpack_simple(fid, '>' + ('?' * count), count)
def read_uint8(fid, count=1):
""" Read unsigned 8bit integer from bti file """
return _unpack_simple(fid, '>' + ('B' * count), count)
def read_int8(fid, count=1):
""" Read 8bit integer from bti file """
return _unpack_simple(fid, '>' + ('b' * count), count)
def read_uint16(fid, count=1):
""" Read unsigned 16bit integer from bti file """
return _unpack_simple(fid, '>' + ('H' * count), count)
def read_int16(fid, count=1):
""" Read 16bit integer from bti file """
return _unpack_simple(fid, '>' + ('H' * count), count)
def read_uint32(fid, count=1):
""" Read unsigned 32bit integer from bti file """
return _unpack_simple(fid, '>' + ('I' * count), count)
def read_int32(fid, count=1):
""" Read 32bit integer from bti file """
return _unpack_simple(fid, '>' + ('i' * count), count)
def read_uint64(fid, count=1):
""" Read unsigned 64bit integer from bti file """
return _unpack_simple(fid, '>' + ('Q' * count), count)
def read_int64(fid, count=1):
""" Read 64bit integer from bti file """
return _unpack_simple(fid, '>' + ('q' * count), count)
def read_float(fid, count=1):
""" Read 32bit float from bti file """
return _unpack_simple(fid, '>' + ('f' * count), count)
def read_double(fid, count=1):
""" Read 64bit float from bti file """
return _unpack_simple(fid, '>' + ('d' * count), count)
def read_int16_matrix(fid, rows, cols):
""" Read 16bit integer matrix from bti file """
fmt = '>' + ('h' * rows * cols)
return _unpack_matrix(fid, fmt, rows, cols, np.int16)
def read_float_matrix(fid, rows, cols):
""" Read 32bit float matrix from bti file """
fmt = '>' + ('f' * rows * cols)
return _unpack_matrix(fid, fmt, rows, cols, 'f4')
def read_double_matrix(fid, rows, cols):
""" Read 64bit float matrix from bti file """
fmt = '>' + ('d' * rows * cols)
return _unpack_matrix(fid, fmt, rows, cols, 'f8')
def read_transform(fid):
""" Read 64bit float matrix transform from bti file """
fmt = '>' + ('d' * 4 * 4)
return _unpack_matrix(fid, fmt, 4, 4, 'f8')
| bsd-3-clause |
kfwang/Glance-OVA-OVF | glance/registry/api/v1/__init__.py | 20 | 3478 | # Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.common import wsgi
from glance.registry.api.v1 import images
from glance.registry.api.v1 import members
def init(mapper):
images_resource = images.create_resource()
mapper.connect("/",
controller=images_resource,
action="index")
mapper.connect("/images",
controller=images_resource,
action="index",
conditions={'method': ['GET']})
mapper.connect("/images",
controller=images_resource,
action="create",
conditions={'method': ['POST']})
mapper.connect("/images/detail",
controller=images_resource,
action="detail",
conditions={'method': ['GET']})
mapper.connect("/images/{id}",
controller=images_resource,
action="show",
conditions=dict(method=["GET"]))
mapper.connect("/images/{id}",
controller=images_resource,
action="update",
conditions=dict(method=["PUT"]))
mapper.connect("/images/{id}",
controller=images_resource,
action="delete",
conditions=dict(method=["DELETE"]))
members_resource = members.create_resource()
mapper.connect("/images/{image_id}/members",
controller=members_resource,
action="index",
conditions={'method': ['GET']})
mapper.connect("/images/{image_id}/members",
controller=members_resource,
action="create",
conditions={'method': ['POST']})
mapper.connect("/images/{image_id}/members",
controller=members_resource,
action="update_all",
conditions=dict(method=["PUT"]))
mapper.connect("/images/{image_id}/members/{id}",
controller=members_resource,
action="show",
conditions={'method': ['GET']})
mapper.connect("/images/{image_id}/members/{id}",
controller=members_resource,
action="update",
conditions={'method': ['PUT']})
mapper.connect("/images/{image_id}/members/{id}",
controller=members_resource,
action="delete",
conditions={'method': ['DELETE']})
mapper.connect("/shared-images/{id}",
controller=members_resource,
action="index_shared_images")
class API(wsgi.Router):
"""WSGI entry point for all Registry requests."""
def __init__(self, mapper):
mapper = mapper or wsgi.APIMapper()
init(mapper)
super(API, self).__init__(mapper)
| apache-2.0 |
overtherain/scriptfile | software/googleAppEngine/lib/django_1_2/tests/modeltests/get_or_create/tests.py | 39 | 1929 | from datetime import date
from django.db import IntegrityError
from django.test import TransactionTestCase
from models import Person, ManualPrimaryKeyTest
class GetOrCreateTests(TransactionTestCase):
def test_get_or_create(self):
p = Person.objects.create(
first_name='John', last_name='Lennon', birthday=date(1940, 10, 9)
)
p, created = Person.objects.get_or_create(
first_name="John", last_name="Lennon", defaults={
"birthday": date(1940, 10, 9)
}
)
self.assertFalse(created)
self.assertEqual(Person.objects.count(), 1)
p, created = Person.objects.get_or_create(
first_name='George', last_name='Harrison', defaults={
'birthday': date(1943, 2, 25)
}
)
self.assertTrue(created)
self.assertEqual(Person.objects.count(), 2)
# If we execute the exact same statement, it won't create a Person.
p, created = Person.objects.get_or_create(
first_name='George', last_name='Harrison', defaults={
'birthday': date(1943, 2, 25)
}
)
self.assertFalse(created)
self.assertEqual(Person.objects.count(), 2)
# If you don't specify a value or default value for all required
# fields, you will get an error.
self.assertRaises(IntegrityError,
Person.objects.get_or_create, first_name="Tom", last_name="Smith"
)
# If you specify an existing primary key, but different other fields,
# then you will get an error and data will not be updated.
m = ManualPrimaryKeyTest.objects.create(id=1, data="Original")
self.assertRaises(IntegrityError,
ManualPrimaryKeyTest.objects.get_or_create, id=1, data="Different"
)
self.assertEqual(ManualPrimaryKeyTest.objects.get(id=1).data, "Original")
| mit |
ryanahall/django | django/views/generic/base.py | 281 | 7690 | from __future__ import unicode_literals
import logging
from functools import update_wrapper
from django import http
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import NoReverseMatch, reverse
from django.template.response import TemplateResponse
from django.utils import six
from django.utils.decorators import classonlymethod
logger = logging.getLogger('django.request')
class ContextMixin(object):
"""
A default context mixin that passes the keyword arguments received by
get_context_data as the template context.
"""
def get_context_data(self, **kwargs):
if 'view' not in kwargs:
kwargs['view'] = self
return kwargs
class View(object):
"""
Intentionally simple parent class for all views. Only implements
dispatch-by-method and simple sanity checking.
"""
http_method_names = ['get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'trace']
def __init__(self, **kwargs):
"""
Constructor. Called in the URLconf; can contain helpful extra
keyword arguments, and other things.
"""
# Go through keyword arguments, and either save their values to our
# instance, or raise an error.
for key, value in six.iteritems(kwargs):
setattr(self, key, value)
@classonlymethod
def as_view(cls, **initkwargs):
"""
Main entry point for a request-response process.
"""
for key in initkwargs:
if key in cls.http_method_names:
raise TypeError("You tried to pass in the %s method name as a "
"keyword argument to %s(). Don't do that."
% (key, cls.__name__))
if not hasattr(cls, key):
raise TypeError("%s() received an invalid keyword %r. as_view "
"only accepts arguments that are already "
"attributes of the class." % (cls.__name__, key))
def view(request, *args, **kwargs):
self = cls(**initkwargs)
if hasattr(self, 'get') and not hasattr(self, 'head'):
self.head = self.get
self.request = request
self.args = args
self.kwargs = kwargs
return self.dispatch(request, *args, **kwargs)
view.view_class = cls
view.view_initkwargs = initkwargs
# take name and docstring from class
update_wrapper(view, cls, updated=())
# and possible attributes set by decorators
# like csrf_exempt from dispatch
update_wrapper(view, cls.dispatch, assigned=())
return view
def dispatch(self, request, *args, **kwargs):
# Try to dispatch to the right method; if a method doesn't exist,
# defer to the error handler. Also defer to the error handler if the
# request method isn't on the approved list.
if request.method.lower() in self.http_method_names:
handler = getattr(self, request.method.lower(), self.http_method_not_allowed)
else:
handler = self.http_method_not_allowed
return handler(request, *args, **kwargs)
def http_method_not_allowed(self, request, *args, **kwargs):
logger.warning('Method Not Allowed (%s): %s', request.method, request.path,
extra={
'status_code': 405,
'request': request
}
)
return http.HttpResponseNotAllowed(self._allowed_methods())
def options(self, request, *args, **kwargs):
"""
Handles responding to requests for the OPTIONS HTTP verb.
"""
response = http.HttpResponse()
response['Allow'] = ', '.join(self._allowed_methods())
response['Content-Length'] = '0'
return response
def _allowed_methods(self):
return [m.upper() for m in self.http_method_names if hasattr(self, m)]
class TemplateResponseMixin(object):
"""
A mixin that can be used to render a template.
"""
template_name = None
template_engine = None
response_class = TemplateResponse
content_type = None
def render_to_response(self, context, **response_kwargs):
"""
Returns a response, using the `response_class` for this
view, with a template rendered with the given context.
If any keyword arguments are provided, they will be
passed to the constructor of the response class.
"""
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=self.request,
template=self.get_template_names(),
context=context,
using=self.template_engine,
**response_kwargs
)
def get_template_names(self):
"""
Returns a list of template names to be used for the request. Must return
a list. May not be called if render_to_response is overridden.
"""
if self.template_name is None:
raise ImproperlyConfigured(
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'")
else:
return [self.template_name]
class TemplateView(TemplateResponseMixin, ContextMixin, View):
"""
A view that renders a template. This view will also pass into the context
any keyword arguments passed by the url conf.
"""
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
class RedirectView(View):
"""
A view that provides a redirect on any GET request.
"""
permanent = False
url = None
pattern_name = None
query_string = False
def get_redirect_url(self, *args, **kwargs):
"""
Return the URL redirect to. Keyword arguments from the
URL pattern match generating the redirect request
are provided as kwargs to this method.
"""
if self.url:
url = self.url % kwargs
elif self.pattern_name:
try:
url = reverse(self.pattern_name, args=args, kwargs=kwargs)
except NoReverseMatch:
return None
else:
return None
args = self.request.META.get('QUERY_STRING', '')
if args and self.query_string:
url = "%s?%s" % (url, args)
return url
def get(self, request, *args, **kwargs):
url = self.get_redirect_url(*args, **kwargs)
if url:
if self.permanent:
return http.HttpResponsePermanentRedirect(url)
else:
return http.HttpResponseRedirect(url)
else:
logger.warning('Gone: %s', request.path,
extra={
'status_code': 410,
'request': request
})
return http.HttpResponseGone()
def head(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def options(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
| bsd-3-clause |
bmanojlovic/ansible | lib/ansible/modules/network/ios/ios_command.py | 3 | 7571 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'core',
'version': '1.0'
}
DOCUMENTATION = """
---
module: ios_command
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Run commands on remote devices running Cisco IOS
description:
- Sends arbitrary commands to an ios node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
- This module does not support running commands in configuration mode.
Please use M(ios_config) to configure IOS devices.
notes:
- Provider arguments are no longer supported. Network tasks should now
specify connection plugin network_cli instead.
options:
commands:
description:
- List of commands to send to the remote ios device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retries has expired.
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of retries, the task fails.
See examples.
required: false
default: null
aliases: ['waitfor']
version_added: "2.2"
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the wait_for must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
required: false
default: all
choices: ['any', 'all']
version_added: "2.2"
retries:
description:
- Specifies the number of retries a command should by tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(wait_for) conditions.
required: false
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
required: false
default: 1
"""
EXAMPLES = """
tasks:
- name: run show version on remote devices
ios_command:
commands: show version
- name: run show version and check to see if output contains IOS
ios_command:
commands: show version
wait_for: result[0] contains IOS
- name: run multiple commands on remote nodes
ios_command:
commands:
- show version
- show interfaces
- name: run multiple commands and evaluate the output
ios_command:
commands:
- show version
- show interfaces
wait_for:
- result[0] contains IOS
- result[1] contains Loopback0
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
start:
description: The time the job started
returned: always
type: str
sample: "2016-11-16 10:38:15.126146"
end:
description: The time the job ended
returned: always
type: str
sample: "2016-11-16 10:38:25.595612"
delta:
description: The time elapsed to perform all operations
returned: always
type: str
sample: "0:00:10.469466"
"""
import time
from ansible.module_utils.local import LocalAnsibleModule
from ansible.module_utils.ios import run_commands
from ansible.module_utils.network_common import ComplexList
from ansible.module_utils.netcli import Conditional
from ansible.module_utils.six import string_types
VALID_KEYS = ['command', 'output']
def to_lines(stdout):
for item in stdout:
if isinstance(item, string_types):
item = str(item).split('\n')
yield item
def parse_commands(module, warnings):
command = ComplexList(dict(
command=dict(key=True),
prompt=dict(),
response=dict()
))
commands = command(module.params['commands'])
for index, item in enumerate(commands):
if module.check_mode and not item['command'].startswith('show'):
warnings.append(
'only show commands are supported when using check mode, not '
'executing `%s`' % item['command']
)
elif item['command'].startswith('conf'):
module.fail_json(
msg='ios_command does not support running config mode '
'commands. Please use ios_config instead'
)
commands[index] = module.jsonify(item)
return commands
def main():
spec = dict(
# { command: <str>, prompt: <str>, response: <str> }
commands=dict(type='list', required=True),
wait_for=dict(type='list', aliases=['waitfor']),
match=dict(default='all', choices=['all', 'any']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
module = LocalAnsibleModule(argument_spec=spec,
supports_check_mode=True)
warnings = list()
commands = parse_commands(module, warnings)
wait_for = module.params['wait_for'] or list()
conditionals = [Conditional(c) for c in wait_for]
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not be satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result = {
'changed': False,
'stdout': responses,
'warnings': warnings,
'stdout_lines': list(to_lines(responses))
}
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
mythos234/SimplKernel-5.1.1-G92x | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py | 4653 | 3596 | # EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
| gpl-2.0 |
a20012251/dd-agent | tests/checks/mock/test_system_core.py | 46 | 2489 | import mock
import psutil
from tests.checks.common import AgentCheckTest
from utils.platform import Platform
if Platform.is_mac():
CHECK_RATES = [
'system.core.idle',
'system.core.nice',
'system.core.system',
'system.core.user',
]
MOCK_PSUTIL_CPU_TIMES = [
psutil._psosx.scputimes(user=7877.29, nice=0.0, system=7469.72, idle=38164.81),
psutil._psosx.scputimes(user=3826.74, nice=0.0, system=2701.6, idle=46981.39),
psutil._psosx.scputimes(user=7486.51, nice=0.0, system=5991.36, idle=40031.88),
psutil._psosx.scputimes(user=3964.85, nice=0.0, system=2862.37, idle=46682.5)
]
elif Platform.is_unix():
CHECK_RATES = [
'system.core.idle',
'system.core.nice',
'system.core.system',
'system.core.user',
'system.core.iowait',
'system.core.irq',
'system.core.softirq',
'system.core.steal',
'system.core.guest',
'system.core.guest_nice',
]
MOCK_PSUTIL_CPU_TIMES = [
psutil._pslinux.scputimes(user=1805.64, nice=0.01, system=298.66, idle=14177.28,
iowait=3.23, irq=0.05, softirq=33.28, steal=0.0,
guest=0.0, guest_nice=0.0),
psutil._pslinux.scputimes(user=1724.18, nice=0.04, system=235.61, idle=14381.94,
iowait=3.55, irq=0.0, softirq=6.94, steal=0.0,
guest=0.0, guest_nice=0.0),
psutil._pslinux.scputimes(user=1737.58, nice=0.03, system=230.61, idle=14382.33,
iowait=2.69, irq=0.0, softirq=6.12, steal=0.0,
guest=0.0, guest_nice=0.0),
psutil._pslinux.scputimes(user=1696.18, nice=0.0, system=218.36, idle=14610.06,
iowait=2.43, irq=0.0, softirq=3.8, steal=0.0,
guest=0.0, guest_nice=0.0)
]
else:
MOCK_PSUTIL_CPU_TIMES = []
class SystemCoreTestCase(AgentCheckTest):
CHECK_NAME = 'system_core'
@mock.patch('psutil.cpu_times', return_value=MOCK_PSUTIL_CPU_TIMES)
def test_system_core(self, mock_cpu_times):
self.run_check_twice({"instances": [{}]})
self.assertMetric('system.core.count', value=4, count=1)
for i in range(4):
for rate in CHECK_RATES:
self.assertMetric(rate, count=1, tags=['core:{0}'.format(i)])
self.coverage_report()
| bsd-3-clause |
ekasitk/sahara | sahara/tests/unit/plugins/cdh/v5_3_0/test_edp_engine.py | 5 | 4231 | # Copyright (c) 2015 Intel Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.plugins.cdh.v5_3_0 import edp_engine
from sahara.tests.unit import base as sahara_base
from sahara.utils import edp
class CDH53ConfigHintsTest(sahara_base.SaharaTestCase):
@mock.patch(
'sahara.plugins.cdh.confighints_helper.get_possible_hive_config_from',
return_value={})
def test_get_possible_job_config_hive(self,
get_possible_hive_config_from):
expected_config = {'job_config': {}}
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_HIVE)
get_possible_hive_config_from.assert_called_once_with(
'plugins/cdh/v5_3_0/resources/hive-site.xml')
self.assertEqual(expected_config, actual_config)
@mock.patch('sahara.plugins.cdh.v5_3_0.edp_engine.EdpOozieEngine')
def test_get_possible_job_config_java(self, BaseCDHEdpOozieEngine):
expected_config = {'job_config': {}}
BaseCDHEdpOozieEngine.get_possible_job_config.return_value = (
expected_config)
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_JAVA)
BaseCDHEdpOozieEngine.get_possible_job_config.assert_called_once_with(
edp.JOB_TYPE_JAVA)
self.assertEqual(expected_config, actual_config)
@mock.patch(
'sahara.plugins.cdh.confighints_helper.'
'get_possible_mapreduce_config_from',
return_value={})
def test_get_possible_job_config_mapreduce(
self, get_possible_mapreduce_config_from):
expected_config = {'job_config': {}}
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_MAPREDUCE)
get_possible_mapreduce_config_from.assert_called_once_with(
'plugins/cdh/v5_3_0/resources/mapred-site.xml')
self.assertEqual(expected_config, actual_config)
@mock.patch(
'sahara.plugins.cdh.confighints_helper.'
'get_possible_mapreduce_config_from',
return_value={})
def test_get_possible_job_config_mapreduce_streaming(
self, get_possible_mapreduce_config_from):
expected_config = {'job_config': {}}
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_MAPREDUCE_STREAMING)
get_possible_mapreduce_config_from.assert_called_once_with(
'plugins/cdh/v5_3_0/resources/mapred-site.xml')
self.assertEqual(expected_config, actual_config)
@mock.patch(
'sahara.plugins.cdh.confighints_helper.get_possible_pig_config_from',
return_value={})
def test_get_possible_job_config_pig(self,
get_possible_pig_config_from):
expected_config = {'job_config': {}}
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_PIG)
get_possible_pig_config_from.assert_called_once_with(
'plugins/cdh/v5_3_0/resources/mapred-site.xml')
self.assertEqual(expected_config, actual_config)
@mock.patch('sahara.plugins.cdh.v5_3_0.edp_engine.EdpOozieEngine')
def test_get_possible_job_config_shell(self, BaseCDHEdpOozieEngine):
expected_config = {'job_config': {}}
BaseCDHEdpOozieEngine.get_possible_job_config.return_value = (
expected_config)
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_SHELL)
BaseCDHEdpOozieEngine.get_possible_job_config.assert_called_once_with(
edp.JOB_TYPE_SHELL)
self.assertEqual(expected_config, actual_config)
| apache-2.0 |
vijayendrabvs/ssl-neutron | neutron/tests/unit/test_iptables_manager.py | 4 | 28075 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Locaweb.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: Juliano Martinez, Locaweb.
import inspect
import os
import mock
from neutron.agent.linux import iptables_manager
from neutron.tests import base
from neutron.tests import tools
IPTABLES_ARG = {'bn': iptables_manager.binary_name}
NAT_DUMP = ('# Generated by iptables_manager\n'
'*nat\n'
':neutron-postrouting-bottom - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-snat - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-float-snat - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A POSTROUTING -j neutron-postrouting-bottom\n'
'[0:0] -A neutron-postrouting-bottom -j %(bn)s-snat\n'
'[0:0] -A %(bn)s-snat -j '
'%(bn)s-float-snat\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % IPTABLES_ARG)
FILTER_DUMP = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % IPTABLES_ARG)
class IptablesManagerStateFulTestCase(base.BaseTestCase):
def setUp(self):
super(IptablesManagerStateFulTestCase, self).setUp()
self.root_helper = 'sudo'
self.iptables = (iptables_manager.
IptablesManager(root_helper=self.root_helper))
self.execute = mock.patch.object(self.iptables, "execute").start()
self.addCleanup(mock.patch.stopall)
def test_binary_name(self):
self.assertEqual(iptables_manager.binary_name,
os.path.basename(inspect.stack()[-1][1])[:16])
def test_get_chain_name(self):
name = '0123456789' * 5
# 28 chars is the maximum length of iptables chain name.
self.assertEqual(iptables_manager.get_chain_name(name, wrap=False),
name[:28])
# 11 chars is the maximum length of chain name of iptable_manager
# if binary_name is prepended.
self.assertEqual(iptables_manager.get_chain_name(name, wrap=True),
name[:11])
def test_add_and_remove_chain_custom_binary_name(self):
bn = ("abcdef" * 5)
self.iptables = (iptables_manager.
IptablesManager(root_helper=self.root_helper,
binary_name=bn))
self.execute = mock.patch.object(self.iptables, "execute").start()
iptables_args = {'bn': bn[:16]}
filter_dump = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(bn)s-filter - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % iptables_args)
filter_dump_mod = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(bn)s-filter - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% iptables_args)
nat_dump = ('# Generated by iptables_manager\n'
'*nat\n'
':neutron-postrouting-bottom - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-snat - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-float-snat - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A POSTROUTING -j neutron-postrouting-bottom\n'
'[0:0] -A neutron-postrouting-bottom -j %(bn)s-snat\n'
'[0:0] -A %(bn)s-snat -j '
'%(bn)s-float-snat\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % iptables_args)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=nat_dump + filter_dump_mod,
root_helper=self.root_helper),
None),
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=nat_dump + filter_dump,
root_helper=self.root_helper),
None),
]
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain('filter')
self.iptables.apply()
self.iptables.ipv4['filter'].empty_chain('filter')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_empty_chain_custom_binary_name(self):
bn = ("abcdef" * 5)[:16]
self.iptables = (iptables_manager.
IptablesManager(root_helper=self.root_helper,
binary_name=bn))
self.execute = mock.patch.object(self.iptables, "execute").start()
iptables_args = {'bn': bn}
filter_dump = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % iptables_args)
filter_dump_mod = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(bn)s-filter - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'[0:0] -A %(bn)s-filter -s 0/0 -d 192.168.0.2\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% iptables_args)
nat_dump = ('# Generated by iptables_manager\n'
'*nat\n'
':neutron-postrouting-bottom - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-snat - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-float-snat - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A POSTROUTING -j neutron-postrouting-bottom\n'
'[0:0] -A neutron-postrouting-bottom -j %(bn)s-snat\n'
'[0:0] -A %(bn)s-snat -j '
'%(bn)s-float-snat\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % iptables_args)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=nat_dump + filter_dump_mod,
root_helper=self.root_helper),
None),
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=nat_dump + filter_dump,
root_helper=self.root_helper),
None),
]
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain('filter')
self.iptables.ipv4['filter'].add_rule('filter',
'-s 0/0 -d 192.168.0.2')
self.iptables.apply()
self.iptables.ipv4['filter'].remove_chain('filter')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_add_and_remove_chain(self):
filter_dump_mod = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(bn)s-filter - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% IPTABLES_ARG)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=NAT_DUMP + filter_dump_mod,
root_helper=self.root_helper),
None),
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=NAT_DUMP + FILTER_DUMP,
root_helper=self.root_helper),
None),
]
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain('filter')
self.iptables.apply()
self.iptables.ipv4['filter'].remove_chain('filter')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_add_filter_rule(self):
filter_dump_mod = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(bn)s-filter - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'[0:0] -A %(bn)s-filter -j DROP\n'
'[0:0] -A %(bn)s-INPUT -s 0/0 -d 192.168.0.2 -j '
'%(bn)s-filter\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% IPTABLES_ARG)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=NAT_DUMP + filter_dump_mod,
root_helper=self.root_helper),
None),
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=NAT_DUMP + FILTER_DUMP,
root_helper=self.root_helper
),
None),
]
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain('filter')
self.iptables.ipv4['filter'].add_rule('filter', '-j DROP')
self.iptables.ipv4['filter'].add_rule('INPUT',
'-s 0/0 -d 192.168.0.2 -j'
' %(bn)s-filter' % IPTABLES_ARG)
self.iptables.apply()
self.iptables.ipv4['filter'].remove_rule('filter', '-j DROP')
self.iptables.ipv4['filter'].remove_rule('INPUT',
'-s 0/0 -d 192.168.0.2 -j'
' %(bn)s-filter'
% IPTABLES_ARG)
self.iptables.ipv4['filter'].remove_chain('filter')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_rule_with_wrap_target(self):
name = '0123456789' * 5
wrap = "%s-%s" % (iptables_manager.binary_name,
iptables_manager.get_chain_name(name))
iptables_args = {'bn': iptables_manager.binary_name,
'wrap': wrap}
filter_dump_mod = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(wrap)s - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'[0:0] -A %(bn)s-INPUT -s 0/0 -d 192.168.0.2 -j '
'%(wrap)s\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% iptables_args)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=NAT_DUMP + filter_dump_mod,
root_helper=self.root_helper),
None),
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=NAT_DUMP + FILTER_DUMP,
root_helper=self.root_helper),
None),
]
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain(name)
self.iptables.ipv4['filter'].add_rule('INPUT',
'-s 0/0 -d 192.168.0.2 -j'
' $%s' % name)
self.iptables.apply()
self.iptables.ipv4['filter'].remove_rule('INPUT',
'-s 0/0 -d 192.168.0.2 -j'
' $%s' % name)
self.iptables.ipv4['filter'].remove_chain(name)
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_add_nat_rule(self):
nat_dump = ('# Generated by iptables_manager\n'
'*nat\n'
':neutron-postrouting-bottom - [0:0]\n'
':%(bn)s-float-snat - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-snat - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A POSTROUTING -j neutron-postrouting-bottom\n'
'[0:0] -A neutron-postrouting-bottom -j %(bn)s-snat\n'
'[0:0] -A %(bn)s-snat -j %(bn)s-float-snat\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% IPTABLES_ARG)
nat_dump_mod = ('# Generated by iptables_manager\n'
'*nat\n'
':neutron-postrouting-bottom - [0:0]\n'
':%(bn)s-float-snat - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-nat - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-snat - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A POSTROUTING -j neutron-postrouting-bottom\n'
'[0:0] -A neutron-postrouting-bottom -j %(bn)s-snat\n'
'[0:0] -A %(bn)s-snat -j %(bn)s-float-snat\n'
'[0:0] -A %(bn)s-PREROUTING -d 192.168.0.3 -j '
'%(bn)s-nat\n'
'[0:0] -A %(bn)s-nat -p tcp --dport 8080 -j '
'REDIRECT --to-port 80\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% IPTABLES_ARG)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=nat_dump_mod + FILTER_DUMP,
root_helper=self.root_helper),
None),
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=nat_dump + FILTER_DUMP,
root_helper=self.root_helper),
None),
]
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['nat'].add_chain('nat')
self.iptables.ipv4['nat'].add_rule('PREROUTING',
'-d 192.168.0.3 -j '
'%(bn)s-nat' % IPTABLES_ARG)
self.iptables.ipv4['nat'].add_rule('nat',
'-p tcp --dport 8080' +
' -j REDIRECT --to-port 80')
self.iptables.apply()
self.iptables.ipv4['nat'].remove_rule('nat',
'-p tcp --dport 8080 -j'
' REDIRECT --to-port 80')
self.iptables.ipv4['nat'].remove_rule('PREROUTING',
'-d 192.168.0.3 -j '
'%(bn)s-nat' % IPTABLES_ARG)
self.iptables.ipv4['nat'].remove_chain('nat')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_add_rule_to_a_nonexistent_chain(self):
self.assertRaises(LookupError, self.iptables.ipv4['filter'].add_rule,
'nonexistent', '-j DROP')
def test_remove_nonexistent_chain(self):
with mock.patch.object(iptables_manager, "LOG") as log:
self.iptables.ipv4['filter'].remove_chain('nonexistent')
log.warn.assert_called_once_with(
'Attempted to remove chain %s which does not exist',
'nonexistent')
def test_remove_nonexistent_rule(self):
with mock.patch.object(iptables_manager, "LOG") as log:
self.iptables.ipv4['filter'].remove_rule('nonexistent', '-j DROP')
log.warn.assert_called_once_with(
'Tried to remove rule that was not there: '
'%(chain)r %(rule)r %(wrap)r %(top)r',
{'wrap': True, 'top': False, 'rule': '-j DROP',
'chain': 'nonexistent'})
def test_get_traffic_counters_chain_notexists(self):
with mock.patch.object(iptables_manager, "LOG") as log:
acc = self.iptables.get_traffic_counters('chain1')
self.assertIsNone(acc)
self.assertEqual(0, self.execute.call_count)
log.warn.assert_called_once_with(
'Attempted to get traffic counters of chain %s which '
'does not exist', 'chain1')
def test_get_traffic_counters(self):
iptables_dump = (
'Chain OUTPUT (policy ACCEPT 400 packets, 65901 bytes)\n'
' pkts bytes target prot opt in out source'
' destination \n'
' 400 65901 chain1 all -- * * 0.0.0.0/0'
' 0.0.0.0/0 \n'
' 400 65901 chain2 all -- * * 0.0.0.0/0'
' 0.0.0.0/0 \n')
expected_calls_and_values = [
(mock.call(['iptables', '-t', 'filter', '-L', 'OUTPUT',
'-n', '-v', '-x'],
root_helper=self.root_helper),
iptables_dump),
(mock.call(['iptables', '-t', 'nat', '-L', 'OUTPUT', '-n',
'-v', '-x'],
root_helper=self.root_helper),
''),
(mock.call(['ip6tables', '-t', 'filter', '-L', 'OUTPUT',
'-n', '-v', '-x'],
root_helper=self.root_helper),
iptables_dump),
]
tools.setup_mock_calls(self.execute, expected_calls_and_values)
acc = self.iptables.get_traffic_counters('OUTPUT')
self.assertEqual(acc['pkts'], 1600)
self.assertEqual(acc['bytes'], 263604)
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_get_traffic_counters_with_zero(self):
iptables_dump = (
'Chain OUTPUT (policy ACCEPT 400 packets, 65901 bytes)\n'
' pkts bytes target prot opt in out source'
' destination \n'
' 400 65901 chain1 all -- * * 0.0.0.0/0'
' 0.0.0.0/0 \n'
' 400 65901 chain2 all -- * * 0.0.0.0/0'
' 0.0.0.0/0 \n')
expected_calls_and_values = [
(mock.call(['iptables', '-t', 'filter', '-L', 'OUTPUT',
'-n', '-v', '-x', '-Z'],
root_helper=self.root_helper),
iptables_dump),
(mock.call(['iptables', '-t', 'nat', '-L', 'OUTPUT', '-n',
'-v', '-x', '-Z'],
root_helper=self.root_helper),
''),
(mock.call(['ip6tables', '-t', 'filter', '-L', 'OUTPUT',
'-n', '-v', '-x', '-Z'],
root_helper=self.root_helper),
iptables_dump),
]
tools.setup_mock_calls(self.execute, expected_calls_and_values)
acc = self.iptables.get_traffic_counters('OUTPUT', zero=True)
self.assertEqual(acc['pkts'], 1600)
self.assertEqual(acc['bytes'], 263604)
tools.verify_mock_calls(self.execute, expected_calls_and_values)
class IptablesManagerStateLessTestCase(base.BaseTestCase):
def setUp(self):
super(IptablesManagerStateLessTestCase, self).setUp()
self.iptables = (iptables_manager.IptablesManager(state_less=True))
def test_nat_not_found(self):
self.assertNotIn('nat', self.iptables.ipv4)
| apache-2.0 |
santisiri/popego | envs/ALPHA-POPEGO/lib/python2.5/site-packages/SQLAlchemy-0.4.0-py2.5.egg/sqlalchemy/databases/access.py | 1 | 15034 | # access.py
# Copyright (C) 2007 Paul Johnston, paj@pajhome.org.uk
# Portions derived from jet2sql.py by Matt Keranen, mksql@yahoo.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import random
from sqlalchemy import sql, schema, types, exceptions, pool
from sqlalchemy.sql import compiler, expression
from sqlalchemy.engine import default, base
class AcNumeric(types.Numeric):
def result_processor(self, dialect):
return None
def bind_processor(self, dialect):
def process(value):
if value is None:
# Not sure that this exception is needed
return value
else:
return str(value)
return process
def get_col_spec(self):
return "NUMERIC"
class AcFloat(types.Float):
def get_col_spec(self):
return "FLOAT"
def bind_processor(self, dialect):
"""By converting to string, we can use Decimal types round-trip."""
def process(value):
if not value is None:
return str(value)
return None
return process
class AcInteger(types.Integer):
def get_col_spec(self):
return "INTEGER"
class AcTinyInteger(types.Integer):
def get_col_spec(self):
return "TINYINT"
class AcSmallInteger(types.Smallinteger):
def get_col_spec(self):
return "SMALLINT"
class AcDateTime(types.DateTime):
def __init__(self, *a, **kw):
super(AcDateTime, self).__init__(False)
def get_col_spec(self):
return "DATETIME"
class AcDate(types.Date):
def __init__(self, *a, **kw):
super(AcDate, self).__init__(False)
def get_col_spec(self):
return "DATETIME"
class AcText(types.TEXT):
def get_col_spec(self):
return "MEMO"
class AcString(types.String):
def get_col_spec(self):
return "TEXT" + (self.length and ("(%d)" % self.length) or "")
class AcUnicode(types.Unicode):
def get_col_spec(self):
return "TEXT" + (self.length and ("(%d)" % self.length) or "")
def bind_processor(self, dialect):
return None
def result_processor(self, dialect):
return None
class AcChar(types.CHAR):
def get_col_spec(self):
return "TEXT" + (self.length and ("(%d)" % self.length) or "")
class AcBinary(types.Binary):
def get_col_spec(self):
return "BINARY"
class AcBoolean(types.Boolean):
def get_col_spec(self):
return "YESNO"
def result_processor(self, dialect):
def process(value):
if value is None:
return None
return value and True or False
return process
def bind_processor(self, dialect):
def process(value):
if value is True:
return 1
elif value is False:
return 0
elif value is None:
return None
else:
return value and True or False
return process
class AcTimeStamp(types.TIMESTAMP):
def get_col_spec(self):
return "TIMESTAMP"
def descriptor():
return {'name':'access',
'description':'Microsoft Access',
'arguments':[
('user',"Database user name",None),
('password',"Database password",None),
('db',"Path to database file",None),
]}
class AccessExecutionContext(default.DefaultExecutionContext):
def _has_implicit_sequence(self, column):
if column.primary_key and column.autoincrement:
if isinstance(column.type, types.Integer) and not column.foreign_key:
if column.default is None or (isinstance(column.default, schema.Sequence) and \
column.default.optional):
return True
return False
def post_exec(self):
"""If we inserted into a row with a COUNTER column, fetch the ID"""
if self.compiled.isinsert:
tbl = self.compiled.statement.table
if not hasattr(tbl, 'has_sequence'):
tbl.has_sequence = None
for column in tbl.c:
if getattr(column, 'sequence', False) or self._has_implicit_sequence(column):
tbl.has_sequence = column
break
if bool(tbl.has_sequence):
# TBD: for some reason _last_inserted_ids doesn't exist here
# (but it does at corresponding point in mssql???)
#if not len(self._last_inserted_ids) or self._last_inserted_ids[0] is None:
self.cursor.execute("SELECT @@identity AS lastrowid")
row = self.cursor.fetchone()
self._last_inserted_ids = [int(row[0])] #+ self._last_inserted_ids[1:]
# print "LAST ROW ID", self._last_inserted_ids
super(AccessExecutionContext, self).post_exec()
const, daoEngine = None, None
class AccessDialect(default.DefaultDialect):
colspecs = {
types.Unicode : AcUnicode,
types.Integer : AcInteger,
types.Smallinteger: AcSmallInteger,
types.Numeric : AcNumeric,
types.Float : AcFloat,
types.DateTime : AcDateTime,
types.Date : AcDate,
types.String : AcString,
types.Binary : AcBinary,
types.Boolean : AcBoolean,
types.TEXT : AcText,
types.CHAR: AcChar,
types.TIMESTAMP: AcTimeStamp,
}
supports_sane_rowcount = False
supports_sane_multi_rowcount = False
def type_descriptor(self, typeobj):
newobj = types.adapt_type(typeobj, self.colspecs)
return newobj
def __init__(self, **params):
super(AccessDialect, self).__init__(**params)
self.text_as_varchar = False
self._dtbs = None
def dbapi(cls):
import win32com.client
win32com.client.gencache.EnsureModule('{00025E01-0000-0000-C000-000000000046}', 0, 5, 0)
global const, daoEngine
if const is None:
const = win32com.client.constants
daoEngine = win32com.client.Dispatch('DAO.DBEngine.36')
import pyodbc as module
return module
dbapi = classmethod(dbapi)
def create_connect_args(self, url):
opts = url.translate_connect_args()
connectors = ["Driver={Microsoft Access Driver (*.mdb)}"]
connectors.append("Dbq=%s" % opts["database"])
user = opts.get("username", None)
if user:
connectors.append("UID=%s" % user)
connectors.append("PWD=%s" % opts.get("password", ""))
return [[";".join(connectors)], {}]
def create_execution_context(self, *args, **kwargs):
return AccessExecutionContext(self, *args, **kwargs)
def last_inserted_ids(self):
return self.context.last_inserted_ids
def do_execute(self, cursor, statement, params, **kwargs):
if params == {}:
params = ()
super(AccessDialect, self).do_execute(cursor, statement, params, **kwargs)
def _execute(self, c, statement, parameters):
try:
if parameters == {}:
parameters = ()
c.execute(statement, parameters)
self.context.rowcount = c.rowcount
except Exception, e:
raise exceptions.DBAPIError.instance(statement, parameters, e)
def has_table(self, connection, tablename, schema=None):
# This approach seems to be more reliable that using DAO
try:
connection.execute('select top 1 * from [%s]' % tablename)
return True
except Exception, e:
return False
def reflecttable(self, connection, table, include_columns):
# This is defined in the function, as it relies on win32com constants,
# that aren't imported until dbapi method is called
if not hasattr(self, 'ischema_names'):
self.ischema_names = {
const.dbByte: AcBinary,
const.dbInteger: AcInteger,
const.dbLong: AcInteger,
const.dbSingle: AcFloat,
const.dbDouble: AcFloat,
const.dbDate: AcDateTime,
const.dbLongBinary: AcBinary,
const.dbMemo: AcText,
const.dbBoolean: AcBoolean,
const.dbText: AcUnicode, # All Access strings are unicode
}
# A fresh DAO connection is opened for each reflection
# This is necessary, so we get the latest updates
dtbs = daoEngine.OpenDatabase(connection.engine.url.database)
try:
for tbl in dtbs.TableDefs:
if tbl.Name.lower() == table.name.lower():
break
else:
raise exceptions.NoSuchTableError(table.name)
for col in tbl.Fields:
coltype = self.ischema_names[col.Type]
if col.Type == const.dbText:
coltype = coltype(col.Size)
colargs = \
{
'nullable': not(col.Required or col.Attributes & const.dbAutoIncrField),
}
default = col.DefaultValue
if col.Attributes & const.dbAutoIncrField:
colargs['default'] = schema.Sequence(col.Name + '_seq')
elif default:
if col.Type == const.dbBoolean:
default = default == 'Yes' and '1' or '0'
colargs['default'] = schema.PassiveDefault(sql.text(default))
table.append_column(schema.Column(col.Name, coltype, **colargs))
# TBD: check constraints
# Find primary key columns first
for idx in tbl.Indexes:
if idx.Primary:
for col in idx.Fields:
thecol = table.c[col.Name]
table.primary_key.add(thecol)
if isinstance(thecol.type, AcInteger) and \
not (thecol.default and isinstance(thecol.default.arg, schema.Sequence)):
thecol.autoincrement = False
# Then add other indexes
for idx in tbl.Indexes:
if not idx.Primary:
if len(idx.Fields) == 1:
col = table.c[idx.Fields[0].Name]
if not col.primary_key:
col.index = True
col.unique = idx.Unique
else:
pass # TBD: multi-column indexes
for fk in dtbs.Relations:
if fk.ForeignTable != table.name:
continue
scols = [c.ForeignName for c in fk.Fields]
rcols = ['%s.%s' % (fk.Table, c.Name) for c in fk.Fields]
table.append_constraint(schema.ForeignKeyConstraint(scols, rcols))
finally:
dtbs.Close()
def table_names(self, connection, schema):
# A fresh DAO connection is opened for each reflection
# This is necessary, so we get the latest updates
dtbs = daoEngine.OpenDatabase(connection.engine.url.database)
names = [t.Name for t in dtbs.TableDefs if t.Name[:4] != "MSys" and t.Name[:4] <> "~TMP"]
dtbs.Close()
return names
class AccessCompiler(compiler.DefaultCompiler):
def visit_select_precolumns(self, select):
"""Access puts TOP, it's version of LIMIT here """
s = select.distinct and "DISTINCT " or ""
if select.limit:
s += "TOP %s " % (select.limit)
if select.offset:
raise exceptions.InvalidRequestError('Access does not support LIMIT with an offset')
return s
def limit_clause(self, select):
"""Limit in access is after the select keyword"""
return ""
def binary_operator_string(self, binary):
"""Access uses "mod" instead of "%" """
return binary.operator == '%' and 'mod' or binary.operator
def label_select_column(self, select, column):
if isinstance(column, expression._Function):
return column.label(column.name + "_" + hex(random.randint(0, 65535))[2:])
else:
return super(AccessCompiler, self).label_select_column(select, column)
function_rewrites = {'current_date': 'now',
'current_timestamp': 'now',
'length': 'len',
}
def visit_function(self, func):
"""Access function names differ from the ANSI SQL names; rewrite common ones"""
func.name = self.function_rewrites.get(func.name, func.name)
super(AccessCompiler, self).visit_function(func)
def for_update_clause(self, select):
"""FOR UPDATE is not supported by Access; silently ignore"""
return ''
class AccessSchemaGenerator(compiler.SchemaGenerator):
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column) + " " + column.type.dialect_impl(self.dialect).get_col_spec()
# install a sequence if we have an implicit IDENTITY column
if (not getattr(column.table, 'has_sequence', False)) and column.primary_key and \
column.autoincrement and isinstance(column.type, types.Integer) and not column.foreign_key:
if column.default is None or (isinstance(column.default, schema.Sequence) and column.default.optional):
column.sequence = schema.Sequence(column.name + '_seq')
if not column.nullable:
colspec += " NOT NULL"
if hasattr(column, 'sequence'):
column.table.has_sequence = column
colspec = self.preparer.format_column(column) + " counter"
else:
default = self.get_column_default_string(column)
if default is not None:
colspec += " DEFAULT " + default
return colspec
class AccessSchemaDropper(compiler.SchemaDropper):
def visit_index(self, index):
self.append("\nDROP INDEX [%s].[%s]" % (index.table.name, index.name))
self.execute()
class AccessDefaultRunner(base.DefaultRunner):
pass
class AccessIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = compiler.RESERVED_WORDS.copy()
reserved_words.update(['value', 'text'])
def __init__(self, dialect):
super(AccessIdentifierPreparer, self).__init__(dialect, initial_quote='[', final_quote=']')
dialect = AccessDialect
dialect.poolclass = pool.SingletonThreadPool
dialect.statement_compiler = AccessCompiler
dialect.schemagenerator = AccessSchemaGenerator
dialect.schemadropper = AccessSchemaDropper
dialect.preparer = AccessIdentifierPreparer
dialect.defaultrunner = AccessDefaultRunner
| bsd-3-clause |
sasukeh/cinder | cinder/volume/drivers/netapp/utils.py | 3 | 16796 | # Copyright (c) 2012 NetApp, Inc. All rights reserved.
# Copyright (c) 2014 Navneet Singh. All rights reserved.
# Copyright (c) 2014 Clinton Knight. All rights reserved.
# Copyright (c) 2015 Tom Barron. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utilities for NetApp drivers.
This module contains common utilities to be used by one or more
NetApp drivers to achieve the desired functionality.
"""
import decimal
import platform
import socket
from oslo_concurrency import processutils as putils
from oslo_log import log as logging
from oslo_utils import importutils
import six
from cinder import context
from cinder import exception
from cinder.i18n import _, _LE, _LW, _LI
from cinder import utils
from cinder import version
from cinder.volume import qos_specs
from cinder.volume import volume_types
LOG = logging.getLogger(__name__)
OPENSTACK_PREFIX = 'openstack-'
OBSOLETE_SSC_SPECS = {'netapp:raid_type': 'netapp_raid_type',
'netapp:disk_type': 'netapp_disk_type'}
DEPRECATED_SSC_SPECS = {'netapp_unmirrored': 'netapp_mirrored',
'netapp_nodedup': 'netapp_dedup',
'netapp_nocompression': 'netapp_compression',
'netapp_thick_provisioned': 'netapp_thin_provisioned'}
QOS_KEYS = frozenset(
['maxIOPS', 'total_iops_sec', 'maxBPS', 'total_bytes_sec'])
BACKEND_QOS_CONSUMERS = frozenset(['back-end', 'both'])
def validate_instantiation(**kwargs):
"""Checks if a driver is instantiated other than by the unified driver.
Helps check direct instantiation of netapp drivers.
Call this function in every netapp block driver constructor.
"""
if kwargs and kwargs.get('netapp_mode') == 'proxy':
return
LOG.warning(_LW("It is not the recommended way to use drivers by NetApp. "
"Please use NetAppDriver to achieve the functionality."))
def check_flags(required_flags, configuration):
"""Ensure that the flags we care about are set."""
for flag in required_flags:
if not getattr(configuration, flag, None):
msg = _('Configuration value %s is not set.') % flag
raise exception.InvalidInput(reason=msg)
def check_netapp_lib():
if not importutils.try_import('netapp_lib'):
msg = ('You have not installed the NetApp API Library for OpenStack. '
'Please install it using "sudo pip install netapp-lib" and '
'restart this service!')
raise exception.NetAppDriverException(msg)
def to_bool(val):
"""Converts true, yes, y, 1 to True, False otherwise."""
if val:
strg = six.text_type(val).lower()
if (strg == 'true' or strg == 'y'
or strg == 'yes' or strg == 'enabled'
or strg == '1'):
return True
else:
return False
else:
return False
@utils.synchronized("safe_set_attr")
def set_safe_attr(instance, attr, val):
"""Sets the attribute in a thread safe manner.
Returns if new val was set on attribute.
If attr already had the value then False.
"""
if not instance or not attr:
return False
old_val = getattr(instance, attr, None)
if val is None and old_val is None:
return False
elif val == old_val:
return False
else:
setattr(instance, attr, val)
return True
def get_volume_extra_specs(volume):
"""Provides extra specs associated with volume."""
ctxt = context.get_admin_context()
type_id = volume.get('volume_type_id')
if type_id is None:
return {}
volume_type = volume_types.get_volume_type(ctxt, type_id)
if volume_type is None:
return {}
extra_specs = volume_type.get('extra_specs', {})
log_extra_spec_warnings(extra_specs)
return extra_specs
def resolve_hostname(hostname):
"""Resolves host name to IP address."""
res = socket.getaddrinfo(hostname, None)[0]
family, socktype, proto, canonname, sockaddr = res
return sockaddr[0]
def round_down(value, precision):
return float(decimal.Decimal(six.text_type(value)).quantize(
decimal.Decimal(precision), rounding=decimal.ROUND_DOWN))
def log_extra_spec_warnings(extra_specs):
for spec in (set(extra_specs.keys() if extra_specs else []) &
set(OBSOLETE_SSC_SPECS.keys())):
LOG.warning(_LW('Extra spec %(old)s is obsolete. Use %(new)s '
'instead.'), {'old': spec,
'new': OBSOLETE_SSC_SPECS[spec]})
for spec in (set(extra_specs.keys() if extra_specs else []) &
set(DEPRECATED_SSC_SPECS.keys())):
LOG.warning(_LW('Extra spec %(old)s is deprecated. Use %(new)s '
'instead.'), {'old': spec,
'new': DEPRECATED_SSC_SPECS[spec]})
def get_iscsi_connection_properties(lun_id, volume, iqn,
address, port):
properties = {}
properties['target_discovered'] = False
properties['target_portal'] = '%s:%s' % (address, port)
properties['target_iqn'] = iqn
properties['target_lun'] = int(lun_id)
properties['volume_id'] = volume['id']
auth = volume['provider_auth']
if auth:
(auth_method, auth_username, auth_secret) = auth.split()
properties['auth_method'] = auth_method
properties['auth_username'] = auth_username
properties['auth_password'] = auth_secret
return {
'driver_volume_type': 'iscsi',
'data': properties,
}
def validate_qos_spec(qos_spec):
"""Check validity of Cinder qos spec for our backend."""
if qos_spec is None:
return
normalized_qos_keys = [key.lower() for key in QOS_KEYS]
keylist = []
for key, value in qos_spec.items():
lower_case_key = key.lower()
if lower_case_key not in normalized_qos_keys:
msg = _('Unrecognized QOS keyword: "%s"') % key
raise exception.Invalid(msg)
keylist.append(lower_case_key)
# Modify the following check when we allow multiple settings in one spec.
if len(keylist) > 1:
msg = _('Only one limit can be set in a QoS spec.')
raise exception.Invalid(msg)
def get_volume_type_from_volume(volume):
"""Provides volume type associated with volume."""
type_id = volume.get('volume_type_id')
if type_id is None:
return {}
ctxt = context.get_admin_context()
return volume_types.get_volume_type(ctxt, type_id)
def map_qos_spec(qos_spec, volume):
"""Map Cinder QOS spec to limit/throughput-value as used in client API."""
if qos_spec is None:
return None
qos_spec = map_dict_to_lower(qos_spec)
spec = dict(policy_name=get_qos_policy_group_name(volume),
max_throughput=None)
# IOPS and BPS specifications are exclusive of one another.
if 'maxiops' in qos_spec or 'total_iops_sec' in qos_spec:
spec['max_throughput'] = '%siops' % qos_spec['maxiops']
elif 'maxbps' in qos_spec or 'total_bytes_sec' in qos_spec:
spec['max_throughput'] = '%sB/s' % qos_spec['maxbps']
return spec
def map_dict_to_lower(input_dict):
"""Return an equivalent to the input dictionary with lower-case keys."""
lower_case_dict = {}
for key in input_dict:
lower_case_dict[key.lower()] = input_dict[key]
return lower_case_dict
def get_qos_policy_group_name(volume):
"""Return the name of backend QOS policy group based on its volume id."""
if 'id' in volume:
return OPENSTACK_PREFIX + volume['id']
return None
def get_qos_policy_group_name_from_info(qos_policy_group_info):
"""Return the name of a QOS policy group given qos policy group info."""
if qos_policy_group_info is None:
return None
legacy = qos_policy_group_info.get('legacy')
if legacy is not None:
return legacy['policy_name']
spec = qos_policy_group_info.get('spec')
if spec is not None:
return spec['policy_name']
return None
def get_valid_qos_policy_group_info(volume, extra_specs=None):
"""Given a volume, return information for QOS provisioning."""
info = dict(legacy=None, spec=None)
try:
volume_type = get_volume_type_from_volume(volume)
except KeyError:
LOG.exception(_LE('Cannot get QoS spec for volume %s.'), volume['id'])
return info
if volume_type is None:
return info
if extra_specs is None:
extra_specs = volume_type.get('extra_specs', {})
info['legacy'] = get_legacy_qos_policy(extra_specs)
info['spec'] = get_valid_backend_qos_spec_from_volume_type(volume,
volume_type)
msg = 'QoS policy group info for volume %(vol)s: %(info)s'
LOG.debug(msg, {'vol': volume['name'], 'info': info})
check_for_invalid_qos_spec_combination(info, volume_type)
return info
def get_valid_backend_qos_spec_from_volume_type(volume, volume_type):
"""Given a volume type, return the associated Cinder QoS spec."""
spec_key_values = get_backend_qos_spec_from_volume_type(volume_type)
if spec_key_values is None:
return None
validate_qos_spec(spec_key_values)
return map_qos_spec(spec_key_values, volume)
def get_backend_qos_spec_from_volume_type(volume_type):
qos_specs_id = volume_type.get('qos_specs_id')
if qos_specs_id is None:
return None
ctxt = context.get_admin_context()
qos_spec = qos_specs.get_qos_specs(ctxt, qos_specs_id)
if qos_spec is None:
return None
consumer = qos_spec['consumer']
# Front end QoS specs are handled by libvirt and we ignore them here.
if consumer not in BACKEND_QOS_CONSUMERS:
return None
spec_key_values = qos_spec['specs']
return spec_key_values
def check_for_invalid_qos_spec_combination(info, volume_type):
"""Invalidate QOS spec if both legacy and non-legacy info is present."""
if info['legacy'] and info['spec']:
msg = _('Conflicting QoS specifications in volume type '
'%s: when QoS spec is associated to volume '
'type, legacy "netapp:qos_policy_group" is not allowed in '
'the volume type extra specs.') % volume_type['id']
raise exception.Invalid(msg)
def get_legacy_qos_policy(extra_specs):
"""Return legacy qos policy information if present in extra specs."""
external_policy_name = extra_specs.get('netapp:qos_policy_group')
if external_policy_name is None:
return None
return dict(policy_name=external_policy_name)
class hashabledict(dict):
"""A hashable dictionary that is comparable (i.e. in unit tests, etc.)"""
def __hash__(self):
return hash(tuple(sorted(self.items())))
class OpenStackInfo(object):
"""OS/distribution, release, and version.
NetApp uses these fields as content for EMS log entry.
"""
PACKAGE_NAME = 'python-cinder'
def __init__(self):
self._version = 'unknown version'
self._release = 'unknown release'
self._vendor = 'unknown vendor'
self._platform = 'unknown platform'
def _update_version_from_version_string(self):
try:
self._version = version.version_info.version_string()
except Exception:
pass
def _update_release_from_release_string(self):
try:
self._release = version.version_info.release_string()
except Exception:
pass
def _update_platform(self):
try:
self._platform = platform.platform()
except Exception:
pass
@staticmethod
def _get_version_info_version():
return version.version_info.version
@staticmethod
def _get_version_info_release():
return version.version_info.release
def _update_info_from_version_info(self):
try:
ver = self._get_version_info_version()
if ver:
self._version = ver
except Exception:
pass
try:
rel = self._get_version_info_release()
if rel:
self._release = rel
except Exception:
pass
# RDO, RHEL-OSP, Mirantis on Redhat, SUSE
def _update_info_from_rpm(self):
LOG.debug('Trying rpm command.')
try:
out, err = putils.execute("rpm", "-q", "--queryformat",
"'%{version}\t%{release}\t%{vendor}'",
self.PACKAGE_NAME)
if not out:
LOG.info(_LI('No rpm info found for %(pkg)s package.'), {
'pkg': self.PACKAGE_NAME})
return False
parts = out.split()
self._version = parts[0]
self._release = parts[1]
self._vendor = ' '.join(parts[2::])
return True
except Exception as e:
LOG.info(_LI('Could not run rpm command: %(msg)s.'), {'msg': e})
return False
# ubuntu, mirantis on ubuntu
def _update_info_from_dpkg(self):
LOG.debug('Trying dpkg-query command.')
try:
_vendor = None
out, err = putils.execute("dpkg-query", "-W", "-f='${Version}'",
self.PACKAGE_NAME)
if not out:
LOG.info(_LI('No dpkg-query info found for %(pkg)s package.'),
{'pkg': self.PACKAGE_NAME})
return False
# debian format: [epoch:]upstream_version[-debian_revision]
deb_version = out
# in case epoch or revision is missing, copy entire string
_release = deb_version
if ':' in deb_version:
deb_epoch, upstream_version = deb_version.split(':')
_release = upstream_version
if '-' in deb_version:
deb_revision = deb_version.split('-')[1]
_vendor = deb_revision
self._release = _release
if _vendor:
self._vendor = _vendor
return True
except Exception as e:
LOG.info(_LI('Could not run dpkg-query command: %(msg)s.'), {
'msg': e})
return False
def _update_openstack_info(self):
self._update_version_from_version_string()
self._update_release_from_release_string()
self._update_platform()
# some distributions override with more meaningful information
self._update_info_from_version_info()
# see if we have still more targeted info from rpm or apt
found_package = self._update_info_from_rpm()
if not found_package:
self._update_info_from_dpkg()
def info(self):
self._update_openstack_info()
return '%(version)s|%(release)s|%(vendor)s|%(platform)s' % {
'version': self._version, 'release': self._release,
'vendor': self._vendor, 'platform': self._platform}
class Features(object):
def __init__(self):
self.defined_features = set()
def add_feature(self, name, supported=True, min_version=None):
if not isinstance(supported, bool):
raise TypeError("Feature value must be a bool type.")
self.defined_features.add(name)
setattr(self, name, FeatureState(supported, min_version))
def __getattr__(self, name):
# NOTE(cknight): Needed to keep pylint happy.
raise AttributeError
class FeatureState(object):
def __init__(self, supported=True, minimum_version=None):
"""Represents the current state of enablement for a Feature
:param supported: True if supported, false otherwise
:param minimum_version: The minimum version that this feature is
suported at
"""
self.supported = supported
self.minimum_version = minimum_version
def __nonzero__(self):
"""Allow a FeatureState object to be tested for truth value
:return True if the feature is supported, otherwise False
"""
return self.supported
| apache-2.0 |
jarped/QGIS | python/plugins/processing/gui/AutofillDialog.py | 17 | 2264 | # -*- coding: utf-8 -*-
"""
***************************************************************************
AutofillDialog.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4 import uic
from PyQt4.QtGui import QDialog
pluginPath = os.path.split(os.path.dirname(__file__))[0]
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'DlgAutofill.ui'))
class AutofillDialog(BASE, WIDGET):
DO_NOT_AUTOFILL = 0
FILL_WITH_NUMBERS = 1
FILL_WITH_PARAMETER = 2
def __init__(self, alg):
super(AutofillDialog, self).__init__(None)
self.setupUi(self)
self.cmbFillType.currentIndexChanged.connect(self.toggleParameters)
for param in alg.parameters:
self.cmbParameters.addItem(param.description)
def toggleParameters(self, index):
if index == self.FILL_WITH_PARAMETER:
self.lblParameters.setEnabled(True)
self.cmbParameters.setEnabled(True)
else:
self.lblParameters.setEnabled(False)
self.cmbParameters.setEnabled(False)
def accept(self):
self.mode = self.cmbFillType.currentIndex()
self.param = self.cmbParameters.currentIndex()
QDialog.accept(self)
def reject(self):
self.mode = None
self.param = None
QDialog.reject(self)
| gpl-2.0 |
Blitzen/oauthlib | tests/oauth1/rfc5849/endpoints/test_base.py | 27 | 16073 | from __future__ import unicode_literals, absolute_import
from mock import MagicMock
from re import sub
from ....unittest import TestCase
from oauthlib.common import safe_string_equals
from oauthlib.oauth1 import Client, RequestValidator
from oauthlib.oauth1.rfc5849 import errors, SIGNATURE_RSA, SIGNATURE_HMAC
from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT
from oauthlib.oauth1.rfc5849.endpoints import RequestTokenEndpoint, BaseEndpoint
URLENCODED = {"Content-Type": "application/x-www-form-urlencoded"}
class BaseEndpointTest(TestCase):
def setUp(self):
self.validator = MagicMock(spec=RequestValidator)
self.validator.allowed_signature_methods = ['HMAC-SHA1']
self.validator.timestamp_lifetime = 600
self.endpoint = RequestTokenEndpoint(self.validator)
self.client = Client('foo', callback_uri='https://c.b/cb')
self.uri, self.headers, self.body = self.client.sign(
'https://i.b/request_token')
def test_ssl_enforcement(self):
uri, headers, _ = self.client.sign('http://i.b/request_token')
h, b, s = self.endpoint.create_request_token_response(
uri, headers=headers)
self.assertEqual(s, 400)
self.assertIn('insecure_transport_protocol', b)
def test_missing_parameters(self):
h, b, s = self.endpoint.create_request_token_response(self.uri)
self.assertEqual(s, 400)
self.assertIn('invalid_request', b)
def test_signature_methods(self):
headers = {}
headers['Authorization'] = self.headers['Authorization'].replace(
'HMAC', 'RSA')
h, b, s = self.endpoint.create_request_token_response(
self.uri, headers=headers)
self.assertEqual(s, 400)
self.assertIn('invalid_signature_method', b)
def test_invalid_version(self):
headers = {}
headers['Authorization'] = self.headers['Authorization'].replace(
'1.0', '2.0')
h, b, s = self.endpoint.create_request_token_response(
self.uri, headers=headers)
self.assertEqual(s, 400)
self.assertIn('invalid_request', b)
def test_expired_timestamp(self):
headers = {}
for pattern in ('12345678901', '4567890123', '123456789K'):
headers['Authorization'] = sub('timestamp="\d*k?"',
'timestamp="%s"' % pattern,
self.headers['Authorization'])
h, b, s = self.endpoint.create_request_token_response(
self.uri, headers=headers)
self.assertEqual(s, 400)
self.assertIn('invalid_request', b)
def test_client_key_check(self):
self.validator.check_client_key.return_value = False
h, b, s = self.endpoint.create_request_token_response(
self.uri, headers=self.headers)
self.assertEqual(s, 400)
self.assertIn('invalid_request', b)
def test_noncecheck(self):
self.validator.check_nonce.return_value = False
h, b, s = self.endpoint.create_request_token_response(
self.uri, headers=self.headers)
self.assertEqual(s, 400)
self.assertIn('invalid_request', b)
def test_enforce_ssl(self):
"""Ensure SSL is enforced by default."""
v = RequestValidator()
e = BaseEndpoint(v)
c = Client('foo')
u, h, b = c.sign('http://example.com')
r = e._create_request(u, 'GET', b, h)
self.assertRaises(errors.InsecureTransportError,
e._check_transport_security, r)
def test_multiple_source_params(self):
"""Check for duplicate params"""
v = RequestValidator()
e = BaseEndpoint(v)
self.assertRaises(errors.InvalidRequestError, e._create_request,
'https://a.b/?oauth_signature_method=HMAC-SHA1',
'GET', 'oauth_version=foo', URLENCODED)
headers = {'Authorization': 'OAuth oauth_signature="foo"'}
headers.update(URLENCODED)
self.assertRaises(errors.InvalidRequestError, e._create_request,
'https://a.b/?oauth_signature_method=HMAC-SHA1',
'GET',
'oauth_version=foo',
headers)
headers = {'Authorization': 'OAuth oauth_signature_method="foo"'}
headers.update(URLENCODED)
self.assertRaises(errors.InvalidRequestError, e._create_request,
'https://a.b/',
'GET',
'oauth_signature=foo',
headers)
def test_duplicate_params(self):
"""Ensure params are only supplied once"""
v = RequestValidator()
e = BaseEndpoint(v)
self.assertRaises(errors.InvalidRequestError, e._create_request,
'https://a.b/?oauth_version=a&oauth_version=b',
'GET', None, URLENCODED)
self.assertRaises(errors.InvalidRequestError, e._create_request,
'https://a.b/', 'GET', 'oauth_version=a&oauth_version=b',
URLENCODED)
def test_mandated_params(self):
"""Ensure all mandatory params are present."""
v = RequestValidator()
e = BaseEndpoint(v)
r = e._create_request('https://a.b/', 'GET',
'oauth_signature=a&oauth_consumer_key=b&oauth_nonce',
URLENCODED)
self.assertRaises(errors.InvalidRequestError,
e._check_mandatory_parameters, r)
def test_oauth_version(self):
"""OAuth version must be 1.0 if present."""
v = RequestValidator()
e = BaseEndpoint(v)
r = e._create_request('https://a.b/', 'GET',
('oauth_signature=a&oauth_consumer_key=b&oauth_nonce=c&'
'oauth_timestamp=a&oauth_signature_method=RSA-SHA1&'
'oauth_version=2.0'),
URLENCODED)
self.assertRaises(errors.InvalidRequestError,
e._check_mandatory_parameters, r)
def test_oauth_timestamp(self):
"""Check for a valid UNIX timestamp."""
v = RequestValidator()
e = BaseEndpoint(v)
# Invalid timestamp length, must be 10
r = e._create_request('https://a.b/', 'GET',
('oauth_signature=a&oauth_consumer_key=b&oauth_nonce=c&'
'oauth_version=1.0&oauth_signature_method=RSA-SHA1&'
'oauth_timestamp=123456789'),
URLENCODED)
self.assertRaises(errors.InvalidRequestError,
e._check_mandatory_parameters, r)
# Invalid timestamp age, must be younger than 10 minutes
r = e._create_request('https://a.b/', 'GET',
('oauth_signature=a&oauth_consumer_key=b&oauth_nonce=c&'
'oauth_version=1.0&oauth_signature_method=RSA-SHA1&'
'oauth_timestamp=1234567890'),
URLENCODED)
self.assertRaises(errors.InvalidRequestError,
e._check_mandatory_parameters, r)
# Timestamp must be an integer
r = e._create_request('https://a.b/', 'GET',
('oauth_signature=a&oauth_consumer_key=b&oauth_nonce=c&'
'oauth_version=1.0&oauth_signature_method=RSA-SHA1&'
'oauth_timestamp=123456789a'),
URLENCODED)
self.assertRaises(errors.InvalidRequestError,
e._check_mandatory_parameters, r)
def test_signature_method_validation(self):
"""Ensure valid signature method is used."""
body = ('oauth_signature=a&oauth_consumer_key=b&oauth_nonce=c&'
'oauth_version=1.0&oauth_signature_method=%s&'
'oauth_timestamp=1234567890')
uri = 'https://example.com/'
class HMACValidator(RequestValidator):
@property
def allowed_signature_methods(self):
return (SIGNATURE_HMAC,)
v = HMACValidator()
e = BaseEndpoint(v)
r = e._create_request(uri, 'GET', body % 'RSA-SHA1', URLENCODED)
self.assertRaises(errors.InvalidSignatureMethodError,
e._check_mandatory_parameters, r)
r = e._create_request(uri, 'GET', body % 'PLAINTEXT', URLENCODED)
self.assertRaises(errors.InvalidSignatureMethodError,
e._check_mandatory_parameters, r)
r = e._create_request(uri, 'GET', body % 'shibboleth', URLENCODED)
self.assertRaises(errors.InvalidSignatureMethodError,
e._check_mandatory_parameters, r)
class RSAValidator(RequestValidator):
@property
def allowed_signature_methods(self):
return (SIGNATURE_RSA,)
v = RSAValidator()
e = BaseEndpoint(v)
r = e._create_request(uri, 'GET', body % 'HMAC-SHA1', URLENCODED)
self.assertRaises(errors.InvalidSignatureMethodError,
e._check_mandatory_parameters, r)
r = e._create_request(uri, 'GET', body % 'PLAINTEXT', URLENCODED)
self.assertRaises(errors.InvalidSignatureMethodError,
e._check_mandatory_parameters, r)
r = e._create_request(uri, 'GET', body % 'shibboleth', URLENCODED)
self.assertRaises(errors.InvalidSignatureMethodError,
e._check_mandatory_parameters, r)
class PlainValidator(RequestValidator):
@property
def allowed_signature_methods(self):
return (SIGNATURE_PLAINTEXT,)
v = PlainValidator()
e = BaseEndpoint(v)
r = e._create_request(uri, 'GET', body % 'HMAC-SHA1', URLENCODED)
self.assertRaises(errors.InvalidSignatureMethodError,
e._check_mandatory_parameters, r)
r = e._create_request(uri, 'GET', body % 'RSA-SHA1', URLENCODED)
self.assertRaises(errors.InvalidSignatureMethodError,
e._check_mandatory_parameters, r)
r = e._create_request(uri, 'GET', body % 'shibboleth', URLENCODED)
self.assertRaises(errors.InvalidSignatureMethodError,
e._check_mandatory_parameters, r)
class ClientValidator(RequestValidator):
clients = ['foo']
nonces = [('foo', 'once', '1234567891', 'fez')]
owners = {'foo': ['abcdefghijklmnopqrstuvxyz', 'fez']}
assigned_realms = {('foo', 'abcdefghijklmnopqrstuvxyz'): 'photos'}
verifiers = {('foo', 'fez'): 'shibboleth'}
@property
def client_key_length(self):
return 1, 30
@property
def request_token_length(self):
return 1, 30
@property
def access_token_length(self):
return 1, 30
@property
def nonce_length(self):
return 2, 30
@property
def verifier_length(self):
return 2, 30
@property
def realms(self):
return ['photos']
@property
def timestamp_lifetime(self):
# Disabled check to allow hardcoded verification signatures
return 1000000000
@property
def dummy_client(self):
return 'dummy'
@property
def dummy_request_token(self):
return 'dumbo'
@property
def dummy_access_token(self):
return 'dumbo'
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
request, request_token=None, access_token=None):
resource_owner_key = request_token if request_token else access_token
return not (client_key, nonce, timestamp, resource_owner_key) in self.nonces
def validate_client_key(self, client_key):
return client_key in self.clients
def validate_access_token(self, client_key, access_token, request):
return (self.owners.get(client_key) and
access_token in self.owners.get(client_key))
def validate_request_token(self, client_key, request_token, request):
return (self.owners.get(client_key) and
request_token in self.owners.get(client_key))
def validate_requested_realm(self, client_key, realm, request):
return True
def validate_realm(self, client_key, access_token, request, uri=None,
required_realm=None):
return (client_key, access_token) in self.assigned_realms
def validate_verifier(self, client_key, request_token, verifier,
request):
return ((client_key, request_token) in self.verifiers and
safe_string_equals(verifier, self.verifiers.get(
(client_key, request_token))))
def validate_redirect_uri(self, client_key, redirect_uri, request):
return redirect_uri.startswith('http://client.example.com/')
def get_client_secret(self, client_key, request):
return 'super secret'
def get_access_token_secret(self, client_key, access_token, request):
return 'even more secret'
def get_request_token_secret(self, client_key, request_token, request):
return 'even more secret'
def get_rsa_key(self, client_key, request):
return ("-----BEGIN PUBLIC KEY-----\nMIGfMA0GCSqGSIb3DQEBAQUAA4GNA"
"DCBiQKBgQDVLQCATX8iK+aZuGVdkGb6uiar\nLi/jqFwL1dYj0JLIsdQc"
"KaMWtPC06K0+vI+RRZcjKc6sNB9/7kJcKN9Ekc9BUxyT\n/D09Cz47cmC"
"YsUoiW7G8NSqbE4wPiVpGkJRzFAxaCWwOSSQ+lpC9vwxnvVQfOoZ1\nnp"
"mWbCdA0iTxsMahwQIDAQAB\n-----END PUBLIC KEY-----")
class SignatureVerificationTest(TestCase):
def setUp(self):
v = ClientValidator()
self.e = BaseEndpoint(v)
self.uri = 'https://example.com/'
self.sig = ('oauth_signature=%s&'
'oauth_timestamp=1234567890&'
'oauth_nonce=abcdefghijklmnopqrstuvwxyz&'
'oauth_version=1.0&'
'oauth_signature_method=%s&'
'oauth_token=abcdefghijklmnopqrstuvxyz&'
'oauth_consumer_key=foo')
def test_signature_too_short(self):
short_sig = ('oauth_signature=fmrXnTF4lO4o%2BD0%2FlZaJHP%2FXqEY&'
'oauth_timestamp=1234567890&'
'oauth_nonce=abcdefghijklmnopqrstuvwxyz&'
'oauth_version=1.0&oauth_signature_method=HMAC-SHA1&'
'oauth_token=abcdefghijklmnopqrstuvxyz&'
'oauth_consumer_key=foo')
r = self.e._create_request(self.uri, 'GET', short_sig, URLENCODED)
self.assertFalse(self.e._check_signature(r))
plain = ('oauth_signature=correctlengthbutthewrongcontent1111&'
'oauth_timestamp=1234567890&'
'oauth_nonce=abcdefghijklmnopqrstuvwxyz&'
'oauth_version=1.0&oauth_signature_method=PLAINTEXT&'
'oauth_token=abcdefghijklmnopqrstuvxyz&'
'oauth_consumer_key=foo')
r = self.e._create_request(self.uri, 'GET', plain, URLENCODED)
self.assertFalse(self.e._check_signature(r))
def test_hmac_signature(self):
hmac_sig = "fmrXnTF4lO4o%2BD0%2FlZaJHP%2FXqEY%3D"
sig = self.sig % (hmac_sig, "HMAC-SHA1")
r = self.e._create_request(self.uri, 'GET', sig, URLENCODED)
self.assertTrue(self.e._check_signature(r))
def test_rsa_signature(self):
rsa_sig = ("fxFvCx33oKlR9wDquJ%2FPsndFzJphyBa3RFPPIKi3flqK%2BJ7yIrMVbH"
"YTM%2FLHPc7NChWz4F4%2FzRA%2BDN1k08xgYGSBoWJUOW6VvOQ6fbYhMA"
"FkOGYbuGDbje487XMzsAcv6ZjqZHCROSCk5vofgLk2SN7RZ3OrgrFzf4in"
"xetClqA%3D")
sig = self.sig % (rsa_sig, "RSA-SHA1")
r = self.e._create_request(self.uri, 'GET', sig, URLENCODED)
self.assertTrue(self.e._check_signature(r))
def test_plaintext_signature(self):
plain_sig = "super%252520secret%26even%252520more%252520secret"
sig = self.sig % (plain_sig, "PLAINTEXT")
r = self.e._create_request(self.uri, 'GET', sig, URLENCODED)
self.assertTrue(self.e._check_signature(r))
| bsd-3-clause |
wscullin/spack | var/spack/repos/builtin/packages/tmuxinator/package.py | 3 | 1740 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Tmuxinator(Package):
"""A session configuration creator and manager for tmux"""
homepage = "https://github.com/tmuxinator/tmuxinator"
url = "https://github.com/tmuxinator/tmuxinator"
version('0.6.11',
git='https://github.com/tmuxinator/tmuxinator',
tag='v0.6.11')
extends('ruby')
def install(self, spec, prefix):
gem('build', 'tmuxinator.gemspec')
gem('install', 'tmuxinator-{0}.gem'.format(self.version))
| lgpl-2.1 |
a-rank/cassandra-tools | cassandra_tools/ui.py | 1 | 3032 | # Copyright 2016 Allan Rank
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import click
def prompt(choices, text, close=False):
items = ["{} {}".format(idx, c) for idx, c in enumerate(choices)]
if close:
items.append("c <close>")
click.echo()
click.secho("\n".join(items), bold=True)
try:
return int(click.prompt(text))
except ValueError:
return len(choices)
def format_columns(columns):
if columns:
return ", ".join([c.name for c in columns])
else:
return "no columns"
def print_header(text, bold=False):
click.echo()
line = "".join(["-" for _ in xrange(len(text) + 2)])
click.secho(line, bold=bold)
click.secho(" {}".format(text), bold=bold)
click.secho(line, bold=bold)
def print_dict(map, name):
if map:
print_header("{}: {}".format(name, len(map)))
items = "\n".join(map.keys())
click.echo(items)
def print_host(host):
click.echo("{}\tv{}\t{}\t{}".format(
host.broadcast_address,
host.release_version,
host.rack,
host.datacenter))
def print_keyspace(keyspace_meta):
print_header("{} {}".format("keyspace:", keyspace_meta.name), True)
replication_strategy = keyspace_meta.replication_strategy
if replication_strategy:
click.echo("replication:\t{}".format(replication_strategy.export_for_schema()))
click.echo("durable writes:\t{}".format(keyspace_meta.durable_writes))
print_dict(keyspace_meta.tables, "tables")
print_dict(keyspace_meta.views, "views")
print_dict(keyspace_meta.indexes, "indexes")
print_dict(keyspace_meta.user_types, "user types")
print_dict(keyspace_meta.functions, "functions")
print_dict(keyspace_meta.aggregates, "aggregates")
def print_table(table_meta):
def max_column(column):
return len(column.name)
print_header("table: {}.{}".format(table_meta.keyspace_name, table_meta.name), True)
click.echo("primary key:\t(({}), {})".format(format_columns(table_meta.partition_key),
format_columns(table_meta.clustering_key)))
columns = table_meta.columns.values()
columns_text = "\n".join(["{}\t{}".format(c.name, c.cql_type) for c in columns])
max_len_column = max(columns, key=max_column)
print_header("{}: {}".format("columns", len(columns)))
click.echo(columns_text.expandtabs(len(max_len_column.name) + 2))
print_dict(table_meta.views, "views")
print_dict(table_meta.indexes, "indexes")
| apache-2.0 |
Michaelhobo/ee149-final-project | Tools/autotest/common.py | 16 | 9395 | import util, pexpect, time, math
from pymavlink import mavwp
# a list of pexpect objects to read while waiting for
# messages. This keeps the output to stdout flowing
expect_list = []
def expect_list_clear():
'''clear the expect list'''
global expect_list
for p in expect_list[:]:
expect_list.remove(p)
def expect_list_extend(list):
'''extend the expect list'''
global expect_list
expect_list.extend(list)
def idle_hook(mav):
'''called when waiting for a mavlink message'''
global expect_list
for p in expect_list:
util.pexpect_drain(p)
def message_hook(mav, msg):
'''called as each mavlink msg is received'''
idle_hook(mav)
def expect_callback(e):
'''called when waiting for a expect pattern'''
global expect_list
for p in expect_list:
if p == e:
continue
util.pexpect_drain(p)
def get_distance(loc1, loc2):
'''get ground distance between two locations'''
dlat = loc2.lat - loc1.lat
dlong = loc2.lng - loc1.lng
return math.sqrt((dlat*dlat) + (dlong*dlong)) * 1.113195e5
def get_bearing(loc1, loc2):
'''get bearing from loc1 to loc2'''
off_x = loc2.lng - loc1.lng
off_y = loc2.lat - loc1.lat
bearing = 90.00 + math.atan2(-off_y, off_x) * 57.2957795
if bearing < 0:
bearing += 360.00
return bearing;
def wait_seconds(seconds_to_wait):
tstart = time.time();
tnow = tstart
while tstart + seconds_to_wait > tnow:
tnow = time.time()
def wait_altitude(mav, alt_min, alt_max, timeout=30):
climb_rate = 0
previous_alt = 0
'''wait for a given altitude range'''
tstart = time.time()
print("Waiting for altitude between %u and %u" % (alt_min, alt_max))
while time.time() < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
climb_rate = m.alt - previous_alt
previous_alt = m.alt
print("Wait Altitude: Cur:%u, min_alt:%u, climb_rate: %u" % (m.alt, alt_min , climb_rate))
if m.alt >= alt_min and m.alt <= alt_max:
print("Altitude OK")
return True
print("Failed to attain altitude range")
return False
def wait_groundspeed(mav, gs_min, gs_max, timeout=30):
'''wait for a given ground speed range'''
tstart = time.time()
print("Waiting for groundspeed between %.1f and %.1f" % (gs_min, gs_max))
while time.time() < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
print("Wait groundspeed %.1f, target:%.1f" % (m.groundspeed, gs_min))
if m.groundspeed >= gs_min and m.groundspeed <= gs_max:
return True
print("Failed to attain groundspeed range")
return False
def wait_roll(mav, roll, accuracy, timeout=30):
'''wait for a given roll in degrees'''
tstart = time.time()
print("Waiting for roll of %d at %s" % (roll, time.ctime()))
while time.time() < tstart + timeout:
m = mav.recv_match(type='ATTITUDE', blocking=True)
p = math.degrees(m.pitch)
r = math.degrees(m.roll)
print("Roll %d Pitch %d" % (r, p))
if math.fabs(r - roll) <= accuracy:
print("Attained roll %d" % roll)
return True
print("Failed to attain roll %d" % roll)
return False
def wait_pitch(mav, pitch, accuracy, timeout=30):
'''wait for a given pitch in degrees'''
tstart = time.time()
print("Waiting for pitch of %u at %s" % (pitch, time.ctime()))
while time.time() < tstart + timeout:
m = mav.recv_match(type='ATTITUDE', blocking=True)
p = math.degrees(m.pitch)
r = math.degrees(m.roll)
print("Pitch %d Roll %d" % (p, r))
if math.fabs(p - pitch) <= accuracy:
print("Attained pitch %d" % pitch)
return True
print("Failed to attain pitch %d" % pitch)
return False
def wait_heading(mav, heading, accuracy=5, timeout=30):
'''wait for a given heading'''
tstart = time.time()
print("Waiting for heading %u with accuracy %u" % (heading, accuracy))
while time.time() < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
print("Heading %u" % m.heading)
if math.fabs(m.heading - heading) <= accuracy:
print("Attained heading %u" % heading)
return True
print("Failed to attain heading %u" % heading)
return False
def wait_distance(mav, distance, accuracy=5, timeout=30):
'''wait for flight of a given distance'''
tstart = time.time()
start = mav.location()
while time.time() < tstart + timeout:
pos = mav.location()
delta = get_distance(start, pos)
print("Distance %.2f meters" % delta)
if math.fabs(delta - distance) <= accuracy:
print("Attained distance %.2f meters OK" % delta)
return True
if delta > (distance + accuracy):
print("Failed distance - overshoot delta=%f distance=%f" % (delta, distance))
return False
print("Failed to attain distance %u" % distance)
return False
def wait_location(mav, loc, accuracy=5, timeout=30, target_altitude=None, height_accuracy=-1):
'''wait for arrival at a location'''
tstart = time.time()
if target_altitude is None:
target_altitude = loc.alt
print("Waiting for location %.4f,%.4f at altitude %.1f height_accuracy=%.1f" % (
loc.lat, loc.lng, target_altitude, height_accuracy))
while time.time() < tstart + timeout:
pos = mav.location()
delta = get_distance(loc, pos)
print("Distance %.2f meters alt %.1f" % (delta, pos.alt))
if delta <= accuracy:
if height_accuracy != -1 and math.fabs(pos.alt - target_altitude) > height_accuracy:
continue
print("Reached location (%.2f meters)" % delta)
return True
print("Failed to attain location")
return False
def wait_waypoint(mav, wpnum_start, wpnum_end, allow_skip=True, max_dist=2, timeout=400, mode=None):
'''wait for waypoint ranges'''
tstart = time.time()
# this message arrives after we set the current WP
start_wp = mav.waypoint_current()
current_wp = start_wp
print("\ntest: wait for waypoint ranges start=%u end=%u\n\n" % (wpnum_start, wpnum_end))
# if start_wp != wpnum_start:
# print("test: Expected start waypoint %u but got %u" % (wpnum_start, start_wp))
# return False
while time.time() < tstart + timeout:
seq = mav.waypoint_current()
m = mav.recv_match(type='NAV_CONTROLLER_OUTPUT', blocking=True)
wp_dist = m.wp_dist
m = mav.recv_match(type='VFR_HUD', blocking=True)
# if we exited the required mode, finish
if mode is not None and mav.flightmode != mode:
print('Exited %s mode' % mode)
return True
print("test: WP %u (wp_dist=%u Alt=%d), current_wp: %u, wpnum_end: %u" % (seq, wp_dist, m.alt, current_wp, wpnum_end))
if seq == current_wp+1 or (seq > current_wp+1 and allow_skip):
print("test: Starting new waypoint %u" % seq)
tstart = time.time()
current_wp = seq
# the wp_dist check is a hack until we can sort out the right seqnum
# for end of mission
#if current_wp == wpnum_end or (current_wp == wpnum_end-1 and wp_dist < 2):
if (current_wp == wpnum_end and wp_dist < max_dist):
print("Reached final waypoint %u" % seq)
return True
if (seq >= 255):
print("Reached final waypoint %u" % seq)
return True
if seq > current_wp+1:
print("Failed: Skipped waypoint! Got wp %u expected %u" % (seq, current_wp+1))
return False
print("Failed: Timed out waiting for waypoint %u of %u" % (wpnum_end, wpnum_end))
return False
def save_wp(mavproxy, mav):
mavproxy.send('rc 7 1000\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==1000', blocking=True)
wait_seconds(1)
mavproxy.send('rc 7 2000\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==2000', blocking=True)
wait_seconds(1)
mavproxy.send('rc 7 1000\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==1000', blocking=True)
wait_seconds(1)
def wait_mode(mav, mode, timeout=None):
print("Waiting for mode %s" % mode)
mav.recv_match(condition='MAV.flightmode.upper()=="%s".upper()' % mode, timeout=timeout, blocking=True)
print("Got mode %s" % mode)
return mav.flightmode
def mission_count(filename):
'''load a mission from a file and return number of waypoints'''
wploader = mavwp.MAVWPLoader()
wploader.load(filename)
num_wp = wploader.count()
return num_wp
def sim_location(mav):
'''return current simulator location'''
from pymavlink import mavutil
m = mav.recv_match(type='SIMSTATE', blocking=True)
return mavutil.location(m.lat*1.0e-7, m.lng*1.0e-7, 0, math.degrees(m.yaw))
def log_download(mavproxy, mav, filename, timeout=360):
'''download latest log'''
mavproxy.send("log list\n")
mavproxy.expect("numLogs")
mav.wait_heartbeat()
mav.wait_heartbeat()
mavproxy.send("log download latest %s\n" % filename)
mavproxy.expect("Finished downloading", timeout=timeout)
mavproxy.send("log erase\n")
mav.wait_heartbeat()
mav.wait_heartbeat()
return True
| gpl-3.0 |
Grumpy-Mike/Mikes-Pi-Bakery | CurveBall/curvedBall.py | 1 | 3415 | # Curved Ball - a game for the Pi Glow board
# By Mike Cook - March 2015
import time, random, sys
from smbus import SMBus
import wiringpi2 as io
# command register addresses for the SN3218 IC used in PiGlow
CMD_ENABLE_OUTPUT = 0x00
CMD_ENABLE_LEDS = 0x13
CMD_SET_PWM_VALUES = 0x01
CMD_UPDATE = 0x16
SN3218 = 0x54 # i2c address of SN3218 IC
bus = None
try :
io.wiringPiSetupGpio()
except :
print"start IDLE with 'gksudo idle' from command line"
sys.exit()
pinList= [7,8,25] # GPIO pins for switches
lights = [0x00 for i in range(0,18)] # the LED brightness list
red = [0,6,17] # red LEDs
orange = [1,7,16] # orange LEDs
yellow = [2,8,15] # yellow LEDs
green = [3,5,13] # green LEDs
blue = [14,4,11] # blue LEDs
white = [12,9,10] # white LEDs
triangleIn = [red,orange,yellow,green,blue,white]
triangleOut = [white,blue,green,yellow,orange,red]
speed = 0.03 # delay is twice this
returnSpeed = 0.1 # for hit back
score = 0
def main():
initGPIO()
busInit()
while True: # repeat forever
wipe()
updateLEDs(lights)
while scanSwitches() != -1: #make sure fingers off
pass
pitch()
def pitch(): # throw the ball
global score
time.sleep(1.0) # delay before the throw - try making this random
arm = random.randint(0,2) # direction of curved ball
bat = False
push = -1
for triangle in range(0,5):
wipe() # clear all LEDs in the list
if bat:
lights[white[push]] = 0x20 # turn on bat LED
lights[triangleIn[triangle][arm]] = 0x80
updateLEDs(lights)
time.sleep(speed)
if not bat: # no switch pressed so far so look for one
push = scanSwitches() # switched pressed?
if push != -1:
bat = True # no more looking at switches
score = 6 - triangle # sooner you see it the higher the score
else:
lights[white[push]] = 0x20
updateLEDs(lights)
time.sleep(speed)
if arm == push:
print "hit - score ",score
for triangle in range(0,6): # hit it back
wipe()
lights[triangleOut[triangle][arm]] = 0x80
updateLEDs(lights)
time.sleep(returnSpeed)
time.sleep(0.7)
def initGPIO(): # set up the GPIO pins
for pin in range (0,3):
io.pinMode(pinList[pin],0) # make pin into an input
io.pullUpDnControl(pinList[pin],2) # enable pull up
def scanSwitches(): # look at each pin in turn
down = -1 # default return value means no switch pressed
for pin in range (0,3):
if io.digitalRead(pinList[pin]) == 0:
down = pin
return down
def busInit(): # start up the I2C bus and enable the outputs on the SN3218
global bus
bus = SMBus(1)
bus.write_byte_data(SN3218,CMD_ENABLE_OUTPUT, 0x01)
bus.write_i2c_block_data(SN3218, CMD_ENABLE_LEDS, [0xFF, 0xFF, 0xFF])
def updateLEDs(lights): # update the LEDs to reflect the lights list
bus.write_i2c_block_data(SN3218, CMD_SET_PWM_VALUES, lights)
bus.write_byte_data(SN3218,CMD_UPDATE, 0xFF)
def wipe(): # clear the lights list
global lights
for i in range(0,18):
lights[i] = 0
# Main program logic:
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
# set all the LEDs to "off" when Ctrl+C is pressed before exiting
wipe()
updateLEDs(lights)
| gpl-2.0 |
brchiu/tensorflow | tensorflow/python/training/device_setter_test.py | 97 | 7482 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for device function for replicated training."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import device_setter
from tensorflow.python.training import server_lib
class DeviceSetterTest(test.TestCase):
_cluster_spec = server_lib.ClusterSpec({
"ps": ["ps0:2222", "ps1:2222"],
"worker": ["worker0:2222", "worker1:2222", "worker2:2222"]
})
def testCPUOverride(self):
with ops.device(
device_setter.replica_device_setter(cluster=self._cluster_spec)):
with ops.device("/cpu:0"):
v = variables.Variable([1, 2])
w = variables.Variable([2, 1])
with ops.device("/cpu:0"):
a = v + w
self.assertDeviceEqual("/job:ps/task:0/cpu:0", v.device)
self.assertDeviceEqual("/job:ps/task:0/cpu:0", v.initializer.device)
self.assertDeviceEqual("/job:ps/task:1", w.device)
self.assertDeviceEqual("/job:ps/task:1", w.initializer.device)
self.assertDeviceEqual("/job:worker/cpu:0", a.device)
def testResource(self):
with ops.device(
device_setter.replica_device_setter(cluster=self._cluster_spec)):
v = resource_variable_ops.ResourceVariable([1, 2])
self.assertDeviceEqual("/job:ps/task:0", v.device)
def testPS2TasksWithClusterSpecClass(self):
with ops.device(
device_setter.replica_device_setter(cluster=self._cluster_spec)):
v = variables.Variable([1, 2])
w = variables.Variable([2, 1])
a = v + w
self.assertDeviceEqual("/job:ps/task:0", v.device)
self.assertDeviceEqual("/job:ps/task:0", v.initializer.device)
self.assertDeviceEqual("/job:ps/task:1", w.device)
self.assertDeviceEqual("/job:ps/task:1", w.initializer.device)
self.assertDeviceEqual("/job:worker", a.device)
def testPS2TasksPinVariableToJob(self):
with ops.device(
device_setter.replica_device_setter(cluster=self._cluster_spec)):
v = variables.Variable([1, 2])
with ops.device("/job:moon"):
w = variables.Variable([2, 1])
with ops.device("/job:ps"): # Explicit PS job will get task set.
x = variables.Variable([0, 1])
a = v + w + x
self.assertDeviceEqual("/job:ps/task:0", v.device)
self.assertDeviceEqual("/job:ps/task:0", v.initializer.device)
self.assertDeviceEqual("/job:moon", w.device)
self.assertDeviceEqual("/job:moon", w.initializer.device)
self.assertDeviceEqual("/job:ps/task:1", x.device)
self.assertDeviceEqual("/job:ps/task:1", x.initializer.device)
self.assertDeviceEqual("/job:worker", a.device)
def testPS2TasksUseCpuForPS(self):
with ops.device(
device_setter.replica_device_setter(ps_tasks=1, ps_device="/cpu:0")):
v = variables.Variable([1, 2])
with ops.device("/job:moon"):
w = variables.Variable([2, 1])
a = v + w
self.assertDeviceEqual("/cpu:0", v.device)
self.assertDeviceEqual("/cpu:0", v.initializer.device)
self.assertDeviceEqual("/job:moon/cpu:0", w.device)
self.assertDeviceEqual("/job:moon/cpu:0", w.initializer.device)
self.assertDeviceEqual("/job:worker", a.device)
def testPS2TasksNoMerging(self):
with ops.device(
device_setter.replica_device_setter(
cluster=self._cluster_spec, merge_devices=False)):
v = variables.Variable([1, 2])
with ops.device("/job:ps"): # Won't assign task when merge_devices=False.
w = variables.Variable([2, 1])
a = v + w
self.assertDeviceEqual("/job:ps/task:0", v.device)
self.assertDeviceEqual("/job:ps/task:0", v.initializer.device)
self.assertDeviceEqual("/job:ps", w.device)
self.assertDeviceEqual("/job:ps", w.initializer.device)
self.assertDeviceEqual("/job:worker", a.device)
def testPS2TasksWithClusterSpecDict(self):
with ops.device(
device_setter.replica_device_setter(cluster=self._cluster_spec.as_dict(
))):
v = variables.Variable([1, 2])
w = variables.Variable([2, 1])
a = v + w
self.assertDeviceEqual("/job:ps/task:0", v.device)
self.assertDeviceEqual("/job:ps/task:0", v.initializer.device)
self.assertDeviceEqual("/job:ps/task:1", w.device)
self.assertDeviceEqual("/job:ps/task:1", w.initializer.device)
self.assertDeviceEqual("/job:worker", a.device)
def testPS2TasksWithClusterDef(self):
with ops.device(
device_setter.replica_device_setter(
cluster=self._cluster_spec.as_cluster_def())):
v = variables.Variable([1, 2])
w = variables.Variable([2, 1])
a = v + w
self.assertDeviceEqual("/job:ps/task:0", v.device)
self.assertDeviceEqual("/job:ps/task:0", v.initializer.device)
self.assertDeviceEqual("/job:ps/task:1", w.device)
self.assertDeviceEqual("/job:ps/task:1", w.initializer.device)
self.assertDeviceEqual("/job:worker", a.device)
def testPS2TasksWithDevice(self):
cluster_spec = server_lib.ClusterSpec({
"sun": ["sun0:2222", "sun1:2222", "sun2:2222"],
"moon": ["moon0:2222", "moon1:2222"]
})
with ops.device(
device_setter.replica_device_setter(
ps_device="/job:moon",
worker_device="/job:sun",
cluster=cluster_spec.as_cluster_def())):
v = variables.Variable([1, 2])
w = variables.Variable([2, 1])
a = v + w
self.assertDeviceEqual("/job:moon/task:0", v.device)
self.assertDeviceEqual("/job:moon/task:0", v.initializer.device)
self.assertDeviceEqual("/job:moon/task:1", w.device)
self.assertDeviceEqual("/job:moon/task:1", w.initializer.device)
self.assertDeviceEqual("/job:sun", a.device)
def testPS2TasksWithCPUConstraint(self):
cluster_spec = server_lib.ClusterSpec({
"sun": ["sun0:2222", "sun1:2222", "sun2:2222"],
"moon": ["moon0:2222", "moon1:2222"]
})
with ops.device(
device_setter.replica_device_setter(
ps_device="/job:moon/cpu:0",
worker_device="/job:sun",
cluster=cluster_spec.as_cluster_def())):
v = variables.Variable([1, 2])
w = variables.Variable([2, 1])
a = v + w
self.assertDeviceEqual("/job:moon/task:0/cpu:0", v.device)
self.assertDeviceEqual("/job:moon/task:0/cpu:0", v.initializer.device)
self.assertDeviceEqual("/job:moon/task:1/cpu:0", w.device)
self.assertDeviceEqual("/job:moon/task:1/cpu:0", w.initializer.device)
self.assertDeviceEqual("/job:sun", a.device)
if __name__ == "__main__":
test.main()
| apache-2.0 |
samba-team/samba | source4/torture/drs/python/getnc_unpriv.py | 2 | 14757 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Tests replication scenarios with different user privileges.
# We want to test every replication scenario we can think of against:
# - users with only GET_CHANGES privileges
# - users with only GET_ALL_CHANGES privileges
# - users with both GET_CHANGES and GET_ALL_CHANGES privileges
# - users with no privileges
#
# Copyright (C) Kamen Mazdrashki <kamenim@samba.org> 2011
# Copyright (C) Andrew Bartlett <abartlet@samba.org> 2017
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# Usage:
# export DC1=dc1_dns_name
# export DC2=dc2_dns_name
# export SUBUNITRUN=$samba4srcdir/scripting/bin/subunitrun
# PYTHONPATH="$PYTHONPATH:$samba4srcdir/torture/drs/python" $SUBUNITRUN getnc_unpriv -U"$DOMAIN/$DC_USERNAME"%"$DC_PASSWORD"
#
import drs_base
import samba.tests
from samba import werror, WERRORError
from samba import sd_utils
import ldb
from ldb import SCOPE_BASE
import random
from samba.dcerpc import drsuapi, security
from samba.credentials import DONT_USE_KERBEROS
class DrsReplicaSyncUnprivTestCase(drs_base.DrsBaseTestCase):
"""Confirm the behaviour of DsGetNCChanges for unprivileged users"""
def setUp(self):
super(DrsReplicaSyncUnprivTestCase, self).setUp()
self.get_changes_user = "get-changes-user"
self.base_dn = self.ldb_dc1.get_default_basedn()
self.user_pass = samba.generate_random_password(12, 16)
# add some randomness to the test OU. (Deletion of the last test's
# objects can be slow to replicate out. So the OU created by a previous
# testenv may still exist at this point).
rand = random.randint(1, 10000000)
test_ou = "OU=test_getnc_unpriv%d" % rand
self.ou = "%s,%s" % (test_ou, self.base_dn)
self.ldb_dc1.add({
"dn": self.ou,
"objectclass": "organizationalUnit"})
self.ldb_dc1.newuser(self.get_changes_user, self.user_pass,
userou=test_ou)
(self.drs, self.drs_handle) = self._ds_bind(self.dnsname_dc1)
self.sd_utils = sd_utils.SDUtils(self.ldb_dc1)
self.user_dn = "cn=%s,%s" % (self.get_changes_user, self.ou)
user_sid = self.sd_utils.get_object_sid(self.user_dn)
self.acl_mod_get_changes = "(OA;;CR;%s;;%s)" % (security.GUID_DRS_GET_CHANGES,
str(user_sid))
self.acl_mod_get_all_changes = "(OA;;CR;%s;;%s)" % (security.GUID_DRS_GET_ALL_CHANGES,
str(user_sid))
self.desc_sddl = self.sd_utils.get_sd_as_sddl(self.base_dn)
# We set DONT_USE_KERBEROS to avoid a race with getting the
# user replicated to our selected KDC
self.user_creds = self.insta_creds(template=self.get_credentials(),
username=self.get_changes_user,
userpass=self.user_pass,
kerberos_state=DONT_USE_KERBEROS)
(self.user_drs, self.user_drs_handle) = self._ds_bind(self.dnsname_dc1,
self.user_creds)
def tearDown(self):
self.sd_utils.modify_sd_on_dn(self.base_dn, self.desc_sddl)
try:
self.ldb_dc1.delete(self.ou, ["tree_delete:1"])
except ldb.LdbError as e1:
(enum, string) = e1.args
if enum == ldb.ERR_NO_SUCH_OBJECT:
pass
super(DrsReplicaSyncUnprivTestCase, self).tearDown()
def _test_repl_exop(self, exop, repl_obj, expected_error, dest_dsa=None,
partial_attribute_set=None):
"""
Common function to send a replication request and check the result
matches what's expected.
"""
req8 = self._exop_req8(dest_dsa=dest_dsa,
invocation_id=self.ldb_dc1.get_invocation_id(),
nc_dn_str=repl_obj,
exop=exop,
replica_flags=drsuapi.DRSUAPI_DRS_WRIT_REP,
partial_attribute_set=partial_attribute_set)
if expected_error is None:
# user is OK, request should be accepted without throwing an error
(level, ctr) = self.user_drs.DsGetNCChanges(self.user_drs_handle,
8, req8)
else:
# check the request is rejected (with the error we're expecting)
try:
(level, ctr) = self.user_drs.DsGetNCChanges(self.user_drs_handle,
8, req8)
self.fail("Should have failed with user denied access")
except WERRORError as e:
(enum, estr) = e.args
self.assertTrue(enum in expected_error,
"Got unexpected error: %s" % estr)
def _test_repl_single_obj(self, repl_obj, expected_error,
partial_attribute_set=None):
"""
Checks that replication on a single object either succeeds or fails as
expected (based on the user's access rights)
"""
self._test_repl_exop(exop=drsuapi.DRSUAPI_EXOP_REPL_OBJ,
repl_obj=repl_obj,
expected_error=expected_error,
partial_attribute_set=partial_attribute_set)
def _test_repl_secret(self, repl_obj, expected_error, dest_dsa=None):
"""
Checks that REPL_SECRET on an object either succeeds or fails as
expected (based on the user's access rights)
"""
self._test_repl_exop(exop=drsuapi.DRSUAPI_EXOP_REPL_SECRET,
repl_obj=repl_obj,
expected_error=expected_error,
dest_dsa=dest_dsa)
def _test_repl_full(self, expected_error, partial_attribute_set=None):
"""
Checks that a full replication either succeeds or fails as expected
(based on the user's access rights)
"""
self._test_repl_exop(exop=drsuapi.DRSUAPI_EXOP_NONE,
repl_obj=self.ldb_dc1.get_default_basedn(),
expected_error=expected_error,
partial_attribute_set=partial_attribute_set)
def _test_repl_full_on_ou(self, repl_obj, expected_error):
"""
Full replication on a specific OU should always fail (it should be done
against a base NC). The error may vary based on the user's access rights
"""
# Just try against the OU created in the test setup
self._test_repl_exop(exop=drsuapi.DRSUAPI_EXOP_NONE,
repl_obj=repl_obj,
expected_error=expected_error)
def test_repl_getchanges_userpriv(self):
"""
Tests various replication requests made by a user with only GET_CHANGES
rights. Some requests will be accepted, but most will be rejected.
"""
# Assign the user GET_CHANGES rights
self.sd_utils.dacl_add_ace(self.base_dn, self.acl_mod_get_changes)
self._test_repl_single_obj(repl_obj=self.ou,
expected_error=[werror.WERR_DS_DRA_ACCESS_DENIED])
bad_ou = "OU=bad_obj,%s" % self.ou
self._test_repl_single_obj(repl_obj=bad_ou,
expected_error=[werror.WERR_DS_DRA_BAD_DN,
werror.WERR_DS_DRA_ACCESS_DENIED])
self._test_repl_secret(repl_obj=self.ou,
expected_error=[werror.WERR_DS_DRA_ACCESS_DENIED])
self._test_repl_secret(repl_obj=self.user_dn,
expected_error=[werror.WERR_DS_DRA_ACCESS_DENIED])
self._test_repl_secret(repl_obj=self.user_dn,
dest_dsa=self.ldb_dc1.get_ntds_GUID(),
expected_error=[werror.WERR_DS_DRA_ACCESS_DENIED])
self._test_repl_secret(repl_obj=bad_ou,
expected_error=[werror.WERR_DS_DRA_BAD_DN])
self._test_repl_full(expected_error=[werror.WERR_DS_DRA_ACCESS_DENIED])
self._test_repl_full_on_ou(repl_obj=self.ou,
expected_error=[werror.WERR_DS_CANT_FIND_EXPECTED_NC,
werror.WERR_DS_DRA_ACCESS_DENIED])
self._test_repl_full_on_ou(repl_obj=bad_ou,
expected_error=[werror.WERR_DS_DRA_BAD_NC,
werror.WERR_DS_DRA_ACCESS_DENIED])
# Partial Attribute Sets don't require GET_ALL_CHANGES rights, so we
# expect the following to succeed
self._test_repl_single_obj(repl_obj=self.ou,
expected_error=None,
partial_attribute_set=self.get_partial_attribute_set())
self._test_repl_full(expected_error=None,
partial_attribute_set=self.get_partial_attribute_set())
def test_repl_getallchanges_userpriv(self):
"""
Tests various replication requests made by a user with only
GET_ALL_CHANGES rights. Note that assigning these rights is possible,
but doesn't make a lot of sense. We test it anyway for consistency.
"""
# Assign the user GET_ALL_CHANGES rights
self.sd_utils.dacl_add_ace(self.base_dn, self.acl_mod_get_all_changes)
# We can expect to get the same responses as an unprivileged user,
# i.e. we have permission to see the results, but don't have permission
# to ask
self.test_repl_no_userpriv()
def test_repl_both_userpriv(self):
"""
Tests various replication requests made by a privileged user (i.e. has
both GET_CHANGES and GET_ALL_CHANGES). We expect any valid requests
to be accepted.
"""
# Assign the user both GET_CHANGES and GET_ALL_CHANGES rights
both_rights = self.acl_mod_get_changes + self.acl_mod_get_all_changes
self.sd_utils.dacl_add_ace(self.base_dn, both_rights)
self._test_repl_single_obj(repl_obj=self.ou,
expected_error=None)
bad_ou = "OU=bad_obj,%s" % self.ou
self._test_repl_single_obj(repl_obj=bad_ou,
expected_error=[werror.WERR_DS_DRA_BAD_DN])
# Microsoft returns DB_ERROR, Samba returns ACCESS_DENIED
self._test_repl_secret(repl_obj=self.ou,
expected_error=[werror.WERR_DS_DRA_DB_ERROR,
werror.WERR_DS_DRA_ACCESS_DENIED])
self._test_repl_secret(repl_obj=self.user_dn,
expected_error=[werror.WERR_DS_DRA_DB_ERROR,
werror.WERR_DS_DRA_ACCESS_DENIED])
# Note that Windows accepts this but Samba rejects it
self._test_repl_secret(repl_obj=self.user_dn,
dest_dsa=self.ldb_dc1.get_ntds_GUID(),
expected_error=[werror.WERR_DS_DRA_ACCESS_DENIED])
self._test_repl_secret(repl_obj=bad_ou,
expected_error=[werror.WERR_DS_DRA_BAD_DN])
self._test_repl_full(expected_error=None)
self._test_repl_full_on_ou(repl_obj=self.ou,
expected_error=[werror.WERR_DS_CANT_FIND_EXPECTED_NC])
self._test_repl_full_on_ou(repl_obj=bad_ou,
expected_error=[werror.WERR_DS_DRA_BAD_NC,
werror.WERR_DS_DRA_BAD_DN])
self._test_repl_single_obj(repl_obj=self.ou,
expected_error=None,
partial_attribute_set=self.get_partial_attribute_set())
self._test_repl_full(expected_error=None,
partial_attribute_set=self.get_partial_attribute_set())
def test_repl_no_userpriv(self):
"""
Tests various replication requests made by a unprivileged user.
We expect all these requests to be rejected.
"""
# Microsoft usually returns BAD_DN, Samba returns ACCESS_DENIED
usual_error = [werror.WERR_DS_DRA_BAD_DN, werror.WERR_DS_DRA_ACCESS_DENIED]
self._test_repl_single_obj(repl_obj=self.ou,
expected_error=usual_error)
bad_ou = "OU=bad_obj,%s" % self.ou
self._test_repl_single_obj(repl_obj=bad_ou,
expected_error=usual_error)
self._test_repl_secret(repl_obj=self.ou,
expected_error=usual_error)
self._test_repl_secret(repl_obj=self.user_dn,
expected_error=usual_error)
self._test_repl_secret(repl_obj=self.user_dn,
dest_dsa=self.ldb_dc1.get_ntds_GUID(),
expected_error=usual_error)
self._test_repl_secret(repl_obj=bad_ou,
expected_error=usual_error)
self._test_repl_full(expected_error=[werror.WERR_DS_DRA_ACCESS_DENIED])
self._test_repl_full_on_ou(repl_obj=self.ou,
expected_error=usual_error)
self._test_repl_full_on_ou(repl_obj=bad_ou,
expected_error=[werror.WERR_DS_DRA_BAD_NC,
werror.WERR_DS_DRA_ACCESS_DENIED])
self._test_repl_single_obj(repl_obj=self.ou,
expected_error=usual_error,
partial_attribute_set=self.get_partial_attribute_set())
self._test_repl_full(expected_error=[werror.WERR_DS_DRA_ACCESS_DENIED],
partial_attribute_set=self.get_partial_attribute_set())
| gpl-3.0 |
jjenki11/blaze-chem-rendering | avogadro/avogadro-1.1.1/libavogadro/src/python/unittest/molecule.py | 9 | 10475 | import Avogadro
import unittest
from numpy import *
class TestMolecule(unittest.TestCase):
def setUp(self):
self.molecule = Avogadro.molecules.addMolecule()
def test_fileName(self):
self.molecule.fileName = "somefile.ext"
self.assertEqual(self.molecule.fileName, "somefile.ext")
def test_addAtom(self):
# add 5 atoms
for i in range(5):
# test addAtom()
atom = self.molecule.addAtom()
self.assertEqual(atom.index, i)
self.assertEqual(atom.id, i)
self.assertEqual(atom.type, Avogadro.PrimitiveType.AtomType)
# take atom with index 2
atom = self.molecule.atom(2)
# store the id
id = atom.id
# delete it
self.molecule.removeAtom(id)
# test addAtom(id)
atom = self.molecule.addAtom(id)
self.assertEqual(atom.id, id)
def test_removeAtom(self):
# add 2 atoms
atom1 = self.molecule.addAtom()
atom2 = self.molecule.addAtom()
self.assertEqual(self.molecule.numAtoms, 2)
# test removeAtom(Atom)
self.molecule.removeAtom(atom1)
self.assertEqual(self.molecule.numAtoms, 1)
# test removeAtom(id)
self.molecule.removeAtom(atom2.id)
self.assertEqual(self.molecule.numAtoms, 0)
def test_addBond(self):
# add 5 bonds
for i in range(5):
# test addBond()
bond = self.molecule.addBond()
self.assertEqual(bond.index, i)
self.assertEqual(bond.id, i)
self.assertEqual(bond.type, Avogadro.PrimitiveType.BondType)
# take bond with index 2
bond = self.molecule.bond(2)
# store the id
id = bond.id
# delete it
self.molecule.removeBond(id)
# test addBond(id)
bond = self.molecule.addBond(id)
self.assertEqual(bond.id, id)
def test_removeBond(self):
# add 2 bonds
bond1 = self.molecule.addBond()
bond2 = self.molecule.addBond()
self.assertEqual(self.molecule.numBonds, 2)
# test removeBond(Bond)
self.molecule.removeBond(bond1)
self.assertEqual(self.molecule.numBonds, 1)
# test removeBond(id)
self.molecule.removeBond(bond2.id)
self.assertEqual(self.molecule.numBonds, 0)
def test_addHydrogens(self):
atom = self.molecule.addAtom()
atom.atomicNumber = 6
self.molecule.addHydrogens(atom)
self.assertEqual(self.molecule.numAtoms, 5)
def test_removeHydrogens(self):
atom = self.molecule.addAtom()
atom.atomicNumber = 6
self.molecule.addHydrogens(atom)
self.assertEqual(self.molecule.numAtoms, 5)
self.molecule.removeHydrogens(atom)
self.assertEqual(self.molecule.numAtoms, 1)
def test_calculatePartialCharges(self):
print("FIXME: implement test_calculate_partialCharges")
def test_addCube(self):
for i in range(5):
cube = self.molecule.addCube()
self.assertEqual(cube.index, i)
self.assertEqual(cube.id, i)
self.assertEqual(cube.type, Avogadro.PrimitiveType.CubeType)
def test_removeCube(self):
# add 2 cubes
cube1 = self.molecule.addCube()
cube2 = self.molecule.addCube()
self.assertEqual(self.molecule.numCubes, 2)
# test removeCube(Cube)
self.molecule.removeCube(cube1)
self.assertEqual(self.molecule.numCubes, 1)
# test removeCube(id)
self.molecule.removeCube(cube2.id)
self.assertEqual(self.molecule.numCubes, 0)
def test_addResidue(self):
for i in range(5):
residue = self.molecule.addResidue()
self.assertEqual(residue.index, i)
self.assertEqual(residue.id, i)
self.assertEqual(residue.type, Avogadro.PrimitiveType.ResidueType)
def test_removeResidue(self):
# add 2 residues
residue1 = self.molecule.addResidue()
residue2 = self.molecule.addResidue()
self.assertEqual(self.molecule.numResidues, 2)
# test removeResidue(Residue)
self.molecule.removeResidue(residue1)
self.assertEqual(self.molecule.numResidues, 1)
# test removeResidue(id)
self.molecule.removeResidue(residue2.id)
self.assertEqual(self.molecule.numResidues, 0)
def test_addRing(self):
for i in range(5):
ring = self.molecule.addRing()
self.assertEqual(ring.index, i)
self.assertEqual(ring.id, i)
self.assertEqual(ring.type, Avogadro.PrimitiveType.FragmentType)
def test_removeRing(self):
# add 2 rings
ring1 = self.molecule.addRing()
ring2 = self.molecule.addRing()
self.assertEqual(self.molecule.numRings, 2)
# test removeRing(Ring)
self.molecule.removeRing(ring1)
self.assertEqual(self.molecule.numRings, 1)
# test removeRing(id)
self.molecule.removeRing(ring2.id)
self.assertEqual(self.molecule.numRings, 0)
def test_numXXX(self):
for i in range(5):
self.molecule.addAtom()
self.molecule.addBond()
self.molecule.addResidue()
self.molecule.addCube()
self.molecule.addRing()
self.assertEqual(self.molecule.numAtoms, 5)
self.assertEqual(self.molecule.numBonds, 5)
self.assertEqual(self.molecule.numResidues, 5)
self.assertEqual(self.molecule.numCubes, 5)
self.assertEqual(self.molecule.numRings, 5)
def test_atom(self):
# add 3 atoms
for i in range(3):
self.molecule.addAtom()
# delete the 2nd
self.molecule.removeAtom(1)
# atom with index 1 should now have id 2
self.assertEqual(self.molecule.atom(1).id, 2)
def test_atomById(self):
# add 3 atoms
for i in range(3):
self.molecule.addAtom()
# delete the 2nd
self.molecule.removeAtom(1)
# atom with id 2 should now have inex 1
self.assertEqual(self.molecule.atomById(2).index, 1)
def test_bond(self):
# add 3 bonds
for i in range(3):
self.molecule.addBond()
# delete the 2nd
self.molecule.removeBond(1)
# bond with index 1 should now have id 2
self.assertEqual(self.molecule.bond(1).id, 2)
def test_bond_id(self):
# add 10 atoms & bonds
for i in range(10):
self.molecule.addAtom()
self.molecule.addBond()
# add a bonds
bond = self.molecule.addBond()
# set the atoms to id 4 & 5, order 1
bond.setAtoms(4, 5, 1)
# test molecule.bond(id, id)
self.assertEqual(self.molecule.bond(4, 5).id, bond.id)
def test_bond_Bond(self):
# add 10 atoms & bonds
for i in range(10):
self.molecule.addAtom()
self.molecule.addBond()
# get atom with id 4 & 5
atom4 = self.molecule.atomById(4)
atom5 = self.molecule.atomById(5)
# add a bonds
bond = self.molecule.addBond()
# set the atoms to id 4 & 5, order 1
bond.setAtoms(atom4.id, atom5.id, 1)
# test molecule.bond(id, id)
self.assertEqual(self.molecule.bond(atom4, atom5).id, bond.id)
def test_bondById(self):
# add 3 bonds
for i in range(3):
self.molecule.addBond()
# delete the 2nd
self.molecule.removeBond(1)
# bond with id 2 should now have inex 1
self.assertEqual(self.molecule.bondById(2).index, 1)
def test_residue(self):
# add 3 residues
for i in range(3):
self.molecule.addResidue()
# delete the 2nd
self.molecule.removeResidue(1)
# residue with index 1 should now have id 2
self.assertEqual(self.molecule.residue(1).id, 2)
def test_residueById(self):
# add 3 residues
for i in range(3):
self.molecule.addResidue()
# delete the 2nd
self.molecule.removeResidue(1)
# residue with id 2 should now have inex 1
self.assertEqual(self.molecule.residueById(2).index, 1)
def test_atoms(self):
# add 10 atoms
for i in range(10):
self.molecule.addAtom()
i = 0
for atom in self.molecule.atoms:
self.assertEqual(atom.type, Avogadro.PrimitiveType.AtomType)
self.assertEqual(atom.id, i)
i += 1
def test_bonds(self):
# add 10 bonds
for i in range(10):
self.molecule.addBond()
i = 0
for bond in self.molecule.bonds:
self.assertEqual(bond.type, Avogadro.PrimitiveType.BondType)
self.assertEqual(bond.id, i)
i += 1
def test_cubes(self):
# add 10 cubes
for i in range(10):
self.molecule.addCube()
i = 0
for cube in self.molecule.cubes:
self.assertEqual(cube.type, Avogadro.PrimitiveType.CubeType)
self.assertEqual(cube.id, i)
i += 1
def test_residues(self):
# add 10 residues
for i in range(10):
self.molecule.addResidue()
i = 0
for residue in self.molecule.residues:
self.assertEqual(residue.type, Avogadro.PrimitiveType.ResidueType)
self.assertEqual(residue.id, i)
i += 1
def test_rings(self):
# add 10 rings
for i in range(10):
self.molecule.addAtom()
i = 0
for ring in self.molecule.rings:
self.assertEqual(ring.type, Avogadro.PrimitiveType.FragmentType)
self.assertEqual(ring.id, i)
i += 1
def test_clear(self):
for i in range(5):
self.molecule.addAtom()
self.molecule.addBond()
self.molecule.addResidue()
self.molecule.addCube()
self.molecule.addRing()
self.molecule.clear()
self.assertEqual(self.molecule.numAtoms, 0)
self.assertEqual(self.molecule.numBonds, 0)
self.assertEqual(self.molecule.numResidues, 0)
self.assertEqual(self.molecule.numCubes, 0)
self.assertEqual(self.molecule.numRings, 0)
def test_center(self):
atom1 = self.molecule.addAtom()
atom2 = self.molecule.addAtom()
atom1.pos = array([1.0, 2.0, 3.0])
atom2.pos = array([4.0, 5.0, 6.0])
# compute the center
center = (atom1.pos + atom2.pos) / 2.0
self.assertEqual(self.molecule.center[0], center[0])
self.assertEqual(self.molecule.center[1], center[1])
self.assertEqual(self.molecule.center[2], center[2])
def test_normalVector(self):
atom1 = self.molecule.addAtom()
atom2 = self.molecule.addAtom()
atom1.pos = array([1.0, 2.0, 3.0])
atom2.pos = array([4.0, 5.0, 6.0])
n = self.molecule.normalVector
# just check we got an array with size 3
self.assertEqual(len(n), 3)
def test_radius(self):
# just check the method is there
self.molecule.radius
def test_farthestAtom(self):
# just check the method is there
self.molecule.farthestAtom
def test_translate(self):
print("FIXME: Molecule::translate(Eigen::Vector3d isn't implemented)")
# just check the method is there and accepts the array
atom = self.molecule.addAtom()
vec = array([1., 2., 3.])
self.molecule.translate(vec)
if __name__ == "__main__":
unittest.main()
| mit |
gamajr/EZNCoder | engine/generator.py | 1 | 3020 | # -*- coding: utf-8 -*-
import string
from infoparser import MInfo
class MEGenerator():
"""Classe que gera linhas de comando para o MEncoder."""
def __init__(self):
self._cut_cmd = string.Template("")
self.info = MInfo()
self._supported_ops = ['sub','wmv2avi','avixvid']
def gen_convert_line(self, media_file, operation):
#TODO: Escrever DocString
if operation == 'sub':
resp = self._subtitle(media_file)
elif operation == 'wmv2avi':
resp = self._wmv2avi(media_file)
elif operation == 'avixvid':
resp = self._avixvid(media_file)
else:
resp = None
return resp
def gen_cut_line(self, media_file, cut_point=None):
"""Gera uma lista com as linhas de comando para cortar um video
atraves do MEncoder. Se os dois argumentos forem None, os video e
dividido em dois."""
pass
def _subtitle(self, media_file):
cmd = string.Template("""mencoder -oac $audio_opts -ovc xvid -xvidencopts
bitrate=$br -sub $srt_file -subpos 90 -subfont-text-scale 3
-subfont-outline 2 -subcp ISO-8859-1 -sub-bg-alpha 200 -o $conv_file $orig_file""")
base_name=media_file[:-4]
self.info.parse_data(base_name+'.avi')
kbps = int(self.info.get_vdata('Bit rate').split()[0])
if kbps % 50 != 0:
br = str(kbps + (50 - kbps % 50))
else:
br = str(kbps)
audio_opts=''
if self.info.get_adata('Codec ID/Hint')=='MP3':
audio_opts = 'copy'
else:
audio_opts = 'mp3lame -lameopts cbr:mode=2:br=192'
return ' '.join(cmd.substitute({'audio_opts':audio_opts, 'br':br,
'srt_file': base_name+'.srt', 'conv_file':base_name+'_sub.avi',
'orig_file':base_name+'.avi'}).split())
def _wmv2avi(self, media_file):
cmd = string.Template("""mencoder -oac mp3lame -lameopts cbr:mode=2:br=64
-ovc lavc -ofps 23.976 -o $conv_file $orig_file""")
base_name=media_file[:-4]
return ' '.join(cmd.substitute({'conv_file':base_name+'_conv.avi', 'orig_file':base_name+'.wmv'}).split())
def _avixvid(self, media_file):
cmd = string.Template("""mencoder -oac $audio_opts -ovc xvid -xvidencopts
bitrate=850 -o $conv_file $orig_file""")
base_name=media_file[:-4]
self.info.parse_data(base_name+'.avi')
audio_opts=''
if self.info.get_adata('Codec ID/Hint')=='MP3':
audio_opts = 'copy'
else:
audio_opts = 'mp3lame -lameopts cbr:mode=2:br=192'
return ' '.join(cmd.substitute({'audio_opts':audio_opts,
'conv_file':base_name+'_conv.avi', 'orig_file':base_name+'.avi'}).split())
def get_supported_operations(self):
return self._supported_ops
#TODO: Implementar gen_cut_line!!!!
#mencoder infile.wmv -ofps 23.976 -ovc lavc -oac copy -o outfile.avi | gpl-3.0 |
osynge/ceph-deploy | ceph_deploy/hosts/rhel/install.py | 8 | 1931 | from ceph_deploy.util import templates
def install(distro, version_kind, version, adjust_repos, **kw):
packages = kw.get('components', [])
distro.packager.clean()
distro.packager.install(packages)
def mirror_install(distro, repo_url,
gpg_url, adjust_repos, extra_installs=True, **kw):
packages = kw.get('components', [])
repo_url = repo_url.strip('/') # Remove trailing slashes
distro.packager.clean()
if adjust_repos:
distro.packager.add_repo_gpg_key(gpg_url)
ceph_repo_content = templates.ceph_repo.format(
repo_url=repo_url,
gpg_url=gpg_url
)
distro.conn.remote_module.write_yum_repo(ceph_repo_content)
if extra_installs and packages:
distro.packager.install(packages)
def repo_install(distro, reponame, baseurl, gpgkey, **kw):
# do we have specific components to install?
# removed them from `kw` so that we don't mess with other defaults
packages = kw.pop('components', [])
# Get some defaults
name = kw.pop('name', '%s repo' % reponame)
enabled = kw.pop('enabled', 1)
gpgcheck = kw.pop('gpgcheck', 1)
install_ceph = kw.pop('install_ceph', False)
proxy = kw.pop('proxy', '') # will get ignored if empty
_type = 'repo-md'
baseurl = baseurl.strip('/') # Remove trailing slashes
distro.packager.clean()
if gpgkey:
distro.packager.add_repo_gpg_key(gpgkey)
repo_content = templates.custom_repo(
reponame=reponame,
name=name,
baseurl=baseurl,
enabled=enabled,
gpgcheck=gpgcheck,
_type=_type,
gpgkey=gpgkey,
proxy=proxy,
**kw
)
distro.conn.remote_module.write_yum_repo(
repo_content,
"%s.repo" % reponame
)
# Some custom repos do not need to install ceph
if install_ceph and packages:
distro.packager.install(packages)
| mit |
jperon/musite | static/js/brython/Lib/time.py | 518 | 12346 | from browser import window
import javascript
# use Javascript Date constructor
date = javascript.JSConstructor(window.Date)
#daylight = 0 # fix me.. returns Non zero if DST timezone is defined
##############################################
# Added to pass some tests
# Are there timezone always in the browser?
# I'm assuming we don't have always this info
_STRUCT_TM_ITEMS = 9
##############################################
##############################################
## Helper functions
def _get_day_of_year(arg):
"""
Get the day position in the year starting from 1
Parameters
----------
arg : tuple
Returns
-------
int with the correct day of the year starting from 1
"""
ml = [31,28,31,30,31,30,31,31,30,31,30,31]
if arg[0]%4==0:
ml[1] += 1
i=1
yday=0
while i<arg[1]:
yday += ml[i-1]
i += 1
yday += arg[2]
return yday
def _get_week_of_year(arg):
"""
Get the week position in the year starting from 0. All days in a new
year preceding the first Monday are considered to be in week 0.
Parameters
----------
arg : tuple
Returns
-------
int with the correct iso week (weeks starting on Monday) of the year.
"""
d1 = date(arg[0], arg[1]-1, arg[2])
d0 = date(arg[0], 0, 1)
firstday = d0.getDay()
if firstday == 0 : firstday = 7
firstweek = 8 - firstday
doy = arg[7]
if firstday != 1:
doy = doy - firstweek
if doy % 7 == 0:
week_number = doy // 7
else:
week_number = doy // 7 + 1
return week_number
def _check_struct_time(t):
mm = t[1]
if mm == 0: mm = 1
if -1 > mm > 13: raise ValueError("month out of range")
dd = t[2]
if dd == 0: dd = 1
if -1 > dd > 32: raise ValueError("day of month out of range")
hh = t[3]
if -1 > hh > 24: raise ValueError("hour out of range")
minu = t[4]
if -1 > minu > 60: raise ValueError("minute out of range")
ss = t[5]
if -1 > ss > 62: raise ValueError("seconds out of range")
wd = t[6] % 7
if wd < -2: raise ValueError("day of week out of range")
dy = t[7]
if dy == 0: dy = 1
if -1 > dy > 367: raise ValueError("day of year out of range")
return t[0], mm, dd, hh, minu, ss, wd, dy, t[-1]
def _is_dst(secs = None):
"Check if data has daylight saving time"
d = date()
if secs is not None:
d = date(secs*1000)
# calculate if we are in daylight savings time or not.
# borrowed from http://stackoverflow.com/questions/11887934/check-if-daylight-saving-time-is-in-effect-and-if-it-is-for-how-many-hours
jan = date(d.getFullYear(), 0, 1)
jul = date(d.getFullYear(), 6, 1)
dst = int(d.getTimezoneOffset() < max(abs(jan.getTimezoneOffset()), abs(jul.getTimezoneOffset())))
return dst
def _get_tzname():
"check if timezone is available, if not return a tuple of empty str"
d = date()
d = d.toTimeString()
try:
d = d.split('(')[1].split(')')[0]
return (d, 'NotAvailable')
except:
return ('', '')
def _set_altzone():
d = date()
jan = date(d.getFullYear(), 0, 1)
jul = date(d.getFullYear(), 6, 1)
result = timezone - (jan.getTimezoneOffset() - jul.getTimezoneOffset()) * 60
return result
def _check_input(t):
if t and isinstance(t, struct_time) and len(t.args) == 9:
t = t.args
elif t and isinstance(t, tuple) and len(t) == 9:
t = t
elif t and isinstance(t, struct_time) and len(t.args) != 9:
raise TypeError("function takes exactly 9 arguments ({} given)".format(len(t.args)))
elif t and isinstance(t, tuple) and len(t) != 9:
raise TypeError("function takes exactly 9 arguments ({} given)".format(len(t.args)))
elif t and not isinstance(t, (tuple, struct_time)):
raise TypeError("Tuple or struct_time argument required")
else:
t = localtime().args
return t
## end of helper functions
##############################################
##############################################
## Values depending the timezone of the browser.
daylight = _is_dst()
timezone = date().getTimezoneOffset() * 60
tzname = _get_tzname()
altzone = _set_altzone() if daylight else timezone
##############################################
def asctime(t = None):
weekdays = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu",
4: "Fri", 5: "Sat", 6: "Sun"}
months = {1:'Jan',2:'Feb',3:'Mar',4:'Apr',5:'May',6:'Jun',
7:'Jul',8:'Aug',9:'Sep',10:'Oct',11:'Nov',12:'Dec'}
t = _check_input(t)
t = _check_struct_time(t)
result = "%s %s %2d %02d:%02d:%02d %d" % (
weekdays[t[6]], months[t[1]], t[2], t[3], t[4], t[5], t[0])
return result
def ctime(timestamp=None):
if timestamp is None:
timestamp = date().getTime() / 1000.
d = date(0)
d.setUTCSeconds(timestamp)
jan = date(d.getFullYear(), 0, 1)
jul = date(d.getFullYear(), 6, 1)
dst = int(d.getTimezoneOffset() < max(jan.getTimezoneOffset(), jul.getTimezoneOffset()))
d = date(0)
d.setUTCSeconds(timestamp + (1 + dst) * 3600)
weekdays = {1: "Mon", 2: "Tue", 3: "Wed", 4: "Thu",
5: "Fri", 6: "Sat", 0: "Sun"}
months = {0:'Jan',1:'Feb',2:'Mar',3:'Apr',4:'May',5:'Jun',
6:'Jul',7:'Aug',8:'Sep',9:'Oct',10:'Nov',11:'Dec'}
result = "%s %s %2d %02d:%02d:%02d %d" % (weekdays[d.getUTCDay()],
months[d.getUTCMonth()], d.getUTCDate(),
d.getUTCHours(), d.getUTCMinutes(), d.getUTCSeconds(),
d.getUTCFullYear())
return result
def gmtime(secs = None):
d = date()
if secs is not None:
d = date(secs*1000)
wday = d.getUTCDay() - 1 if d.getUTCDay() - 1 >= 0 else 6
tmp = struct_time([d.getUTCFullYear(),
d.getUTCMonth()+1, d.getUTCDate(),
d.getUTCHours(), d.getUTCMinutes(), d.getUTCSeconds(),
wday, 0, 0])
tmp.args[7] = _get_day_of_year(tmp.args)
return tmp
def localtime(secs = None):
d = date()
if secs is not None:
d = date(secs * 1000)
dst = _is_dst(secs)
wday = d.getDay() - 1 if d.getDay() - 1 >= 0 else 6
tmp = struct_time([d.getFullYear(),
d.getMonth()+1, d.getDate(),
d.getHours(), d.getMinutes(), d.getSeconds(),
wday, 0, dst])
tmp.args[7] = _get_day_of_year(tmp.args)
return tmp
def mktime(t):
if isinstance(t, struct_time):
d1 = date(t.tm_year, t.tm_mon - 1, t.tm_mday,
t.tm_hour, t.tm_min, t.tm_sec, 0).getTime()
elif isinstance(t, tuple):
d1 = date(t[0], t[1] - 1, t[2], t[3], t[4], t[5], 0).getTime()
else:
raise ValueError("Tuple or struct_time argument required")
d2 = date(0).getTime()
return (d1 - d2) / 1000.
def monotonic():
return javascript.JSObject(window.performance.now)()/1000.
def perf_counter():
return float(date().getTime()/1000.0)
def time():
return float(date().getTime()/1000)
def sleep(secs):
start = date().getTime()
while date().getTime() - start < secs * 1000.:
pass
def strftime(_format,t = None):
def ns(t,nb):
# left padding with 0
res = str(t)
while len(res)<nb:
res = '0'+res
return res
t = _check_input(t)
t = _check_struct_time(t)
YY = ns(t[0],4)
yy = ns(t[0],4)[2:]
mm = ns(t[1],2)
dd = ns(t[2],2)
HH = t[3]
HH24 = ns(HH,2)
HH12 = ns(HH % 12,2)
if HH12 == 0:HH12 = 12
AMPM = 'AM' if 0 <= HH < 12 else 'PM'
MM = ns(t[4],2)
SS = ns(t[5],2)
DoY = ns(t[7],3)
w = t[6] + 1 if t[6] < 6 else 0
W = ns(_get_week_of_year(t),2)
abb_weekdays = ['Sun','Mon','Tue','Wed','Thu','Fri','Sat']
full_weekdays = ['Sunday','Monday','Tuesday','Wednesday',
'Thursday','Friday','Saturday']
abb_months = ['Jan','Feb','Mar','Apr','May','Jun',
'Jul','Aug','Sep','Oct','Nov','Dec']
full_months = ['January','February','March','April','May','June',
'July','August','September','October','November','December']
res = _format
res = res.replace("%H",HH24)
res = res.replace("%I",HH12)
res = res.replace("%p",AMPM)
res = res.replace("%M",MM)
res = res.replace("%S",SS)
res = res.replace("%Y",YY)
res = res.replace("%y",yy)
res = res.replace("%m",mm)
res = res.replace("%d",dd)
res = res.replace("%a",abb_weekdays[w])
res = res.replace("%A",full_weekdays[w])
res = res.replace("%b",abb_months[int(mm)-1])
res = res.replace("%B",full_months[int(mm)-1])
res = res.replace("%j", DoY)
res = res.replace("%w", w)
res = res.replace("%W", W)
res = res.replace("%x", mm+'/'+dd+'/'+yy)
res = res.replace("%X", HH24+':'+MM+':'+SS)
res = res.replace("%c", abb_weekdays[w]+' '+abb_months[int(mm)-1]+
' '+dd+' '+HH24+':'+MM+':'+SS+' '+YY)
res = res.replace("%%", '%')
return res
class struct_time:
def __init__(self, args):
if len(args)!=9:
raise TypeError("time.struct_time() takes a 9-sequence (%s-sequence given)" %len(args))
self.args = args
@property
def tm_year(self):
return self.args[0]
@property
def tm_mon(self):
return self.args[1]
@property
def tm_mday(self):
return self.args[2]
@property
def tm_hour(self):
return self.args[3]
@property
def tm_min(self):
return self.args[4]
@property
def tm_sec(self):
return self.args[5]
@property
def tm_wday(self):
return self.args[6]
@property
def tm_yday(self):
return self.args[7]
@property
def tm_isdst(self):
return self.args[8]
def __getitem__(self, i):
return self.args[i]
def __iter__(self):
return iter(self.args)
def __repr__(self):
return ("time.structime(tm_year={}, tm_mon={}, tm_day={}, "+\
"tm_hour={}, tm_min={}, tm_sec={}, tm_wday={}, "+\
"tm_yday={}, tm_isdst={})").format(*self.args)
def __str__(self):
return self.__repr__()
def to_struct_time(ptuple):
# Receives a packed tuple, pass its attribute "arg" to struct_time
arg = ptuple.arg
# The tuple received from module _strptime has 7 elements, we must add
# the rank of day in the year in the range [1, 366]
ml = [31,28,31,30,31,30,31,31,30,31,30,31]
if arg[0]%4==0:
ml[1] += 1
i=1
yday=0
while i<arg[1]:
yday += ml[i-1]
i += 1
yday += arg[2]
arg.append(yday)
arg.append(-1)
return struct_time(arg)
def strptime(string, _format):
import _strptime
return struct_time([_strptime._strptime_datetime(to_struct_time, string, _format)])
# All the clock_xx machinery shouldn't work in the browser so some
# NotImplementedErrors or messages are shown
_clock_msg = """Browser cannot access CPU. See '%s'"""
def _clock_xx(url):
raise NotImplementedError(_clock_msg % url)
clock = lambda: _clock_xx("https://docs.python.org/3/library/time.html#time.clock")
clock_getres = lambda: _clock_xx("https://docs.python.org/3/library/time.html#time.clock_getres")
clock_gettime = lambda: _clock_xx("https://docs.python.org/3/library/time.html#time.clock_gettime")
clock_settime = lambda: _clock_xx("https://docs.python.org/3/library/time.html#time.clock_settime")
CLOCK_HIGHRES = _clock_msg % "https://docs.python.org/3/library/time.html#time.CLOCK_HIGHRES"
CLOCK_MONOTONIC = _clock_msg % "https://docs.python.org/3/library/time.html#time.CLOCK_MONOTONIC"
CLOCK_MONOTONIC_RAW = _clock_msg % "https://docs.python.org/3/library/time.html#time.CLOCK_MONOTONIC_RAW"
CLOCK_PROCESS_CPUTIME_ID = _clock_msg % "https://docs.python.org/3/library/time.html#time.CLOCK_PROCESS_CPUTIME_ID"
CLOCK_REALTIME = _clock_msg % "https://docs.python.org/3/library/time.html#time.CLOCK_REALTIME"
CLOCK_THREAD_CPUTIME_ID = _clock_msg % "https://docs.python.org/3/library/time.html#time.CLOCK_THREAD_CPUTIME_ID"
get_clock_info = lambda: _clock_xx("https://docs.python.org/3/library/time.html#time.get_clock_info")
process_time = lambda: _clock_xx("https://docs.python.org/3/library/time.html#time.process_time")
def tzset():
raise NotImplementedError()
| mit |
codekaki/odoo | addons/document_page/wizard/document_page_show_diff.py | 59 | 2212 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
import base64
class showdiff(osv.osv_memory):
""" Disp[ay Difference for History """
_name = 'wizard.document.page.history.show_diff'
def get_diff(self, cr, uid, context=None):
if context is None:
context = {}
history = self.pool.get('document.page.history')
ids = context.get('active_ids', [])
diff = ""
if len(ids) == 2:
if ids[0] > ids[1]:
diff = history.getDiff(cr, uid, ids[1], ids[0])
else:
diff = history.getDiff(cr, uid, ids[0], ids[1])
elif len(ids) == 1:
old = history.browse(cr, uid, ids[0])
nids = history.search(cr, uid, [('page_id', '=', old.page_id.id)])
nids.sort()
diff = history.getDiff(cr, uid, ids[0], nids[-1])
else:
raise osv.except_osv(_('Warning!'), _('You need to select minimum one or maximum two history revisions!'))
return diff
_columns = {
'diff': fields.text('Diff', readonly=True),
}
_defaults = {
'diff': get_diff
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
City-of-Bloomington/green-rental | south/modelsinspector.py | 33 | 17469 | """
Like the old south.modelsparser, but using introspection where possible
rather than direct inspection of models.py.
"""
from __future__ import print_function
import datetime
import re
import decimal
from south.utils import get_attribute, auto_through
from south.utils.py3 import text_type
from django.db import models
from django.db.models.base import ModelBase, Model
from django.db.models.fields import NOT_PROVIDED
from django.conf import settings
from django.utils.functional import Promise
from django.contrib.contenttypes import generic
from django.utils.datastructures import SortedDict
from django.utils import datetime_safe
NOISY = False
try:
from django.utils import timezone
except ImportError:
timezone = False
# Define any converter functions first to prevent NameErrors
def convert_on_delete_handler(value):
django_db_models_module = 'models' # relative to standard import 'django.db'
if hasattr(models, "PROTECT"):
if value in (models.CASCADE, models.PROTECT, models.DO_NOTHING, models.SET_DEFAULT):
# straightforward functions
return '%s.%s' % (django_db_models_module, value.__name__)
else:
# This is totally dependent on the implementation of django.db.models.deletion.SET
func_name = getattr(value, '__name__', None)
if func_name == 'set_on_delete':
# we must inspect the function closure to see what parameters were passed in
closure_contents = value.__closure__[0].cell_contents
if closure_contents is None:
return "%s.SET_NULL" % (django_db_models_module)
# simple function we can perhaps cope with:
elif hasattr(closure_contents, '__call__'):
raise ValueError("South does not support on_delete with SET(function) as values.")
else:
# Attempt to serialise the value
return "%s.SET(%s)" % (django_db_models_module, value_clean(closure_contents))
raise ValueError("%s was not recognized as a valid model deletion handler. Possible values: %s." % (value, ', '.join(f.__name__ for f in (models.CASCADE, models.PROTECT, models.SET, models.SET_NULL, models.SET_DEFAULT, models.DO_NOTHING))))
else:
raise ValueError("on_delete argument encountered in Django version that does not support it")
# Gives information about how to introspect certain fields.
# This is a list of triples; the first item is a list of fields it applies to,
# (note that isinstance is used, so superclasses are perfectly valid here)
# the second is a list of positional argument descriptors, and the third
# is a list of keyword argument descriptors.
# Descriptors are of the form:
# [attrname, options]
# Where attrname is the attribute on the field to get the value from, and options
# is an optional dict.
#
# The introspector uses the combination of all matching entries, in order.
introspection_details = [
(
(models.Field, ),
[],
{
"null": ["null", {"default": False}],
"blank": ["blank", {"default": False, "ignore_if":"primary_key"}],
"primary_key": ["primary_key", {"default": False}],
"max_length": ["max_length", {"default": None}],
"unique": ["_unique", {"default": False}],
"db_index": ["db_index", {"default": False}],
"default": ["default", {"default": NOT_PROVIDED, "ignore_dynamics": True}],
"db_column": ["db_column", {"default": None}],
"db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_INDEX_TABLESPACE}],
},
),
(
(models.ForeignKey, models.OneToOneField),
[],
dict([
("to", ["rel.to", {}]),
("to_field", ["rel.field_name", {"default_attr": "rel.to._meta.pk.name"}]),
("related_name", ["rel.related_name", {"default": None}]),
("db_index", ["db_index", {"default": True}]),
("on_delete", ["rel.on_delete", {"default": getattr(models, "CASCADE", None), "is_django_function": True, "converter": convert_on_delete_handler, "ignore_missing": True}])
])
),
(
(models.ManyToManyField,),
[],
{
"to": ["rel.to", {}],
"symmetrical": ["rel.symmetrical", {"default": True}],
"related_name": ["rel.related_name", {"default": None}],
"db_table": ["db_table", {"default": None}],
# TODO: Kind of ugly to add this one-time-only option
"through": ["rel.through", {"ignore_if_auto_through": True}],
},
),
(
(models.DateField, models.TimeField),
[],
{
"auto_now": ["auto_now", {"default": False}],
"auto_now_add": ["auto_now_add", {"default": False}],
},
),
(
(models.DecimalField, ),
[],
{
"max_digits": ["max_digits", {"default": None}],
"decimal_places": ["decimal_places", {"default": None}],
},
),
(
(models.SlugField, ),
[],
{
"db_index": ["db_index", {"default": True}],
},
),
(
(models.BooleanField, ),
[],
{
"default": ["default", {"default": NOT_PROVIDED, "converter": bool}],
"blank": ["blank", {"default": True, "ignore_if":"primary_key"}],
},
),
(
(models.FilePathField, ),
[],
{
"path": ["path", {"default": ''}],
"match": ["match", {"default": None}],
"recursive": ["recursive", {"default": False}],
},
),
(
(generic.GenericRelation, ),
[],
{
"to": ["rel.to", {}],
"symmetrical": ["rel.symmetrical", {"default": True}],
"object_id_field": ["object_id_field_name", {"default": "object_id"}],
"content_type_field": ["content_type_field_name", {"default": "content_type"}],
"blank": ["blank", {"default": True}],
},
),
]
# Regexes of allowed field full paths
allowed_fields = [
"^django\.db",
"^django\.contrib\.contenttypes\.generic",
"^django\.contrib\.localflavor",
]
# Regexes of ignored fields (custom fields which look like fields, but have no column behind them)
ignored_fields = [
"^django\.contrib\.contenttypes\.generic\.GenericRelation",
"^django\.contrib\.contenttypes\.generic\.GenericForeignKey",
]
# Similar, but for Meta, so just the inner level (kwds).
meta_details = {
"db_table": ["db_table", {"default_attr_concat": ["%s_%s", "app_label", "module_name"]}],
"db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_TABLESPACE}],
"unique_together": ["unique_together", {"default": []}],
"index_together": ["index_together", {"default": [], "ignore_missing": True}],
"ordering": ["ordering", {"default": []}],
"proxy": ["proxy", {"default": False, "ignore_missing": True}],
}
def add_introspection_rules(rules=[], patterns=[]):
"Allows you to add some introspection rules at runtime, e.g. for 3rd party apps."
assert isinstance(rules, (list, tuple))
assert isinstance(patterns, (list, tuple))
allowed_fields.extend(patterns)
introspection_details.extend(rules)
def add_ignored_fields(patterns):
"Allows you to add some ignore field patterns."
assert isinstance(patterns, (list, tuple))
ignored_fields.extend(patterns)
def can_ignore(field):
"""
Returns True if we know for certain that we can ignore this field, False
otherwise.
"""
full_name = "%s.%s" % (field.__class__.__module__, field.__class__.__name__)
for regex in ignored_fields:
if re.match(regex, full_name):
return True
return False
def can_introspect(field):
"""
Returns True if we are allowed to introspect this field, False otherwise.
('allowed' means 'in core'. Custom fields can declare they are introspectable
by the default South rules by adding the attribute _south_introspects = True.)
"""
# Check for special attribute
if hasattr(field, "_south_introspects") and field._south_introspects:
return True
# Check it's an introspectable field
full_name = "%s.%s" % (field.__class__.__module__, field.__class__.__name__)
for regex in allowed_fields:
if re.match(regex, full_name):
return True
return False
def matching_details(field):
"""
Returns the union of all matching entries in introspection_details for the field.
"""
our_args = []
our_kwargs = {}
for classes, args, kwargs in introspection_details:
if any([isinstance(field, x) for x in classes]):
our_args.extend(args)
our_kwargs.update(kwargs)
return our_args, our_kwargs
class IsDefault(Exception):
"""
Exception for when a field contains its default value.
"""
def get_value(field, descriptor):
"""
Gets an attribute value from a Field instance and formats it.
"""
attrname, options = descriptor
# If the options say it's not a attribute name but a real value, use that.
if options.get('is_value', False):
value = attrname
else:
try:
value = get_attribute(field, attrname)
except AttributeError:
if options.get("ignore_missing", False):
raise IsDefault
else:
raise
# Lazy-eval functions get eval'd.
if isinstance(value, Promise):
value = text_type(value)
# If the value is the same as the default, omit it for clarity
if "default" in options and value == options['default']:
raise IsDefault
# If there's an ignore_if, use it
if "ignore_if" in options:
if get_attribute(field, options['ignore_if']):
raise IsDefault
# If there's an ignore_if_auto_through which is True, use it
if options.get("ignore_if_auto_through", False):
if auto_through(field):
raise IsDefault
# Some default values need to be gotten from an attribute too.
if "default_attr" in options:
default_value = get_attribute(field, options['default_attr'])
if value == default_value:
raise IsDefault
# Some are made from a formatting string and several attrs (e.g. db_table)
if "default_attr_concat" in options:
format, attrs = options['default_attr_concat'][0], options['default_attr_concat'][1:]
default_value = format % tuple(map(lambda x: get_attribute(field, x), attrs))
if value == default_value:
raise IsDefault
# Clean and return the value
return value_clean(value, options)
def value_clean(value, options={}):
"Takes a value and cleans it up (so e.g. it has timezone working right)"
# Lazy-eval functions get eval'd.
if isinstance(value, Promise):
value = text_type(value)
# Callables get called.
if not options.get('is_django_function', False) and callable(value) and not isinstance(value, ModelBase):
# Datetime.datetime.now is special, as we can access it from the eval
# context (and because it changes all the time; people will file bugs otherwise).
if value == datetime.datetime.now:
return "datetime.datetime.now"
elif value == datetime.datetime.utcnow:
return "datetime.datetime.utcnow"
elif value == datetime.date.today:
return "datetime.date.today"
# In case we use Django's own now function, revert to datetime's
# original one since we'll deal with timezones on our own.
elif timezone and value == timezone.now:
return "datetime.datetime.now"
# All other callables get called.
value = value()
# Models get their own special repr()
if isinstance(value, ModelBase):
# If it's a proxy model, follow it back to its non-proxy parent
if getattr(value._meta, "proxy", False):
value = value._meta.proxy_for_model
return "orm['%s.%s']" % (value._meta.app_label, value._meta.object_name)
# As do model instances
if isinstance(value, Model):
if options.get("ignore_dynamics", False):
raise IsDefault
return "orm['%s.%s'].objects.get(pk=%r)" % (value.__class__._meta.app_label, value.__class__._meta.object_name, value.pk)
# Make sure Decimal is converted down into a string
if isinstance(value, decimal.Decimal):
value = str(value)
# in case the value is timezone aware
datetime_types = (
datetime.datetime,
datetime.time,
datetime_safe.datetime,
)
if (timezone and isinstance(value, datetime_types) and
getattr(settings, 'USE_TZ', False) and
value is not None and timezone.is_aware(value)):
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
# datetime_safe has an improper repr value
if isinstance(value, datetime_safe.datetime):
value = datetime.datetime(*value.utctimetuple()[:7])
# converting a date value to a datetime to be able to handle
# timezones later gracefully
elif isinstance(value, (datetime.date, datetime_safe.date)):
value = datetime.datetime(*value.timetuple()[:3])
# Now, apply the converter func if there is one
if "converter" in options:
value = options['converter'](value)
# Return the final value
if options.get('is_django_function', False):
return value
else:
return repr(value)
def introspector(field):
"""
Given a field, introspects its definition triple.
"""
arg_defs, kwarg_defs = matching_details(field)
args = []
kwargs = {}
# For each argument, use the descriptor to get the real value.
for defn in arg_defs:
try:
args.append(get_value(field, defn))
except IsDefault:
pass
for kwd, defn in kwarg_defs.items():
try:
kwargs[kwd] = get_value(field, defn)
except IsDefault:
pass
return args, kwargs
def get_model_fields(model, m2m=False):
"""
Given a model class, returns a dict of {field_name: field_triple} defs.
"""
field_defs = SortedDict()
inherited_fields = {}
# Go through all bases (that are themselves models, but not Model)
for base in model.__bases__:
if hasattr(base, '_meta') and issubclass(base, models.Model):
if not base._meta.abstract:
# Looks like we need their fields, Ma.
inherited_fields.update(get_model_fields(base))
# Now, go through all the fields and try to get their definition
source = model._meta.local_fields[:]
if m2m:
source += model._meta.local_many_to_many
for field in source:
# Can we ignore it completely?
if can_ignore(field):
continue
# Does it define a south_field_triple method?
if hasattr(field, "south_field_triple"):
if NOISY:
print(" ( Nativing field: %s" % field.name)
field_defs[field.name] = field.south_field_triple()
# Can we introspect it?
elif can_introspect(field):
# Get the full field class path.
field_class = field.__class__.__module__ + "." + field.__class__.__name__
# Run this field through the introspector
args, kwargs = introspector(field)
# Workaround for Django bug #13987
if model._meta.pk.column == field.column and 'primary_key' not in kwargs:
kwargs['primary_key'] = True
# That's our definition!
field_defs[field.name] = (field_class, args, kwargs)
# Shucks, no definition!
else:
if NOISY:
print(" ( Nodefing field: %s" % field.name)
field_defs[field.name] = None
# If they've used the horrific hack that is order_with_respect_to, deal with
# it.
if model._meta.order_with_respect_to:
field_defs['_order'] = ("django.db.models.fields.IntegerField", [], {"default": "0"})
return field_defs
def get_model_meta(model):
"""
Given a model class, will return the dict representing the Meta class.
"""
# Get the introspected attributes
meta_def = {}
for kwd, defn in meta_details.items():
try:
meta_def[kwd] = get_value(model._meta, defn)
except IsDefault:
pass
# Also, add on any non-abstract model base classes.
# This is called _ormbases as the _bases variable was previously used
# for a list of full class paths to bases, so we can't conflict.
for base in model.__bases__:
if hasattr(base, '_meta') and issubclass(base, models.Model):
if not base._meta.abstract:
# OK, that matches our terms.
if "_ormbases" not in meta_def:
meta_def['_ormbases'] = []
meta_def['_ormbases'].append("%s.%s" % (
base._meta.app_label,
base._meta.object_name,
))
return meta_def
# Now, load the built-in South introspection plugins
import south.introspection_plugins
| agpl-3.0 |
daviddoria/itkHoughTransform | Wrapping/WrapITK/Languages/Python/Tests/GradientMagnitudeRecursiveGaussianImageFilter.py | 13 | 1478 | #==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
import itk
from sys import argv
itk.auto_progress(2)
InputPixelType = itk.F
OutputPixelType = itk.F
InputImageType = itk.Image[InputPixelType, 2]
OutputImageType = itk.Image[OutputPixelType, 2]
reader = itk.ImageFileReader[InputImageType].New( FileName=argv[1] )
filter = itk.GradientMagnitudeRecursiveGaussianImageFilter[InputImageType, OutputImageType].New( reader, Sigma=float(argv[3]) )
filter.Update();
WritePixelType = itk.UC
WriteImageType = itk.Image[WritePixelType, 2]
rescaler = itk.RescaleIntensityImageFilter[OutputImageType, WriteImageType].New( filter, OutputMinimum=0, OutputMaximum=255 )
writer = itk.ImageFileWriter[WriteImageType].New( rescaler, FileName=argv[2] )
writer.Update();
| apache-2.0 |
andela-ooladayo/django | tests/middleware/test_security.py | 291 | 7781 | from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase
from django.test.utils import override_settings
class SecurityMiddlewareTest(SimpleTestCase):
@property
def middleware(self):
from django.middleware.security import SecurityMiddleware
return SecurityMiddleware()
@property
def secure_request_kwargs(self):
return {"wsgi.url_scheme": "https"}
def response(self, *args, **kwargs):
headers = kwargs.pop("headers", {})
response = HttpResponse(*args, **kwargs)
for k, v in headers.items():
response[k] = v
return response
def process_response(self, *args, **kwargs):
request_kwargs = {}
if kwargs.pop("secure", False):
request_kwargs.update(self.secure_request_kwargs)
request = (kwargs.pop("request", None) or
self.request.get("/some/url", **request_kwargs))
ret = self.middleware.process_request(request)
if ret:
return ret
return self.middleware.process_response(
request, self.response(*args, **kwargs))
request = RequestFactory()
def process_request(self, method, *args, **kwargs):
if kwargs.pop("secure", False):
kwargs.update(self.secure_request_kwargs)
req = getattr(self.request, method.lower())(*args, **kwargs)
return self.middleware.process_request(req)
@override_settings(SECURE_HSTS_SECONDS=3600)
def test_sts_on(self):
"""
With HSTS_SECONDS=3600, the middleware adds
"strict-transport-security: max-age=3600" to the response.
"""
self.assertEqual(
self.process_response(secure=True)["strict-transport-security"],
"max-age=3600")
@override_settings(SECURE_HSTS_SECONDS=3600)
def test_sts_already_present(self):
"""
The middleware will not override a "strict-transport-security" header
already present in the response.
"""
response = self.process_response(
secure=True,
headers={"strict-transport-security": "max-age=7200"})
self.assertEqual(response["strict-transport-security"], "max-age=7200")
@override_settings(HSTS_SECONDS=3600)
def test_sts_only_if_secure(self):
"""
The "strict-transport-security" header is not added to responses going
over an insecure connection.
"""
self.assertNotIn("strict-transport-security", self.process_response(secure=False))
@override_settings(HSTS_SECONDS=0)
def test_sts_off(self):
"""
With HSTS_SECONDS of 0, the middleware does not add a
"strict-transport-security" header to the response.
"""
self.assertNotIn("strict-transport-security", self.process_response(secure=True))
@override_settings(
SECURE_HSTS_SECONDS=600, SECURE_HSTS_INCLUDE_SUBDOMAINS=True)
def test_sts_include_subdomains(self):
"""
With HSTS_SECONDS non-zero and HSTS_INCLUDE_SUBDOMAINS
True, the middleware adds a "strict-transport-security" header with the
"includeSubDomains" tag to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(
response["strict-transport-security"],
"max-age=600; includeSubDomains",
)
@override_settings(
SECURE_HSTS_SECONDS=600, SECURE_HSTS_INCLUDE_SUBDOMAINS=False)
def test_sts_no_include_subdomains(self):
"""
With HSTS_SECONDS non-zero and HSTS_INCLUDE_SUBDOMAINS
False, the middleware adds a "strict-transport-security" header without
the "includeSubDomains" tag to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(response["strict-transport-security"], "max-age=600")
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=True)
def test_content_type_on(self):
"""
With CONTENT_TYPE_NOSNIFF set to True, the middleware adds
"x-content-type-options: nosniff" header to the response.
"""
self.assertEqual(self.process_response()["x-content-type-options"], "nosniff")
@override_settings(SECURE_CONTENT_TYPE_NO_SNIFF=True)
def test_content_type_already_present(self):
"""
The middleware will not override an "x-content-type-options" header
already present in the response.
"""
response = self.process_response(secure=True, headers={"x-content-type-options": "foo"})
self.assertEqual(response["x-content-type-options"], "foo")
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=False)
def test_content_type_off(self):
"""
With CONTENT_TYPE_NOSNIFF False, the middleware does not add an
"x-content-type-options" header to the response.
"""
self.assertNotIn("x-content-type-options", self.process_response())
@override_settings(SECURE_BROWSER_XSS_FILTER=True)
def test_xss_filter_on(self):
"""
With BROWSER_XSS_FILTER set to True, the middleware adds
"s-xss-protection: 1; mode=block" header to the response.
"""
self.assertEqual(
self.process_response()["x-xss-protection"],
"1; mode=block")
@override_settings(SECURE_BROWSER_XSS_FILTER=True)
def test_xss_filter_already_present(self):
"""
The middleware will not override an "x-xss-protection" header
already present in the response.
"""
response = self.process_response(secure=True, headers={"x-xss-protection": "foo"})
self.assertEqual(response["x-xss-protection"], "foo")
@override_settings(BROWSER_XSS_FILTER=False)
def test_xss_filter_off(self):
"""
With BROWSER_XSS_FILTER set to False, the middleware does not add an
"x-xss-protection" header to the response.
"""
self.assertNotIn("x-xss-protection", self.process_response())
@override_settings(SECURE_SSL_REDIRECT=True)
def test_ssl_redirect_on(self):
"""
With SSL_REDIRECT True, the middleware redirects any non-secure
requests to the https:// version of the same URL.
"""
ret = self.process_request("get", "/some/url?query=string")
self.assertEqual(ret.status_code, 301)
self.assertEqual(
ret["Location"], "https://testserver/some/url?query=string")
@override_settings(SECURE_SSL_REDIRECT=True)
def test_no_redirect_ssl(self):
"""
The middleware does not redirect secure requests.
"""
ret = self.process_request("get", "/some/url", secure=True)
self.assertEqual(ret, None)
@override_settings(
SECURE_SSL_REDIRECT=True, SECURE_REDIRECT_EXEMPT=["^insecure/"])
def test_redirect_exempt(self):
"""
The middleware does not redirect requests with URL path matching an
exempt pattern.
"""
ret = self.process_request("get", "/insecure/page")
self.assertEqual(ret, None)
@override_settings(
SECURE_SSL_REDIRECT=True, SECURE_SSL_HOST="secure.example.com")
def test_redirect_ssl_host(self):
"""
The middleware redirects to SSL_HOST if given.
"""
ret = self.process_request("get", "/some/url")
self.assertEqual(ret.status_code, 301)
self.assertEqual(ret["Location"], "https://secure.example.com/some/url")
@override_settings(SECURE_SSL_REDIRECT=False)
def test_ssl_redirect_off(self):
"""
With SSL_REDIRECT False, the middleware does no redirect.
"""
ret = self.process_request("get", "/some/url")
self.assertEqual(ret, None)
| bsd-3-clause |
DelazJ/QGIS | tests/src/python/test_qgseditwidgets.py | 16 | 6402 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for edit widgets.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Matthias Kuhn'
__date__ = '20/05/2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
import qgis # NOQA
from qgis.core import (QgsProject, QgsFeature, QgsGeometry, QgsPointXY, QgsVectorLayer, NULL, QgsField)
from qgis.gui import QgsGui
from qgis.testing import start_app, unittest
from qgis.PyQt.QtCore import Qt, QVariant
from qgis.PyQt.QtWidgets import QTextEdit, QTableWidgetItem
start_app()
class TestQgsTextEditWidget(unittest.TestCase):
@classmethod
def setUpClass(cls):
QgsGui.editorWidgetRegistry().initEditors()
def createLayerWithOnePoint(self):
self.layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
pr = self.layer.dataProvider()
f = QgsFeature()
f.setAttributes(["test", 123])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
self.assertTrue(pr.addFeatures([f]))
self.assertEqual(self.layer.featureCount(), 1)
return self.layer
def doAttributeTest(self, idx, expected):
reg = QgsGui.editorWidgetRegistry()
configWdg = reg.createConfigWidget('TextEdit', self.layer, idx, None)
config = configWdg.config()
editwidget = reg.create('TextEdit', self.layer, idx, config, None, None)
editwidget.setValue('value')
self.assertEqual(editwidget.value(), expected[0])
editwidget.setValue(123)
self.assertEqual(editwidget.value(), expected[1])
editwidget.setValue(None)
self.assertEqual(editwidget.value(), expected[2])
editwidget.setValue(NULL)
self.assertEqual(editwidget.value(), expected[3])
editwidget.setValue(float('nan'))
self.assertEqual(editwidget.value(), expected[4])
def test_SetValue(self):
self.createLayerWithOnePoint()
self.doAttributeTest(0, ['value', '123', NULL, NULL, NULL])
self.doAttributeTest(1, [NULL, 123, NULL, NULL, NULL])
def testStringWithMaxLen(self):
""" tests that text edit wrappers correctly handle string fields with a maximum length """
layer = QgsVectorLayer("none?field=fldint:integer", "layer", "memory")
self.assertTrue(layer.isValid())
layer.dataProvider().addAttributes([QgsField('max', QVariant.String, 'string', 10),
QgsField('nomax', QVariant.String, 'string', 0)])
layer.updateFields()
QgsProject.instance().addMapLayer(layer)
reg = QgsGui.editorWidgetRegistry()
config = {'IsMultiline': 'True'}
# first test for field without character limit
editor = QTextEdit()
editor.setPlainText('this_is_a_long_string')
w = reg.create('TextEdit', layer, 2, config, editor, None)
self.assertEqual(w.value(), 'this_is_a_long_string')
# next test for field with character limit
editor = QTextEdit()
editor.setPlainText('this_is_a_long_string')
w = reg.create('TextEdit', layer, 1, config, editor, None)
self.assertEqual(w.value(), 'this_is_a_')
QgsProject.instance().removeAllMapLayers()
def test_indeterminate_state(self):
"""
Test the indeterminate state for the wrapper
"""
layer = QgsVectorLayer("none?field=fld:string", "layer", "memory")
reg = QgsGui.editorWidgetRegistry()
configWdg = reg.createConfigWidget('TextEdit', layer, 0, None)
config = configWdg.config()
editwidget = reg.create('TextEdit', layer, 0, config, None, None)
editwidget.setValue('value')
self.assertEqual(editwidget.value(), 'value')
editwidget.showIndeterminateState()
self.assertFalse(editwidget.value())
self.assertFalse(editwidget.widget().toPlainText())
class TestQgsValueRelationWidget(unittest.TestCase):
def test_enableDisable(self):
reg = QgsGui.editorWidgetRegistry()
layer = QgsVectorLayer("none?field=number:integer", "layer", "memory")
wrapper = reg.create('ValueRelation', layer, 0, {}, None, None)
widget = wrapper.widget()
self.assertTrue(widget.isEnabled())
wrapper.setEnabled(False)
self.assertFalse(widget.isEnabled())
wrapper.setEnabled(True)
self.assertTrue(widget.isEnabled())
def test_enableDisableOnTableWidget(self):
reg = QgsGui.editorWidgetRegistry()
layer = QgsVectorLayer("none?field=number:integer", "layer", "memory")
wrapper = reg.create('ValueRelation', layer, 0, {'AllowMulti': 'True'}, None, None)
widget = wrapper.widget()
item = QTableWidgetItem('first item')
widget.setItem(0, 0, item)
# does not change the state the whole widget but the single items instead
wrapper.setEnabled(False)
# widget still true, but items false
self.assertTrue(widget.isEnabled())
self.assertNotEqual(widget.item(0, 0).flags(), widget.item(0, 0).flags() | Qt.ItemIsEnabled)
wrapper.setEnabled(True)
self.assertTrue(widget.isEnabled())
self.assertEqual(widget.item(0, 0).flags(), widget.item(0, 0).flags() | Qt.ItemIsEnabled)
class TestQgsValueMapEditWidget(unittest.TestCase):
VALUEMAP_NULL_TEXT = "{2839923C-8B7D-419E-B84B-CA2FE9B80EC7}"
def test_ValueMap_set_get(self):
layer = QgsVectorLayer("none?field=number:integer", "layer", "memory")
self.assertTrue(layer.isValid())
QgsProject.instance().addMapLayer(layer)
reg = QgsGui.editorWidgetRegistry()
configWdg = reg.createConfigWidget('ValueMap', layer, 0, None)
config = {'map': [{'two': '2'}, {'twoandhalf': '2.5'}, {'NULL text': 'NULL'}, {'nothing': self.VALUEMAP_NULL_TEXT}]}
# Set a configuration containing values and NULL and check if it
# is returned intact.
configWdg.setConfig(config)
self.assertEqual(configWdg.config(), config)
QgsProject.instance().removeAllMapLayers()
if __name__ == "__main__":
unittest.main()
| gpl-2.0 |
BrnoPCmaniak/Django_Megaparmeni | djangobb_forum/migrations/0001_initial.py | 13 | 26862 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Category'
db.create_table('djangobb_forum_category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=80)),
('position', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
))
db.send_create_signal('djangobb_forum', ['Category'])
# Adding M2M table for field groups on 'Category'
db.create_table('djangobb_forum_category_groups', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('category', models.ForeignKey(orm['djangobb_forum.category'], null=False)),
('group', models.ForeignKey(orm['auth.group'], null=False))
))
db.create_unique('djangobb_forum_category_groups', ['category_id', 'group_id'])
# Adding model 'Forum'
db.create_table('djangobb_forum_forum', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(related_name='forums', to=orm['djangobb_forum.Category'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=80)),
('position', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(default='', blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('post_count', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('topic_count', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('last_post', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='last_forum_post', null=True, to=orm['djangobb_forum.Post'])),
))
db.send_create_signal('djangobb_forum', ['Forum'])
# Adding M2M table for field moderators on 'Forum'
db.create_table('djangobb_forum_forum_moderators', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('forum', models.ForeignKey(orm['djangobb_forum.forum'], null=False)),
('user', models.ForeignKey(orm['auth.user'], null=False))
))
db.create_unique('djangobb_forum_forum_moderators', ['forum_id', 'user_id'])
# Adding model 'Topic'
db.create_table('djangobb_forum_topic', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('forum', self.gf('django.db.models.fields.related.ForeignKey')(related_name='topics', to=orm['djangobb_forum.Forum'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('views', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('sticky', self.gf('django.db.models.fields.BooleanField')(default=False)),
('closed', self.gf('django.db.models.fields.BooleanField')(default=False)),
('post_count', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('last_post', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='last_topic_post', null=True, to=orm['djangobb_forum.Post'])),
))
db.send_create_signal('djangobb_forum', ['Topic'])
# Adding M2M table for field subscribers on 'Topic'
db.create_table('djangobb_forum_topic_subscribers', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('topic', models.ForeignKey(orm['djangobb_forum.topic'], null=False)),
('user', models.ForeignKey(orm['auth.user'], null=False))
))
db.create_unique('djangobb_forum_topic_subscribers', ['topic_id', 'user_id'])
# Adding model 'Post'
db.create_table('djangobb_forum_post', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('topic', self.gf('django.db.models.fields.related.ForeignKey')(related_name='posts', to=orm['djangobb_forum.Topic'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='posts', to=orm['auth.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('updated_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('markup', self.gf('django.db.models.fields.CharField')(default='bbcode', max_length=15)),
('body', self.gf('django.db.models.fields.TextField')()),
('body_html', self.gf('django.db.models.fields.TextField')()),
('user_ip', self.gf('django.db.models.fields.IPAddressField')(max_length=15, null=True, blank=True)),
))
db.send_create_signal('djangobb_forum', ['Post'])
# Adding model 'Reputation'
db.create_table('djangobb_forum_reputation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('from_user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='reputations_from', to=orm['auth.User'])),
('to_user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='reputations_to', to=orm['auth.User'])),
('post', self.gf('django.db.models.fields.related.ForeignKey')(related_name='post', to=orm['djangobb_forum.Post'])),
('time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('sign', self.gf('django.db.models.fields.IntegerField')(default=0)),
('reason', self.gf('django.db.models.fields.TextField')(max_length=1000)),
))
db.send_create_signal('djangobb_forum', ['Reputation'])
# Adding unique constraint on 'Reputation', fields ['from_user', 'post']
db.create_unique('djangobb_forum_reputation', ['from_user_id', 'post_id'])
# Adding model 'Profile'
db.create_table('djangobb_forum_profile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('djangobb_forum.fields.AutoOneToOneField')(related_name='forum_profile', unique=True, to=orm['auth.User'])),
('status', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('site', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('jabber', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('icq', self.gf('django.db.models.fields.CharField')(max_length=12, blank=True)),
('msn', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('aim', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('yahoo', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('location', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('signature', self.gf('django.db.models.fields.TextField')(default='', max_length=1024, blank=True)),
('time_zone', self.gf('django.db.models.fields.FloatField')(default=3.0)),
('language', self.gf('django.db.models.fields.CharField')(default='', max_length=5)),
('avatar', self.gf('djangobb_forum.fields.ExtendedImageField')(default='', max_length=100, blank=True)),
('theme', self.gf('django.db.models.fields.CharField')(default='default', max_length=80)),
('show_avatar', self.gf('django.db.models.fields.BooleanField')(default=True)),
('show_signatures', self.gf('django.db.models.fields.BooleanField')(default=True)),
('privacy_permission', self.gf('django.db.models.fields.IntegerField')(default=1)),
('markup', self.gf('django.db.models.fields.CharField')(default='bbcode', max_length=15)),
('post_count', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
))
db.send_create_signal('djangobb_forum', ['Profile'])
# Adding model 'PostTracking'
db.create_table('djangobb_forum_posttracking', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('djangobb_forum.fields.AutoOneToOneField')(to=orm['auth.User'], unique=True)),
('topics', self.gf('djangobb_forum.fields.JSONField')(null=True)),
('last_read', self.gf('django.db.models.fields.DateTimeField')(null=True)),
))
db.send_create_signal('djangobb_forum', ['PostTracking'])
# Adding model 'Report'
db.create_table('djangobb_forum_report', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('reported_by', self.gf('django.db.models.fields.related.ForeignKey')(related_name='reported_by', to=orm['auth.User'])),
('post', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['djangobb_forum.Post'])),
('zapped', self.gf('django.db.models.fields.BooleanField')(default=False)),
('zapped_by', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='zapped_by', null=True, to=orm['auth.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')(blank=True)),
('reason', self.gf('django.db.models.fields.TextField')(default='', max_length='1000', blank=True)),
))
db.send_create_signal('djangobb_forum', ['Report'])
# Adding model 'Ban'
db.create_table('djangobb_forum_ban', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name='ban_users', unique=True, to=orm['auth.User'])),
('ban_start', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('ban_end', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('reason', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('djangobb_forum', ['Ban'])
# Adding model 'Attachment'
db.create_table('djangobb_forum_attachment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('post', self.gf('django.db.models.fields.related.ForeignKey')(related_name='attachments', to=orm['djangobb_forum.Post'])),
('size', self.gf('django.db.models.fields.IntegerField')()),
('content_type', self.gf('django.db.models.fields.CharField')(max_length=255)),
('path', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name', self.gf('django.db.models.fields.TextField')()),
('hash', self.gf('django.db.models.fields.CharField')(default='', max_length=40, db_index=True, blank=True)),
))
db.send_create_signal('djangobb_forum', ['Attachment'])
def backwards(self, orm):
# Removing unique constraint on 'Reputation', fields ['from_user', 'post']
db.delete_unique('djangobb_forum_reputation', ['from_user_id', 'post_id'])
# Deleting model 'Category'
db.delete_table('djangobb_forum_category')
# Removing M2M table for field groups on 'Category'
db.delete_table('djangobb_forum_category_groups')
# Deleting model 'Forum'
db.delete_table('djangobb_forum_forum')
# Removing M2M table for field moderators on 'Forum'
db.delete_table('djangobb_forum_forum_moderators')
# Deleting model 'Topic'
db.delete_table('djangobb_forum_topic')
# Removing M2M table for field subscribers on 'Topic'
db.delete_table('djangobb_forum_topic_subscribers')
# Deleting model 'Post'
db.delete_table('djangobb_forum_post')
# Deleting model 'Reputation'
db.delete_table('djangobb_forum_reputation')
# Deleting model 'Profile'
db.delete_table('djangobb_forum_profile')
# Deleting model 'PostTracking'
db.delete_table('djangobb_forum_posttracking')
# Deleting model 'Report'
db.delete_table('djangobb_forum_report')
# Deleting model 'Ban'
db.delete_table('djangobb_forum_ban')
# Deleting model 'Attachment'
db.delete_table('djangobb_forum_attachment')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'djangobb_forum.attachment': {
'Meta': {'object_name': 'Attachment'},
'content_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'hash': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [], {}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': "orm['djangobb_forum.Post']"}),
'size': ('django.db.models.fields.IntegerField', [], {})
},
'djangobb_forum.ban': {
'Meta': {'object_name': 'Ban'},
'ban_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'ban_start': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.TextField', [], {}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'ban_users'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'djangobb_forum.category': {
'Meta': {'ordering': "['position']", 'object_name': 'Category'},
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'djangobb_forum.forum': {
'Meta': {'ordering': "['position']", 'object_name': 'Forum'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'forums'", 'to': "orm['djangobb_forum.Category']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_post': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_forum_post'", 'null': 'True', 'to': "orm['djangobb_forum.Post']"}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'topic_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'djangobb_forum.post': {
'Meta': {'ordering': "['created']", 'object_name': 'Post'},
'body': ('django.db.models.fields.TextField', [], {}),
'body_html': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'markup': ('django.db.models.fields.CharField', [], {'default': "'bbcode'", 'max_length': '15'}),
'topic': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['djangobb_forum.Topic']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['auth.User']"}),
'user_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'})
},
'djangobb_forum.posttracking': {
'Meta': {'object_name': 'PostTracking'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_read': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'topics': ('djangobb_forum.fields.JSONField', [], {'null': 'True'}),
'user': ('djangobb_forum.fields.AutoOneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'djangobb_forum.profile': {
'Meta': {'object_name': 'Profile'},
'aim': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'avatar': ('djangobb_forum.fields.ExtendedImageField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'icq': ('django.db.models.fields.CharField', [], {'max_length': '12', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jabber': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '5'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'markup': ('django.db.models.fields.CharField', [], {'default': "'bbcode'", 'max_length': '15'}),
'msn': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'privacy_permission': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'show_avatar': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_signatures': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'signature': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'site': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'theme': ('django.db.models.fields.CharField', [], {'default': "'default'", 'max_length': '80'}),
'time_zone': ('django.db.models.fields.FloatField', [], {'default': '3.0'}),
'user': ('djangobb_forum.fields.AutoOneToOneField', [], {'related_name': "'forum_profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'yahoo': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'})
},
'djangobb_forum.report': {
'Meta': {'object_name': 'Report'},
'created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djangobb_forum.Post']"}),
'reason': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': "'1000'", 'blank': 'True'}),
'reported_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reported_by'", 'to': "orm['auth.User']"}),
'zapped': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'zapped_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'zapped_by'", 'null': 'True', 'to': "orm['auth.User']"})
},
'djangobb_forum.reputation': {
'Meta': {'unique_together': "(('from_user', 'post'),)", 'object_name': 'Reputation'},
'from_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reputations_from'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'post'", 'to': "orm['djangobb_forum.Post']"}),
'reason': ('django.db.models.fields.TextField', [], {'max_length': '1000'}),
'sign': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'to_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reputations_to'", 'to': "orm['auth.User']"})
},
'djangobb_forum.topic': {
'Meta': {'ordering': "['-updated']", 'object_name': 'Topic'},
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'forum': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'topics'", 'to': "orm['djangobb_forum.Forum']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_post': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_topic_post'", 'null': 'True', 'to': "orm['djangobb_forum.Post']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'sticky': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subscribers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'subscriptions'", 'blank': 'True', 'to': "orm['auth.User']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
}
}
complete_apps = ['djangobb_forum']
| bsd-3-clause |
gohin/django | django/db/models/lookups.py | 194 | 16328 | import inspect
from copy import copy
from django.utils.functional import cached_property
from django.utils.six.moves import range
from .query_utils import QueryWrapper
class RegisterLookupMixin(object):
def _get_lookup(self, lookup_name):
try:
return self.class_lookups[lookup_name]
except KeyError:
# To allow for inheritance, check parent class' class_lookups.
for parent in inspect.getmro(self.__class__):
if 'class_lookups' not in parent.__dict__:
continue
if lookup_name in parent.class_lookups:
return parent.class_lookups[lookup_name]
except AttributeError:
# This class didn't have any class_lookups
pass
return None
def get_lookup(self, lookup_name):
found = self._get_lookup(lookup_name)
if found is None and hasattr(self, 'output_field'):
return self.output_field.get_lookup(lookup_name)
if found is not None and not issubclass(found, Lookup):
return None
return found
def get_transform(self, lookup_name):
found = self._get_lookup(lookup_name)
if found is None and hasattr(self, 'output_field'):
return self.output_field.get_transform(lookup_name)
if found is not None and not issubclass(found, Transform):
return None
return found
@classmethod
def register_lookup(cls, lookup):
if 'class_lookups' not in cls.__dict__:
cls.class_lookups = {}
cls.class_lookups[lookup.lookup_name] = lookup
return lookup
@classmethod
def _unregister_lookup(cls, lookup):
"""
Removes given lookup from cls lookups. Meant to be used in
tests only.
"""
del cls.class_lookups[lookup.lookup_name]
class Transform(RegisterLookupMixin):
bilateral = False
def __init__(self, lhs, lookups):
self.lhs = lhs
self.init_lookups = lookups[:]
def as_sql(self, compiler, connection):
raise NotImplementedError
@cached_property
def output_field(self):
return self.lhs.output_field
def copy(self):
return copy(self)
def relabeled_clone(self, relabels):
copy = self.copy()
copy.lhs = self.lhs.relabeled_clone(relabels)
return copy
def get_group_by_cols(self):
return self.lhs.get_group_by_cols()
def get_bilateral_transforms(self):
if hasattr(self.lhs, 'get_bilateral_transforms'):
bilateral_transforms = self.lhs.get_bilateral_transforms()
else:
bilateral_transforms = []
if self.bilateral:
bilateral_transforms.append((self.__class__, self.init_lookups))
return bilateral_transforms
@cached_property
def contains_aggregate(self):
return self.lhs.contains_aggregate
class Lookup(RegisterLookupMixin):
lookup_name = None
def __init__(self, lhs, rhs):
self.lhs, self.rhs = lhs, rhs
self.rhs = self.get_prep_lookup()
if hasattr(self.lhs, 'get_bilateral_transforms'):
bilateral_transforms = self.lhs.get_bilateral_transforms()
else:
bilateral_transforms = []
if bilateral_transforms:
# We should warn the user as soon as possible if he is trying to apply
# a bilateral transformation on a nested QuerySet: that won't work.
# We need to import QuerySet here so as to avoid circular
from django.db.models.query import QuerySet
if isinstance(rhs, QuerySet):
raise NotImplementedError("Bilateral transformations on nested querysets are not supported.")
self.bilateral_transforms = bilateral_transforms
def apply_bilateral_transforms(self, value):
for transform, lookups in self.bilateral_transforms:
value = transform(value, lookups)
return value
def batch_process_rhs(self, compiler, connection, rhs=None):
if rhs is None:
rhs = self.rhs
if self.bilateral_transforms:
sqls, sqls_params = [], []
for p in rhs:
value = QueryWrapper('%s',
[self.lhs.output_field.get_db_prep_value(p, connection)])
value = self.apply_bilateral_transforms(value)
sql, sql_params = compiler.compile(value)
sqls.append(sql)
sqls_params.extend(sql_params)
else:
params = self.lhs.output_field.get_db_prep_lookup(
self.lookup_name, rhs, connection, prepared=True)
sqls, sqls_params = ['%s'] * len(params), params
return sqls, sqls_params
def get_prep_lookup(self):
return self.lhs.output_field.get_prep_lookup(self.lookup_name, self.rhs)
def get_db_prep_lookup(self, value, connection):
return (
'%s', self.lhs.output_field.get_db_prep_lookup(
self.lookup_name, value, connection, prepared=True))
def process_lhs(self, compiler, connection, lhs=None):
lhs = lhs or self.lhs
return compiler.compile(lhs)
def process_rhs(self, compiler, connection):
value = self.rhs
if self.bilateral_transforms:
if self.rhs_is_direct_value():
# Do not call get_db_prep_lookup here as the value will be
# transformed before being used for lookup
value = QueryWrapper("%s",
[self.lhs.output_field.get_db_prep_value(value, connection)])
value = self.apply_bilateral_transforms(value)
# Due to historical reasons there are a couple of different
# ways to produce sql here. get_compiler is likely a Query
# instance, _as_sql QuerySet and as_sql just something with
# as_sql. Finally the value can of course be just plain
# Python value.
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql'):
sql, params = compiler.compile(value)
return '(' + sql + ')', params
if hasattr(value, '_as_sql'):
sql, params = value._as_sql(connection=connection)
return '(' + sql + ')', params
else:
return self.get_db_prep_lookup(value, connection)
def rhs_is_direct_value(self):
return not(
hasattr(self.rhs, 'as_sql') or
hasattr(self.rhs, '_as_sql') or
hasattr(self.rhs, 'get_compiler'))
def relabeled_clone(self, relabels):
new = copy(self)
new.lhs = new.lhs.relabeled_clone(relabels)
if hasattr(new.rhs, 'relabeled_clone'):
new.rhs = new.rhs.relabeled_clone(relabels)
return new
def get_group_by_cols(self):
cols = self.lhs.get_group_by_cols()
if hasattr(self.rhs, 'get_group_by_cols'):
cols.extend(self.rhs.get_group_by_cols())
return cols
def as_sql(self, compiler, connection):
raise NotImplementedError
@cached_property
def contains_aggregate(self):
return self.lhs.contains_aggregate or getattr(self.rhs, 'contains_aggregate', False)
class BuiltinLookup(Lookup):
def process_lhs(self, compiler, connection, lhs=None):
lhs_sql, params = super(BuiltinLookup, self).process_lhs(
compiler, connection, lhs)
field_internal_type = self.lhs.output_field.get_internal_type()
db_type = self.lhs.output_field.db_type(connection=connection)
lhs_sql = connection.ops.field_cast_sql(
db_type, field_internal_type) % lhs_sql
lhs_sql = connection.ops.lookup_cast(self.lookup_name, field_internal_type) % lhs_sql
return lhs_sql, params
def as_sql(self, compiler, connection):
lhs_sql, params = self.process_lhs(compiler, connection)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
params.extend(rhs_params)
rhs_sql = self.get_rhs_op(connection, rhs_sql)
return '%s %s' % (lhs_sql, rhs_sql), params
def get_rhs_op(self, connection, rhs):
return connection.operators[self.lookup_name] % rhs
default_lookups = {}
class Exact(BuiltinLookup):
lookup_name = 'exact'
default_lookups['exact'] = Exact
class IExact(BuiltinLookup):
lookup_name = 'iexact'
def process_rhs(self, qn, connection):
rhs, params = super(IExact, self).process_rhs(qn, connection)
if params:
params[0] = connection.ops.prep_for_iexact_query(params[0])
return rhs, params
default_lookups['iexact'] = IExact
class GreaterThan(BuiltinLookup):
lookup_name = 'gt'
default_lookups['gt'] = GreaterThan
class GreaterThanOrEqual(BuiltinLookup):
lookup_name = 'gte'
default_lookups['gte'] = GreaterThanOrEqual
class LessThan(BuiltinLookup):
lookup_name = 'lt'
default_lookups['lt'] = LessThan
class LessThanOrEqual(BuiltinLookup):
lookup_name = 'lte'
default_lookups['lte'] = LessThanOrEqual
class In(BuiltinLookup):
lookup_name = 'in'
def process_rhs(self, compiler, connection):
if self.rhs_is_direct_value():
# rhs should be an iterable, we use batch_process_rhs
# to prepare/transform those values
rhs = list(self.rhs)
if not rhs:
from django.db.models.sql.datastructures import EmptyResultSet
raise EmptyResultSet
sqls, sqls_params = self.batch_process_rhs(compiler, connection, rhs)
placeholder = '(' + ', '.join(sqls) + ')'
return (placeholder, sqls_params)
else:
return super(In, self).process_rhs(compiler, connection)
def get_rhs_op(self, connection, rhs):
return 'IN %s' % rhs
def as_sql(self, compiler, connection):
max_in_list_size = connection.ops.max_in_list_size()
if self.rhs_is_direct_value() and (max_in_list_size and
len(self.rhs) > max_in_list_size):
# This is a special case for Oracle which limits the number of elements
# which can appear in an 'IN' clause.
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.batch_process_rhs(compiler, connection)
in_clause_elements = ['(']
params = []
for offset in range(0, len(rhs_params), max_in_list_size):
if offset > 0:
in_clause_elements.append(' OR ')
in_clause_elements.append('%s IN (' % lhs)
params.extend(lhs_params)
sqls = rhs[offset: offset + max_in_list_size]
sqls_params = rhs_params[offset: offset + max_in_list_size]
param_group = ', '.join(sqls)
in_clause_elements.append(param_group)
in_clause_elements.append(')')
params.extend(sqls_params)
in_clause_elements.append(')')
return ''.join(in_clause_elements), params
else:
return super(In, self).as_sql(compiler, connection)
default_lookups['in'] = In
class PatternLookup(BuiltinLookup):
def get_rhs_op(self, connection, rhs):
# Assume we are in startswith. We need to produce SQL like:
# col LIKE %s, ['thevalue%']
# For python values we can (and should) do that directly in Python,
# but if the value is for example reference to other column, then
# we need to add the % pattern match to the lookup by something like
# col LIKE othercol || '%%'
# So, for Python values we don't need any special pattern, but for
# SQL reference values or SQL transformations we need the correct
# pattern added.
if (hasattr(self.rhs, 'get_compiler') or hasattr(self.rhs, 'as_sql')
or hasattr(self.rhs, '_as_sql') or self.bilateral_transforms):
pattern = connection.pattern_ops[self.lookup_name].format(connection.pattern_esc)
return pattern.format(rhs)
else:
return super(PatternLookup, self).get_rhs_op(connection, rhs)
class Contains(PatternLookup):
lookup_name = 'contains'
def process_rhs(self, qn, connection):
rhs, params = super(Contains, self).process_rhs(qn, connection)
if params and not self.bilateral_transforms:
params[0] = "%%%s%%" % connection.ops.prep_for_like_query(params[0])
return rhs, params
default_lookups['contains'] = Contains
class IContains(Contains):
lookup_name = 'icontains'
default_lookups['icontains'] = IContains
class StartsWith(PatternLookup):
lookup_name = 'startswith'
def process_rhs(self, qn, connection):
rhs, params = super(StartsWith, self).process_rhs(qn, connection)
if params and not self.bilateral_transforms:
params[0] = "%s%%" % connection.ops.prep_for_like_query(params[0])
return rhs, params
default_lookups['startswith'] = StartsWith
class IStartsWith(PatternLookup):
lookup_name = 'istartswith'
def process_rhs(self, qn, connection):
rhs, params = super(IStartsWith, self).process_rhs(qn, connection)
if params and not self.bilateral_transforms:
params[0] = "%s%%" % connection.ops.prep_for_like_query(params[0])
return rhs, params
default_lookups['istartswith'] = IStartsWith
class EndsWith(PatternLookup):
lookup_name = 'endswith'
def process_rhs(self, qn, connection):
rhs, params = super(EndsWith, self).process_rhs(qn, connection)
if params and not self.bilateral_transforms:
params[0] = "%%%s" % connection.ops.prep_for_like_query(params[0])
return rhs, params
default_lookups['endswith'] = EndsWith
class IEndsWith(PatternLookup):
lookup_name = 'iendswith'
def process_rhs(self, qn, connection):
rhs, params = super(IEndsWith, self).process_rhs(qn, connection)
if params and not self.bilateral_transforms:
params[0] = "%%%s" % connection.ops.prep_for_like_query(params[0])
return rhs, params
default_lookups['iendswith'] = IEndsWith
class Between(BuiltinLookup):
def get_rhs_op(self, connection, rhs):
return "BETWEEN %s AND %s" % (rhs, rhs)
class Range(BuiltinLookup):
lookup_name = 'range'
def get_rhs_op(self, connection, rhs):
return "BETWEEN %s AND %s" % (rhs[0], rhs[1])
def process_rhs(self, compiler, connection):
if self.rhs_is_direct_value():
# rhs should be an iterable of 2 values, we use batch_process_rhs
# to prepare/transform those values
return self.batch_process_rhs(compiler, connection)
else:
return super(Range, self).process_rhs(compiler, connection)
default_lookups['range'] = Range
class IsNull(BuiltinLookup):
lookup_name = 'isnull'
def as_sql(self, compiler, connection):
sql, params = compiler.compile(self.lhs)
if self.rhs:
return "%s IS NULL" % sql, params
else:
return "%s IS NOT NULL" % sql, params
default_lookups['isnull'] = IsNull
class Search(BuiltinLookup):
lookup_name = 'search'
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
sql_template = connection.ops.fulltext_search_sql(field_name=lhs)
return sql_template, lhs_params + rhs_params
default_lookups['search'] = Search
class Regex(BuiltinLookup):
lookup_name = 'regex'
def as_sql(self, compiler, connection):
if self.lookup_name in connection.operators:
return super(Regex, self).as_sql(compiler, connection)
else:
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
sql_template = connection.ops.regex_lookup(self.lookup_name)
return sql_template % (lhs, rhs), lhs_params + rhs_params
default_lookups['regex'] = Regex
class IRegex(Regex):
lookup_name = 'iregex'
default_lookups['iregex'] = IRegex
| bsd-3-clause |
Kangmo/bitcoin | qa/rpc-tests/mempool_reorg.py | 5 | 4397 | #!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test re-org scenarios with a mempool that contains transactions
# that spend (directly or indirectly) coinbase transactions.
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class MempoolCoinbaseTest(BitcoinTestFramework):
alert_filename = None # Set by setup_network
def setup_network(self):
args = ["-checkmempool", "-debug=mempool"]
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, args))
self.nodes.append(start_node(1, self.options.tmpdir, args))
connect_nodes(self.nodes[1], 0)
self.is_network_split = False
self.sync_all()
def run_test(self):
start_count = self.nodes[0].getblockcount()
# Mine three blocks. After this, nodes[0] blocks
# 101, 102, and 103 are spend-able.
new_blocks = self.nodes[1].generate(4)
self.sync_all()
node0_address = self.nodes[0].getnewaddress()
node1_address = self.nodes[1].getnewaddress()
# Three scenarios for re-orging coinbase spends in the memory pool:
# 1. Direct coinbase spend : spend_101
# 2. Indirect (coinbase spend in chain, child in mempool) : spend_102 and spend_102_1
# 3. Indirect (coinbase and child both in chain) : spend_103 and spend_103_1
# Use invalidatblock to make all of the above coinbase spends invalid (immature coinbase),
# and make sure the mempool code behaves correctly.
b = [ self.nodes[0].getblockhash(n) for n in range(101, 105) ]
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
spend_101_raw = create_tx(self.nodes[0], coinbase_txids[1], node1_address, 49.99)
spend_102_raw = create_tx(self.nodes[0], coinbase_txids[2], node0_address, 49.99)
spend_103_raw = create_tx(self.nodes[0], coinbase_txids[3], node0_address, 49.99)
# Create a block-height-locked transaction which will be invalid after reorg
timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 49.99})
# Set the time lock
timelock_tx = timelock_tx.replace("ffffffff", "11111111", 1)
timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
timelock_tx = self.nodes[0].signrawtransaction(timelock_tx)["hex"]
assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, timelock_tx)
# Broadcast and mine spend_102 and 103:
spend_102_id = self.nodes[0].sendrawtransaction(spend_102_raw)
spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw)
self.nodes[0].generate(1)
assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, timelock_tx)
# Create 102_1 and 103_1:
spend_102_1_raw = create_tx(self.nodes[0], spend_102_id, node1_address, 49.98)
spend_103_1_raw = create_tx(self.nodes[0], spend_103_id, node1_address, 49.98)
# Broadcast and mine 103_1:
spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw)
last_block = self.nodes[0].generate(1)
timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx)
# ... now put spend_101 and spend_102_1 in memory pools:
spend_101_id = self.nodes[0].sendrawtransaction(spend_101_raw)
spend_102_1_id = self.nodes[0].sendrawtransaction(spend_102_1_raw)
self.sync_all()
assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, timelock_tx_id})
for node in self.nodes:
node.invalidateblock(last_block[0])
assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, spend_103_1_id})
# Use invalidateblock to re-org back and make all those coinbase spends
# immature/invalid:
for node in self.nodes:
node.invalidateblock(new_blocks[0])
self.sync_all()
# mempool should be empty.
assert_equal(set(self.nodes[0].getrawmempool()), set())
if __name__ == '__main__':
MempoolCoinbaseTest().main()
| mit |
chiefspace/udemy-rest-api | udemy_rest_api_section4/env/lib/python3.4/site-packages/jinja2/constants.py | 220 | 1626 | # -*- coding: utf-8 -*-
"""
jinja.constants
~~~~~~~~~~~~~~~
Various constants.
:copyright: (c) 2017 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
#: list of lorem ipsum words used by the lipsum() helper function
LOREM_IPSUM_WORDS = u'''\
a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
auctor augue bibendum blandit class commodo condimentum congue consectetuer
consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
penatibus per pharetra phasellus placerat platea porta porttitor posuere
potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
viverra volutpat vulputate'''
| gpl-2.0 |
glatard/nipype | nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py | 9 | 2314 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.freesurfer.model import MRISPreproc
def test_MRISPreproc_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
fsgd_file=dict(argstr='--fsgd %s',
xor=('subjects', 'fsgd_file', 'subject_file'),
),
fwhm=dict(argstr='--fwhm %f',
xor=['num_iters'],
),
fwhm_source=dict(argstr='--fwhm-src %f',
xor=['num_iters_source'],
),
hemi=dict(argstr='--hemi %s',
mandatory=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
num_iters=dict(argstr='--niters %d',
xor=['fwhm'],
),
num_iters_source=dict(argstr='--niterssrc %d',
xor=['fwhm_source'],
),
out_file=dict(argstr='--out %s',
genfile=True,
),
proj_frac=dict(argstr='--projfrac %s',
),
smooth_cortex_only=dict(argstr='--smooth-cortex-only',
),
source_format=dict(argstr='--srcfmt %s',
),
subject_file=dict(argstr='--f %s',
xor=('subjects', 'fsgd_file', 'subject_file'),
),
subjects=dict(argstr='--s %s...',
xor=('subjects', 'fsgd_file', 'subject_file'),
),
subjects_dir=dict(),
surf_area=dict(argstr='--area %s',
xor=('surf_measure', 'surf_measure_file', 'surf_area'),
),
surf_dir=dict(argstr='--surfdir %s',
),
surf_measure=dict(argstr='--meas %s',
xor=('surf_measure', 'surf_measure_file', 'surf_area'),
),
surf_measure_file=dict(argstr='--is %s...',
xor=('surf_measure', 'surf_measure_file', 'surf_area'),
),
target=dict(argstr='--target %s',
mandatory=True,
),
terminal_output=dict(nohash=True,
),
vol_measure_file=dict(argstr='--iv %s %s...',
),
)
inputs = MRISPreproc.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_MRISPreproc_outputs():
output_map = dict(out_file=dict(),
)
outputs = MRISPreproc.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| bsd-3-clause |
BackupGGCode/python-for-android | python-build/python-libs/xmpppy/xmpp/session.py | 199 | 16899 | ##
## XMPP server
##
## Copyright (C) 2004 Alexey "Snake" Nezhdanov
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2, or (at your option)
## any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
__version__="$Id"
"""
When your handler is called it is getting the session instance as the first argument.
This is the difference from xmpppy 0.1 where you got the "Client" instance.
With Session class you can have "multi-session" client instead of having
one client for each connection. Is is specifically important when you are
writing the server.
"""
from protocol import *
# Transport-level flags
SOCKET_UNCONNECTED =0
SOCKET_ALIVE =1
SOCKET_DEAD =2
# XML-level flags
STREAM__NOT_OPENED =1
STREAM__OPENED =2
STREAM__CLOSING =3
STREAM__CLOSED =4
# XMPP-session flags
SESSION_NOT_AUTHED =1
SESSION_AUTHED =2
SESSION_BOUND =3
SESSION_OPENED =4
SESSION_CLOSED =5
class Session:
"""
The Session class instance is used for storing all session-related info like
credentials, socket/xml stream/session state flags, roster items (in case of
client type connection) etc.
Session object have no means of discovering is any info is ready to be read.
Instead you should use poll() (recomended) or select() methods for this purpose.
Session can be one of two types: 'server' and 'client'. 'server' session handles
inbound connection and 'client' one used to create an outbound one.
Session instance have multitude of internal attributes. The most imporant is the 'peer' one.
It is set once the peer is authenticated (client).
"""
def __init__(self,socket,owner,xmlns=None,peer=None):
""" When the session is created it's type (client/server) is determined from the beginning.
socket argument is the pre-created socket-like object.
It must have the following methods: send, recv, fileno, close.
owner is the 'master' instance that have Dispatcher plugged into it and generally
will take care about all session events.
xmlns is the stream namespace that will be used. Client must set this argument
If server sets this argument than stream will be dropped if opened with some another namespace.
peer is the name of peer instance. This is the flag that differentiates client session from
server session. Client must set it to the name of the server that will be connected, server must
leave this argument alone.
"""
self.xmlns=xmlns
if peer:
self.TYP='client'
self.peer=peer
self._socket_state=SOCKET_UNCONNECTED
else:
self.TYP='server'
self.peer=None
self._socket_state=SOCKET_ALIVE
self._sock=socket
self._send=socket.send
self._recv=socket.recv
self.fileno=socket.fileno
self._registered=0
self.Dispatcher=owner.Dispatcher
self.DBG_LINE='session'
self.DEBUG=owner.Dispatcher.DEBUG
self._expected={}
self._owner=owner
if self.TYP=='server': self.ID=`random.random()`[2:]
else: self.ID=None
self.sendbuffer=''
self._stream_pos_queued=None
self._stream_pos_sent=0
self.deliver_key_queue=[]
self.deliver_queue_map={}
self.stanza_queue=[]
self._session_state=SESSION_NOT_AUTHED
self.waiting_features=[]
for feature in [NS_TLS,NS_SASL,NS_BIND,NS_SESSION]:
if feature in owner.features: self.waiting_features.append(feature)
self.features=[]
self.feature_in_process=None
self.slave_session=None
self.StartStream()
def StartStream(self):
""" This method is used to initialise the internal xml expat parser
and to send initial stream header (in case of client connection).
Should be used after initial connection and after every stream restart."""
self._stream_state=STREAM__NOT_OPENED
self.Stream=simplexml.NodeBuilder()
self.Stream._dispatch_depth=2
self.Stream.dispatch=self._dispatch
self.Parse=self.Stream.Parse
self.Stream.stream_footer_received=self._stream_close
if self.TYP=='client':
self.Stream.stream_header_received=self._catch_stream_id
self._stream_open()
else:
self.Stream.stream_header_received=self._stream_open
def receive(self):
""" Reads all pending incoming data.
Raises IOError on disconnection.
Blocks until at least one byte is read."""
try: received = self._recv(10240)
except: received = ''
if len(received): # length of 0 means disconnect
self.DEBUG(`self.fileno()`+' '+received,'got')
else:
self.DEBUG('Socket error while receiving data','error')
self.set_socket_state(SOCKET_DEAD)
raise IOError("Peer disconnected")
return received
def sendnow(self,chunk):
""" Put chunk into "immidiatedly send" queue.
Should only be used for auth/TLS stuff and like.
If you just want to shedule regular stanza for delivery use enqueue method.
"""
if isinstance(chunk,Node): chunk = chunk.__str__().encode('utf-8')
elif type(chunk)==type(u''): chunk = chunk.encode('utf-8')
self.enqueue(chunk)
def enqueue(self,stanza):
""" Takes Protocol instance as argument.
Puts stanza into "send" fifo queue. Items into the send queue are hold until
stream authenticated. After that this method is effectively the same as "sendnow" method."""
if isinstance(stanza,Protocol):
self.stanza_queue.append(stanza)
else: self.sendbuffer+=stanza
if self._socket_state>=SOCKET_ALIVE: self.push_queue()
def push_queue(self,failreason=ERR_RECIPIENT_UNAVAILABLE):
""" If stream is authenticated than move items from "send" queue to "immidiatedly send" queue.
Else if the stream is failed then return all queued stanzas with error passed as argument.
Otherwise do nothing."""
# If the stream authed - convert stanza_queue into sendbuffer and set the checkpoints
if self._stream_state>=STREAM__CLOSED or self._socket_state>=SOCKET_DEAD: # the stream failed. Return all stanzas that are still waiting for delivery.
self._owner.deactivatesession(self)
for key in self.deliver_key_queue: # Not sure. May be I
self._dispatch(Error(self.deliver_queue_map[key],failreason),trusted=1) # should simply re-dispatch it?
for stanza in self.stanza_queue: # But such action can invoke
self._dispatch(Error(stanza,failreason),trusted=1) # Infinite loops in case of S2S connection...
self.deliver_queue_map,self.deliver_key_queue,self.stanza_queue={},[],[]
return
elif self._session_state>=SESSION_AUTHED: # FIXME! äÏÌÖÅÎ ÂÙÔØ ËÁËÏÊ-ÔÏ ÄÒÕÇÏÊ ÆÌÁÇ.
#### LOCK_QUEUE
for stanza in self.stanza_queue:
txt=stanza.__str__().encode('utf-8')
self.sendbuffer+=txt
self._stream_pos_queued+=len(txt) # should be re-evaluated for SSL connection.
self.deliver_queue_map[self._stream_pos_queued]=stanza # position of the stream when stanza will be successfully and fully sent
self.deliver_key_queue.append(self._stream_pos_queued)
self.stanza_queue=[]
#### UNLOCK_QUEUE
def flush_queue(self):
""" Put the "immidiatedly send" queue content on the wire. Blocks until at least one byte sent."""
if self.sendbuffer:
try:
# LOCK_QUEUE
sent=self._send(self.sendbuffer) # âÌÏËÉÒÕÀÝÁÑ ÛÔÕÞËÁ!
except:
# UNLOCK_QUEUE
self.set_socket_state(SOCKET_DEAD)
self.DEBUG("Socket error while sending data",'error')
return self.terminate_stream()
self.DEBUG(`self.fileno()`+' '+self.sendbuffer[:sent],'sent')
self._stream_pos_sent+=sent
self.sendbuffer=self.sendbuffer[sent:]
self._stream_pos_delivered=self._stream_pos_sent # Should be acquired from socket somehow. Take SSL into account.
while self.deliver_key_queue and self._stream_pos_delivered>self.deliver_key_queue[0]:
del self.deliver_queue_map[self.deliver_key_queue[0]]
self.deliver_key_queue.remove(self.deliver_key_queue[0])
# UNLOCK_QUEUE
def _dispatch(self,stanza,trusted=0):
""" This is callback that is used to pass the received stanza forth to owner's dispatcher
_if_ the stream is authorised. Otherwise the stanza is just dropped.
The 'trusted' argument is used to emulate stanza receive.
This method is used internally.
"""
self._owner.packets+=1
if self._stream_state==STREAM__OPENED or trusted: # if the server really should reject all stanzas after he is closed stream (himeself)?
self.DEBUG(stanza.__str__(),'dispatch')
stanza.trusted=trusted
return self.Dispatcher.dispatch(stanza,self)
def _catch_stream_id(self,ns=None,tag='stream',attrs={}):
""" This callback is used to detect the stream namespace of incoming stream. Used internally. """
if not attrs.has_key('id') or not attrs['id']:
return self.terminate_stream(STREAM_INVALID_XML)
self.ID=attrs['id']
if not attrs.has_key('version'): self._owner.Dialback(self)
def _stream_open(self,ns=None,tag='stream',attrs={}):
""" This callback is used to handle opening stream tag of the incoming stream.
In the case of client session it just make some validation.
Server session also sends server headers and if the stream valid the features node.
Used internally. """
text='<?xml version="1.0" encoding="utf-8"?>\n<stream:stream'
if self.TYP=='client':
text+=' to="%s"'%self.peer
else:
text+=' id="%s"'%self.ID
if not attrs.has_key('to'): text+=' from="%s"'%self._owner.servernames[0]
else: text+=' from="%s"'%attrs['to']
if attrs.has_key('xml:lang'): text+=' xml:lang="%s"'%attrs['xml:lang']
if self.xmlns: xmlns=self.xmlns
else: xmlns=NS_SERVER
text+=' xmlns:db="%s" xmlns:stream="%s" xmlns="%s"'%(NS_DIALBACK,NS_STREAMS,xmlns)
if attrs.has_key('version') or self.TYP=='client': text+=' version="1.0"'
self.sendnow(text+'>')
self.set_stream_state(STREAM__OPENED)
if self.TYP=='client': return
if tag<>'stream': return self.terminate_stream(STREAM_INVALID_XML)
if ns<>NS_STREAMS: return self.terminate_stream(STREAM_INVALID_NAMESPACE)
if self.Stream.xmlns<>self.xmlns: return self.terminate_stream(STREAM_BAD_NAMESPACE_PREFIX)
if not attrs.has_key('to'): return self.terminate_stream(STREAM_IMPROPER_ADDRESSING)
if attrs['to'] not in self._owner.servernames: return self.terminate_stream(STREAM_HOST_UNKNOWN)
self.ourname=attrs['to'].lower()
if self.TYP=='server' and attrs.has_key('version'):
# send features
features=Node('stream:features')
if NS_TLS in self.waiting_features:
features.NT.starttls.setNamespace(NS_TLS)
features.T.starttls.NT.required
if NS_SASL in self.waiting_features:
features.NT.mechanisms.setNamespace(NS_SASL)
for mec in self._owner.SASL.mechanisms:
features.T.mechanisms.NT.mechanism=mec
else:
if NS_BIND in self.waiting_features: features.NT.bind.setNamespace(NS_BIND)
if NS_SESSION in self.waiting_features: features.NT.session.setNamespace(NS_SESSION)
self.sendnow(features)
def feature(self,feature):
""" Declare some stream feature as activated one. """
if feature not in self.features: self.features.append(feature)
self.unfeature(feature)
def unfeature(self,feature):
""" Declare some feature as illegal. Illegal features can not be used.
Example: BIND feature becomes illegal after Non-SASL auth. """
if feature in self.waiting_features: self.waiting_features.remove(feature)
def _stream_close(self,unregister=1):
""" Write the closing stream tag and destroy the underlaying socket. Used internally. """
if self._stream_state>=STREAM__CLOSED: return
self.set_stream_state(STREAM__CLOSING)
self.sendnow('</stream:stream>')
self.set_stream_state(STREAM__CLOSED)
self.push_queue() # decompose queue really since STREAM__CLOSED
self._owner.flush_queues()
if unregister: self._owner.unregistersession(self)
self._destroy_socket()
def terminate_stream(self,error=None,unregister=1):
""" Notify the peer about stream closure.
Ensure that xmlstream is not brokes - i.e. if the stream isn't opened yet -
open it before closure.
If the error condition is specified than create a stream error and send it along with
closing stream tag.
Emulate receiving 'unavailable' type presence just before stream closure.
"""
if self._stream_state>=STREAM__CLOSING: return
if self._stream_state<STREAM__OPENED:
self.set_stream_state(STREAM__CLOSING)
self._stream_open()
else:
self.set_stream_state(STREAM__CLOSING)
p=Presence(typ='unavailable')
p.setNamespace(NS_CLIENT)
self._dispatch(p,trusted=1)
if error:
if isinstance(error,Node): self.sendnow(error)
else: self.sendnow(ErrorNode(error))
self._stream_close(unregister=unregister)
if self.slave_session:
self.slave_session.terminate_stream(STREAM_REMOTE_CONNECTION_FAILED)
def _destroy_socket(self):
""" Break cyclic dependancies to let python's GC free memory right now."""
self.Stream.dispatch=None
self.Stream.stream_footer_received=None
self.Stream.stream_header_received=None
self.Stream.destroy()
self._sock.close()
self.set_socket_state(SOCKET_DEAD)
def start_feature(self,f):
""" Declare some feature as "negotiating now" to prevent other features from start negotiating. """
if self.feature_in_process: raise "Starting feature %s over %s !"%(f,self.feature_in_process)
self.feature_in_process=f
def stop_feature(self,f):
""" Declare some feature as "negotiated" to allow other features start negotiating. """
if self.feature_in_process<>f: raise "Stopping feature %s instead of %s !"%(f,self.feature_in_process)
self.feature_in_process=None
def set_socket_state(self,newstate):
""" Change the underlaying socket state.
Socket starts with SOCKET_UNCONNECTED state
and then proceeds (possibly) to SOCKET_ALIVE
and then to SOCKET_DEAD """
if self._socket_state<newstate: self._socket_state=newstate
def set_session_state(self,newstate):
""" Change the session state.
Session starts with SESSION_NOT_AUTHED state
and then comes through
SESSION_AUTHED, SESSION_BOUND, SESSION_OPENED and SESSION_CLOSED states.
"""
if self._session_state<newstate:
if self._session_state<SESSION_AUTHED and \
newstate>=SESSION_AUTHED: self._stream_pos_queued=self._stream_pos_sent
self._session_state=newstate
def set_stream_state(self,newstate):
""" Change the underlaying XML stream state
Stream starts with STREAM__NOT_OPENED and then proceeds with
STREAM__OPENED, STREAM__CLOSING and STREAM__CLOSED states.
Note that some features (like TLS and SASL)
requires stream re-start so this state can have non-linear changes. """
if self._stream_state<newstate: self._stream_state=newstate
| apache-2.0 |
FlorianLudwig/odoo | addons/report/controllers/main.py | 210 | 6943 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.web.http import Controller, route, request
from openerp.addons.web.controllers.main import _serialize_exception
from openerp.osv import osv
from openerp.tools import html_escape
import simplejson
from werkzeug import exceptions, url_decode
from werkzeug.test import Client
from werkzeug.wrappers import BaseResponse
from werkzeug.datastructures import Headers
from reportlab.graphics.barcode import createBarcodeDrawing
class ReportController(Controller):
#------------------------------------------------------
# Report controllers
#------------------------------------------------------
@route([
'/report/<path:converter>/<reportname>',
'/report/<path:converter>/<reportname>/<docids>',
], type='http', auth='user', website=True)
def report_routes(self, reportname, docids=None, converter=None, **data):
report_obj = request.registry['report']
cr, uid, context = request.cr, request.uid, request.context
if docids:
docids = [int(i) for i in docids.split(',')]
options_data = None
if data.get('options'):
options_data = simplejson.loads(data['options'])
if data.get('context'):
# Ignore 'lang' here, because the context in data is the one from the webclient *but* if
# the user explicitely wants to change the lang, this mechanism overwrites it.
data_context = simplejson.loads(data['context'])
if data_context.get('lang'):
del data_context['lang']
context.update(data_context)
if converter == 'html':
html = report_obj.get_html(cr, uid, docids, reportname, data=options_data, context=context)
return request.make_response(html)
elif converter == 'pdf':
pdf = report_obj.get_pdf(cr, uid, docids, reportname, data=options_data, context=context)
pdfhttpheaders = [('Content-Type', 'application/pdf'), ('Content-Length', len(pdf))]
return request.make_response(pdf, headers=pdfhttpheaders)
else:
raise exceptions.HTTPException(description='Converter %s not implemented.' % converter)
#------------------------------------------------------
# Misc. route utils
#------------------------------------------------------
@route(['/report/barcode', '/report/barcode/<type>/<path:value>'], type='http', auth="user")
def report_barcode(self, type, value, width=600, height=100, humanreadable=0):
"""Contoller able to render barcode images thanks to reportlab.
Samples:
<img t-att-src="'/report/barcode/QR/%s' % o.name"/>
<img t-att-src="'/report/barcode/?type=%s&value=%s&width=%s&height=%s' %
('QR', o.name, 200, 200)"/>
:param type: Accepted types: 'Codabar', 'Code11', 'Code128', 'EAN13', 'EAN8', 'Extended39',
'Extended93', 'FIM', 'I2of5', 'MSI', 'POSTNET', 'QR', 'Standard39', 'Standard93',
'UPCA', 'USPS_4State'
:param humanreadable: Accepted values: 0 (default) or 1. 1 will insert the readable value
at the bottom of the output image
"""
try:
width, height, humanreadable = int(width), int(height), bool(humanreadable)
barcode = createBarcodeDrawing(
type, value=value, format='png', width=width, height=height,
humanReadable = humanreadable
)
barcode = barcode.asString('png')
except (ValueError, AttributeError):
raise exceptions.HTTPException(description='Cannot convert into barcode.')
return request.make_response(barcode, headers=[('Content-Type', 'image/png')])
@route(['/report/download'], type='http', auth="user")
def report_download(self, data, token):
"""This function is used by 'qwebactionmanager.js' in order to trigger the download of
a pdf/controller report.
:param data: a javascript array JSON.stringified containg report internal url ([0]) and
type [1]
:returns: Response with a filetoken cookie and an attachment header
"""
requestcontent = simplejson.loads(data)
url, type = requestcontent[0], requestcontent[1]
try:
if type == 'qweb-pdf':
reportname = url.split('/report/pdf/')[1].split('?')[0]
docids = None
if '/' in reportname:
reportname, docids = reportname.split('/')
if docids:
# Generic report:
response = self.report_routes(reportname, docids=docids, converter='pdf')
else:
# Particular report:
data = url_decode(url.split('?')[1]).items() # decoding the args represented in JSON
response = self.report_routes(reportname, converter='pdf', **dict(data))
response.headers.add('Content-Disposition', 'attachment; filename=%s.pdf;' % reportname)
response.set_cookie('fileToken', token)
return response
elif type =='controller':
reqheaders = Headers(request.httprequest.headers)
response = Client(request.httprequest.app, BaseResponse).get(url, headers=reqheaders, follow_redirects=True)
response.set_cookie('fileToken', token)
return response
else:
return
except Exception, e:
se = _serialize_exception(e)
error = {
'code': 200,
'message': "Odoo Server Error",
'data': se
}
return request.make_response(html_escape(simplejson.dumps(error)))
@route(['/report/check_wkhtmltopdf'], type='json', auth="user")
def check_wkhtmltopdf(self):
return request.registry['report']._check_wkhtmltopdf()
| agpl-3.0 |
350dotorg/Django | django/contrib/flatpages/tests/csrf.py | 47 | 3425 | import os
from django.conf import settings
from django.test import TestCase, Client
class FlatpageCSRFTests(TestCase):
fixtures = ['sample_flatpages']
urls = 'django.contrib.flatpages.tests.urls'
def setUp(self):
self.client = Client(enforce_csrf_checks=True)
self.old_MIDDLEWARE_CLASSES = settings.MIDDLEWARE_CLASSES
flatpage_middleware_class = 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware'
csrf_middleware_class = 'django.middleware.csrf.CsrfViewMiddleware'
if csrf_middleware_class not in settings.MIDDLEWARE_CLASSES:
settings.MIDDLEWARE_CLASSES += (csrf_middleware_class,)
if flatpage_middleware_class not in settings.MIDDLEWARE_CLASSES:
settings.MIDDLEWARE_CLASSES += (flatpage_middleware_class,)
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(
os.path.dirname(__file__),
'templates'
),
)
self.old_LOGIN_URL = settings.LOGIN_URL
settings.LOGIN_URL = '/accounts/login/'
def tearDown(self):
settings.MIDDLEWARE_CLASSES = self.old_MIDDLEWARE_CLASSES
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
settings.LOGIN_URL = self.old_LOGIN_URL
def test_view_flatpage(self):
"A flatpage can be served through a view, even when the middleware is in use"
response = self.client.get('/flatpage_root/flatpage/')
self.assertEquals(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>")
def test_view_non_existent_flatpage(self):
"A non-existent flatpage raises 404 when served through a view, even when the middleware is in use"
response = self.client.get('/flatpage_root/no_such_flatpage/')
self.assertEquals(response.status_code, 404)
def test_view_authenticated_flatpage(self):
"A flatpage served through a view can require authentication"
response = self.client.get('/flatpage_root/sekrit/')
self.assertRedirects(response, '/accounts/login/?next=/flatpage_root/sekrit/')
def test_fallback_flatpage(self):
"A flatpage can be served by the fallback middlware"
response = self.client.get('/flatpage/')
self.assertEquals(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>")
def test_fallback_non_existent_flatpage(self):
"A non-existent flatpage raises a 404 when served by the fallback middlware"
response = self.client.get('/no_such_flatpage/')
self.assertEquals(response.status_code, 404)
def test_post_view_flatpage(self):
"POSTing to a flatpage served through a view will raise a CSRF error if no token is provided (Refs #14156)"
response = self.client.post('/flatpage_root/flatpage/')
self.assertEquals(response.status_code, 403)
def test_post_fallback_flatpage(self):
"POSTing to a flatpage served by the middleware will raise a CSRF error if no token is provided (Refs #14156)"
response = self.client.post('/flatpage/')
self.assertEquals(response.status_code, 403)
def test_post_unknown_page(self):
"POSTing to an unknown page isn't caught as a 403 CSRF error"
response = self.client.post('/no_such_page/')
self.assertEquals(response.status_code, 404)
| bsd-3-clause |
mobarski/sandbox | rsm/v4.py | 2 | 5658 | from common2 import *
# NAME IDEA -> pooling/random/sparse/distributed hebbian/horde/crowd/fragment/sample memory
# FEATURES:
# + boost -- neurons with empty mem slots learn faster
# + noise --
# + dropout -- temporal disabling of neurons
# + decay -- remove from mem
# + negatives -- learning to avoid detecting some patterns
# + fatigue -- winner has lower score for some time
# - sklearn -- compatibile api
# - prune -- if input < mem shrink mem ? (problem with m > input len
# IDEA:
# - popularity -- most popular neuron is cloned / killed
# NEXT VERSION:
# - layers -- rsm stacking
# NEXT VERSION:
# - attention
# - https://towardsdatascience.com/the-fall-of-rnn-lstm-2d1594c74ce0
# - https://towardsdatascience.com/memory-attention-sequences-37456d271992
# NEXT VERSION:
# - numpy -- faster version
# - cython -- faster version
# - gpu -- faster version
# - distributed
class rsm:
def __init__(self,n,m):
"""Random Sample Memory
n -- number of neurons
m -- max connections per neuron (memory)
"""
self.N = n
self.M = m
self.mem = {j:set() for j in range(n)}
self.win = {j:0 for j in range(n)}
self.tow = {j:-42000 for j in range(n)} # time of win
self.t = 0
# ---[ core ]---------------------------------------------------------------
# TODO -- input length vs mem length
def scores(self, input, boost=False, noise=False, fatigue=0, dropout=0.0): # -> dict[i] -> scores
"""
input -- sparse binary features
boost -- improve scores based on number of unconnected synapses (TODO)
noise -- randomize scores to prevent snowballing
dropout -- temporal disabling of neurons
"""
mem = self.mem
tow = self.tow
N = self.N
M = self.M
t = self.t
scores = {}
for j in mem:
scores[j] = len(input & mem[j])
if noise:
for j in mem:
scores[j] += 0.9*random()
if boost:
for j in mem:
scores[j] += 1+2*(M-len(mem[j])) if len(mem[j])<M else 0
if fatigue:
for j in mem:
dt = 1.0*min(fatigue,t - tow[j])
factor = dt / fatigue
scores[j] *= factor
if dropout:
k = int(round(float(dropout)*N))
for j in combinations(N,k):
scores[j] = -1
return scores
def learn(self, input, k, decay=0.0, dropout=0.0, fatigue=0,
negative=False, boost=True, noise=True):
"""
input -- sparse binary features
k -- number of winning neurons
"""
mem = self.mem
win = self.win
tow = self.tow
M = self.M
t = self.t
known_inputs = set()
for j in mem:
known_inputs.update(mem[j])
scores = self.scores(input, boost=boost, noise=noise, dropout=dropout, fatigue=fatigue)
winners = top(k,scores)
for j in winners:
# negative learning
if negative:
mem[j].difference_update(input)
continue
# positive learning
unknown_inputs = input - known_inputs
mem[j].update(pick(unknown_inputs, M-len(mem[j])))
known_inputs.update(mem[j])
# handle decay
if decay:
decay_candidates = mem[j] - input
if decay_candidates:
for d in decay_candidates:
if random() < decay:
mem[j].remove(d)
# handle popularity
win[j] += 1
# handle fatigue
tow[j] = t
self.t += 1
# ---[ auxiliary ]----------------------------------------------------------
def fit(self, X, Y):
for x,y in zip (X,Y):
negative = not y
self.learn(x,negative=negative)
def score_many(self, X, k=1, method=1):
out = []
for x in X:
s = self.score_one(x,k,method)
out += [s]
return out
def transform(self, X, k=1, method=1, cutoff=0.5):
out = []
for s in self.score_many(X,k,method):
y = 1 if s>=cutoff else 0
out += [y]
return out
def confusion(self, X, Y, k=1, method=1, cutoff=0.5):
PY = self.transform(X,k,method,cutoff)
p = 0
n = 0
tp = 0
tn = 0
fp = 0
fn = 0
for y,py in zip(Y,PY):
if y: p+=1
else: n+=1
if y:
if py: tp+=1
else: fn+=1
else:
if py: fp+=1
else: tn+=1
return dict(p=p,n=n,tp=tp,tn=tn,fp=fp,fn=fn)
def score(self, X, Y, k=1, method=1, cutoff=0.5, kind='acc'):
c = self.confusion(X,Y,k,method,cutoff)
p = float(c['p'])
n = float(c['n'])
tp = float(c['tp'])
tn = float(c['tn'])
fp = float(c['fp'])
fn = float(c['fn'])
if kind=='f1':
return (2*tp) / (2*tp + fp + fn)
elif kind=='acc':
return (tp+tn) / (p+n)
elif kind=='prec':
return tp / (tp + fp)
elif kind=='sens':
return tp / (tp + fn)
elif kind=='spec':
return tn / (tn + fp)
def score_one(self, input, k=1, method=1):
"aggregate scores to scalar"
scores = self.scores(input)
if method==0:
return top(k, scores, values=True)
elif method==1:
score = 1.0*sum(top(k, scores, values=True))/(k*(self.M+1))
return score
elif method==2:
score = 1.0*sum(top(k, scores, values=True))/(k*self.M)
return min(1.0,score)
if method==3:
score = 1.0*min(top(k, scores, values=True))/(self.M+1)
return score
elif method==4:
score = 1.0*min(top(k, scores, values=True))/self.M
return min(1.0,score)
if method==5:
score = 1.0*max(top(k, scores, values=True))/(self.M+1)
return score
elif method==6:
score = 1.0*max(top(k, scores, values=True))/self.M
return min(1.0,score)
def stats(self,prefix=''):
vol_v = self.vol.values()
mem_v = self.mem.values()
out = {}
out['m_empty'] = sum([1.0 if len(x)==0 else 0.0 for x in mem_v])/self.N
out['m_not_empty'] = sum([1.0 if len(x)>0 else 0.0 for x in mem_v])/self.N
out['m_full'] = sum([1.0 if len(x)==self.M else 0.0 for x in mem_v])/self.N
out['m_avg'] = sum([1.0*len(x) for x in mem_v])/(self.N*self.M)
return {k:v for k,v in out.items() if k.startswith(prefix)}
| mit |
blzr/enigma2 | lib/python/Screens/Opkg.py | 3 | 3749 | from Components.ActionMap import ActionMap
from Components.Opkg import OpkgComponent
from Components.Label import Label
from Components.Slider import Slider
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from enigma import eTimer
class Opkg(Screen):
def __init__(self, session, cmdList=[]):
Screen.__init__(self, session)
self.cmdList = cmdList
self.sliderPackages = {}
self.slider = Slider(0, len(cmdList))
self["slider"] = self.slider
self.activityslider = Slider(0, 100)
self["activityslider"] = self.activityslider
self.status = Label(_("Preparing... Please wait"))
self["status"] = self.status
self.package = Label()
self["package"] = self.package
self.packages = 0
self.error = 0
self.processed_packages = []
self.activity = 0
self.activityTimer = eTimer()
self.activityTimer.callback.append(self.doActivityTimer)
#self.activityTimer.start(100, False)
self.opkg = OpkgComponent()
self.opkg.addCallback(self.opkgCallback)
self.runningCmd = None
self.runNextCmd()
self["actions"] = ActionMap(["WizardActions"],
{
"ok": self.exit,
"back": self.exit
}, -1)
def runNextCmd(self):
if self.runningCmd is None:
self.runningCmd = 0
else:
self.runningCmd += 1
print len(self.cmdList), self.runningCmd
if len(self.cmdList) - 1 < self.runningCmd:
self.activityslider.setValue(0)
self.slider.setValue(len(self.cmdList))
self.package.setText("")
self.status.setText(ngettext("Done - Installed, updated or removed %d package (%s)", "Done - Installed, updated or removed %d packages (%s)", self.packages) % (self.packages, ngettext("with %d error", "with %d errors", self.error) % self.error))
return False
else:
cmd = self.cmdList[self.runningCmd]
self.slider.setValue(self.runningCmd)
self.opkg.startCmd(cmd[0], args=cmd[1])
self.startActivityTimer()
def doActivityTimer(self):
if not self.opkg.isRunning():
self.stopActivityTimer()
else:
self.activity += 1
if self.activity == 100:
self.activity = 0
self.activityslider.setValue(self.activity)
def startActivityTimer(self):
self.activityTimer.start(100, False)
def stopActivityTimer(self):
self.activityTimer.stop()
def opkgCallback(self, event, param):
if event == OpkgComponent.EVENT_DOWNLOAD:
self.status.setText(_("Downloading"))
elif event == OpkgComponent.EVENT_UPGRADE:
if param in self.sliderPackages:
self.slider.setValue(self.sliderPackages[param])
self.package.setText(param)
self.status.setText(_("Updating"))
if not param in self.processed_packages:
self.processed_packages.append(param)
self.packages += 1
elif event == OpkgComponent.EVENT_INSTALL:
self.package.setText(param)
self.status.setText(_("Installing"))
if not param in self.processed_packages:
self.processed_packages.append(param)
self.packages += 1
elif event == OpkgComponent.EVENT_REMOVE:
self.package.setText(param)
self.status.setText(_("Removing"))
if not param in self.processed_packages:
self.processed_packages.append(param)
self.packages += 1
elif event == OpkgComponent.EVENT_CONFIGURING:
self.package.setText(param)
self.status.setText(_("Configuring"))
elif event == OpkgComponent.EVENT_ERROR:
self.error += 1
elif event == OpkgComponent.EVENT_DONE:
self.runNextCmd()
elif event == OpkgComponent.EVENT_MODIFIED:
self.session.openWithCallback(
self.modificationCallback,
MessageBox,
_("A configuration file (%s) was modified since Installation.\nDo you want to keep your version?") % (param)
)
def modificationCallback(self, res):
self.opkg.write(res and "N" or "Y")
def exit(self):
if not self.opkg.isRunning():
self.close()
| gpl-2.0 |
theflofly/tensorflow | tensorflow/python/autograph/converters/side_effect_guards.py | 6 | 7527 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Adds guards against function calls with side effects.
Only standalone calls are guarded.
WARNING: This mechanism is incomplete. Particularly, it only guards the
arguments passed to functions, and does not account for indirectly modified
state.
Example:
y = tf.layers.dense(x) # Creates TF variable 'foo'
loss = loss(y)
opt.minimize(loss) # indirectly affects 'foo'
z = tf.get_variable('foo') # Indirectly affects `loss` and 'foo'
# Here, `loss` can be guarded. But `z` cannot.
# TODO(mdan): We should probably define a safe mode where we guard everything.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct import ast_util
from tensorflow.python.autograph.pyct import qual_names
from tensorflow.python.autograph.pyct import templates
from tensorflow.python.autograph.pyct.static_analysis.annos import NodeAnno
class SymbolNamer(object):
"""Describes the interface for SideEffectGuardTransformer's namer."""
def new_symbol(self, name_root, reserved_locals):
"""Generate a new unique function_name.
Args:
name_root: String, used as stem in the new name.
reserved_locals: Set(string), additional local symbols that are reserved.
Returns:
String.
"""
raise NotImplementedError()
class SideEffectGuardTransformer(converter.Base):
"""Adds control dependencies to functions with side effects."""
def _visit_and_reindent(self, nodes):
new_nodes = []
current_dest = new_nodes
alias_map = {}
reindent_requested = False
for n in nodes:
n = self.visit(n)
# NOTE: the order in which these statements execute is important; in
# particular, watch out for ending up with cycles in the AST.
if alias_map:
n = ast_util.rename_symbols(n, alias_map)
if isinstance(n, (list, tuple)):
current_dest.extend(n)
else:
current_dest.append(n)
if anno.hasanno(n, anno.Basic.INDENT_BLOCK_REMAINDER):
reindent_requested = True
new_dest, new_alias_map = anno.getanno(
n, anno.Basic.INDENT_BLOCK_REMAINDER)
anno.delanno(n, anno.Basic.INDENT_BLOCK_REMAINDER)
new_alias_map.update(alias_map)
alias_map = new_alias_map
current_dest = new_dest
if reindent_requested:
no_controls_to_gate = False
if not current_dest:
no_controls_to_gate = True
if len(current_dest) == 1:
if ast_util.matches(current_dest[0], 'return'):
no_controls_to_gate = True
if ast_util.matches(current_dest[0], 'return ()'):
no_controls_to_gate = True
if ast_util.matches(current_dest[0], 'return []'):
no_controls_to_gate = True
if ast_util.matches(current_dest[0], 'return {}'):
no_controls_to_gate = True
if no_controls_to_gate:
# TODO(mdan): There may still be something that could be done.
raise ValueError(
'Unable to insert statement into the computation flow: it is not'
' followed by any computation which the statement could gate.')
return new_nodes
def visit_FunctionDef(self, node):
node.body = self._visit_and_reindent(node.body)
return node
def visit_With(self, node):
node.body = self._visit_and_reindent(node.body)
return node
def visit_If(self, node):
node.body = self._visit_and_reindent(node.body)
node.orelse = self._visit_and_reindent(node.orelse)
return node
def visit_While(self, node):
node.body = self._visit_and_reindent(node.body)
node.orelse = self._visit_and_reindent(node.orelse)
return node
# TODO(b/123995141) Remove once ExceptionHandlers are in the CFG
def visit_ExceptHandler(self, node):
return node
def visit_Expr(self, node):
self.generic_visit(node)
if isinstance(node.value, gast.Call):
# Patterns of single function calls, like:
# opt.minimize(loss)
# or:
# tf.py_func(...)
# First, attempt to gate future evaluation of args. If that's not
# possible, gate all remaining statements (and that may fail too, see
# _visit_and_reindent.
args_scope = anno.getanno(node.value, NodeAnno.ARGS_SCOPE)
live_out = anno.getanno(node, anno.Static.LIVE_VARS_OUT)
# NOTE: We can't guard object attributes because they may not be writable.
# In addition, avoid renaming well-known names.
# TODO(mdan): Move these names into config.
unguarded_names = (qual_names.QN('self'), qual_names.QN('ag__'))
guarded_args = tuple(s for s in live_out
if not s.is_composite() and s not in unguarded_names)
# TODO(mdan): Include all arguments which depended on guarded_args too.
# For example, the following will still cause a race:
# tf.assign(a, a + 1)
# b = a + 1
# tf.assign(a, a + 1) # Control deps here should include `b`
# c = b + 1
# Or maybe we should just raise an "unsafe assign" error?
if guarded_args:
# The aliases may need new names to avoid incorrectly making them local.
# TODO(mdan): This is brutal. It will even rename modules - any fix?
need_alias = tuple(
s for s in guarded_args if s not in args_scope.parent.modified)
aliased_new_names = tuple(
qual_names.QN(
self.ctx.namer.new_symbol(
s.ssf(), args_scope.parent.referenced)) for s in need_alias)
alias_map = dict(zip(need_alias, aliased_new_names))
if len(guarded_args) == 1:
s, = guarded_args
aliased_guarded_args = alias_map.get(s, s)
else:
aliased_guarded_args = gast.Tuple(
[alias_map.get(s, s).ast() for s in guarded_args], None)
template = """
with ag__.utils.control_dependency_on_returns(call):
aliased_guarded_args = ag__.utils.alias_tensors(guarded_args)
"""
control_deps_guard = templates.replace(
template,
call=node.value,
aliased_guarded_args=aliased_guarded_args,
guarded_args=guarded_args)[-1]
else:
alias_map = {}
template = """
with ag__.utils.control_dependency_on_returns(call):
pass
"""
control_deps_guard = templates.replace(template, call=node.value)[-1]
control_deps_guard.body = []
node = control_deps_guard
anno.setanno(node, anno.Basic.INDENT_BLOCK_REMAINDER,
(node.body, alias_map))
return node
def transform(node, ctx):
return SideEffectGuardTransformer(ctx).visit(node)
| apache-2.0 |
sinbazhou/odoo | addons/google_calendar/res_config.py | 256 | 1534 | from openerp.osv import fields, osv
class calendar_config_settings(osv.TransientModel):
_inherit = 'base.config.settings'
_columns = {
'google_cal_sync': fields.boolean("Show tutorial to know how to get my 'Client ID' and my 'Client Secret'"),
'cal_client_id': fields.char("Client_id"),
'cal_client_secret': fields.char("Client_key"),
'server_uri': fields.char('URI for tuto')
}
def set_calset(self,cr,uid,ids,context=None) :
params = self.pool['ir.config_parameter']
myself = self.browse(cr,uid,ids[0],context=context)
params.set_param(cr, uid, 'google_calendar_client_id', (myself.cal_client_id or '').strip(), groups=['base.group_system'], context=None)
params.set_param(cr, uid, 'google_calendar_client_secret', (myself.cal_client_secret or '').strip(), groups=['base.group_system'], context=None)
def get_default_all(self,cr,uid,ids,context=None):
params = self.pool.get('ir.config_parameter')
cal_client_id = params.get_param(cr, uid, 'google_calendar_client_id',default='',context=context)
cal_client_secret = params.get_param(cr, uid, 'google_calendar_client_secret',default='',context=context)
server_uri= "%s/google_account/authentication" % params.get_param(cr, uid, 'web.base.url',default="http://yourcompany.odoo.com",context=context)
return dict(cal_client_id=cal_client_id,cal_client_secret=cal_client_secret,server_uri=server_uri)
| agpl-3.0 |
KhalidGit/flask | Work/TriviaMVA/TriviaMVA/env/Lib/site-packages/werkzeug/contrib/lint.py | 295 | 12282 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.lint
~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 0.5
This module provides a middleware that performs sanity checks of the WSGI
application. It checks that :pep:`333` is properly implemented and warns
on some common HTTP errors such as non-empty responses for 304 status
codes.
This module provides a middleware, the :class:`LintMiddleware`. Wrap your
application with it and it will warn about common problems with WSGI and
HTTP while your application is running.
It's strongly recommended to use it during development.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from urlparse import urlparse
from warnings import warn
from werkzeug.datastructures import Headers
from werkzeug.http import is_entity_header
from werkzeug.wsgi import FileWrapper
from werkzeug._compat import string_types
class WSGIWarning(Warning):
"""Warning class for WSGI warnings."""
class HTTPWarning(Warning):
"""Warning class for HTTP warnings."""
def check_string(context, obj, stacklevel=3):
if type(obj) is not str:
warn(WSGIWarning('%s requires bytestrings, got %s' %
(context, obj.__class__.__name__)))
class InputStream(object):
def __init__(self, stream):
self._stream = stream
def read(self, *args):
if len(args) == 0:
warn(WSGIWarning('wsgi does not guarantee an EOF marker on the '
'input stream, thus making calls to '
'wsgi.input.read() unsafe. Conforming servers '
'may never return from this call.'),
stacklevel=2)
elif len(args) != 1:
warn(WSGIWarning('too many parameters passed to wsgi.input.read()'),
stacklevel=2)
return self._stream.read(*args)
def readline(self, *args):
if len(args) == 0:
warn(WSGIWarning('Calls to wsgi.input.readline() without arguments'
' are unsafe. Use wsgi.input.read() instead.'),
stacklevel=2)
elif len(args) == 1:
warn(WSGIWarning('wsgi.input.readline() was called with a size hint. '
'WSGI does not support this, although it\'s available '
'on all major servers.'),
stacklevel=2)
else:
raise TypeError('too many arguments passed to wsgi.input.readline()')
return self._stream.readline(*args)
def __iter__(self):
try:
return iter(self._stream)
except TypeError:
warn(WSGIWarning('wsgi.input is not iterable.'), stacklevel=2)
return iter(())
def close(self):
warn(WSGIWarning('application closed the input stream!'),
stacklevel=2)
self._stream.close()
class ErrorStream(object):
def __init__(self, stream):
self._stream = stream
def write(self, s):
check_string('wsgi.error.write()', s)
self._stream.write(s)
def flush(self):
self._stream.flush()
def writelines(self, seq):
for line in seq:
self.write(seq)
def close(self):
warn(WSGIWarning('application closed the error stream!'),
stacklevel=2)
self._stream.close()
class GuardedWrite(object):
def __init__(self, write, chunks):
self._write = write
self._chunks = chunks
def __call__(self, s):
check_string('write()', s)
self._write.write(s)
self._chunks.append(len(s))
class GuardedIterator(object):
def __init__(self, iterator, headers_set, chunks):
self._iterator = iterator
self._next = iter(iterator).next
self.closed = False
self.headers_set = headers_set
self.chunks = chunks
def __iter__(self):
return self
def next(self):
if self.closed:
warn(WSGIWarning('iterated over closed app_iter'),
stacklevel=2)
rv = self._next()
if not self.headers_set:
warn(WSGIWarning('Application returned before it '
'started the response'), stacklevel=2)
check_string('application iterator items', rv)
self.chunks.append(len(rv))
return rv
def close(self):
self.closed = True
if hasattr(self._iterator, 'close'):
self._iterator.close()
if self.headers_set:
status_code, headers = self.headers_set
bytes_sent = sum(self.chunks)
content_length = headers.get('content-length', type=int)
if status_code == 304:
for key, value in headers:
key = key.lower()
if key not in ('expires', 'content-location') and \
is_entity_header(key):
warn(HTTPWarning('entity header %r found in 304 '
'response' % key))
if bytes_sent:
warn(HTTPWarning('304 responses must not have a body'))
elif 100 <= status_code < 200 or status_code == 204:
if content_length != 0:
warn(HTTPWarning('%r responses must have an empty '
'content length') % status_code)
if bytes_sent:
warn(HTTPWarning('%r responses must not have a body' %
status_code))
elif content_length is not None and content_length != bytes_sent:
warn(WSGIWarning('Content-Length and the number of bytes '
'sent to the client do not match.'))
def __del__(self):
if not self.closed:
try:
warn(WSGIWarning('Iterator was garbage collected before '
'it was closed.'))
except Exception:
pass
class LintMiddleware(object):
"""This middleware wraps an application and warns on common errors.
Among other thing it currently checks for the following problems:
- invalid status codes
- non-bytestrings sent to the WSGI server
- strings returned from the WSGI application
- non-empty conditional responses
- unquoted etags
- relative URLs in the Location header
- unsafe calls to wsgi.input
- unclosed iterators
Detected errors are emitted using the standard Python :mod:`warnings`
system and usually end up on :data:`stderr`.
::
from werkzeug.contrib.lint import LintMiddleware
app = LintMiddleware(app)
:param app: the application to wrap
"""
def __init__(self, app):
self.app = app
def check_environ(self, environ):
if type(environ) is not dict:
warn(WSGIWarning('WSGI environment is not a standard python dict.'),
stacklevel=4)
for key in ('REQUEST_METHOD', 'SERVER_NAME', 'SERVER_PORT',
'wsgi.version', 'wsgi.input', 'wsgi.errors',
'wsgi.multithread', 'wsgi.multiprocess',
'wsgi.run_once'):
if key not in environ:
warn(WSGIWarning('required environment key %r not found'
% key), stacklevel=3)
if environ['wsgi.version'] != (1, 0):
warn(WSGIWarning('environ is not a WSGI 1.0 environ'),
stacklevel=3)
script_name = environ.get('SCRIPT_NAME', '')
if script_name and script_name[:1] != '/':
warn(WSGIWarning('SCRIPT_NAME does not start with a slash: %r'
% script_name), stacklevel=3)
path_info = environ.get('PATH_INFO', '')
if path_info[:1] != '/':
warn(WSGIWarning('PATH_INFO does not start with a slash: %r'
% path_info), stacklevel=3)
def check_start_response(self, status, headers, exc_info):
check_string('status', status)
status_code = status.split(None, 1)[0]
if len(status_code) != 3 or not status_code.isdigit():
warn(WSGIWarning('Status code must be three digits'), stacklevel=3)
if len(status) < 4 or status[3] != ' ':
warn(WSGIWarning('Invalid value for status %r. Valid '
'status strings are three digits, a space '
'and a status explanation'), stacklevel=3)
status_code = int(status_code)
if status_code < 100:
warn(WSGIWarning('status code < 100 detected'), stacklevel=3)
if type(headers) is not list:
warn(WSGIWarning('header list is not a list'), stacklevel=3)
for item in headers:
if type(item) is not tuple or len(item) != 2:
warn(WSGIWarning('Headers must tuple 2-item tuples'),
stacklevel=3)
name, value = item
if type(name) is not str or type(value) is not str:
warn(WSGIWarning('header items must be strings'),
stacklevel=3)
if name.lower() == 'status':
warn(WSGIWarning('The status header is not supported due to '
'conflicts with the CGI spec.'),
stacklevel=3)
if exc_info is not None and not isinstance(exc_info, tuple):
warn(WSGIWarning('invalid value for exc_info'), stacklevel=3)
headers = Headers(headers)
self.check_headers(headers)
return status_code, headers
def check_headers(self, headers):
etag = headers.get('etag')
if etag is not None:
if etag.startswith('w/'):
etag = etag[2:]
if not (etag[:1] == etag[-1:] == '"'):
warn(HTTPWarning('unquoted etag emitted.'), stacklevel=4)
location = headers.get('location')
if location is not None:
if not urlparse(location).netloc:
warn(HTTPWarning('absolute URLs required for location header'),
stacklevel=4)
def check_iterator(self, app_iter):
if isinstance(app_iter, string_types):
warn(WSGIWarning('application returned string. Response will '
'send character for character to the client '
'which will kill the performance. Return a '
'list or iterable instead.'), stacklevel=3)
def __call__(self, *args, **kwargs):
if len(args) != 2:
warn(WSGIWarning('Two arguments to WSGI app required'), stacklevel=2)
if kwargs:
warn(WSGIWarning('No keyword arguments to WSGI app allowed'),
stacklevel=2)
environ, start_response = args
self.check_environ(environ)
environ['wsgi.input'] = InputStream(environ['wsgi.input'])
environ['wsgi.errors'] = ErrorStream(environ['wsgi.errors'])
# hook our own file wrapper in so that applications will always
# iterate to the end and we can check the content length
environ['wsgi.file_wrapper'] = FileWrapper
headers_set = []
chunks = []
def checking_start_response(*args, **kwargs):
if len(args) not in (2, 3):
warn(WSGIWarning('Invalid number of arguments: %s, expected '
'2 or 3' % len(args), stacklevel=2))
if kwargs:
warn(WSGIWarning('no keyword arguments allowed.'))
status, headers = args[:2]
if len(args) == 3:
exc_info = args[2]
else:
exc_info = None
headers_set[:] = self.check_start_response(status, headers,
exc_info)
return GuardedWrite(start_response(status, headers, exc_info),
chunks)
app_iter = self.app(environ, checking_start_response)
self.check_iterator(app_iter)
return GuardedIterator(app_iter, headers_set, chunks)
| apache-2.0 |
SUSE/azure-sdk-for-python | azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2017_03_01/operations/registries_operations.py | 2 | 30708 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class RegistriesOperations(object):
"""RegistriesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: The client API version. Constant value: "2017-03-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-03-01"
self.config = config
def check_name_availability(
self, name, custom_headers=None, raw=False, **operation_config):
"""Checks whether the container registry name is available for use. The
name must contain only alphanumeric characters, be globally unique, and
between 5 and 60 characters in length.
:param name: The name of the container registry.
:type name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RegistryNameStatus
<azure.mgmt.containerregistry.v2017_03_01.models.RegistryNameStatus>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
registry_name_check_request = models.RegistryNameCheckRequest(name=name)
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.ContainerRegistry/checkNameAvailability'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(registry_name_check_request, 'RegistryNameCheckRequest')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RegistryNameStatus', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get(
self, resource_group_name, registry_name, custom_headers=None, raw=False, **operation_config):
"""Gets the properties of the specified container registry.
:param resource_group_name: The name of the resource group to which
the container registry belongs.
:type resource_group_name: str
:param registry_name: The name of the container registry.
:type registry_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Registry
<azure.mgmt.containerregistry.v2017_03_01.models.Registry>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'registryName': self._serialize.url("registry_name", registry_name, 'str', max_length=50, min_length=5, pattern='^[a-zA-Z0-9]*$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Registry', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create(
self, resource_group_name, registry_name, registry_create_parameters, custom_headers=None, raw=False, **operation_config):
"""Creates a container registry with the specified parameters.
:param resource_group_name: The name of the resource group to which
the container registry belongs.
:type resource_group_name: str
:param registry_name: The name of the container registry.
:type registry_name: str
:param registry_create_parameters: The parameters for creating a
container registry.
:type registry_create_parameters: :class:`RegistryCreateParameters
<azure.mgmt.containerregistry.v2017_03_01.models.RegistryCreateParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`Registry
<azure.mgmt.containerregistry.v2017_03_01.models.Registry>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'registryName': self._serialize.url("registry_name", registry_name, 'str', max_length=50, min_length=5, pattern='^[a-zA-Z0-9]*$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(registry_create_parameters, 'RegistryCreateParameters')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Registry', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def delete(
self, resource_group_name, registry_name, custom_headers=None, raw=False, **operation_config):
"""Deletes a container registry.
:param resource_group_name: The name of the resource group to which
the container registry belongs.
:type resource_group_name: str
:param registry_name: The name of the container registry.
:type registry_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'registryName': self._serialize.url("registry_name", registry_name, 'str', max_length=50, min_length=5, pattern='^[a-zA-Z0-9]*$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def update(
self, resource_group_name, registry_name, registry_update_parameters, custom_headers=None, raw=False, **operation_config):
"""Updates a container registry with the specified parameters.
:param resource_group_name: The name of the resource group to which
the container registry belongs.
:type resource_group_name: str
:param registry_name: The name of the container registry.
:type registry_name: str
:param registry_update_parameters: The parameters for updating a
container registry.
:type registry_update_parameters: :class:`RegistryUpdateParameters
<azure.mgmt.containerregistry.v2017_03_01.models.RegistryUpdateParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Registry
<azure.mgmt.containerregistry.v2017_03_01.models.Registry>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'registryName': self._serialize.url("registry_name", registry_name, 'str', max_length=50, min_length=5, pattern='^[a-zA-Z0-9]*$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(registry_update_parameters, 'RegistryUpdateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Registry', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_by_resource_group(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Lists all the container registries under the specified resource group.
:param resource_group_name: The name of the resource group to which
the container registry belongs.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RegistryPaged
<azure.mgmt.containerregistry.v2017_03_01.models.RegistryPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.RegistryPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.RegistryPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list(
self, custom_headers=None, raw=False, **operation_config):
"""Lists all the container registries under the specified subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RegistryPaged
<azure.mgmt.containerregistry.v2017_03_01.models.RegistryPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.ContainerRegistry/registries'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.RegistryPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.RegistryPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_credentials(
self, resource_group_name, registry_name, custom_headers=None, raw=False, **operation_config):
"""Lists the login credentials for the specified container registry.
:param resource_group_name: The name of the resource group to which
the container registry belongs.
:type resource_group_name: str
:param registry_name: The name of the container registry.
:type registry_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RegistryListCredentialsResult
<azure.mgmt.containerregistry.v2017_03_01.models.RegistryListCredentialsResult>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/listCredentials'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'registryName': self._serialize.url("registry_name", registry_name, 'str', max_length=50, min_length=5, pattern='^[a-zA-Z0-9]*$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RegistryListCredentialsResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def regenerate_credential(
self, resource_group_name, registry_name, name, custom_headers=None, raw=False, **operation_config):
"""Regenerates one of the login credentials for the specified container
registry.
:param resource_group_name: The name of the resource group to which
the container registry belongs.
:type resource_group_name: str
:param registry_name: The name of the container registry.
:type registry_name: str
:param name: Specifies name of the password which should be
regenerated -- password or password2. Possible values include:
'password', 'password2'
:type name: str or :class:`PasswordName
<azure.mgmt.containerregistry.v2017_03_01.models.PasswordName>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RegistryListCredentialsResult
<azure.mgmt.containerregistry.v2017_03_01.models.RegistryListCredentialsResult>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
regenerate_credential_parameters = models.RegenerateCredentialParameters(name=name)
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/regenerateCredential'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'registryName': self._serialize.url("registry_name", registry_name, 'str', max_length=50, min_length=5, pattern='^[a-zA-Z0-9]*$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(regenerate_credential_parameters, 'RegenerateCredentialParameters')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RegistryListCredentialsResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| mit |
ConsenSys/testrpc | tests/filters/test_filter_log_matching.py | 3 | 1927 | import pytest
from testrpc.client.utils import (
encode_number,
encode_data,
coerce_args_to_bytes,
)
from testrpc.client.filters import (
check_if_log_matches,
)
@coerce_args_to_bytes
def make_log_filter(from_block="latest", to_block="latest", addresses=None, topics=None):
if addresses is None:
addresses = []
if topics is None:
topics = []
log_filter = {
'from_block': from_block,
'to_block': to_block,
'addresses': addresses,
'filter_topics': topics,
}
return log_filter
@coerce_args_to_bytes
def make_log_entry(block_number=1,
type=b"mined",
address=b"0xd3cda913deb6f67967b99d67acdfa1712c293601",
topics=None,
data=b""):
if topics is None:
topics = []
log_entry = {
"type": type,
"logIndex": "0x0",
"transactionIndex": "0x0",
"transactionHash": "0xebb0f76aa6a6bb8d178bc2b54ae8fd7ca778d703bf47d135c188ca2b6d25f2e4",
"blockHash": "0xd2f44ad2d3702136acccacb5098829585e63b5e1e264b0e54c4d5af2edb87368",
"blockNumber": encode_number(block_number),
"address": address,
"data": encode_data(data),
"topics": topics,
}
return log_entry
@pytest.mark.parametrize(
'log_filter,log_entry,expected',
(
(make_log_filter(), make_log_entry(), True),
(make_log_filter(), make_log_entry(topics=['a']), True),
(make_log_filter(from_block=1, to_block=2), make_log_entry(block_number=1), True),
(make_log_filter(from_block=1, to_block=2), make_log_entry(block_number=2), True),
(make_log_filter(from_block=1, to_block=2), make_log_entry(block_number=3), False),
),
)
def test_check_if_filter_matches_log(log_filter, log_entry, expected):
actual = check_if_log_matches(log_entry, **log_filter)
assert actual is expected
| mit |
Tatsh-ansible/ansible | lib/ansible/modules/net_tools/ldap/ldap_entry.py | 9 | 9945 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Peter Sagerson <psagers@ignorare.net>
# (c) 2016, Jiri Tyr <jiri.tyr@gmail.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ldap_entry
short_description: Add or remove LDAP entries.
description:
- Add or remove LDAP entries. This module only asserts the existence or
non-existence of an LDAP entry, not its attributes. To assert the
attribute values of an entry, see M(ldap_attr).
notes:
- The default authentication settings will attempt to use a SASL EXTERNAL
bind over a UNIX domain socket. This works well with the default Ubuntu
install for example, which includes a cn=peercred,cn=external,cn=auth ACL
rule allowing root to modify the server configuration. If you need to use
a simple bind to access your server, pass the credentials in I(bind_dn)
and I(bind_pw).
version_added: '2.3'
author:
- Jiri Tyr (@jtyr)
requirements:
- python-ldap
options:
bind_dn:
required: false
default: null
description:
- A DN to bind with. If this is omitted, we'll try a SASL bind with
the EXTERNAL mechanism. If this is blank, we'll use an anonymous
bind.
bind_pw:
required: false
default: null
description:
- The password to use with I(bind_dn).
dn:
required: true
description:
- The DN of the entry to add or remove.
attributes:
required: false
default: null
description:
- If I(state=present), attributes necessary to create an entry. Existing
entries are never modified. To assert specific attribute values on an
existing entry, use M(ldap_attr) module instead.
objectClass:
required: false
default: null
description:
- If I(state=present), value or list of values to use when creating
the entry. It can either be a string or an actual list of
strings.
params:
required: false
default: null
description:
- List of options which allows to overwrite any of the task or the
I(attributes) options. To remove an option, set the value of the option
to C(null).
server_uri:
required: false
default: ldapi:///
description:
- A URI to the LDAP server. The default value lets the underlying
LDAP client library look for a UNIX domain socket in its default
location.
start_tls:
required: false
choices: ['yes', 'no']
default: 'no'
description:
- If true, we'll use the START_TLS LDAP extension.
state:
required: false
choices: [present, absent]
default: present
description:
- The target state of the entry.
validate_certs:
required: false
choices: ['yes', 'no']
default: 'yes'
description:
- If C(no), SSL certificates will not be validated. This should only be
used on sites using self-signed certificates.
version_added: "2.4"
"""
EXAMPLES = """
- name: Make sure we have a parent entry for users
ldap_entry:
dn: ou=users,dc=example,dc=com
objectClass: organizationalUnit
- name: Make sure we have an admin user
ldap_entry:
dn: cn=admin,dc=example,dc=com
objectClass:
- simpleSecurityObject
- organizationalRole
attributes:
description: An LDAP administrator
userPassword: "{SSHA}tabyipcHzhwESzRaGA7oQ/SDoBZQOGND"
- name: Get rid of an old entry
ldap_entry:
dn: ou=stuff,dc=example,dc=com
state: absent
server_uri: ldap://localhost/
bind_dn: cn=admin,dc=example,dc=com
bind_pw: password
#
# The same as in the previous example but with the authentication details
# stored in the ldap_auth variable:
#
# ldap_auth:
# server_uri: ldap://localhost/
# bind_dn: cn=admin,dc=example,dc=com
# bind_pw: password
- name: Get rid of an old entry
ldap_entry:
dn: ou=stuff,dc=example,dc=com
state: absent
params: "{{ ldap_auth }}"
"""
RETURN = """
# Default return values
"""
import traceback
try:
import ldap
import ldap.modlist
import ldap.sasl
HAS_LDAP = True
except ImportError:
HAS_LDAP = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native
class LdapEntry(object):
def __init__(self, module):
# Shortcuts
self.module = module
self.bind_dn = self.module.params['bind_dn']
self.bind_pw = self.module.params['bind_pw']
self.dn = self.module.params['dn']
self.server_uri = self.module.params['server_uri']
self.start_tls = self.module.params['start_tls']
self.state = self.module.params['state']
self.verify_cert = self.module.params['validate_certs']
# Add the objectClass into the list of attributes
self.module.params['attributes']['objectClass'] = (
self.module.params['objectClass'])
# Load attributes
if self.state == 'present':
self.attrs = self._load_attrs()
# Establish connection
self.connection = self._connect_to_ldap()
def _load_attrs(self):
""" Turn attribute's value to array. """
attrs = {}
for name, value in self.module.params['attributes'].items():
if name not in attrs:
attrs[name] = []
if isinstance(value, list):
attrs[name] = value
else:
attrs[name].append(str(value))
return attrs
def add(self):
""" If self.dn does not exist, returns a callable that will add it. """
def _add():
self.connection.add_s(self.dn, modlist)
if not self._is_entry_present():
modlist = ldap.modlist.addModlist(self.attrs)
action = _add
else:
action = None
return action
def delete(self):
""" If self.dn exists, returns a callable that will delete it. """
def _delete():
self.connection.delete_s(self.dn)
if self._is_entry_present():
action = _delete
else:
action = None
return action
def _is_entry_present(self):
try:
self.connection.search_s(self.dn, ldap.SCOPE_BASE)
except ldap.NO_SUCH_OBJECT:
is_present = False
else:
is_present = True
return is_present
def _connect_to_ldap(self):
if not self.verify_cert:
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
connection = ldap.initialize(self.server_uri)
if self.start_tls:
try:
connection.start_tls_s()
except ldap.LDAPError as e:
self.module.fail_json(msg="Cannot start TLS.", details=to_native(e),
exception=traceback.format_exc())
try:
if self.bind_dn is not None:
connection.simple_bind_s(self.bind_dn, self.bind_pw)
else:
connection.sasl_interactive_bind_s('', ldap.sasl.external())
except ldap.LDAPError as e:
self.module.fail_json(
msg="Cannot bind to the server.", details=to_native(e),
exception=traceback.format_exc())
return connection
def main():
module = AnsibleModule(
argument_spec={
'attributes': dict(default={}, type='dict'),
'bind_dn': dict(),
'bind_pw': dict(default='', no_log=True),
'dn': dict(required=True),
'objectClass': dict(type='raw'),
'params': dict(type='dict'),
'server_uri': dict(default='ldapi:///'),
'start_tls': dict(default=False, type='bool'),
'state': dict(default='present', choices=['present', 'absent']),
'validate_certs': dict(default=True, type='bool'),
},
supports_check_mode=True,
)
if not HAS_LDAP:
module.fail_json(
msg="Missing required 'ldap' module (pip install python-ldap).")
state = module.params['state']
# Check if objectClass is present when needed
if state == 'present' and module.params['objectClass'] is None:
module.fail_json(msg="At least one objectClass must be provided.")
# Check if objectClass is of the correct type
if (
module.params['objectClass'] is not None and not (
isinstance(module.params['objectClass'], string_types) or
isinstance(module.params['objectClass'], list))):
module.fail_json(msg="objectClass must be either a string or a list.")
# Update module parameters with user's parameters if defined
if 'params' in module.params and isinstance(module.params['params'], dict):
for key, val in module.params['params'].items():
if key in module.argument_spec:
module.params[key] = val
else:
module.params['attributes'][key] = val
# Remove the params
module.params.pop('params', None)
# Instantiate the LdapEntry object
ldap = LdapEntry(module)
# Get the action function
if state == 'present':
action = ldap.add()
elif state == 'absent':
action = ldap.delete()
# Perform the action
if action is not None and not module.check_mode:
try:
action()
except Exception as e:
module.fail_json(msg="Entry action failed.", details=to_native(e), exception=traceback.format_exc())
module.exit_json(changed=(action is not None))
if __name__ == '__main__':
main()
| gpl-3.0 |
mou4e/zirconium | tools/findit/svn_repository_parser.py | 74 | 9178 | # Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import xml.dom.minidom as minidom
from xml.parsers.expat import ExpatError
import crash_utils
from repository_parser_interface import ParserInterface
# This number is 6 because each linediff page in src.chromium.org should
# contain the following tables: table with revision number, table with actual
# diff, table with dropdown menu, table with legend, a border table and a table
# containing page information.
NUM_TABLES_IN_LINEDIFF_PAGE = 6
# Each of the linediff info should contain 3 tds, one for changed line number,
# and two for line contents before/after.
NUM_TDS_IN_LINEDIFF_PAGE = 3
class SVNParser(ParserInterface):
"""Parser for SVN repository using chromium.org, for components in config.
Attributes:
url_map: A map from component to the urls, where urls are for changelog,
revision, line diff and annotation.
"""
def __init__(self, url_map):
self.component_to_urls_map = url_map
def ParseChangelog(self, component, range_start, range_end):
file_to_revision_map = {}
revision_map = {}
# Check if the current component is supported by reading the components
# parsed from config file. If it is not, fail.
url_map = self.component_to_urls_map.get(component)
if not url_map:
return (revision_map, file_to_revision_map)
# Retrieve data from the url, return empty map if fails.
revision_range_str = '%s:%s' % (range_start, range_end)
url = url_map['changelog_url'] % revision_range_str
response = crash_utils.GetDataFromURL(url)
if not response:
return (revision_map, file_to_revision_map)
# Parse xml out of the returned string. If it fails, return empty map.
try:
xml_revisions = minidom.parseString(response)
except ExpatError:
return (revision_map, file_to_revision_map)
# Iterate through the returned XML object.
revisions = xml_revisions.getElementsByTagName('logentry')
for revision in revisions:
# Create new revision object for each of the revision.
revision_object = {}
# Set author of the CL.
revision_object['author'] = revision.getElementsByTagName(
'author')[0].firstChild.nodeValue
# Get the revision number from xml.
revision_number = int(revision.getAttribute('revision'))
# Iterate through the changed paths in the CL.
paths = revision.getElementsByTagName('paths')
if paths:
for changed_path in paths[0].getElementsByTagName('path'):
# Get path and file change type from the xml.
file_path = changed_path.firstChild.nodeValue
file_change_type = changed_path.getAttribute('action')
if file_path.startswith('/trunk/'):
file_path = file_path[len('/trunk/'):]
# Add file to the map.
if file_path not in file_to_revision_map:
file_to_revision_map[file_path] = []
file_to_revision_map[file_path].append(
(revision_number, file_change_type))
# Set commit message of the CL.
revision_object['message'] = revision.getElementsByTagName('msg')[
0].firstChild.nodeValue
# Set url of this CL.
revision_url = url_map['revision_url'] % revision_number
revision_object['url'] = revision_url
# Add this CL to the revision map.
revision_map[revision_number] = revision_object
return (revision_map, file_to_revision_map)
def ParseLineDiff(self, path, component, file_change_type, revision_number):
changed_line_numbers = []
changed_line_contents = []
url_map = self.component_to_urls_map.get(component)
if not url_map:
return (None, None, None)
# If the file is added (not modified), treat it as if it is not changed.
backup_url = url_map['revision_url'] % revision_number
if file_change_type == 'A':
return (backup_url, changed_line_numbers, changed_line_contents)
# Retrieve data from the url. If no data is retrieved, return empty lists.
url = url_map['diff_url'] % (path, revision_number - 1,
revision_number, revision_number)
data = crash_utils.GetDataFromURL(url)
if not data:
return (backup_url, changed_line_numbers, changed_line_contents)
line_diff_html = minidom.parseString(data)
tables = line_diff_html.getElementsByTagName('table')
# If there are not NUM_TABLES tables in the html page, there should be an
# error in the html page.
if len(tables) != NUM_TABLES_IN_LINEDIFF_PAGE:
return (backup_url, changed_line_numbers, changed_line_contents)
# Diff content is in the second table. Each line of the diff content
# is in <tr>.
trs = tables[1].getElementsByTagName('tr')
prefix_len = len('vc_diff_')
# Filter trs so that it only contains diff chunk with contents.
filtered_trs = []
for tr in trs:
tr_class = tr.getAttribute('class')
# Check for the classes of the <tr>s.
if tr_class:
tr_class = tr_class[prefix_len:]
# Do not have to add header.
if tr_class == 'header' or tr_class == 'chunk_header':
continue
# If the class of tr is empty, this page does not have any change.
if tr_class == 'empty':
return (backup_url, changed_line_numbers, changed_line_contents)
filtered_trs.append(tr)
# Iterate through filtered trs, and grab line diff information.
for tr in filtered_trs:
tds = tr.getElementsByTagName('td')
# If there aren't 3 tds, this line does should not contain line diff.
if len(tds) != NUM_TDS_IN_LINEDIFF_PAGE:
continue
# If line number information is not in hyperlink, ignore this line.
try:
line_num = tds[0].getElementsByTagName('a')[0].firstChild.nodeValue
left_diff_type = tds[1].getAttribute('class')[prefix_len:]
right_diff_type = tds[2].getAttribute('class')[prefix_len:]
except IndexError:
continue
# Treat the line as modified only if both left and right diff has type
# changed or both have different change type, and if the change is not
# deletion.
if (left_diff_type != right_diff_type) or (
left_diff_type == 'change' and right_diff_type == 'change'):
# Check if the line content is not empty.
try:
new_line = tds[2].firstChild.nodeValue
except AttributeError:
new_line = ''
if not (left_diff_type == 'remove' and right_diff_type == 'empty'):
changed_line_numbers.append(int(line_num))
changed_line_contents.append(new_line.strip())
return (url, changed_line_numbers, changed_line_contents)
def ParseBlameInfo(self, component, file_path, line, revision):
url_map = self.component_to_urls_map.get(component)
if not url_map:
return None
# Retrieve blame data from url, return None if fails.
url = url_map['blame_url'] % (file_path, revision, revision)
data = crash_utils.GetDataFromURL(url)
if not data:
return None
blame_html = minidom.parseString(data)
title = blame_html.getElementsByTagName('title')
# If the returned html page is an exception page, return None.
if title[0].firstChild.nodeValue == 'ViewVC Exception':
return None
# Each of the blame result is in <tr>.
blame_results = blame_html.getElementsByTagName('tr')
try:
blame_result = blame_results[line]
except IndexError:
return None
# There must be 4 <td> for each <tr>. If not, this page is wrong.
tds = blame_result.getElementsByTagName('td')
if len(tds) != 4:
return None
# The third <td> has the line content, separated by <span>s. Combine
# those to get a string of changed line. If it has nothing, the line
# is empty.
line_content = ''
if tds[3].hasChildNodes():
contents = tds[3].childNodes
for content in contents:
# Nodetype 3 means it is text node.
if content.nodeType == minidom.Node.TEXT_NODE:
line_content += content.nodeValue
else:
line_content += content.firstChild.nodeValue
line_content = line_content.strip()
# If the current line has the same author/revision as the previous lines,
# the result is not shown. Propagate up until we find the line with info.
while not tds[1].firstChild:
line -= 1
blame_result = blame_results[line]
tds = blame_result.getElementsByTagName('td')
author = tds[1].firstChild.nodeValue
# Revision can either be in hyperlink or plain text.
try:
revision = tds[2].getElementsByTagName('a')[0].firstChild.nodeValue
except IndexError:
revision = tds[2].firstChild.nodeValue
(revision_info, _) = self.ParseChangelog(component, revision, revision)
message = revision_info[int(revision)]['message']
# Return the parsed information.
revision_url = url_map['revision_url'] % int(revision)
return (line_content, revision, author, revision_url, message)
| bsd-3-clause |
andrejb/cloudant_bigcouch | couchjs/scons/scons-local-2.0.1/SCons/Options/ListOption.py | 61 | 2003 | #
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/ListOption.py 5134 2010/08/16 23:02:40 bdeegan"
__doc__ = """Place-holder for the old SCons.Options module hierarchy
This is for backwards compatibility. The new equivalent is the Variables/
class hierarchy. These will have deprecation warnings added (some day),
and will then be removed entirely (some day).
"""
import SCons.Variables
import SCons.Warnings
warned = False
def ListOption(*args, **kw):
global warned
if not warned:
msg = "The ListOption() function is deprecated; use the ListVariable() function instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
warned = True
return SCons.Variables.ListVariable(*args, **kw)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 |
googlearchive/appengine-flask-skeleton | lib/flask/logging.py | 838 | 1398 | # -*- coding: utf-8 -*-
"""
flask.logging
~~~~~~~~~~~~~
Implements the logging support for Flask.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from logging import getLogger, StreamHandler, Formatter, getLoggerClass, DEBUG
def create_logger(app):
"""Creates a logger for the given application. This logger works
similar to a regular Python logger but changes the effective logging
level based on the application's debug flag. Furthermore this
function also removes all attached handlers in case there was a
logger with the log name before.
"""
Logger = getLoggerClass()
class DebugLogger(Logger):
def getEffectiveLevel(x):
if x.level == 0 and app.debug:
return DEBUG
return Logger.getEffectiveLevel(x)
class DebugHandler(StreamHandler):
def emit(x, record):
StreamHandler.emit(x, record) if app.debug else None
handler = DebugHandler()
handler.setLevel(DEBUG)
handler.setFormatter(Formatter(app.debug_log_format))
logger = getLogger(app.logger_name)
# just in case that was not a new logger, get rid of all the handlers
# already attached to it.
del logger.handlers[:]
logger.__class__ = DebugLogger
logger.addHandler(handler)
return logger
| apache-2.0 |
deepsrijit1105/edx-platform | common/lib/xmodule/xmodule/tests/test_annotator_token.py | 222 | 1060 | """
This test will run for annotator_token.py
"""
import unittest
from xmodule.annotator_token import retrieve_token
class TokenRetriever(unittest.TestCase):
"""
Tests to make sure that when passed in a username and secret token, that it will be encoded correctly
"""
def test_token(self):
"""
Test for the token generator. Give an a random username and secret token,
it should create the properly encoded string of text.
"""
expected = "eyJhbGciOiAiSFMyNTYiLCAidHlwIjogIkpXVCJ9.eyJpc3N1ZWRBdCI6ICIyMDE0LTAyLTI3VDE3OjAwOjQyLjQwNjQ0MSswOjAwIiwgImNvbnN1bWVyS2V5IjogImZha2Vfc2VjcmV0IiwgInVzZXJJZCI6ICJ1c2VybmFtZSIsICJ0dGwiOiA4NjQwMH0.Dx1PoF-7mqBOOSGDMZ9R_s3oaaLRPnn6CJgGGF2A5CQ"
response = retrieve_token("username", "fake_secret")
# because the middle hashes are dependent on time, conly the header and footer are checked for secret key
self.assertEqual(expected.split('.')[0], response.split('.')[0])
self.assertNotEqual(expected.split('.')[2], response.split('.')[2])
| agpl-3.0 |
indico/indico | indico/modules/events/sessions/operations.py | 1 | 6547 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import session
from indico.core import signals
from indico.core.db import db
from indico.modules.events.logs.models.entries import EventLogKind, EventLogRealm
from indico.modules.events.logs.util import make_diff_log
from indico.modules.events.models.events import EventType
from indico.modules.events.sessions import COORDINATOR_PRIV_SETTINGS, COORDINATOR_PRIV_TITLES, logger, session_settings
from indico.modules.events.sessions.models.blocks import SessionBlock
from indico.modules.events.sessions.models.sessions import Session
from indico.util.i18n import orig_string
def create_session(event, data):
"""
Create a new session with the information passed in the `data` argument.
"""
event_session = Session(event=event)
event_session.populate_from_dict(data)
db.session.flush()
event.log(EventLogRealm.management, EventLogKind.positive, 'Sessions',
f'Session "{event_session.title}" has been created', session.user,
meta={'session_id': event_session.id})
logger.info('Session %s created by %s', event_session, session.user)
return event_session
def create_session_block(session_, data):
block = SessionBlock(session=session_)
block.populate_from_dict(data)
db.session.flush()
session_.event.log(EventLogRealm.management, EventLogKind.positive, 'Sessions',
'Session block "{}" for session "{}" has been created'
.format(block.title, session_.title), session.user,
meta={'session_block_id': block.id})
logger.info('Session block %s created by %s', block, session.user)
return block
def update_session(event_session, data):
"""Update a session based on the information in the `data`."""
event_session.populate_from_dict(data)
db.session.flush()
signals.event.session_updated.send(event_session)
event_session.event.log(EventLogRealm.management, EventLogKind.change, 'Sessions',
f'Session "{event_session.title}" has been updated', session.user,
meta={'session_id': event_session.id})
logger.info('Session %s modified by %s', event_session, session.user)
def _delete_session_timetable_entries(event_session):
for block in event_session.blocks:
for contribution in block.contributions:
if contribution.timetable_entry:
db.session.delete(contribution.timetable_entry)
if not block.timetable_entry:
continue
for child_block in block.timetable_entry.children:
db.session.delete(child_block)
db.session.delete(block.timetable_entry)
def delete_session(event_session):
"""Delete session from the event."""
event_session.is_deleted = True
for contribution in event_session.contributions[:]:
contribution.session = None
_delete_session_timetable_entries(event_session)
signals.event.session_deleted.send(event_session)
event_session.event.log(EventLogRealm.management, EventLogKind.negative, 'Sessions',
f'Session "{event_session.title}" has been deleted', session.user,
meta={'session_id': event_session.id})
logger.info('Session %s deleted by %s', event_session, session.user)
def update_session_block(session_block, data):
"""Update a session block with data passed in the `data` argument."""
from indico.modules.events.timetable.operations import update_timetable_entry
start_dt = data.pop('start_dt', None)
if start_dt is not None:
session_block.timetable_entry.move(start_dt)
update_timetable_entry(session_block.timetable_entry, {'start_dt': start_dt})
session_block.populate_from_dict(data)
db.session.flush()
signals.event.session_block_updated.send(session_block)
session_block.event.log(EventLogRealm.management, EventLogKind.change, 'Sessions',
f'Session block "{session_block.title}" has been updated', session.user,
meta={'session_block_id': session_block.id})
logger.info('Session block %s modified by %s', session_block, session.user)
def delete_session_block(session_block):
from indico.modules.events.contributions.operations import delete_contribution
from indico.modules.events.timetable.operations import delete_timetable_entry
session_ = session_block.session
event = session_.event
unschedule_contribs = session_.event.type_ == EventType.conference
for contribution in session_block.contributions[:]:
contribution.session_block = None
if unschedule_contribs:
delete_timetable_entry(contribution.timetable_entry, log=False)
else:
delete_contribution(contribution)
for entry in session_block.timetable_entry.children[:]:
delete_timetable_entry(entry, log=False)
delete_timetable_entry(session_block.timetable_entry, log=False)
signals.event.session_block_deleted.send(session_block)
if session_block in session_.blocks:
session_.blocks.remove(session_block)
if not session_.blocks and session_.event.type != 'conference':
delete_session(session_)
db.session.flush()
event.log(EventLogRealm.management, EventLogKind.negative, 'Sessions',
f'Session block "{session_block.title}" has been deleted', session.user,
meta={'session_block_id': session_block.id})
logger.info('Session block %s deleted by %s', session_block, session.user)
def update_session_coordinator_privs(event, data):
changes = {}
for priv, enabled in data.items():
setting = COORDINATOR_PRIV_SETTINGS[priv]
if session_settings.get(event, setting) == enabled:
continue
session_settings.set(event, setting, enabled)
changes[priv] = (not enabled, enabled)
db.session.flush()
logger.info('Session coordinator privs of event %r updated with %r by %r', event, data, session.user)
if changes:
log_fields = {priv: orig_string(title) for priv, title in COORDINATOR_PRIV_TITLES.items()}
event.log(EventLogRealm.management, EventLogKind.change, 'Sessions', 'Coordinator privileges updated',
session.user, data={'Changes': make_diff_log(changes, log_fields)})
| mit |
oinopion/django | django/template/context.py | 15 | 9233 | import warnings
from contextlib import contextmanager
from copy import copy
from django.utils.deprecation import RemovedInDjango20Warning
# Hard-coded processor for easier use of CSRF protection.
_builtin_context_processors = ('django.template.context_processors.csrf',)
_current_app_undefined = object()
class ContextPopException(Exception):
"pop() has been called more times than push()"
pass
class ContextDict(dict):
def __init__(self, context, *args, **kwargs):
super(ContextDict, self).__init__(*args, **kwargs)
context.dicts.append(self)
self.context = context
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
self.context.pop()
class BaseContext(object):
def __init__(self, dict_=None):
self._reset_dicts(dict_)
def _reset_dicts(self, value=None):
builtins = {'True': True, 'False': False, 'None': None}
self.dicts = [builtins]
if value is not None:
self.dicts.append(value)
def __copy__(self):
duplicate = copy(super(BaseContext, self))
duplicate.dicts = self.dicts[:]
return duplicate
def __repr__(self):
return repr(self.dicts)
def __iter__(self):
for d in reversed(self.dicts):
yield d
def push(self, *args, **kwargs):
return ContextDict(self, *args, **kwargs)
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
"Set a variable in the current context"
self.dicts[-1][key] = value
def __getitem__(self, key):
"Get a variable's value, starting at the current context and going upward"
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError(key)
def __delitem__(self, key):
"Delete a variable from the current context"
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
def get(self, key, otherwise=None):
for d in reversed(self.dicts):
if key in d:
return d[key]
return otherwise
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
def new(self, values=None):
"""
Returns a new context with the same properties, but with only the
values given in 'values' stored.
"""
new_context = copy(self)
new_context._reset_dicts(values)
return new_context
def flatten(self):
"""
Returns self.dicts as one dictionary
"""
flat = {}
for d in self.dicts:
flat.update(d)
return flat
def __eq__(self, other):
"""
Compares two contexts by comparing theirs 'dicts' attributes.
"""
if isinstance(other, BaseContext):
# because dictionaries can be put in different order
# we have to flatten them like in templates
return self.flatten() == other.flatten()
# if it's not comparable return false
return False
class Context(BaseContext):
"A stack container for variable context"
def __init__(self, dict_=None, autoescape=True,
current_app=_current_app_undefined,
use_l10n=None, use_tz=None):
if current_app is not _current_app_undefined:
warnings.warn(
"The current_app argument of Context is deprecated. Use "
"RequestContext and set the current_app attribute of its "
"request instead.", RemovedInDjango20Warning, stacklevel=2)
self.autoescape = autoescape
self._current_app = current_app
self.use_l10n = use_l10n
self.use_tz = use_tz
self.template_name = "unknown"
self.render_context = RenderContext()
# Set to the original template -- as opposed to extended or included
# templates -- during rendering, see bind_template.
self.template = None
super(Context, self).__init__(dict_)
@property
def current_app(self):
return None if self._current_app is _current_app_undefined else self._current_app
@contextmanager
def bind_template(self, template):
if self.template is not None:
raise RuntimeError("Context is already bound to a template")
self.template = template
try:
yield
finally:
self.template = None
def __copy__(self):
duplicate = super(Context, self).__copy__()
duplicate.render_context = copy(self.render_context)
return duplicate
def update(self, other_dict):
"Pushes other_dict to the stack of dictionaries in the Context"
if not hasattr(other_dict, '__getitem__'):
raise TypeError('other_dict must be a mapping (dictionary-like) object.')
return ContextDict(self, other_dict)
class RenderContext(BaseContext):
"""
A stack container for storing Template state.
RenderContext simplifies the implementation of template Nodes by providing a
safe place to store state between invocations of a node's `render` method.
The RenderContext also provides scoping rules that are more sensible for
'template local' variables. The render context stack is pushed before each
template is rendered, creating a fresh scope with nothing in it. Name
resolution fails if a variable is not found at the top of the RequestContext
stack. Thus, variables are local to a specific template and don't affect the
rendering of other templates as they would if they were stored in the normal
template context.
"""
def __iter__(self):
for d in self.dicts[-1]:
yield d
def has_key(self, key):
return key in self.dicts[-1]
def get(self, key, otherwise=None):
return self.dicts[-1].get(key, otherwise)
def __getitem__(self, key):
return self.dicts[-1][key]
class RequestContext(Context):
"""
This subclass of template.Context automatically populates itself using
the processors defined in the engine's configuration.
Additional processors can be specified as a list of callables
using the "processors" keyword argument.
"""
def __init__(self, request, dict_=None, processors=None,
current_app=_current_app_undefined,
use_l10n=None, use_tz=None):
# current_app isn't passed here to avoid triggering the deprecation
# warning in Context.__init__.
super(RequestContext, self).__init__(
dict_, use_l10n=use_l10n, use_tz=use_tz)
if current_app is not _current_app_undefined:
warnings.warn(
"The current_app argument of RequestContext is deprecated. "
"Set the current_app attribute of its request instead.",
RemovedInDjango20Warning, stacklevel=2)
self._current_app = current_app
self.request = request
self._processors = () if processors is None else tuple(processors)
self._processors_index = len(self.dicts)
# placeholder for context processors output
self.update({})
# empty dict for any new modifications
# (so that context processors don't overwrite them)
self.update({})
@contextmanager
def bind_template(self, template):
if self.template is not None:
raise RuntimeError("Context is already bound to a template")
self.template = template
# Set context processors according to the template engine's settings.
processors = (template.engine.template_context_processors +
self._processors)
updates = {}
for processor in processors:
updates.update(processor(self.request))
self.dicts[self._processors_index] = updates
try:
yield
finally:
self.template = None
# Unset context processors.
self.dicts[self._processors_index] = {}
def new(self, values=None):
new_context = super(RequestContext, self).new(values)
# This is for backwards-compatibility: RequestContexts created via
# Context.new don't include values from context processors.
if hasattr(new_context, '_processors_index'):
del new_context._processors_index
return new_context
def make_context(context, request=None):
"""
Create a suitable Context from a plain dict and optionally an HttpRequest.
"""
if request is None:
context = Context(context)
else:
# The following pattern is required to ensure values from
# context override those from template context processors.
original_context = context
context = RequestContext(request)
if original_context:
context.push(original_context)
return context
| bsd-3-clause |
piqoni/onadata | onadata/apps/logger/migrations/0029_auto__chg_field_attachment_mimetype__add_field_xform_encrypted__add_fi.py | 13 | 10462 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Attachment.mimetype'
db.alter_column(u'odk_logger_attachment', 'mimetype', self.gf('django.db.models.fields.CharField')(max_length=50))
# Adding field 'XForm.surveys_with_geopoints'
db.add_column(u'odk_logger_xform', 'surveys_with_geopoints',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Changing field 'Attachment.mimetype'
db.alter_column(u'odk_logger_attachment', 'mimetype', self.gf('django.db.models.fields.CharField')(max_length=20))
# Deleting field 'XForm.surveys_with_geopoints'
db.delete_column(u'odk_logger_xform', 'surveys_with_geopoints')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'odk_logger.attachment': {
'Meta': {'object_name': 'Attachment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': "orm['odk_logger.Instance']"}),
'media_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'mimetype': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'})
},
'odk_logger.instance': {
'Meta': {'object_name': 'Instance'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'submitted_via_web'", 'max_length': '20'}),
'survey_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['odk_logger.SurveyType']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': u"orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '249'}),
'xform': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['odk_logger.XForm']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.instancehistory': {
'Meta': {'object_name': 'InstanceHistory'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '249'}),
'xform_instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'submission_history'", 'to': "orm['odk_logger.Instance']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.surveytype': {
'Meta': {'object_name': 'SurveyType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'odk_logger.xform': {
'Meta': {'ordering': "('id_string',)", 'unique_together': "(('user', 'id_string'), ('user', 'sms_id_string'))", 'object_name': 'XForm'},
'allows_sms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'bamboo_dataset': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '60'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'null': 'True'}),
'downloadable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'encrypted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_start_time': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_string': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'is_crowd_form': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'json': ('django.db.models.fields.TextField', [], {'default': "u''"}),
'shared': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shared_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'sms_id_string': ('django.db.models.fields.SlugField', [], {'default': "''", 'max_length': '50'}),
'surveys_with_geopoints': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'xforms'", 'null': 'True', 'to': u"orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32'}),
'xls': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True'}),
'xml': ('django.db.models.fields.TextField', [], {})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['logger']
| bsd-2-clause |
town-hall-pinball/project-omega | tests/machine/test_playfield.py | 1 | 2243 | # Copyright (c) 2014 - 2016 townhallpinball.org
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import unittest
from pin.lib import p
from tests import fixtures
import logging
log = logging.getLogger("pin")
class TestPlayfield(unittest.TestCase):
def setUp(self):
fixtures.reset()
self.playfield = p.modes["playfield"]
self.playfield.enable(children=True)
p.modes["simulator"].enable()
def test_live(self):
p.modes["trough"].eject()
p.now = 1
fixtures.loop()
p.switches["ball_launch_button"].activate()
p.now = 2
fixtures.loop()
p.now = 3
fixtures.loop()
p.now = 4
fixtures.loop()
self.assertTrue(self.playfield.live)
def test_dead(self):
p.modes["trough"].eject()
p.now = 1
fixtures.loop()
p.switches["ball_launch_button"].activate()
p.now = 2
fixtures.loop()
p.now = 3
fixtures.loop()
p.now = 4
fixtures.loop()
self.assertTrue(self.playfield.live)
p.switches["trough_4"].activate()
fixtures.loop()
p.now = 5
fixtures.loop()
self.assertFalse(self.playfield.live)
| mit |
umlfri/umlfri2 | umlfri2/qtgui/base/contextmenu.py | 1 | 1680 | from functools import partial
from PyQt5.QtGui import QIcon, QKeySequence
from PyQt5.QtWidgets import QMenu, QAction
from umlfri2.application import Application
from umlfri2.qtgui.base import image_loader
class ContextMenu(QMenu):
def _add_menu_item(self, icon, label, shortcut, action=None, sub_menu=None):
ret = QAction(label, sub_menu or self)
if shortcut is not None:
ret.setShortcut(QKeySequence(shortcut))
if isinstance(icon, str):
ret.setIcon(QIcon.fromTheme(icon))
elif isinstance(icon, QIcon):
ret.setIcon(icon)
if action is None:
ret.setEnabled(False)
else:
ret.triggered.connect(action)
(sub_menu or self).addAction(ret)
return ret
def _add_type_menu_item(self, type, action=None, sub_menu=None, format="{0}"):
translation = type.metamodel.get_translation(Application().language.current_language)
ret = QAction(format.format(translation.translate(type)), sub_menu or self)
ret.setIcon(image_loader.load_icon(type.icon))
if action is None:
ret.setEnabled(False)
else:
ret.triggered.connect(partial(action, type))
(sub_menu or self).addAction(ret)
return ret
def _add_sub_menu_item(self, label, enabled=True, sub_menu=None):
ret = QAction(label, sub_menu or self)
menu = QMenu()
ret.setMenu(menu)
ret.setEnabled(enabled)
(sub_menu or self).addAction(ret)
return menu
| gpl-3.0 |
gmarkall/numba | numba/pycc/__init__.py | 7 | 3612 | # -*- coding: utf-8 -*-
import os
import logging
import subprocess
import tempfile
import sys
# Public API
from .cc import CC
from .decorators import export, exportmany
def get_ending(args):
if args.llvm:
return ".bc"
elif args.olibs:
return ".o"
elif args.python:
return find_pyext_ending()
else:
return find_shared_ending()
def main(args=None):
import argparse
from .compiler import ModuleCompiler
from .platform import Toolchain, find_shared_ending, find_pyext_ending
from numba.pycc import decorators
parser = argparse.ArgumentParser(
description="DEPRECATED - Compile Python modules to a single shared library")
parser.add_argument("inputs", nargs='+', help="Input file(s)")
parser.add_argument("-o", nargs=1, dest="output",
help="Output file (default is name of first input -- with new ending)")
group = parser.add_mutually_exclusive_group()
group.add_argument("-c", action="store_true", dest="olibs",
help="Create object file from each input instead of shared-library")
group.add_argument("--llvm", action="store_true",
help="Emit llvm instead of native code")
parser.add_argument('--header', action="store_true",
help="Emit C header file with function signatures")
parser.add_argument('--python', action='store_true',
help='Emit additionally generated Python wrapper and '
'extension module code in output')
parser.add_argument('-d', '--debug', action='store_true',
help='Print extra debug information')
args = parser.parse_args(args)
logger = logging.getLogger(__name__)
if args.debug:
logger.setLevel(logging.DEBUG)
logger.warn("The 'pycc' script is DEPRECATED; "
"please use the numba.pycc.CC API instead")
if args.output:
args.output = args.output[0]
output_base = os.path.split(args.output)[1]
module_name = os.path.splitext(output_base)[0]
else:
input_base = os.path.splitext(args.inputs[0])[0]
module_name = os.path.split(input_base)[1]
args.output = input_base + get_ending(args)
logger.debug('args.output --> %s', args.output)
if args.header:
print('ERROR: pycc --header has been disabled in this release due to a known issue')
sys.exit(1)
logger.debug('inputs --> %s', args.inputs)
decorators.process_input_files(args.inputs)
compiler = ModuleCompiler(decorators.export_registry, module_name=module_name)
if args.llvm:
logger.debug('emit llvm')
compiler.write_llvm_bitcode(args.output, wrap=args.python)
elif args.olibs:
logger.debug('emit object file')
compiler.write_native_object(args.output, wrap=args.python)
else:
logger.debug('emit shared library')
logger.debug('write to temporary object file %s', tempfile.gettempdir())
toolchain = Toolchain()
toolchain.debug = args.debug
temp_obj = (tempfile.gettempdir() + os.sep +
os.path.basename(args.output) + '.o')
compiler.write_native_object(temp_obj, wrap=args.python)
libraries = toolchain.get_python_libraries()
toolchain.link_shared(args.output, [temp_obj],
toolchain.get_python_libraries(),
toolchain.get_python_library_dirs(),
export_symbols=compiler.dll_exports)
os.remove(temp_obj)
| bsd-2-clause |
nhicher/ansible | lib/ansible/modules/cloud/google/gcp_compute_ssl_certificate_facts.py | 8 | 6086 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ["preview"],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_ssl_certificate_facts
description:
- Gather facts for GCP SslCertificate
short_description: Gather facts for GCP SslCertificate
version_added: 2.7
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
filters:
description:
A list of filter value pairs. Available filters are listed here
U(https://cloud.google.com/sdk/gcloud/reference/topic/filters).
Each additional filter in the list will act be added as an AND condition
(filter1 and filter2)
extends_documentation_fragment: gcp
'''
EXAMPLES = '''
- name: a ssl certificate facts
gcp_compute_ssl_certificate_facts:
filters:
- name = test_object
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
'''
RETURN = '''
items:
description: List of items
returned: always
type: complex
contains:
certificate:
description:
- The certificate in PEM format.
- The certificate chain must be no greater than 5 certs long.
- The chain must include at least one intermediate cert.
returned: success
type: str
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource.
returned: success
type: str
id:
description:
- The unique identifier for the resource.
returned: success
type: int
name:
description:
- Name of the resource. Provided by the client when the resource is created. The name
must be 1-63 characters long, and comply with RFC1035. Specifically, the name must
be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following characters
must be a dash, lowercase letter, or digit, except the last character, which cannot
be a dash.
returned: success
type: str
privateKey:
description:
- The write-only private key in PEM format.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
import json
################################################################################
# Main
################################################################################
def main():
module = GcpModule(
argument_spec=dict(
filters=dict(type='list', elements='str')
)
)
if 'scopes' not in module.params:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
items = fetch_list(module, collection(module), query_options(module.params['filters']))
if items.get('items'):
items = items.get('items')
else:
items = []
return_value = {
'items': items
}
module.exit_json(**return_value)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/sslCertificates".format(**module.params)
def fetch_list(module, link, query):
auth = GcpSession(module, 'compute')
response = auth.get(link, params={'filter': query})
return return_if_object(module, response)
def query_options(filters):
if not filters:
return ''
if len(filters) == 1:
return filters[0]
else:
queries = []
for f in filters:
# For multiple queries, all queries should have ()
if f[0] != '(' and f[-1] != ')':
queries.append("(%s)" % ''.join(f))
else:
queries.append(f)
return ' '.join(queries)
def return_if_object(module, response):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
if __name__ == "__main__":
main()
| gpl-3.0 |
piotrmaslanka/satella | tests/test_coding/test_monitor.py | 1 | 5062 | import unittest
from queue import Queue
from threading import Thread
from time import sleep
from satella.coding import Monitor
class MonitorTest(unittest.TestCase):
def test_synchronize_on(self):
class TestedMasterClass(Monitor):
def __init__(self):
self.value = 0
super().__init__()
def get_locking_class(self):
class LockingClass:
@Monitor.synchronize_on(self)
def get_value(self2):
self.value += 1
return LockingClass()
msc = TestedMasterClass()
lc = msc.get_locking_class()
class TesterThread(Thread):
def run(self):
lc.get_value()
with Monitor.acquire(msc):
TesterThread().start()
sleep(0.1)
self.assertEqual(msc.value, 0)
with Monitor.release(msc):
sleep(0.1)
self.assertEqual(msc.value, 1)
def test_release_contextmanager(self):
class TestedClass(Monitor):
def __init__(self, cqueue):
self.cqueue = cqueue
Monitor.__init__(self)
@Monitor.synchronized
def execute(self):
self.cqueue.put(1)
sleep(1)
self.cqueue.get()
class TesterThread(Thread):
def __init__(self, tc):
self.tc = tc
Thread.__init__(self)
def run(self):
self.tc.execute()
cq = Queue()
cq.put(1)
tc = TestedClass(cq)
tt = TesterThread(tc)
with Monitor.acquire(tc):
with Monitor.release(tc):
tt.start()
sleep(0.4)
self.assertEqual(cq.qsize(), 2)
def test_release_contextmanager_syntax(self):
class TestedClass(Monitor):
def __init__(self, cqueue):
self.cqueue = cqueue
Monitor.__init__(self)
@Monitor.synchronized
def execute(self):
self.cqueue.put(1)
sleep(1)
self.cqueue.get()
class TesterThread(Thread):
def __init__(self, tc):
self.tc = tc
Thread.__init__(self)
def run(self):
self.tc.execute()
cq = Queue()
cq.put(1)
tc = TestedClass(cq)
tt = TesterThread(tc)
with tc:
with Monitor.release(tc):
tt.start()
sleep(0.4)
self.assertEqual(cq.qsize(), 2)
def test_acquire_contextmanager(self):
class TestedClass(Monitor):
def __init__(self, cqueue):
self.cqueue = cqueue
Monitor.__init__(self)
@Monitor.synchronized
def execute(self):
self.cqueue.put(1)
sleep(1)
self.cqueue.get()
class TesterThread(Thread):
def __init__(self, tc):
self.tc = tc
Thread.__init__(self)
def run(self):
self.tc.execute()
cq = Queue()
cq.put(1)
tc = TestedClass(cq)
tt = TesterThread(tc)
with Monitor.acquire(tc):
tt.start()
sleep(0.4)
self.assertEqual(cq.qsize(), 1)
def test_monitoring(self):
class TestedClass(Monitor):
def __init__(self, cqueue):
self.cqueue = cqueue
Monitor.__init__(self)
@Monitor.synchronized
def execute(self):
self.cqueue.put(1)
sleep(1)
self.cqueue.get()
class TesterThread(Thread):
def __init__(self, tc):
self.tc = tc
Thread.__init__(self)
def run(self):
self.tc.execute()
q = Queue()
tc = TestedClass(q)
a, b = TesterThread(tc), TesterThread(tc)
a.start(), b.start()
while a.is_alive() or b.is_alive():
sleep(0.1)
self.assertNotEqual(q.qsize(), 2)
def test_monitoring_synchronize_on_attribute(self):
class TestedClass:
def __init__(self, cqueue):
self.cqueue = cqueue
self.monitor = Monitor()
@Monitor.synchronize_on_attribute('monitor')
def execute(self):
self.cqueue.put(1)
sleep(1)
self.cqueue.get()
class TesterThread(Thread):
def __init__(self, tc):
self.tc = tc
Thread.__init__(self)
def run(self):
self.tc.execute()
q = Queue()
tc = TestedClass(q)
a, b = TesterThread(tc), TesterThread(tc)
a.start(), b.start()
while a.is_alive() or b.is_alive():
sleep(0.1)
self.assertNotEqual(q.qsize(), 2)
| bsd-3-clause |
JJMinton/conferenceTimer | file_change_handler.py | 1 | 2937 | import path
import asyncio
from datetime import datetime, timedelta
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
from read_schedule import read_schedule
import config
from config import logging
class FileChangeHandler(PatternMatchingEventHandler):
def __init__(self, watch_file, controller_function, args=[], loop=None):
PatternMatchingEventHandler.__init__(self, patterns=[watch_file])
self.controller_function = controller_function
self.args = args
self.loop = asyncio.SelectorEventLoop() if loop is None else loop
self.async_task = None
self.watch_file = watch_file
def process(self, schedule_file_name=None):
if schedule_file_name is None:
schedule_file_name = self.watch_file
logging.debug('FileChangeHnadler.process: Processing {}'.format(schedule_file_name))
schedule = read_schedule(schedule_file_name)
#Stop current run_schedule
if self.async_task is not None:
logging.debug('Stopping previous async_task')
self.async_task.cancel()
asyncio.wait_for(self.async_task, 100, loop=self.loop)
del self.async_task
self.async_task = None
#Start new run_schedule
logging.debug('FileChangeHandler.process: Starting new async_task')
self.async_task = asyncio.ensure_future(self.controller_function(schedule, self.loop, *self.args), loop=self.loop)
logging.debug('FileChangeHandler.process: Return from processing')
return
#ensure immediate return
def on_created(self, event):
logging.info('FileChangeHandler.on_created: File creation detected')
self.process(event.src_path)
def on_modified(self, event):
logging.info('FileChangeHandler.on_modified: File change detected')
self.process(event.src_path)
if __name__=="__main__":
if config.LIGHT_DEBUG:
from light_controls import debug
debug()
from schedule_handler import Schedule_Runner
schedule_runner = Schedule_Runner()
loop = schedule_runner.controller.loop
file_change_handler = FileChangeHandler(config.SCHEDULE_FILE, schedule_runner.run_schedule, loop=loop)
obs = Observer();
obs.schedule(file_change_handler, path.Path(config.SCHEDULE_FILE).abspath().dirname()) #Define what file to watch and how
obs.start() #start watching file
file_change_handler.process() #start schedule running
try:
while True:
#This does nothing except step through the loops (why is this necessary?)
file_change_handler.loop.run_until_complete(asyncio.ensure_future(asyncio.sleep(0.1, loop=file_change_handler.loop), loop=file_change_handler.loop)) #arbitrary sleep time here I think. Could it be forever?
except KeyboardInterrupt:
obs.stop();
#finally:
# obs.join();
| gpl-3.0 |
FNST-OpenStack/cloudkitty | cloudkitty/tenant_fetcher/__init__.py | 1 | 1212 | # -*- coding: utf-8 -*-
# !/usr/bin/env python
# Copyright 2015 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Stéphane Albert
#
import abc
from oslo.config import cfg
import six
fetchers_opts = [
cfg.StrOpt('backend',
default='keystone',
help='Driver used to fetch tenant list.')
]
cfg.CONF.register_opts(fetchers_opts, 'tenant_fetcher')
@six.add_metaclass(abc.ABCMeta)
class BaseFetcher(object):
"""CloudKitty tenants fetcher.
Provides Cloudkitty integration with a backend announcing ratable tenants.
"""
@abc.abstractmethod
def get_tenants(self):
"""Get a list a tenants to rate."""
| apache-2.0 |
knehez/edx-platform | lms/djangoapps/instructor_task/api.py | 28 | 18655 | """
API for submitting background tasks by an instructor for a course.
Also includes methods for getting information about tasks that have
already been submitted, filtered either by running state or input
arguments.
"""
import hashlib
from celery.states import READY_STATES
from xmodule.modulestore.django import modulestore
from instructor_task.models import InstructorTask
from instructor_task.tasks import (
rescore_problem,
reset_problem_attempts,
delete_problem_state,
send_bulk_course_email,
calculate_grades_csv,
calculate_problem_grade_report,
calculate_students_features_csv,
cohort_students,
enrollment_report_features_csv,
calculate_may_enroll_csv,
exec_summary_report_csv,
generate_certificates,
)
from instructor_task.api_helper import (
check_arguments_for_rescoring,
encode_problem_and_student_input,
encode_entrance_exam_and_student_input,
check_entrance_exam_problems_for_rescoring,
submit_task,
)
from bulk_email.models import CourseEmail
def get_running_instructor_tasks(course_id):
"""
Returns a query of InstructorTask objects of running tasks for a given course.
Used to generate a list of tasks to display on the instructor dashboard.
"""
instructor_tasks = InstructorTask.objects.filter(course_id=course_id)
# exclude states that are "ready" (i.e. not "running", e.g. failure, success, revoked):
for state in READY_STATES:
instructor_tasks = instructor_tasks.exclude(task_state=state)
return instructor_tasks.order_by('-id')
def get_instructor_task_history(course_id, usage_key=None, student=None, task_type=None):
"""
Returns a query of InstructorTask objects of historical tasks for a given course,
that optionally match a particular problem, a student, and/or a task type.
"""
instructor_tasks = InstructorTask.objects.filter(course_id=course_id)
if usage_key is not None or student is not None:
_, task_key = encode_problem_and_student_input(usage_key, student)
instructor_tasks = instructor_tasks.filter(task_key=task_key)
if task_type is not None:
instructor_tasks = instructor_tasks.filter(task_type=task_type)
return instructor_tasks.order_by('-id')
def get_entrance_exam_instructor_task_history(course_id, usage_key=None, student=None): # pylint: disable=invalid-name
"""
Returns a query of InstructorTask objects of historical tasks for a given course,
that optionally match an entrance exam and student if present.
"""
instructor_tasks = InstructorTask.objects.filter(course_id=course_id)
if usage_key is not None or student is not None:
_, task_key = encode_entrance_exam_and_student_input(usage_key, student)
instructor_tasks = instructor_tasks.filter(task_key=task_key)
return instructor_tasks.order_by('-id')
# Disabling invalid-name because this fn name is longer than 30 chars.
def submit_rescore_problem_for_student(request, usage_key, student): # pylint: disable=invalid-name
"""
Request a problem to be rescored as a background task.
The problem will be rescored for the specified student only. Parameters are the `course_id`,
the `problem_url`, and the `student` as a User object.
The url must specify the location of the problem, using i4x-type notation.
ItemNotFoundException is raised if the problem doesn't exist, or AlreadyRunningError
if the problem is already being rescored for this student, or NotImplementedError if
the problem doesn't support rescoring.
This method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
# check arguments: let exceptions return up to the caller.
check_arguments_for_rescoring(usage_key)
task_type = 'rescore_problem'
task_class = rescore_problem
task_input, task_key = encode_problem_and_student_input(usage_key, student)
return submit_task(request, task_type, task_class, usage_key.course_key, task_input, task_key)
def submit_rescore_problem_for_all_students(request, usage_key): # pylint: disable=invalid-name
"""
Request a problem to be rescored as a background task.
The problem will be rescored for all students who have accessed the
particular problem in a course and have provided and checked an answer.
Parameters are the `course_id` and the `problem_url`.
The url must specify the location of the problem, using i4x-type notation.
ItemNotFoundException is raised if the problem doesn't exist, or AlreadyRunningError
if the problem is already being rescored, or NotImplementedError if the problem doesn't
support rescoring.
This method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
# check arguments: let exceptions return up to the caller.
check_arguments_for_rescoring(usage_key)
# check to see if task is already running, and reserve it otherwise
task_type = 'rescore_problem'
task_class = rescore_problem
task_input, task_key = encode_problem_and_student_input(usage_key)
return submit_task(request, task_type, task_class, usage_key.course_key, task_input, task_key)
def submit_rescore_entrance_exam_for_student(request, usage_key, student=None): # pylint: disable=invalid-name
"""
Request entrance exam problems to be re-scored as a background task.
The entrance exam problems will be re-scored for given student or if student
is None problems for all students who have accessed the entrance exam.
Parameters are `usage_key`, which must be a :class:`Location`
representing entrance exam section and the `student` as a User object.
ItemNotFoundError is raised if entrance exam does not exists for given
usage_key, AlreadyRunningError is raised if the entrance exam
is already being re-scored, or NotImplementedError if the problem doesn't
support rescoring.
This method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
# check problems for rescoring: let exceptions return up to the caller.
check_entrance_exam_problems_for_rescoring(usage_key)
# check to see if task is already running, and reserve it otherwise
task_type = 'rescore_problem'
task_class = rescore_problem
task_input, task_key = encode_entrance_exam_and_student_input(usage_key, student)
return submit_task(request, task_type, task_class, usage_key.course_key, task_input, task_key)
def submit_reset_problem_attempts_for_all_students(request, usage_key): # pylint: disable=invalid-name
"""
Request to have attempts reset for a problem as a background task.
The problem's attempts will be reset for all students who have accessed the
particular problem in a course. Parameters are the `course_id` and
the `usage_key`, which must be a :class:`Location`.
ItemNotFoundException is raised if the problem doesn't exist, or AlreadyRunningError
if the problem is already being reset.
This method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
# check arguments: make sure that the usage_key is defined
# (since that's currently typed in). If the corresponding module descriptor doesn't exist,
# an exception will be raised. Let it pass up to the caller.
modulestore().get_item(usage_key)
task_type = 'reset_problem_attempts'
task_class = reset_problem_attempts
task_input, task_key = encode_problem_and_student_input(usage_key)
return submit_task(request, task_type, task_class, usage_key.course_key, task_input, task_key)
def submit_reset_problem_attempts_in_entrance_exam(request, usage_key, student): # pylint: disable=invalid-name
"""
Request to have attempts reset for a entrance exam as a background task.
Problem attempts for all problems in entrance exam will be reset
for specified student. If student is None problem attempts will be
reset for all students.
Parameters are `usage_key`, which must be a :class:`Location`
representing entrance exam section and the `student` as a User object.
ItemNotFoundError is raised if entrance exam does not exists for given
usage_key, AlreadyRunningError is raised if the entrance exam
is already being reset.
This method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
# check arguments: make sure entrance exam(section) exists for given usage_key
modulestore().get_item(usage_key)
task_type = 'reset_problem_attempts'
task_class = reset_problem_attempts
task_input, task_key = encode_entrance_exam_and_student_input(usage_key, student)
return submit_task(request, task_type, task_class, usage_key.course_key, task_input, task_key)
def submit_delete_problem_state_for_all_students(request, usage_key): # pylint: disable=invalid-name
"""
Request to have state deleted for a problem as a background task.
The problem's state will be deleted for all students who have accessed the
particular problem in a course. Parameters are the `course_id` and
the `usage_key`, which must be a :class:`Location`.
ItemNotFoundException is raised if the problem doesn't exist, or AlreadyRunningError
if the particular problem's state is already being deleted.
This method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
# check arguments: make sure that the usage_key is defined
# (since that's currently typed in). If the corresponding module descriptor doesn't exist,
# an exception will be raised. Let it pass up to the caller.
modulestore().get_item(usage_key)
task_type = 'delete_problem_state'
task_class = delete_problem_state
task_input, task_key = encode_problem_and_student_input(usage_key)
return submit_task(request, task_type, task_class, usage_key.course_key, task_input, task_key)
def submit_delete_entrance_exam_state_for_student(request, usage_key, student): # pylint: disable=invalid-name
"""
Requests reset of state for entrance exam as a background task.
Module state for all problems in entrance exam will be deleted
for specified student.
Parameters are `usage_key`, which must be a :class:`Location`
representing entrance exam section and the `student` as a User object.
ItemNotFoundError is raised if entrance exam does not exists for given
usage_key, AlreadyRunningError is raised if the entrance exam
is already being reset.
This method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
# check arguments: make sure entrance exam(section) exists for given usage_key
modulestore().get_item(usage_key)
task_type = 'delete_problem_state'
task_class = delete_problem_state
task_input, task_key = encode_entrance_exam_and_student_input(usage_key, student)
return submit_task(request, task_type, task_class, usage_key.course_key, task_input, task_key)
def submit_bulk_course_email(request, course_key, email_id):
"""
Request to have bulk email sent as a background task.
The specified CourseEmail object will be sent be updated for all students who have enrolled
in a course. Parameters are the `course_key` and the `email_id`, the id of the CourseEmail object.
AlreadyRunningError is raised if the same recipients are already being emailed with the same
CourseEmail object.
This method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
# Assume that the course is defined, and that the user has already been verified to have
# appropriate access to the course. But make sure that the email exists.
# We also pull out the To argument here, so that is displayed in
# the InstructorTask status.
email_obj = CourseEmail.objects.get(id=email_id)
to_option = email_obj.to_option
task_type = 'bulk_course_email'
task_class = send_bulk_course_email
# Pass in the to_option as a separate argument, even though it's (currently)
# in the CourseEmail. That way it's visible in the progress status.
# (At some point in the future, we might take the recipient out of the CourseEmail,
# so that the same saved email can be sent to different recipients, as it is tested.)
task_input = {'email_id': email_id, 'to_option': to_option}
task_key_stub = "{email_id}_{to_option}".format(email_id=email_id, to_option=to_option)
# create the key value by using MD5 hash:
task_key = hashlib.md5(task_key_stub).hexdigest()
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
def submit_calculate_grades_csv(request, course_key):
"""
AlreadyRunningError is raised if the course's grades are already being updated.
"""
task_type = 'grade_course'
task_class = calculate_grades_csv
task_input = {}
task_key = ""
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
def submit_problem_grade_report(request, course_key):
"""
Submits a task to generate a CSV grade report containing problem
values.
"""
task_type = 'grade_problems'
task_class = calculate_problem_grade_report
task_input = {}
task_key = ""
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
def submit_calculate_students_features_csv(request, course_key, features):
"""
Submits a task to generate a CSV containing student profile info.
Raises AlreadyRunningError if said CSV is already being updated.
"""
task_type = 'profile_info_csv'
task_class = calculate_students_features_csv
task_input = {'features': features}
task_key = ""
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
def submit_detailed_enrollment_features_csv(request, course_key): # pylint: disable=invalid-name
"""
Submits a task to generate a CSV containing detailed enrollment info.
Raises AlreadyRunningError if said CSV is already being updated.
"""
task_type = 'detailed_enrollment_report'
task_class = enrollment_report_features_csv
task_input = {}
task_key = ""
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
def submit_calculate_may_enroll_csv(request, course_key, features):
"""
Submits a task to generate a CSV file containing information about
invited students who have not enrolled in a given course yet.
Raises AlreadyRunningError if said file is already being updated.
"""
task_type = 'may_enroll_info_csv'
task_class = calculate_may_enroll_csv
task_input = {'features': features}
task_key = ""
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
def submit_executive_summary_report(request, course_key): # pylint: disable=invalid-name
"""
Submits a task to generate a HTML File containing the executive summary report.
Raises AlreadyRunningError if HTML File is already being updated.
"""
task_type = 'exec_summary_report'
task_class = exec_summary_report_csv
task_input = {}
task_key = ""
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
def submit_cohort_students(request, course_key, file_name):
"""
Request to have students cohorted in bulk.
Raises AlreadyRunningError if students are currently being cohorted.
"""
task_type = 'cohort_students'
task_class = cohort_students
task_input = {'file_name': file_name}
task_key = ""
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
def generate_certificates_for_all_students(request, course_key): # pylint: disable=invalid-name
"""
Submits a task to generate certificates for all students enrolled in the course.
Raises AlreadyRunningError if certificates are currently being generated.
"""
task_type = 'generate_certificates_all_student'
task_class = generate_certificates
task_input = {}
task_key = ""
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
| agpl-3.0 |
mrquim/mrquimrepo | repo/script.module.schism.common/lib/js2py/host/dom/constants.py | 97 | 1139 | from js2py.base import *
def _get_conts(idl):
def is_valid(c):
try:
exec(c)
return 1
except:
pass
return '\n'.join(filter(is_valid, (' '.join(e.strip(' ;').split()[-3:]) for e in idl.splitlines())))
default_attrs = {'writable':True, 'enumerable':True, 'configurable':True}
def compose_prototype(Class, attrs=default_attrs):
prototype = Class()
for i in dir(Class):
e = getattr(Class, i)
if hasattr(e, '__func__'):
temp = PyJsFunction(e.__func__, FunctionPrototype)
attrs = {k:v for k,v in attrs.iteritems()}
attrs['value'] = temp
prototype.define_own_property(i, attrs)
return prototype
# Error codes
INDEX_SIZE_ERR = 1
DOMSTRING_SIZE_ERR = 2
HIERARCHY_REQUEST_ERR = 3
WRONG_DOCUMENT_ERR = 4
INVALID_CHARACTER_ERR = 5
NO_DATA_ALLOWED_ERR = 6
NO_MODIFICATION_ALLOWED_ERR = 7
NOT_FOUND_ERR = 8
NOT_SUPPORTED_ERR = 9
INUSE_ATTRIBUTE_ERR = 10
INVALID_STATE_ERR = 11
SYNTAX_ERR = 12
INVALID_MODIFICATION_ERR = 13
NAMESPACE_ERR = 14
INVALID_ACCESS_ERR = 15
VALIDATION_ERR = 16
TYPE_MISMATCH_ERR = 17
| gpl-2.0 |
AntidoteLabs/Antidote-DM | Antidotes DM/youtube_dl/extractor/tubitv.py | 18 | 2650 | # coding: utf-8
from __future__ import unicode_literals
import codecs
import re
from .common import InfoExtractor
from ..compat import compat_urllib_parse
from ..utils import (
ExtractorError,
int_or_none,
sanitized_Request,
)
class TubiTvIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?tubitv\.com/video\?id=(?P<id>[0-9]+)'
_LOGIN_URL = 'http://tubitv.com/login'
_NETRC_MACHINE = 'tubitv'
_TEST = {
'url': 'http://tubitv.com/video?id=54411&title=The_Kitchen_Musical_-_EP01',
'info_dict': {
'id': '54411',
'ext': 'mp4',
'title': 'The Kitchen Musical - EP01',
'thumbnail': 're:^https?://.*\.png$',
'description': 'md5:37532716166069b353e8866e71fefae7',
'duration': 2407,
},
'params': {
'skip_download': 'HLS download',
},
}
def _login(self):
(username, password) = self._get_login_info()
if username is None:
return
self.report_login()
form_data = {
'username': username,
'password': password,
}
payload = compat_urllib_parse.urlencode(form_data).encode('utf-8')
request = sanitized_Request(self._LOGIN_URL, payload)
request.add_header('Content-Type', 'application/x-www-form-urlencoded')
login_page = self._download_webpage(
request, None, False, 'Wrong login info')
if not re.search(r'id="tubi-logout"', login_page):
raise ExtractorError(
'Login failed (invalid username/password)', expected=True)
def _real_initialize(self):
self._login()
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
if re.search(r"<(?:DIV|div) class='login-required-screen'>", webpage):
self.raise_login_required('This video requires login')
title = self._og_search_title(webpage)
description = self._og_search_description(webpage)
thumbnail = self._og_search_thumbnail(webpage)
duration = int_or_none(self._html_search_meta(
'video:duration', webpage, 'duration'))
apu = self._search_regex(r"apu='([^']+)'", webpage, 'apu')
m3u8_url = codecs.decode(apu, 'rot_13')[::-1]
formats = self._extract_m3u8_formats(m3u8_url, video_id, ext='mp4')
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
'description': description,
'duration': duration,
}
| gpl-2.0 |
vickenty/ookoobah | pyglet-c9188efc2e30/experimental/mt_media/drivers/directsound/lib_dsound.py | 78 | 12874 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
# $Id:$
import ctypes
from pyglet import com
lib = ctypes.oledll.dsound
DWORD = ctypes.c_uint32
LPDWORD = ctypes.POINTER(DWORD)
LONG = ctypes.c_long
LPLONG = ctypes.POINTER(LONG)
WORD = ctypes.c_uint16
HWND = DWORD
LPUNKNOWN = ctypes.c_void_p
D3DVALUE = ctypes.c_float
PD3DVALUE = ctypes.POINTER(D3DVALUE)
class D3DVECTOR(ctypes.Structure):
_fields_ = [
('x', ctypes.c_float),
('y', ctypes.c_float),
('z', ctypes.c_float),
]
PD3DVECTOR = ctypes.POINTER(D3DVECTOR)
class WAVEFORMATEX(ctypes.Structure):
_fields_ = [
('wFormatTag', WORD),
('nChannels', WORD),
('nSamplesPerSec', DWORD),
('nAvgBytesPerSec', DWORD),
('nBlockAlign', WORD),
('wBitsPerSample', WORD),
('cbSize', WORD),
]
LPWAVEFORMATEX = ctypes.POINTER(WAVEFORMATEX)
WAVE_FORMAT_PCM = 1
class DSCAPS(ctypes.Structure):
_fields_ = [
('dwSize', DWORD),
('dwFlags', DWORD),
('dwMinSecondarySampleRate', DWORD),
('dwMaxSecondarySampleRate', DWORD),
('dwPrimaryBuffers', DWORD),
('dwMaxHwMixingAllBuffers', DWORD),
('dwMaxHwMixingStaticBuffers', DWORD),
('dwMaxHwMixingStreamingBuffers', DWORD),
('dwFreeHwMixingAllBuffers', DWORD),
('dwFreeHwMixingStaticBuffers', DWORD),
('dwFreeHwMixingStreamingBuffers', DWORD),
('dwMaxHw3DAllBuffers', DWORD),
('dwMaxHw3DStaticBuffers', DWORD),
('dwMaxHw3DStreamingBuffers', DWORD),
('dwFreeHw3DAllBuffers', DWORD),
('dwFreeHw3DStaticBuffers', DWORD),
('dwFreeHw3DStreamingBuffers', DWORD),
('dwTotalHwMemBytes', DWORD),
('dwFreeHwMemBytes', DWORD),
('dwMaxContigFreeHwMemBytes', DWORD),
('dwUnlockTransferRateHwBuffers', DWORD),
('dwPlayCpuOverheadSwBuffers', DWORD),
('dwReserved1', DWORD),
('dwReserved2', DWORD)
]
LPDSCAPS = ctypes.POINTER(DSCAPS)
class DSBCAPS(ctypes.Structure):
_fields_ = [
('dwSize', DWORD),
('dwFlags', DWORD),
('dwBufferBytes', DWORD),
('dwUnlockTransferRate', DWORD),
('dwPlayCpuOverhead', DWORD),
]
LPDSBCAPS = ctypes.POINTER(DSBCAPS)
class DSBUFFERDESC(ctypes.Structure):
_fields_ = [
('dwSize', DWORD),
('dwFlags', DWORD),
('dwBufferBytes', DWORD),
('dwReserved', DWORD),
('lpwfxFormat', LPWAVEFORMATEX),
]
LPDSBUFFERDESC = ctypes.POINTER(DSBUFFERDESC)
class DS3DBUFFER(ctypes.Structure):
_fields_ = [
('dwSize', DWORD),
('vPosition', D3DVECTOR),
('vVelocity', D3DVECTOR),
('dwInsideConeAngle', DWORD),
('dwOutsideConeAngle', DWORD),
('vConeOrientation', D3DVECTOR),
('lConeOutsideVolume', LONG),
('flMinDistance', D3DVALUE),
('flMaxDistance', D3DVALUE),
('dwMode', DWORD),
]
LPDS3DBUFFER = ctypes.POINTER(DS3DBUFFER)
class DS3DLISTENER(ctypes.Structure):
_fields_ = [
('dwSize', DWORD),
('vPosition', D3DVECTOR),
('vVelocity', D3DVECTOR),
('vOrientFront', D3DVECTOR),
('vOrientTop', D3DVECTOR),
('flDistanceFactor', D3DVALUE),
('flRolloffFactor', D3DVALUE),
('flDopplerFactor', D3DVALUE),
]
LPDS3DLISTENER = ctypes.POINTER(DS3DLISTENER)
class IDirectSoundBuffer(com.IUnknown):
_methods_ = [
('GetCaps',
com.STDMETHOD(LPDSBCAPS)),
('GetCurrentPosition',
com.STDMETHOD(LPDWORD, LPDWORD)),
('GetFormat',
com.STDMETHOD(LPWAVEFORMATEX, DWORD, LPDWORD)),
('GetVolume',
com.STDMETHOD(LPLONG)),
('GetPan',
com.STDMETHOD(LPLONG)),
('GetFrequency',
com.STDMETHOD(LPDWORD)),
('GetStatus',
com.STDMETHOD(LPDWORD)),
('Initialize',
com.STDMETHOD(ctypes.c_void_p, LPDSBUFFERDESC)),
('Lock',
com.STDMETHOD(DWORD, DWORD,
ctypes.POINTER(ctypes.c_void_p), LPDWORD,
ctypes.POINTER(ctypes.c_void_p), LPDWORD,
DWORD)),
('Play',
com.STDMETHOD(DWORD, DWORD, DWORD)),
('SetCurrentPosition',
com.STDMETHOD(DWORD)),
('SetFormat',
com.STDMETHOD(LPWAVEFORMATEX)),
('SetVolume',
com.STDMETHOD(LONG)),
('SetPan',
com.STDMETHOD(LONG)),
('SetFrequency',
com.STDMETHOD(DWORD)),
('Stop',
com.STDMETHOD()),
('Unlock',
com.STDMETHOD(ctypes.c_void_p, DWORD, ctypes.c_void_p, DWORD)),
('Restore',
com.STDMETHOD()),
]
IID_IDirectSound3DListener = com.GUID(
0x279AFA84, 0x4981, 0x11CE, 0xA5, 0x21, 0x00, 0x20, 0xAF, 0x0B, 0xE5, 0x60)
class IDirectSound3DListener(com.IUnknown):
_methods_ = [
('GetAllParameters',
com.STDMETHOD(LPDS3DLISTENER)),
('GetDistanceFactor',
com.STDMETHOD(PD3DVALUE)),
('GetDopplerFactor',
com.STDMETHOD(PD3DVALUE)),
('GetOrientation',
com.STDMETHOD(PD3DVECTOR)),
('GetPosition',
com.STDMETHOD(PD3DVECTOR)),
('GetRolloffFactor',
com.STDMETHOD(PD3DVALUE)),
('GetVelocity',
com.STDMETHOD(PD3DVECTOR)),
('SetAllParameters',
com.STDMETHOD(LPDS3DLISTENER)),
('SetDistanceFactor',
com.STDMETHOD(D3DVALUE, DWORD)),
('SetDopplerFactor',
com.STDMETHOD(D3DVALUE, DWORD)),
('SetOrientation',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE,
D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
('SetPosition',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
('SetRolloffFactor',
com.STDMETHOD(D3DVALUE, DWORD)),
('SetVelocity',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
('CommitDeferredSettings',
com.STDMETHOD()),
]
IID_IDirectSound3DBuffer = com.GUID(
0x279AFA86, 0x4981, 0x11CE, 0xA5, 0x21, 0x00, 0x20, 0xAF, 0x0B, 0xE5, 0x60)
class IDirectSound3DBuffer(com.IUnknown):
_methods_ = [
('GetAllParameters',
com.STDMETHOD(LPDS3DBUFFER)),
('GetConeAngles',
com.STDMETHOD(LPDWORD, LPDWORD)),
('GetConeOrientation',
com.STDMETHOD(PD3DVECTOR)),
('GetConeOutsideVolume',
com.STDMETHOD(LPLONG)),
('GetMaxDistance',
com.STDMETHOD(PD3DVALUE)),
('GetMinDistance',
com.STDMETHOD(PD3DVALUE)),
('GetMode',
com.STDMETHOD(LPDWORD)),
('GetPosition',
com.STDMETHOD(PD3DVECTOR)),
('GetVelocity',
com.STDMETHOD(PD3DVECTOR)),
('SetAllParameters',
com.STDMETHOD(LPDS3DBUFFER, DWORD)),
('SetConeAngles',
com.STDMETHOD(DWORD, DWORD, DWORD)),
('SetConeOrientation',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
('SetConeOutsideVolume',
com.STDMETHOD(LONG, DWORD)),
('SetMaxDistance',
com.STDMETHOD(D3DVALUE, DWORD)),
('SetMinDistance',
com.STDMETHOD(D3DVALUE, DWORD)),
('SetMode',
com.STDMETHOD(DWORD, DWORD)),
('SetPosition',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
('SetVelocity',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
]
class IDirectSound(com.IUnknown):
_methods_ = [
('CreateSoundBuffer',
com.STDMETHOD(LPDSBUFFERDESC,
ctypes.POINTER(IDirectSoundBuffer),
LPUNKNOWN)),
('GetCaps',
com.STDMETHOD(LPDSCAPS)),
('DuplicateSoundBuffer',
com.STDMETHOD(IDirectSoundBuffer,
ctypes.POINTER(IDirectSoundBuffer))),
('SetCooperativeLevel',
com.STDMETHOD(HWND, DWORD)),
('Compact',
com.STDMETHOD()),
('GetSpeakerConfig',
com.STDMETHOD(LPDWORD)),
('SetSpeakerConfig',
com.STDMETHOD(DWORD)),
('Initialize',
com.STDMETHOD(com.LPGUID)),
]
_type_ = com.COMInterface
DirectSoundCreate = lib.DirectSoundCreate
DirectSoundCreate.argtypes = \
[com.LPGUID, ctypes.POINTER(IDirectSound), ctypes.c_void_p]
DSCAPS_PRIMARYMONO = 0x00000001
DSCAPS_PRIMARYSTEREO = 0x00000002
DSCAPS_PRIMARY8BIT = 0x00000004
DSCAPS_PRIMARY16BIT = 0x00000008
DSCAPS_CONTINUOUSRATE = 0x00000010
DSCAPS_EMULDRIVER = 0x00000020
DSCAPS_CERTIFIED = 0x00000040
DSCAPS_SECONDARYMONO = 0x00000100
DSCAPS_SECONDARYSTEREO = 0x00000200
DSCAPS_SECONDARY8BIT = 0x00000400
DSCAPS_SECONDARY16BIT = 0x00000800
DSSCL_NORMAL = 0x00000001
DSSCL_PRIORITY = 0x00000002
DSSCL_EXCLUSIVE = 0x00000003
DSSCL_WRITEPRIMARY = 0x00000004
DSSPEAKER_DIRECTOUT = 0x00000000
DSSPEAKER_HEADPHONE = 0x00000001
DSSPEAKER_MONO = 0x00000002
DSSPEAKER_QUAD = 0x00000003
DSSPEAKER_STEREO = 0x00000004
DSSPEAKER_SURROUND = 0x00000005
DSSPEAKER_5POINT1 = 0x00000006
DSSPEAKER_7POINT1 = 0x00000007
DSSPEAKER_GEOMETRY_MIN = 0x00000005 # 5 degrees
DSSPEAKER_GEOMETRY_NARROW = 0x0000000A # 10 degrees
DSSPEAKER_GEOMETRY_WIDE = 0x00000014 # 20 degrees
DSSPEAKER_GEOMETRY_MAX = 0x000000B4 # 180 degrees
DSBCAPS_PRIMARYBUFFER = 0x00000001
DSBCAPS_STATIC = 0x00000002
DSBCAPS_LOCHARDWARE = 0x00000004
DSBCAPS_LOCSOFTWARE = 0x00000008
DSBCAPS_CTRL3D = 0x00000010
DSBCAPS_CTRLFREQUENCY = 0x00000020
DSBCAPS_CTRLPAN = 0x00000040
DSBCAPS_CTRLVOLUME = 0x00000080
DSBCAPS_CTRLPOSITIONNOTIFY = 0x00000100
DSBCAPS_CTRLFX = 0x00000200
DSBCAPS_STICKYFOCUS = 0x00004000
DSBCAPS_GLOBALFOCUS = 0x00008000
DSBCAPS_GETCURRENTPOSITION2 = 0x00010000
DSBCAPS_MUTE3DATMAXDISTANCE = 0x00020000
DSBCAPS_LOCDEFER = 0x00040000
DSBPLAY_LOOPING = 0x00000001
DSBPLAY_LOCHARDWARE = 0x00000002
DSBPLAY_LOCSOFTWARE = 0x00000004
DSBPLAY_TERMINATEBY_TIME = 0x00000008
DSBPLAY_TERMINATEBY_DISTANCE = 0x000000010
DSBPLAY_TERMINATEBY_PRIORITY = 0x000000020
DSBSTATUS_PLAYING = 0x00000001
DSBSTATUS_BUFFERLOST = 0x00000002
DSBSTATUS_LOOPING = 0x00000004
DSBSTATUS_LOCHARDWARE = 0x00000008
DSBSTATUS_LOCSOFTWARE = 0x00000010
DSBSTATUS_TERMINATED = 0x00000020
DSBLOCK_FROMWRITECURSOR = 0x00000001
DSBLOCK_ENTIREBUFFER = 0x00000002
DSBFREQUENCY_MIN = 100
DSBFREQUENCY_MAX = 100000
DSBFREQUENCY_ORIGINAL = 0
DSBPAN_LEFT = -10000
DSBPAN_CENTER = 0
DSBPAN_RIGHT = 10000
DSBVOLUME_MIN = -10000
DSBVOLUME_MAX = 0
DSBSIZE_MIN = 4
DSBSIZE_MAX = 0x0FFFFFFF
DSBSIZE_FX_MIN = 150 # NOTE: Milliseconds, not bytes
DS3DMODE_NORMAL = 0x00000000
DS3DMODE_HEADRELATIVE = 0x00000001
DS3DMODE_DISABLE = 0x00000002
DS3D_IMMEDIATE = 0x00000000
DS3D_DEFERRED = 0x00000001
DS3D_MINDISTANCEFACTOR = -1000000.0 # XXX FLT_MIN
DS3D_MAXDISTANCEFACTOR = 1000000.0 # XXX FLT_MAX
DS3D_DEFAULTDISTANCEFACTOR = 1.0
DS3D_MINROLLOFFFACTOR = 0.0
DS3D_MAXROLLOFFFACTOR = 10.0
DS3D_DEFAULTROLLOFFFACTOR = 1.0
DS3D_MINDOPPLERFACTOR = 0.0
DS3D_MAXDOPPLERFACTOR = 10.0
DS3D_DEFAULTDOPPLERFACTOR = 1.0
DS3D_DEFAULTMINDISTANCE = 1.0
DS3D_DEFAULTMAXDISTANCE = 1000000000.0
DS3D_MINCONEANGLE = 0
DS3D_MAXCONEANGLE = 360
DS3D_DEFAULTCONEANGLE = 360
DS3D_DEFAULTCONEOUTSIDEVOLUME = DSBVOLUME_MAX
| mit |
c11/yatsm | yatsm/classification/__init__.py | 3 | 2042 | """ Module storing classifiers for YATSM
Contains utilities and helper classes for classifying timeseries generated
using YATSM change detection.
"""
import logging
from sklearn.ensemble import RandomForestClassifier
import yaml
from ..errors import AlgorithmNotFoundException
logger = logging.getLogger('yatsm')
_algorithms = {
'RandomForest': RandomForestClassifier
}
def cfg_to_algorithm(config_file):
""" Return instance of classification algorithm helper from config file
Args:
config_file (str): location of configuration file for algorithm
Returns:
tuple: scikit-learn estimator (object) and configuration file (dict)
Raises:
KeyError: raise if configuration file is malformed
AlgorithmNotFoundException: raise if algorithm is not implemented in
YATSM
TypeError: raise if configuration file cannot be used to initialize
the classifier
"""
# Determine which algorithm is used
try:
with open(config_file, 'r') as f:
config = yaml.safe_load(f)
except Exception as e:
logger.error('Could not read config file {} ({})'
.format(config_file, str(e)))
raise
algo_name = config['algorithm']
if algo_name not in _algorithms.keys():
raise AlgorithmNotFoundException(
'Could not process unknown algorithm named "%s"' % algo_name)
else:
algo = _algorithms[algo_name]
if algo_name not in config:
logger.warning('%s algorithm parameters not found in config file %s. '
'Using default values.' % (algo_name, config_file))
config[algo_name] = {}
# Try to load algorithm using hyperparameters from config
try:
sklearn_algo = algo(**config[algo_name].get('init', {}))
except TypeError:
logger.error('Cannot initialize %s classifier. Config file %s '
'contains unknown options' % (algo_name, config_file))
raise
return sklearn_algo, config
| mit |
makinacorpus/odoo | addons/auth_crypt/__openerp__.py | 310 | 2298 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Odoo, Open Source Management Solution
# Copyright (C) 2004-2014 OpenERP S.A. (<http://odoo.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Password Encryption',
'version': '2.0',
'author': ['OpenERP SA', 'FS3'],
'maintainer': 'OpenERP SA',
'website': 'https://www.odoo.com',
'category': 'Tools',
'description': """
Encrypted passwords
===================
Replaces the default password storage with a strong cryptographic
hash.
The key derivation function currently used is RSA Security LLC's
industry-standard ``PKDF2``, in combination with ``SHA512``.
This includes salting and key stretching with several thousands
rounds.
All passwords are encrypted as soon as the module is installed.
This may take a few minutes if there are thousands of users.
Past versions of encrypted passwords will be automatically upgraded
to the current scheme whenever a user authenticates
(``auth_crypt`` was previously using the weaker ``md5crypt`` key
derivation function).
Note: Installing this module permanently prevents user password
recovery and cannot be undone. It is thus recommended to enable
some password reset mechanism for users, such as the one provided
by the ``auth_signup`` module (signup for new users does not
necessarily have to be enabled).
""",
'depends': ['base'],
'data': [],
'auto_install': True,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
yufish/youtube-dl | youtube_dl/extractor/wdr.py | 110 | 10166 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import itertools
import re
from .common import InfoExtractor
from ..compat import (
compat_parse_qs,
compat_urlparse,
)
from ..utils import (
determine_ext,
unified_strdate,
)
class WDRIE(InfoExtractor):
_PLAYER_REGEX = '-(?:video|audio)player(?:_size-[LMS])?'
_VALID_URL = r'(?P<url>https?://www\d?\.(?:wdr\d?|funkhauseuropa)\.de/)(?P<id>.+?)(?P<player>%s)?\.html' % _PLAYER_REGEX
_TESTS = [
{
'url': 'http://www1.wdr.de/mediathek/video/sendungen/servicezeit/videoservicezeit560-videoplayer_size-L.html',
'info_dict': {
'id': 'mdb-362427',
'ext': 'flv',
'title': 'Servicezeit',
'description': 'md5:c8f43e5e815eeb54d0b96df2fba906cb',
'upload_date': '20140310',
'is_live': False
},
'params': {
'skip_download': True,
},
},
{
'url': 'http://www1.wdr.de/themen/av/videomargaspiegelisttot101-videoplayer.html',
'info_dict': {
'id': 'mdb-363194',
'ext': 'flv',
'title': 'Marga Spiegel ist tot',
'description': 'md5:2309992a6716c347891c045be50992e4',
'upload_date': '20140311',
'is_live': False
},
'params': {
'skip_download': True,
},
},
{
'url': 'http://www1.wdr.de/themen/kultur/audioerlebtegeschichtenmargaspiegel100-audioplayer.html',
'md5': '83e9e8fefad36f357278759870805898',
'info_dict': {
'id': 'mdb-194332',
'ext': 'mp3',
'title': 'Erlebte Geschichten: Marga Spiegel (29.11.2009)',
'description': 'md5:2309992a6716c347891c045be50992e4',
'upload_date': '20091129',
'is_live': False
},
},
{
'url': 'http://www.funkhauseuropa.de/av/audioflaviacoelhoamaramar100-audioplayer.html',
'md5': '99a1443ff29af19f6c52cf6f4dc1f4aa',
'info_dict': {
'id': 'mdb-478135',
'ext': 'mp3',
'title': 'Flavia Coelho: Amar é Amar',
'description': 'md5:7b29e97e10dfb6e265238b32fa35b23a',
'upload_date': '20140717',
'is_live': False
},
},
{
'url': 'http://www1.wdr.de/mediathek/video/sendungen/quarks_und_co/filterseite-quarks-und-co100.html',
'playlist_mincount': 146,
'info_dict': {
'id': 'mediathek/video/sendungen/quarks_und_co/filterseite-quarks-und-co100',
}
},
{
'url': 'http://www1.wdr.de/mediathek/video/livestream/index.html',
'info_dict': {
'id': 'mdb-103364',
'title': 're:^WDR Fernsehen [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'description': 'md5:ae2ff888510623bf8d4b115f95a9b7c9',
'ext': 'flv',
'upload_date': '20150212',
'is_live': True
},
'params': {
'skip_download': True,
},
}
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
page_url = mobj.group('url')
page_id = mobj.group('id')
webpage = self._download_webpage(url, page_id)
if mobj.group('player') is None:
entries = [
self.url_result(page_url + href, 'WDR')
for href in re.findall(r'<a href="/?(.+?%s\.html)" rel="nofollow"' % self._PLAYER_REGEX, webpage)
]
if entries: # Playlist page
return self.playlist_result(entries, page_id)
# Overview page
entries = []
for page_num in itertools.count(2):
hrefs = re.findall(
r'<li class="mediathekvideo"\s*>\s*<img[^>]*>\s*<a href="(/mediathek/video/[^"]+)"',
webpage)
entries.extend(
self.url_result(page_url + href, 'WDR')
for href in hrefs)
next_url_m = re.search(
r'<li class="nextToLast">\s*<a href="([^"]+)"', webpage)
if not next_url_m:
break
next_url = page_url + next_url_m.group(1)
webpage = self._download_webpage(
next_url, page_id,
note='Downloading playlist page %d' % page_num)
return self.playlist_result(entries, page_id)
flashvars = compat_parse_qs(
self._html_search_regex(r'<param name="flashvars" value="([^"]+)"', webpage, 'flashvars'))
page_id = flashvars['trackerClipId'][0]
video_url = flashvars['dslSrc'][0]
title = flashvars['trackerClipTitle'][0]
thumbnail = flashvars['startPicture'][0] if 'startPicture' in flashvars else None
is_live = flashvars.get('isLive', ['0'])[0] == '1'
if is_live:
title = self._live_title(title)
if 'trackerClipAirTime' in flashvars:
upload_date = flashvars['trackerClipAirTime'][0]
else:
upload_date = self._html_search_meta('DC.Date', webpage, 'upload date')
if upload_date:
upload_date = unified_strdate(upload_date)
if video_url.endswith('.f4m'):
video_url += '?hdcore=3.2.0&plugin=aasp-3.2.0.77.18'
ext = 'flv'
elif video_url.endswith('.smil'):
fmt = self._extract_smil_formats(video_url, page_id)[0]
video_url = fmt['url']
sep = '&' if '?' in video_url else '?'
video_url += sep
video_url += 'hdcore=3.3.0&plugin=aasp-3.3.0.99.43'
ext = fmt['ext']
else:
ext = determine_ext(video_url)
description = self._html_search_meta('Description', webpage, 'description')
return {
'id': page_id,
'url': video_url,
'ext': ext,
'title': title,
'description': description,
'thumbnail': thumbnail,
'upload_date': upload_date,
'is_live': is_live
}
class WDRMobileIE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://mobile-ondemand\.wdr\.de/
.*?/fsk(?P<age_limit>[0-9]+)
/[0-9]+/[0-9]+/
(?P<id>[0-9]+)_(?P<title>[0-9]+)'''
IE_NAME = 'wdr:mobile'
_TEST = {
'url': 'http://mobile-ondemand.wdr.de/CMS2010/mdb/ondemand/weltweit/fsk0/42/421735/421735_4283021.mp4',
'info_dict': {
'title': '4283021',
'id': '421735',
'ext': 'mp4',
'age_limit': 0,
},
'skip': 'Problems with loading data.'
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
return {
'id': mobj.group('id'),
'title': mobj.group('title'),
'age_limit': int(mobj.group('age_limit')),
'url': url,
'http_headers': {
'User-Agent': 'mobile',
},
}
class WDRMausIE(InfoExtractor):
_VALID_URL = 'http://(?:www\.)?wdrmaus\.de/(?:[^/]+/){,2}(?P<id>[^/?#]+)(?:/index\.php5|(?<!index)\.php5|/(?:$|[?#]))'
IE_DESC = 'Sendung mit der Maus'
_TESTS = [{
'url': 'http://www.wdrmaus.de/aktuelle-sendung/index.php5',
'info_dict': {
'id': 'aktuelle-sendung',
'ext': 'mp4',
'thumbnail': 're:^http://.+\.jpg',
'upload_date': 're:^[0-9]{8}$',
'title': 're:^[0-9.]{10} - Aktuelle Sendung$',
}
}, {
'url': 'http://www.wdrmaus.de/sachgeschichten/sachgeschichten/40_jahre_maus.php5',
'md5': '3b1227ca3ed28d73ec5737c65743b2a3',
'info_dict': {
'id': '40_jahre_maus',
'ext': 'mp4',
'thumbnail': 're:^http://.+\.jpg',
'upload_date': '20131007',
'title': '12.03.2011 - 40 Jahre Maus',
}
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
param_code = self._html_search_regex(
r'<a href="\?startVideo=1&([^"]+)"', webpage, 'parameters')
title_date = self._search_regex(
r'<div class="sendedatum"><p>Sendedatum:\s*([0-9\.]+)</p>',
webpage, 'air date')
title_str = self._html_search_regex(
r'<h1>(.*?)</h1>', webpage, 'title')
title = '%s - %s' % (title_date, title_str)
upload_date = unified_strdate(
self._html_search_meta('dc.date', webpage))
fields = compat_parse_qs(param_code)
video_url = fields['firstVideo'][0]
thumbnail = compat_urlparse.urljoin(url, fields['startPicture'][0])
formats = [{
'format_id': 'rtmp',
'url': video_url,
}]
jscode = self._download_webpage(
'http://www.wdrmaus.de/codebase/js/extended-medien.min.js',
video_id, fatal=False,
note='Downloading URL translation table',
errnote='Could not download URL translation table')
if jscode:
for m in re.finditer(
r"stream:\s*'dslSrc=(?P<stream>[^']+)',\s*download:\s*'(?P<dl>[^']+)'\s*\}",
jscode):
if video_url.startswith(m.group('stream')):
http_url = video_url.replace(
m.group('stream'), m.group('dl'))
formats.append({
'format_id': 'http',
'url': http_url,
})
break
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
'upload_date': upload_date,
}
| unlicense |
eerimoq/asn1tools | tests/files/module_tags_explicit_pp.py | 1 | 9965 | EXPECTED = {'ModuleTagsExplicit': {'extensibility-implied': False,
'imports': {},
'object-classes': {},
'object-sets': {},
'types': {'A': {'tag': {'kind': 'EXPLICIT',
'number': 3},
'type': 'INTEGER'},
'AI': {'tag': {'kind': 'IMPLICIT',
'number': 3},
'type': 'INTEGER'},
'BA': {'tag': {'kind': 'EXPLICIT',
'number': 4},
'type': 'A'},
'BIA': {'tag': {'kind': 'IMPLICIT',
'number': 4},
'type': 'A'},
'BIAI': {'tag': {'kind': 'IMPLICIT',
'number': 4},
'type': 'AI'},
'C1': {'members': [{'members': [{'name': 'a',
'tag': {'kind': 'EXPLICIT',
'number': 0},
'type': 'INTEGER'}],
'name': 'a',
'tag': {'kind': 'EXPLICIT',
'number': 0},
'type': 'CHOICE'}],
'type': 'CHOICE'},
'CBA': {'tag': {'kind': 'EXPLICIT',
'number': 5},
'type': 'BA'},
'CBIAI': {'tag': {'kind': 'EXPLICIT',
'number': 5},
'type': 'BIAI'},
'CIBIA': {'tag': {'kind': 'IMPLICIT',
'number': 5},
'type': 'BIA'},
'CIBIAI': {'tag': {'kind': 'IMPLICIT',
'number': 5},
'type': 'BIAI'},
'S1': {'members': [{'name': 'a',
'type': 'INTEGER'},
{'name': 'b',
'optional': True,
'type': 'BOOLEAN'}],
'type': 'SEQUENCE'},
'S2': {'members': [{'name': 'a',
'type': 'INTEGER'},
{'name': 'b',
'tag': {'kind': 'EXPLICIT',
'number': 2},
'type': 'S1'},
{'members': [{'name': 'a',
'type': 'BOOLEAN'}],
'name': 'c',
'type': 'CHOICE'}],
'type': 'SEQUENCE'},
'S3': {'members': [{'name': 'a',
'type': 'INTEGER'},
{'name': 'b',
'tag': {'kind': 'EXPLICIT',
'number': 2},
'type': 'S1'},
{'members': [{'name': 'a',
'type': 'BOOLEAN'}],
'name': 'c',
'tag': {'kind': 'EXPLICIT',
'number': 3},
'type': 'CHOICE'}],
'type': 'SEQUENCE'},
'S4': {'members': [{'name': 'a',
'type': 'INTEGER'},
{'name': 'b',
'tag': {'kind': 'EXPLICIT',
'number': 1},
'type': 'C1'},
{'name': 'c',
'tag': {'kind': 'EXPLICIT',
'number': 2},
'type': 'S1'},
{'members': [{'name': 'a',
'type': 'BOOLEAN'}],
'name': 'd',
'type': 'CHOICE'}],
'type': 'SEQUENCE'},
'S5': {'members': [{'name': 'a',
'type': 'INTEGER'},
{'name': 'b',
'type': 'S1'},
{'members': [{'name': 'a',
'type': 'BOOLEAN'}],
'name': 'c',
'type': 'CHOICE'}],
'type': 'SEQUENCE'},
'S6': {'members': [{'name': 'a',
'type': 'INTEGER'},
None,
{'name': 'b',
'type': 'BOOLEAN'}],
'type': 'SEQUENCE'},
'S7': {'members': [{'name': 'a',
'tag': {'kind': 'EXPLICIT',
'number': 2},
'type': 'INTEGER'},
None,
{'name': 'b',
'type': 'BOOLEAN'}],
'type': 'SEQUENCE'},
'S8': {'members': [{'element': {'members': [{'name': 'a',
'type': 'INTEGER'},
{'name': 'b',
'type': 'BOOLEAN'}],
'type': 'CHOICE'},
'name': 'a',
'type': 'SEQUENCE OF'}],
'type': 'SEQUENCE'},
'S9': {'members': [{'element': {'members': [{'name': 'a',
'type': 'INTEGER'},
{'name': 'b',
'type': 'BOOLEAN'}],
'type': 'CHOICE'},
'name': 'a',
'type': 'SET OF'}],
'type': 'SET'},
'Type1': {'type': 'VisibleString'},
'Type2': {'tag': {'class': 'APPLICATION',
'kind': 'IMPLICIT',
'number': 3},
'type': 'Type1'},
'Type3': {'tag': {'kind': 'EXPLICIT',
'number': 2},
'type': 'Type2'},
'Type4': {'tag': {'class': 'APPLICATION',
'kind': 'IMPLICIT',
'number': 7},
'type': 'Type3'},
'Type5': {'tag': {'kind': 'IMPLICIT',
'number': 2},
'type': 'Type2'}},
'values': {}}}
| mit |
sgraham/nope | tools/perf/page_sets/top_25_pages.py | 9 | 2200 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry.page import page_set
from page_sets import top_pages
class Top25PageSet(page_set.PageSet):
""" Page set consists of top 25 pages with only navigation actions. """
def __init__(self):
super(Top25PageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/top_25.json',
bucket=page_set.PARTNER_BUCKET)
self.AddUserStory(top_pages.GoogleWebSearchPage(self))
self.AddUserStory(top_pages.GmailPage(self))
self.AddUserStory(top_pages.GoogleCalendarPage(self))
self.AddUserStory(top_pages.GoogleImageSearchPage(self))
self.AddUserStory(top_pages.GoogleDocPage(self))
self.AddUserStory(top_pages.GooglePlusPage(self))
self.AddUserStory(top_pages.YoutubePage(self))
self.AddUserStory(top_pages.BlogspotPage(self))
self.AddUserStory(top_pages.WordpressPage(self))
self.AddUserStory(top_pages.FacebookPage(self))
self.AddUserStory(top_pages.LinkedinPage(self))
self.AddUserStory(top_pages.WikipediaPage(self))
self.AddUserStory(top_pages.TwitterPage(self))
self.AddUserStory(top_pages.PinterestPage(self))
self.AddUserStory(top_pages.ESPNPage(self))
self.AddUserStory(top_pages.WeatherPage(self))
self.AddUserStory(top_pages.YahooGamesPage(self))
other_urls = [
# Why: #1 news worldwide (Alexa global)
'http://news.yahoo.com',
# Why: #2 news worldwide
'http://www.cnn.com',
# Why: #1 world commerce website by visits; #3 commerce in the US by
# time spent
'http://www.amazon.com',
# Why: #1 commerce website by time spent by users in US
'http://www.ebay.com',
# Why: #1 Alexa recreation
'http://booking.com',
# Why: #1 Alexa reference
'http://answers.yahoo.com',
# Why: #1 Alexa sports
'http://sports.yahoo.com/',
# Why: top tech blog
'http://techcrunch.com'
]
for url in other_urls:
self.AddUserStory(page.Page(url, self))
| bsd-3-clause |
hmoco/osf.io | api_tests/base/test_middleware.py | 3 | 4470 | # -*- coding: utf-8 -*-
import pytest
from django.http import HttpResponse
from tests.base import ApiTestCase, fake
from urlparse import urlparse
import mock
from nose.tools import * # flake8: noqa
from rest_framework.test import APIRequestFactory
from website.util import api_v2_url
from api.base import settings
from api.base.middleware import CorsMiddleware
from tests.base import ApiTestCase
from osf_tests import factories
class MiddlewareTestCase(ApiTestCase):
MIDDLEWARE = None
def setUp(self):
super(MiddlewareTestCase, self).setUp()
self.middleware = self.MIDDLEWARE()
self.mock_response = mock.Mock()
self.request_factory = APIRequestFactory()
class TestCorsMiddleware(MiddlewareTestCase):
MIDDLEWARE = CorsMiddleware
def test_institutions_added_to_cors_whitelist(self):
url = api_v2_url('users/me/')
domain = urlparse("https://dinosaurs.sexy")
institution = factories.InstitutionFactory(
domains=[domain.netloc.lower()],
name="Institute for Sexy Lizards"
)
settings.load_origins_whitelist()
request = self.request_factory.get(url, HTTP_ORIGIN=domain.geturl())
response = HttpResponse()
self.middleware.process_request(request)
processed = self.middleware.process_response(request, response)
assert_equal(response['Access-Control-Allow-Origin'], domain.geturl())
def test_institutions_added_to_cors_whitelist(self):
url = api_v2_url('users/me/')
domain = urlparse("https://dinoprints.sexy")
preprintprovider = factories.PreprintProviderFactory(
domain=domain.netloc.lower(),
_id="DinoXiv"
)
settings.load_origins_whitelist()
request = self.request_factory.get(url, HTTP_ORIGIN=domain.geturl())
response = HttpResponse()
self.middleware.process_request(request)
processed = self.middleware.process_response(request, response)
assert_equal(response['Access-Control-Allow-Origin'], domain.geturl())
def test_cross_origin_request_with_cookies_does_not_get_cors_headers(self):
url = api_v2_url('users/me/')
domain = urlparse("https://dinosaurs.sexy")
request = self.request_factory.get(url, HTTP_ORIGIN=domain.geturl())
response = {}
with mock.patch.object(request, 'COOKIES', True):
self.middleware.process_request(request)
processed = self.middleware.process_response(request, response)
assert_not_in('Access-Control-Allow-Origin', response)
def test_cross_origin_request_with_Authorization_gets_cors_headers(self):
url = api_v2_url('users/me/')
domain = urlparse("https://dinosaurs.sexy")
request = self.request_factory.get(
url,
HTTP_ORIGIN=domain.geturl(),
HTTP_AUTHORIZATION="Bearer aqweqweohuweglbiuwefq"
)
response = HttpResponse()
self.middleware.process_request(request)
processed = self.middleware.process_response(request, response)
assert_equal(response['Access-Control-Allow-Origin'], domain.geturl())
def test_cross_origin_request_with_Authorization_and_cookie_does_not_get_cors_headers(self):
url = api_v2_url('users/me/')
domain = urlparse("https://dinosaurs.sexy")
request = self.request_factory.get(
url,
HTTP_ORIGIN=domain.geturl(),
HTTP_AUTHORIZATION="Bearer aqweqweohuweglbiuwefq"
)
response = {}
with mock.patch.object(request, 'COOKIES', True):
self.middleware.process_request(request)
processed = self.middleware.process_response(request, response)
assert_not_in('Access-Control-Allow-Origin', response)
def test_non_institution_preflight_request_requesting_authorization_header_gets_cors_headers(self):
url = api_v2_url('users/me/')
domain = urlparse("https://dinosaurs.sexy")
request = self.request_factory.options(
url,
HTTP_ORIGIN=domain.geturl(),
HTTP_ACCESS_CONTROL_REQUEST_METHOD='GET',
HTTP_ACCESS_CONTROL_REQUEST_HEADERS='authorization'
)
response = HttpResponse()
self.middleware.process_request(request)
processed = self.middleware.process_response(request, response)
assert_equal(response['Access-Control-Allow-Origin'], domain.geturl())
| apache-2.0 |
xxsergzzxx/python-for-android | python-modules/twisted/twisted/words/im/basesupport.py | 55 | 7900 | # Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
#
"""Instance Messenger base classes for protocol support.
You will find these useful if you're adding a new protocol to IM.
"""
# Abstract representation of chat "model" classes
from twisted.words.im.locals import ONLINE, OFFLINE, OfflineError
from twisted.words.im import interfaces
from twisted.internet.protocol import Protocol
from twisted.python.reflect import prefixedMethods
from twisted.persisted import styles
from twisted.internet import error
class AbstractGroup:
def __init__(self, name, account):
self.name = name
self.account = account
def getGroupCommands(self):
"""finds group commands
these commands are methods on me that start with imgroup_; they are
called with no arguments
"""
return prefixedMethods(self, "imgroup_")
def getTargetCommands(self, target):
"""finds group commands
these commands are methods on me that start with imgroup_; they are
called with a user present within this room as an argument
you may want to override this in your group in order to filter for
appropriate commands on the given user
"""
return prefixedMethods(self, "imtarget_")
def join(self):
if not self.account.client:
raise OfflineError
self.account.client.joinGroup(self.name)
def leave(self):
if not self.account.client:
raise OfflineError
self.account.client.leaveGroup(self.name)
def __repr__(self):
return '<%s %r>' % (self.__class__, self.name)
def __str__(self):
return '%s@%s' % (self.name, self.account.accountName)
class AbstractPerson:
def __init__(self, name, baseAccount):
self.name = name
self.account = baseAccount
self.status = OFFLINE
def getPersonCommands(self):
"""finds person commands
these commands are methods on me that start with imperson_; they are
called with no arguments
"""
return prefixedMethods(self, "imperson_")
def getIdleTime(self):
"""
Returns a string.
"""
return '--'
def __repr__(self):
return '<%s %r/%s>' % (self.__class__, self.name, self.status)
def __str__(self):
return '%s@%s' % (self.name, self.account.accountName)
class AbstractClientMixin:
"""Designed to be mixed in to a Protocol implementing class.
Inherit from me first.
@ivar _logonDeferred: Fired when I am done logging in.
"""
def __init__(self, account, chatui, logonDeferred):
for base in self.__class__.__bases__:
if issubclass(base, Protocol):
self.__class__._protoBase = base
break
else:
pass
self.account = account
self.chat = chatui
self._logonDeferred = logonDeferred
def connectionMade(self):
self._protoBase.connectionMade(self)
def connectionLost(self, reason):
self.account._clientLost(self, reason)
self.unregisterAsAccountClient()
return self._protoBase.connectionLost(self, reason)
def unregisterAsAccountClient(self):
"""Tell the chat UI that I have `signed off'.
"""
self.chat.unregisterAccountClient(self)
class AbstractAccount(styles.Versioned):
"""Base class for Accounts.
I am the start of an implementation of L{IAccount<interfaces.IAccount>}, I
implement L{isOnline} and most of L{logOn}, though you'll need to implement
L{_startLogOn} in a subclass.
@cvar _groupFactory: A Callable that will return a L{IGroup} appropriate
for this account type.
@cvar _personFactory: A Callable that will return a L{IPerson} appropriate
for this account type.
@type _isConnecting: boolean
@ivar _isConnecting: Whether I am in the process of establishing a
connection to the server.
@type _isOnline: boolean
@ivar _isOnline: Whether I am currently on-line with the server.
@ivar accountName:
@ivar autoLogin:
@ivar username:
@ivar password:
@ivar host:
@ivar port:
"""
_isOnline = 0
_isConnecting = 0
client = None
_groupFactory = AbstractGroup
_personFactory = AbstractPerson
persistanceVersion = 2
def __init__(self, accountName, autoLogin, username, password, host, port):
self.accountName = accountName
self.autoLogin = autoLogin
self.username = username
self.password = password
self.host = host
self.port = port
self._groups = {}
self._persons = {}
def upgrateToVersion2(self):
# Added in CVS revision 1.16.
for k in ('_groups', '_persons'):
if not hasattr(self, k):
setattr(self, k, {})
def __getstate__(self):
state = styles.Versioned.__getstate__(self)
for k in ('client', '_isOnline', '_isConnecting'):
try:
del state[k]
except KeyError:
pass
return state
def isOnline(self):
return self._isOnline
def logOn(self, chatui):
"""Log on to this account.
Takes care to not start a connection if a connection is
already in progress. You will need to implement
L{_startLogOn} for this to work, and it would be a good idea
to override L{_loginFailed} too.
@returntype: Deferred L{interfaces.IClient}
"""
if (not self._isConnecting) and (not self._isOnline):
self._isConnecting = 1
d = self._startLogOn(chatui)
d.addCallback(self._cb_logOn)
# if chatui is not None:
# (I don't particularly like having to pass chatUI to this function,
# but we haven't factored it out yet.)
d.addCallback(chatui.registerAccountClient)
d.addErrback(self._loginFailed)
return d
else:
raise error.ConnectError("Connection in progress")
def getGroup(self, name):
"""Group factory.
@param name: Name of the group on this account.
@type name: string
"""
group = self._groups.get(name)
if group is None:
group = self._groupFactory(name, self)
self._groups[name] = group
return group
def getPerson(self, name):
"""Person factory.
@param name: Name of the person on this account.
@type name: string
"""
person = self._persons.get(name)
if person is None:
person = self._personFactory(name, self)
self._persons[name] = person
return person
def _startLogOn(self, chatui):
"""Start the sign on process.
Factored out of L{logOn}.
@returntype: Deferred L{interfaces.IClient}
"""
raise NotImplementedError()
def _cb_logOn(self, client):
self._isConnecting = 0
self._isOnline = 1
self.client = client
return client
def _loginFailed(self, reason):
"""Errorback for L{logOn}.
@type reason: Failure
@returns: I{reason}, for further processing in the callback chain.
@returntype: Failure
"""
self._isConnecting = 0
self._isOnline = 0 # just in case
return reason
def _clientLost(self, client, reason):
self.client = None
self._isConnecting = 0
self._isOnline = 0
return reason
def __repr__(self):
return "<%s: %s (%s@%s:%s)>" % (self.__class__,
self.accountName,
self.username,
self.host,
self.port)
| apache-2.0 |
OrkoHunter/networkx | networkx/algorithms/triads.py | 2 | 5507 | # triads.py - functions for analyzing triads of a graph
#
# Copyright 2015 NetworkX developers.
# Copyright 2011 Reya Group <http://www.reyagroup.com>
# Copyright 2011 Alex Levenson <alex@isnotinvain.com>
# Copyright 2011 Diederik van Liere <diederik.vanliere@rotman.utoronto.ca>
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
"""Functions for analyzing triads of a graph."""
from __future__ import division
import networkx as nx
__author__ = '\n'.join(['Alex Levenson (alex@isnontinvain.com)',
'Diederik van Liere (diederik.vanliere@rotman.utoronto.ca)'])
__all__ = ['triadic_census']
#: The names of each type of triad.
TRIAD_NAMES = ('003', '012', '102', '021D', '021U', '021C', '111D', '111U',
'030T', '030C', '201', '120D', '120U', '120C', '210', '300')
#: The integer codes representing each type of triad.
#:
#: Triads that are the same up to symmetry have the same code.
TRICODES = (1, 2, 2, 3, 2, 4, 6, 8, 2, 6, 5, 7, 3, 8, 7, 11, 2, 6, 4, 8, 5, 9,
9, 13, 6, 10, 9, 14, 7, 14, 12, 15, 2, 5, 6, 7, 6, 9, 10, 14, 4, 9,
9, 12, 8, 13, 14, 15, 3, 7, 8, 11, 7, 12, 14, 15, 8, 14, 13, 15,
11, 15, 15, 16)
#: A dictionary mapping triad code to triad name.
TRICODE_TO_NAME = {i: TRIAD_NAMES[code - 1] for i, code in enumerate(TRICODES)}
def triad_graphs():
"""Returns a dictionary mapping triad name to triad graph."""
def abc_graph():
"""Returns a directed graph on three nodes, named ``'a'``, ``'b'``, and
``'c'``.
"""
G = nx.DiGraph()
G.add_nodes_from('abc')
return G
tg = {name: abc_graph() for name in TRIAD_NAMES}
tg['012'].add_edges_from([('a', 'b')])
tg['102'].add_edges_from([('a', 'b'), ('b', 'a')])
tg['102'].add_edges_from([('a', 'b'), ('b', 'a')])
tg['021D'].add_edges_from([('b', 'a'), ('b', 'c')])
tg['021U'].add_edges_from([('a', 'b'), ('c', 'b')])
tg['021C'].add_edges_from([('a', 'b'), ('b', 'c')])
tg['111D'].add_edges_from([('a', 'c'), ('c', 'a'), ('b', 'c')])
tg['111U'].add_edges_from([('a', 'c'), ('c', 'a'), ('c', 'b')])
tg['030T'].add_edges_from([('a', 'b'), ('c', 'b'), ('a', 'c')])
tg['030C'].add_edges_from([('b', 'a'), ('c', 'b'), ('a', 'c')])
tg['201'].add_edges_from([('a', 'b'), ('b', 'a'), ('a', 'c'), ('c', 'a')])
tg['120D'].add_edges_from([('b', 'c'), ('b', 'a'), ('a', 'c'), ('c', 'a')])
tg['120C'].add_edges_from([('a', 'b'), ('b', 'c'), ('a', 'c'), ('c', 'a')])
tg['120U'].add_edges_from([('a', 'b'), ('c', 'b'), ('a', 'c'), ('c', 'a')])
tg['210'].add_edges_from([('a', 'b'), ('b', 'c'), ('c', 'b'), ('a', 'c'),
('c', 'a')])
tg['300'].add_edges_from([('a', 'b'), ('b', 'a'), ('b', 'c'), ('c', 'b'),
('a', 'c'), ('c', 'a')])
return tg
def _tricode(G, v, u, w):
"""Returns the integer code of the given triad.
This is some fancy magic that comes from Batagelj and Mrvar's paper. It
treats each edge joining a pair of ``v``, ``u``, and ``w`` as a bit in
the binary representation of an integer.
"""
combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16),
(w, u, 32))
return sum(x for u, v, x in combos if v in G[u])
def triadic_census(G):
"""Determines the triadic census of a directed graph.
The triadic census is a count of how many of the 16 possible types of
triads are present in a directed graph.
Parameters
----------
G : digraph
A NetworkX DiGraph
Returns
-------
census : dict
Dictionary with triad names as keys and number of occurrences as values.
Notes
-----
This algorithm has complexity `O(m)` where `m` is the number of edges in
the graph.
References
----------
.. [1] Vladimir Batagelj and Andrej Mrvar, A subquadratic triad census
algorithm for large sparse networks with small maximum degree,
University of Ljubljana,
http://vlado.fmf.uni-lj.si/pub/networks/doc/triads/triads.pdf
"""
if not G.is_directed():
raise nx.NetworkXError('Not defined for undirected graphs.')
# Initialize the count for each triad to be zero.
census = {name: 0 for name in TRIAD_NAMES}
n = len(G)
# m = dict(zip(G, range(n)))
m = {v: i for i, v in enumerate(G)}
for v in G:
vnbrs = set(G.pred[v]) | set(G.succ[v])
for u in vnbrs:
if m[u] <= m[v]:
continue
neighbors = (vnbrs | set(G.succ[u]) | set(G.pred[u])) - {u, v}
# Calculate dyadic triads instead of counting them.
if v in G[u] and u in G[v]:
census['102'] += n - len(neighbors) - 2
else:
census['012'] += n - len(neighbors) - 2
# Count connected triads.
for w in neighbors:
if m[u] < m[w] or (m[v] < m[w] < m[u]
and v not in G.pred[w]
and v not in G.succ[w]):
code = _tricode(G, v, u, w)
census[TRICODE_TO_NAME[code]] += 1
# null triads = total number of possible triads - all found triads
#
# Use integer division here, since we know this formula guarantees an
# integral value.
census['003'] = ((n * (n - 1) * (n - 2)) // 6) - sum(census.values())
return census
| bsd-3-clause |
CentroGeo/geonode | geonode/geoapps/urls.py | 6 | 2478 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2020 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.conf.urls import url, include
from django.views.generic import TemplateView
from geonode.monitoring import register_url_event
from . import views
js_info_dict = {
'packages': ('geonode.geoapps', ),
}
apps_list = register_url_event()(TemplateView.as_view(template_name='apps/app_list.html'))
urlpatterns = [
# 'geonode.geoapps.views',
url(r'^$',
apps_list,
{'facet_type': 'geoapps'},
name='apps_browse'),
url(r'^new$', views.new_geoapp, name="new_geoapp"),
url(r'^preview/(?P<geoappid>[^/]*)$', views.geoapp_detail, name="geoapp_detail"),
url(r'^preview/(?P<geoappid>\d+)/metadata$', views.geoapp_metadata, name='geoapp_metadata'),
url(r'^preview/(?P<geoappid>[^/]*)/metadata_detail$',
views.geoapp_metadata_detail, name='geoapp_metadata_detail'),
url(r'^preview/(?P<geoappid>\d+)/metadata_advanced$',
views.geoapp_metadata_advanced, name='geoapp_metadata_advanced'),
url(r'^(?P<geoappid>\d+)/remove$', views.geoapp_remove, name="geoapp_remove"),
url(r'^(?P<geoappid>[^/]+)/view$', views.geoapp_edit, name='geoapp_view'),
url(r'^(?P<geoappid>[^/]+)/edit$', views.geoapp_edit, name='geoapp_edit'),
url(r'^(?P<geoappid>[^/]+)/update$', views.geoapp_edit,
{'template': 'apps/app_update.html'}, name='geoapp_update'),
url(r'^(?P<geoappid>[^/]+)/embed$', views.geoapp_edit,
{'template': 'apps/app_embed.html'}, name='geoapp_embed'),
url(r'^(?P<geoappid>[^/]+)/download$', views.geoapp_edit,
{'template': 'apps/app_download.html'}, name='geoapp_download'),
url(r'^', include('geonode.geoapps.api.urls')),
]
| gpl-3.0 |
jlobaton/namebench | nb_third_party/dns/rdtypes/IN/SRV.py | 248 | 3396 | # Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.name
class SRV(dns.rdata.Rdata):
"""SRV record
@ivar priority: the priority
@type priority: int
@ivar weight: the weight
@type weight: int
@ivar port: the port of the service
@type port: int
@ivar target: the target host
@type target: dns.name.Name object
@see: RFC 2782"""
__slots__ = ['priority', 'weight', 'port', 'target']
def __init__(self, rdclass, rdtype, priority, weight, port, target):
super(SRV, self).__init__(rdclass, rdtype)
self.priority = priority
self.weight = weight
self.port = port
self.target = target
def to_text(self, origin=None, relativize=True, **kw):
target = self.target.choose_relativity(origin, relativize)
return '%d %d %d %s' % (self.priority, self.weight, self.port,
target)
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
priority = tok.get_uint16()
weight = tok.get_uint16()
port = tok.get_uint16()
target = tok.get_name(None)
target = target.choose_relativity(origin, relativize)
tok.get_eol()
return cls(rdclass, rdtype, priority, weight, port, target)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
three_ints = struct.pack("!HHH", self.priority, self.weight, self.port)
file.write(three_ints)
self.target.to_wire(file, compress, origin)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
(priority, weight, port) = struct.unpack('!HHH',
wire[current : current + 6])
current += 6
rdlen -= 6
(target, cused) = dns.name.from_wire(wire[: current + rdlen],
current)
if cused != rdlen:
raise dns.exception.FormError
if not origin is None:
target = target.relativize(origin)
return cls(rdclass, rdtype, priority, weight, port, target)
from_wire = classmethod(from_wire)
def choose_relativity(self, origin = None, relativize = True):
self.target = self.target.choose_relativity(origin, relativize)
def _cmp(self, other):
sp = struct.pack("!HHH", self.priority, self.weight, self.port)
op = struct.pack("!HHH", other.priority, other.weight, other.port)
v = cmp(sp, op)
if v == 0:
v = cmp(self.target, other.target)
return v
| apache-2.0 |
shanewholloway/werkzeug | examples/coolmagic/application.py | 50 | 2604 | # -*- coding: utf-8 -*-
"""
coolmagic.application
~~~~~~~~~~~~~~~~~~~~~
This module provides the WSGI application.
The WSGI middlewares are applied in the `make_app` factory function
that automatically wraps the application within the require
middlewares. Per default only the `SharedDataMiddleware` is applied.
:copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from os import path, listdir
from coolmagic.utils import Request, local_manager, redirect
from werkzeug.routing import Map, Rule, RequestRedirect
from werkzeug.exceptions import HTTPException, NotFound
class CoolMagicApplication(object):
"""
The application class. It's passed a directory with configuration values.
"""
def __init__(self, config):
self.config = config
for fn in listdir(path.join(path.dirname(__file__), 'views')):
if fn.endswith('.py') and fn != '__init__.py':
__import__('coolmagic.views.' + fn[:-3])
from coolmagic.utils import exported_views
rules = [
# url for shared data. this will always be unmatched
# because either the middleware or the webserver
# handles that request first.
Rule('/public/<path:file>',
endpoint='shared_data')
]
self.views = {}
for endpoint, (func, rule, extra) in exported_views.iteritems():
if rule is not None:
rules.append(Rule(rule, endpoint=endpoint, **extra))
self.views[endpoint] = func
self.url_map = Map(rules)
def __call__(self, environ, start_response):
urls = self.url_map.bind_to_environ(environ)
req = Request(environ, urls)
try:
endpoint, args = urls.match(req.path)
resp = self.views[endpoint](**args)
except NotFound, e:
resp = self.views['static.not_found']()
except (HTTPException, RequestRedirect), e:
resp = e
return resp(environ, start_response)
def make_app(config=None):
"""
Factory function that creates a new `CoolmagicApplication`
object. Optional WSGI middlewares should be applied here.
"""
config = config or {}
app = CoolMagicApplication(config)
# static stuff
from werkzeug.utils import SharedDataMiddleware
app = SharedDataMiddleware(app, {
'/public': path.join(path.dirname(__file__), 'public')
})
# clean up locals
app = local_manager.make_middleware(app)
return app
| bsd-3-clause |
Maronato/aosalunos | misago/apps/profiles/warnings/urls.py | 3 | 1060 | from django.conf.urls import patterns, url
def register_profile_urls(first=False):
urlpatterns = []
if first:
urlpatterns += patterns('misago.apps.profiles.warnings.views',
url(r'^$', 'warnings', name="user"),
url(r'^$', 'warnings', name="user_warnings"),
url(r'^(?P<page>[1-9]([0-9]+)?)/$', 'warnings', name="user_warnings"),
url(r'^(?P<warning>\d+)/cancel/$', 'cancel_warning', name="user_warnings_cancel"),
url(r'^(?P<warning>\d+)/delete/$', 'delete_warning', name="user_warnings_delete"),
)
else:
urlpatterns += patterns('misago.apps.profiles.warnings.views',
url(r'^warnings/$', 'warnings', name="user_warnings"),
url(r'^warnings/(?P<page>[1-9]([0-9]+)?)/$', 'warnings', name="user_warnings"),
url(r'^warnings/(?P<warning>\d+)/cancel/$', 'cancel_warning', name="user_warnings_cancel"),
url(r'^warnings/(?P<warning>\d+)/delete/$', 'delete_warning', name="user_warnings_delete"),
)
return urlpatterns | gpl-2.0 |
edx/lettuce | tests/integration/lib/Django-1.3/django/utils/simplejson/scanner.py | 928 | 2227 | """JSON token scanner
"""
import re
try:
from simplejson._speedups import make_scanner as c_make_scanner
except ImportError:
c_make_scanner = None
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
return _scan_once
make_scanner = c_make_scanner or py_make_scanner
| gpl-3.0 |
HPI-SWA-Lab/RSqueak | rsqueakvm/test/test_socket_primitives.py | 1 | 6424 | import py
import time
from rsqueakvm import constants
from rsqueakvm.model.compiled_methods import W_PreSpurCompiledMethod
from rsqueakvm.model.variable import W_BytesObject
from rsqueakvm.primitives import prim_table
from rsqueakvm.primitives.constants import EXTERNAL_CALL
from rsqueakvm.error import PrimitiveFailedError
from rsqueakvm.plugins import socket_plugin as socket
from .util import create_space, copy_to_module, cleanup_module
from .test_primitives import mock
def setup_module():
space = create_space(bootstrap = True)
space.set_system_attribute(constants.SYSTEM_ATTRIBUTE_IMAGE_NAME_INDEX, "IMAGENAME")
wrap = space.w
bootstrap_class = space.bootstrap_class
new_frame = space.make_frame
copy_to_module(locals(), __name__)
def teardown_module():
cleanup_module(__name__)
IMAGENAME = "anImage.image"
def _prim(space, name, module, stack, context = None):
interp, w_frame, argument_count = mock(space, stack, context)
orig_stack = list(w_frame.as_context_get_shadow(space).stack())
prim_meth = W_PreSpurCompiledMethod(space, 0, header=17045052)
prim_meth._primitive = EXTERNAL_CALL
prim_meth.argsize = argument_count - 1
descr = space.wrap_list([space.wrap_string(module), space.wrap_string(name)])
prim_meth.literalatput0(space, 1, descr)
def call():
prim_table[EXTERNAL_CALL](interp, w_frame.as_context_get_shadow(space), argument_count-1, prim_meth)
return w_frame, orig_stack, call
def prim(name, module=None, stack = None, context = None):
if module is None: module = "SocketPlugin"
if stack is None: stack = [space.w_nil]
w_frame, orig_stack, call = _prim(space, name, module, stack, context)
call()
res = w_frame.as_context_get_shadow(space).pop()
s_frame = w_frame.as_context_get_shadow(space)
assert not s_frame.stackdepth() - s_frame.tempsize() # check args are consumed
return res
def prim_fails(name, module, stack):
w_frame, orig_stack, call = _prim(name, module, stack)
with py.test.raises(PrimitiveFailedError):
call()
assert w_frame.as_context_get_shadow(space).stack() == orig_stack
def test_vmdebugging():
assert prim("isRSqueak", "VMDebugging") is space.w_true
def test_resolver_start_lookup():
assert prim("primitiveResolverStartNameLookup", "SocketPlugin",
[space.w_nil, space.wrap_string("google.com")]) == space.w_nil
def test_resolver_lookup_result():
assert prim("primitiveResolverStartNameLookup", "SocketPlugin",
[space.w_nil, space.wrap_string("google.com")]) == space.w_nil
w_res = prim("primitiveResolverNameLookupResult", "SocketPlugin")
assert isinstance(w_res, W_BytesObject)
def test_socket_create():
assert isinstance(prim("primitiveSocketCreate3Semaphores", "SocketPlugin",
[space.w_nil, 2, 0, 8000, 8000, 13, 14, 15]), socket.W_SocketHandle)
assert isinstance(prim("primitiveSocketCreate3Semaphores", "SocketPlugin",
[space.w_nil, 0, 0, 8000, 8000, 13, 14, 15]), socket.W_SocketHandle)
def test_socket_status():
handle = prim("primitiveSocketCreate3Semaphores", "SocketPlugin",
[space.w_nil, 2, 0, 8000, 8000, 13, 14, 15])
assert prim("primitiveSocketConnectionStatus", "SocketPlugin",
[space.w_nil, handle]).value == 0
assert prim("primitiveSocketConnectionStatus", "SocketPlugin",
[space.w_nil, 3200]).value == -1
def test_socket_connect():
handle = prim("primitiveSocketCreate3Semaphores", "SocketPlugin",
[space.w_nil, 2, 0, 8000, 8000, 13, 14, 15])
prim("primitiveResolverStartNameLookup", "SocketPlugin",
[space.w_nil, space.wrap_string("google.com")])
w_host = prim("primitiveResolverNameLookupResult", "SocketPlugin")
assert prim("primitiveSocketConnectToPort", "SocketPlugin",
[space.w_nil, handle, w_host, space.wrap_int(80)])
assert prim("primitiveSocketConnectionStatus", "SocketPlugin",
[space.w_nil, handle]).value == 2
def test_socket_ready():
handle = prim("primitiveSocketCreate3Semaphores", "SocketPlugin",
[space.w_nil, 2, 0, 8000, 8000, 13, 14, 15])
prim("primitiveResolverStartNameLookup", "SocketPlugin",
[space.w_nil, space.wrap_string("google.com")])
w_host = prim("primitiveResolverNameLookupResult", "SocketPlugin")
assert prim("primitiveSocketConnectToPort", "SocketPlugin",
[space.w_nil, handle, w_host, space.wrap_int(80)])
assert prim("primitiveSocketConnectionStatus", "SocketPlugin",
[space.w_nil, handle]).value == 2
time.sleep(0.5)
assert prim("primitiveSocketReceiveDataAvailable", "SocketPlugin",
[space.w_nil, handle]) == space.w_false
_http_get = """
GET / HTTP/1.1
User-Agent: curl/7.37.1
Host: www.google.de
Accept: */*
"""
def test_socket_send_and_read_into():
handle = prim("primitiveSocketCreate3Semaphores", "SocketPlugin",
[space.w_nil, 2, 0, 8000, 8000, 13, 14, 15])
prim("primitiveResolverStartNameLookup", "SocketPlugin",
[space.w_nil, space.wrap_string("google.com")])
w_host = prim("primitiveResolverNameLookupResult", "SocketPlugin")
assert prim("primitiveSocketConnectToPort", "SocketPlugin",
[space.w_nil, handle, w_host, space.wrap_int(80)])
assert prim("primitiveSocketConnectionStatus", "SocketPlugin",
[space.w_nil, handle]).value == 2
assert prim("primitiveSocketSendDataBufCount", "SocketPlugin",
[space.w_nil, handle, space.wrap_string(_http_get),
space.wrap_int(1), space.wrap_int(len(_http_get))]).value == len(_http_get)
time.sleep(0.5)
assert prim("primitiveSocketReceiveDataAvailable", "SocketPlugin",
[space.w_nil, handle]) == space.w_true
w_str = space.wrap_string("_hello")
assert prim("primitiveSocketReceiveDataBufCount", "SocketPlugin",
[space.w_nil, handle, w_str, space.wrap_int(2), space.wrap_int(5)]).value == 5
assert w_str.unwrap_string(None) == "_HTTP/"
def test_socket_destroy():
handle = prim("primitiveSocketCreate3Semaphores", "SocketPlugin",
[space.w_nil, 2, 0, 8000, 8000, 13, 14, 15])
assert prim("primitiveSocketDestroy", "SocketPlugin",
[space.w_nil, handle]).value == -1
| bsd-3-clause |
mlabru/ptracks | view/piloto/dlg_aproximacao_ui.py | 1 | 2854 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './dlg_aproximacao.ui'
#
# Created: Tue Dec 6 11:23:22 2016
# by: PyQt4 UI code generator 4.11.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_CDlgAproximacao(object):
def setupUi(self, CDlgAproximacao):
CDlgAproximacao.setObjectName(_fromUtf8("CDlgAproximacao"))
CDlgAproximacao.resize(259, 151)
self.verticalLayout_2 = QtGui.QVBoxLayout(CDlgAproximacao)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.gbx_aproximacao = QtGui.QGroupBox(CDlgAproximacao)
self.gbx_aproximacao.setObjectName(_fromUtf8("gbx_aproximacao"))
self.verticalLayout = QtGui.QVBoxLayout(self.gbx_aproximacao)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.cbx_apx = QtGui.QComboBox(self.gbx_aproximacao)
self.cbx_apx.setObjectName(_fromUtf8("cbx_apx"))
self.verticalLayout.addWidget(self.cbx_apx)
self.verticalLayout_2.addWidget(self.gbx_aproximacao)
self.lbl_comando = QtGui.QLabel(CDlgAproximacao)
self.lbl_comando.setStyleSheet(_fromUtf8("background-color:rgb(0, 0, 0);\n"
"color:rgb(0, 190, 0)"))
self.lbl_comando.setObjectName(_fromUtf8("lbl_comando"))
self.verticalLayout_2.addWidget(self.lbl_comando)
self.bbx_aproximacao = QtGui.QDialogButtonBox(CDlgAproximacao)
self.bbx_aproximacao.setOrientation(QtCore.Qt.Horizontal)
self.bbx_aproximacao.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.bbx_aproximacao.setObjectName(_fromUtf8("bbx_aproximacao"))
self.verticalLayout_2.addWidget(self.bbx_aproximacao)
self.retranslateUi(CDlgAproximacao)
QtCore.QObject.connect(self.bbx_aproximacao, QtCore.SIGNAL(_fromUtf8("accepted()")), CDlgAproximacao.accept)
QtCore.QObject.connect(self.bbx_aproximacao, QtCore.SIGNAL(_fromUtf8("rejected()")), CDlgAproximacao.reject)
QtCore.QMetaObject.connectSlotsByName(CDlgAproximacao)
def retranslateUi(self, CDlgAproximacao):
CDlgAproximacao.setWindowTitle(_translate("CDlgAproximacao", "Aproximação", None))
self.gbx_aproximacao.setTitle(_translate("CDlgAproximacao", "Aproximações", None))
self.lbl_comando.setText(_translate("CDlgAproximacao", "APX 1001", None))
| gpl-3.0 |
lexus42/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/sre_parse.py | 630 | 29657 | #
# Secret Labs' Regular Expression Engine
#
# convert re-style regular expression to sre pattern
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
# XXX: show string offset and offending character for all errors
import sys
from sre_constants import *
from _sre import MAXREPEAT
SPECIAL_CHARS = ".\\[{()*+?^$|"
REPEAT_CHARS = "*+?{"
DIGITS = set("0123456789")
OCTDIGITS = set("01234567")
HEXDIGITS = set("0123456789abcdefABCDEF")
WHITESPACE = set(" \t\n\r\v\f")
ESCAPES = {
r"\a": (LITERAL, ord("\a")),
r"\b": (LITERAL, ord("\b")),
r"\f": (LITERAL, ord("\f")),
r"\n": (LITERAL, ord("\n")),
r"\r": (LITERAL, ord("\r")),
r"\t": (LITERAL, ord("\t")),
r"\v": (LITERAL, ord("\v")),
r"\\": (LITERAL, ord("\\"))
}
CATEGORIES = {
r"\A": (AT, AT_BEGINNING_STRING), # start of string
r"\b": (AT, AT_BOUNDARY),
r"\B": (AT, AT_NON_BOUNDARY),
r"\d": (IN, [(CATEGORY, CATEGORY_DIGIT)]),
r"\D": (IN, [(CATEGORY, CATEGORY_NOT_DIGIT)]),
r"\s": (IN, [(CATEGORY, CATEGORY_SPACE)]),
r"\S": (IN, [(CATEGORY, CATEGORY_NOT_SPACE)]),
r"\w": (IN, [(CATEGORY, CATEGORY_WORD)]),
r"\W": (IN, [(CATEGORY, CATEGORY_NOT_WORD)]),
r"\Z": (AT, AT_END_STRING), # end of string
}
FLAGS = {
# standard flags
"i": SRE_FLAG_IGNORECASE,
"L": SRE_FLAG_LOCALE,
"m": SRE_FLAG_MULTILINE,
"s": SRE_FLAG_DOTALL,
"x": SRE_FLAG_VERBOSE,
# extensions
"a": SRE_FLAG_ASCII,
"t": SRE_FLAG_TEMPLATE,
"u": SRE_FLAG_UNICODE,
}
class Pattern:
# master pattern object. keeps track of global attributes
def __init__(self):
self.flags = 0
self.open = []
self.groups = 1
self.groupdict = {}
def opengroup(self, name=None):
gid = self.groups
self.groups = gid + 1
if name is not None:
ogid = self.groupdict.get(name, None)
if ogid is not None:
raise error("redefinition of group name %s as group %d; "
"was group %d" % (repr(name), gid, ogid))
self.groupdict[name] = gid
self.open.append(gid)
return gid
def closegroup(self, gid):
self.open.remove(gid)
def checkgroup(self, gid):
return gid < self.groups and gid not in self.open
class SubPattern:
# a subpattern, in intermediate form
def __init__(self, pattern, data=None):
self.pattern = pattern
if data is None:
data = []
self.data = data
self.width = None
def __iter__(self):
return iter(self.data)
def dump(self, level=0):
nl = 1
seqtypes = (tuple, list)
for op, av in self.data:
print(level*" " + op, end=' '); nl = 0
if op == "in":
# member sublanguage
print(); nl = 1
for op, a in av:
print((level+1)*" " + op, a)
elif op == "branch":
print(); nl = 1
i = 0
for a in av[1]:
if i > 0:
print(level*" " + "or")
a.dump(level+1); nl = 1
i = i + 1
elif isinstance(av, seqtypes):
for a in av:
if isinstance(a, SubPattern):
if not nl: print()
a.dump(level+1); nl = 1
else:
print(a, end=' ') ; nl = 0
else:
print(av, end=' ') ; nl = 0
if not nl: print()
def __repr__(self):
return repr(self.data)
def __len__(self):
return len(self.data)
def __delitem__(self, index):
del self.data[index]
def __getitem__(self, index):
if isinstance(index, slice):
return SubPattern(self.pattern, self.data[index])
return self.data[index]
def __setitem__(self, index, code):
self.data[index] = code
def insert(self, index, code):
self.data.insert(index, code)
def append(self, code):
self.data.append(code)
def getwidth(self):
# determine the width (min, max) for this subpattern
if self.width:
return self.width
lo = hi = 0
UNITCODES = (ANY, RANGE, IN, LITERAL, NOT_LITERAL, CATEGORY)
REPEATCODES = (MIN_REPEAT, MAX_REPEAT)
for op, av in self.data:
if op is BRANCH:
i = sys.maxsize
j = 0
for av in av[1]:
l, h = av.getwidth()
i = min(i, l)
j = max(j, h)
lo = lo + i
hi = hi + j
elif op is CALL:
i, j = av.getwidth()
lo = lo + i
hi = hi + j
elif op is SUBPATTERN:
i, j = av[1].getwidth()
lo = lo + i
hi = hi + j
elif op in REPEATCODES:
i, j = av[2].getwidth()
lo = lo + int(i) * av[0]
hi = hi + int(j) * av[1]
elif op in UNITCODES:
lo = lo + 1
hi = hi + 1
elif op == SUCCESS:
break
self.width = int(min(lo, sys.maxsize)), int(min(hi, sys.maxsize))
return self.width
class Tokenizer:
def __init__(self, string):
self.istext = isinstance(string, str)
self.string = string
self.index = 0
self.__next()
def __next(self):
if self.index >= len(self.string):
self.next = None
return
char = self.string[self.index:self.index+1]
# Special case for the str8, since indexing returns a integer
# XXX This is only needed for test_bug_926075 in test_re.py
if char and not self.istext:
char = chr(char[0])
if char == "\\":
try:
c = self.string[self.index + 1]
except IndexError:
raise error("bogus escape (end of line)")
if not self.istext:
c = chr(c)
char = char + c
self.index = self.index + len(char)
self.next = char
def match(self, char, skip=1):
if char == self.next:
if skip:
self.__next()
return 1
return 0
def get(self):
this = self.next
self.__next()
return this
def getwhile(self, n, charset):
result = ''
for _ in range(n):
c = self.next
if c not in charset:
break
result += c
self.__next()
return result
def tell(self):
return self.index, self.next
def seek(self, index):
self.index, self.next = index
def isident(char):
return "a" <= char <= "z" or "A" <= char <= "Z" or char == "_"
def isdigit(char):
return "0" <= char <= "9"
def isname(name):
# check that group name is a valid string
if not isident(name[0]):
return False
for char in name[1:]:
if not isident(char) and not isdigit(char):
return False
return True
def _class_escape(source, escape):
# handle escape code inside character class
code = ESCAPES.get(escape)
if code:
return code
code = CATEGORIES.get(escape)
if code and code[0] == IN:
return code
try:
c = escape[1:2]
if c == "x":
# hexadecimal escape (exactly two digits)
escape += source.getwhile(2, HEXDIGITS)
if len(escape) != 4:
raise ValueError
return LITERAL, int(escape[2:], 16) & 0xff
elif c == "u" and source.istext:
# unicode escape (exactly four digits)
escape += source.getwhile(4, HEXDIGITS)
if len(escape) != 6:
raise ValueError
return LITERAL, int(escape[2:], 16)
elif c == "U" and source.istext:
# unicode escape (exactly eight digits)
escape += source.getwhile(8, HEXDIGITS)
if len(escape) != 10:
raise ValueError
c = int(escape[2:], 16)
chr(c) # raise ValueError for invalid code
return LITERAL, c
elif c in OCTDIGITS:
# octal escape (up to three digits)
escape += source.getwhile(2, OCTDIGITS)
return LITERAL, int(escape[1:], 8) & 0xff
elif c in DIGITS:
raise ValueError
if len(escape) == 2:
return LITERAL, ord(escape[1])
except ValueError:
pass
raise error("bogus escape: %s" % repr(escape))
def _escape(source, escape, state):
# handle escape code in expression
code = CATEGORIES.get(escape)
if code:
return code
code = ESCAPES.get(escape)
if code:
return code
try:
c = escape[1:2]
if c == "x":
# hexadecimal escape
escape += source.getwhile(2, HEXDIGITS)
if len(escape) != 4:
raise ValueError
return LITERAL, int(escape[2:], 16) & 0xff
elif c == "u" and source.istext:
# unicode escape (exactly four digits)
escape += source.getwhile(4, HEXDIGITS)
if len(escape) != 6:
raise ValueError
return LITERAL, int(escape[2:], 16)
elif c == "U" and source.istext:
# unicode escape (exactly eight digits)
escape += source.getwhile(8, HEXDIGITS)
if len(escape) != 10:
raise ValueError
c = int(escape[2:], 16)
chr(c) # raise ValueError for invalid code
return LITERAL, c
elif c == "0":
# octal escape
escape += source.getwhile(2, OCTDIGITS)
return LITERAL, int(escape[1:], 8) & 0xff
elif c in DIGITS:
# octal escape *or* decimal group reference (sigh)
if source.next in DIGITS:
escape = escape + source.get()
if (escape[1] in OCTDIGITS and escape[2] in OCTDIGITS and
source.next in OCTDIGITS):
# got three octal digits; this is an octal escape
escape = escape + source.get()
return LITERAL, int(escape[1:], 8) & 0xff
# not an octal escape, so this is a group reference
group = int(escape[1:])
if group < state.groups:
if not state.checkgroup(group):
raise error("cannot refer to open group")
return GROUPREF, group
raise ValueError
if len(escape) == 2:
return LITERAL, ord(escape[1])
except ValueError:
pass
raise error("bogus escape: %s" % repr(escape))
def _parse_sub(source, state, nested=1):
# parse an alternation: a|b|c
items = []
itemsappend = items.append
sourcematch = source.match
while 1:
itemsappend(_parse(source, state))
if sourcematch("|"):
continue
if not nested:
break
if not source.next or sourcematch(")", 0):
break
else:
raise error("pattern not properly closed")
if len(items) == 1:
return items[0]
subpattern = SubPattern(state)
subpatternappend = subpattern.append
# check if all items share a common prefix
while 1:
prefix = None
for item in items:
if not item:
break
if prefix is None:
prefix = item[0]
elif item[0] != prefix:
break
else:
# all subitems start with a common "prefix".
# move it out of the branch
for item in items:
del item[0]
subpatternappend(prefix)
continue # check next one
break
# check if the branch can be replaced by a character set
for item in items:
if len(item) != 1 or item[0][0] != LITERAL:
break
else:
# we can store this as a character set instead of a
# branch (the compiler may optimize this even more)
set = []
setappend = set.append
for item in items:
setappend(item[0])
subpatternappend((IN, set))
return subpattern
subpattern.append((BRANCH, (None, items)))
return subpattern
def _parse_sub_cond(source, state, condgroup):
item_yes = _parse(source, state)
if source.match("|"):
item_no = _parse(source, state)
if source.match("|"):
raise error("conditional backref with more than two branches")
else:
item_no = None
if source.next and not source.match(")", 0):
raise error("pattern not properly closed")
subpattern = SubPattern(state)
subpattern.append((GROUPREF_EXISTS, (condgroup, item_yes, item_no)))
return subpattern
_PATTERNENDERS = set("|)")
_ASSERTCHARS = set("=!<")
_LOOKBEHINDASSERTCHARS = set("=!")
_REPEATCODES = set([MIN_REPEAT, MAX_REPEAT])
def _parse(source, state):
# parse a simple pattern
subpattern = SubPattern(state)
# precompute constants into local variables
subpatternappend = subpattern.append
sourceget = source.get
sourcematch = source.match
_len = len
PATTERNENDERS = _PATTERNENDERS
ASSERTCHARS = _ASSERTCHARS
LOOKBEHINDASSERTCHARS = _LOOKBEHINDASSERTCHARS
REPEATCODES = _REPEATCODES
while 1:
if source.next in PATTERNENDERS:
break # end of subpattern
this = sourceget()
if this is None:
break # end of pattern
if state.flags & SRE_FLAG_VERBOSE:
# skip whitespace and comments
if this in WHITESPACE:
continue
if this == "#":
while 1:
this = sourceget()
if this in (None, "\n"):
break
continue
if this and this[0] not in SPECIAL_CHARS:
subpatternappend((LITERAL, ord(this)))
elif this == "[":
# character set
set = []
setappend = set.append
## if sourcematch(":"):
## pass # handle character classes
if sourcematch("^"):
setappend((NEGATE, None))
# check remaining characters
start = set[:]
while 1:
this = sourceget()
if this == "]" and set != start:
break
elif this and this[0] == "\\":
code1 = _class_escape(source, this)
elif this:
code1 = LITERAL, ord(this)
else:
raise error("unexpected end of regular expression")
if sourcematch("-"):
# potential range
this = sourceget()
if this == "]":
if code1[0] is IN:
code1 = code1[1][0]
setappend(code1)
setappend((LITERAL, ord("-")))
break
elif this:
if this[0] == "\\":
code2 = _class_escape(source, this)
else:
code2 = LITERAL, ord(this)
if code1[0] != LITERAL or code2[0] != LITERAL:
raise error("bad character range")
lo = code1[1]
hi = code2[1]
if hi < lo:
raise error("bad character range")
setappend((RANGE, (lo, hi)))
else:
raise error("unexpected end of regular expression")
else:
if code1[0] is IN:
code1 = code1[1][0]
setappend(code1)
# XXX: <fl> should move set optimization to compiler!
if _len(set)==1 and set[0][0] is LITERAL:
subpatternappend(set[0]) # optimization
elif _len(set)==2 and set[0][0] is NEGATE and set[1][0] is LITERAL:
subpatternappend((NOT_LITERAL, set[1][1])) # optimization
else:
# XXX: <fl> should add charmap optimization here
subpatternappend((IN, set))
elif this and this[0] in REPEAT_CHARS:
# repeat previous item
if this == "?":
min, max = 0, 1
elif this == "*":
min, max = 0, MAXREPEAT
elif this == "+":
min, max = 1, MAXREPEAT
elif this == "{":
if source.next == "}":
subpatternappend((LITERAL, ord(this)))
continue
here = source.tell()
min, max = 0, MAXREPEAT
lo = hi = ""
while source.next in DIGITS:
lo = lo + source.get()
if sourcematch(","):
while source.next in DIGITS:
hi = hi + sourceget()
else:
hi = lo
if not sourcematch("}"):
subpatternappend((LITERAL, ord(this)))
source.seek(here)
continue
if lo:
min = int(lo)
if min >= MAXREPEAT:
raise OverflowError("the repetition number is too large")
if hi:
max = int(hi)
if max >= MAXREPEAT:
raise OverflowError("the repetition number is too large")
if max < min:
raise error("bad repeat interval")
else:
raise error("not supported")
# figure out which item to repeat
if subpattern:
item = subpattern[-1:]
else:
item = None
if not item or (_len(item) == 1 and item[0][0] == AT):
raise error("nothing to repeat")
if item[0][0] in REPEATCODES:
raise error("multiple repeat")
if sourcematch("?"):
subpattern[-1] = (MIN_REPEAT, (min, max, item))
else:
subpattern[-1] = (MAX_REPEAT, (min, max, item))
elif this == ".":
subpatternappend((ANY, None))
elif this == "(":
group = 1
name = None
condgroup = None
if sourcematch("?"):
group = 0
# options
if sourcematch("P"):
# python extensions
if sourcematch("<"):
# named group: skip forward to end of name
name = ""
while 1:
char = sourceget()
if char is None:
raise error("unterminated name")
if char == ">":
break
name = name + char
group = 1
if not name:
raise error("missing group name")
if not isname(name):
raise error("bad character in group name")
elif sourcematch("="):
# named backreference
name = ""
while 1:
char = sourceget()
if char is None:
raise error("unterminated name")
if char == ")":
break
name = name + char
if not name:
raise error("missing group name")
if not isname(name):
raise error("bad character in group name")
gid = state.groupdict.get(name)
if gid is None:
raise error("unknown group name")
subpatternappend((GROUPREF, gid))
continue
else:
char = sourceget()
if char is None:
raise error("unexpected end of pattern")
raise error("unknown specifier: ?P%s" % char)
elif sourcematch(":"):
# non-capturing group
group = 2
elif sourcematch("#"):
# comment
while 1:
if source.next is None or source.next == ")":
break
sourceget()
if not sourcematch(")"):
raise error("unbalanced parenthesis")
continue
elif source.next in ASSERTCHARS:
# lookahead assertions
char = sourceget()
dir = 1
if char == "<":
if source.next not in LOOKBEHINDASSERTCHARS:
raise error("syntax error")
dir = -1 # lookbehind
char = sourceget()
p = _parse_sub(source, state)
if not sourcematch(")"):
raise error("unbalanced parenthesis")
if char == "=":
subpatternappend((ASSERT, (dir, p)))
else:
subpatternappend((ASSERT_NOT, (dir, p)))
continue
elif sourcematch("("):
# conditional backreference group
condname = ""
while 1:
char = sourceget()
if char is None:
raise error("unterminated name")
if char == ")":
break
condname = condname + char
group = 2
if not condname:
raise error("missing group name")
if isname(condname):
condgroup = state.groupdict.get(condname)
if condgroup is None:
raise error("unknown group name")
else:
try:
condgroup = int(condname)
except ValueError:
raise error("bad character in group name")
else:
# flags
if not source.next in FLAGS:
raise error("unexpected end of pattern")
while source.next in FLAGS:
state.flags = state.flags | FLAGS[sourceget()]
if group:
# parse group contents
if group == 2:
# anonymous group
group = None
else:
group = state.opengroup(name)
if condgroup:
p = _parse_sub_cond(source, state, condgroup)
else:
p = _parse_sub(source, state)
if not sourcematch(")"):
raise error("unbalanced parenthesis")
if group is not None:
state.closegroup(group)
subpatternappend((SUBPATTERN, (group, p)))
else:
while 1:
char = sourceget()
if char is None:
raise error("unexpected end of pattern")
if char == ")":
break
raise error("unknown extension")
elif this == "^":
subpatternappend((AT, AT_BEGINNING))
elif this == "$":
subpattern.append((AT, AT_END))
elif this and this[0] == "\\":
code = _escape(source, this, state)
subpatternappend(code)
else:
raise error("parser error")
return subpattern
def fix_flags(src, flags):
# Check and fix flags according to the type of pattern (str or bytes)
if isinstance(src, str):
if not flags & SRE_FLAG_ASCII:
flags |= SRE_FLAG_UNICODE
elif flags & SRE_FLAG_UNICODE:
raise ValueError("ASCII and UNICODE flags are incompatible")
else:
if flags & SRE_FLAG_UNICODE:
raise ValueError("can't use UNICODE flag with a bytes pattern")
return flags
def parse(str, flags=0, pattern=None):
# parse 're' pattern into list of (opcode, argument) tuples
source = Tokenizer(str)
if pattern is None:
pattern = Pattern()
pattern.flags = flags
pattern.str = str
p = _parse_sub(source, pattern, 0)
p.pattern.flags = fix_flags(str, p.pattern.flags)
tail = source.get()
if tail == ")":
raise error("unbalanced parenthesis")
elif tail:
raise error("bogus characters at end of regular expression")
if flags & SRE_FLAG_DEBUG:
p.dump()
if not (flags & SRE_FLAG_VERBOSE) and p.pattern.flags & SRE_FLAG_VERBOSE:
# the VERBOSE flag was switched on inside the pattern. to be
# on the safe side, we'll parse the whole thing again...
return parse(str, p.pattern.flags)
return p
def parse_template(source, pattern):
# parse 're' replacement string into list of literals and
# group references
s = Tokenizer(source)
sget = s.get
p = []
a = p.append
def literal(literal, p=p, pappend=a):
if p and p[-1][0] is LITERAL:
p[-1] = LITERAL, p[-1][1] + literal
else:
pappend((LITERAL, literal))
sep = source[:0]
if isinstance(sep, str):
makechar = chr
else:
makechar = chr
while 1:
this = sget()
if this is None:
break # end of replacement string
if this and this[0] == "\\":
# group
c = this[1:2]
if c == "g":
name = ""
if s.match("<"):
while 1:
char = sget()
if char is None:
raise error("unterminated group name")
if char == ">":
break
name = name + char
if not name:
raise error("missing group name")
try:
index = int(name)
if index < 0:
raise error("negative group number")
except ValueError:
if not isname(name):
raise error("bad character in group name")
try:
index = pattern.groupindex[name]
except KeyError:
raise IndexError("unknown group name")
a((MARK, index))
elif c == "0":
if s.next in OCTDIGITS:
this = this + sget()
if s.next in OCTDIGITS:
this = this + sget()
literal(makechar(int(this[1:], 8) & 0xff))
elif c in DIGITS:
isoctal = False
if s.next in DIGITS:
this = this + sget()
if (c in OCTDIGITS and this[2] in OCTDIGITS and
s.next in OCTDIGITS):
this = this + sget()
isoctal = True
literal(makechar(int(this[1:], 8) & 0xff))
if not isoctal:
a((MARK, int(this[1:])))
else:
try:
this = makechar(ESCAPES[this][1])
except KeyError:
pass
literal(this)
else:
literal(this)
# convert template to groups and literals lists
i = 0
groups = []
groupsappend = groups.append
literals = [None] * len(p)
if isinstance(source, str):
encode = lambda x: x
else:
# The tokenizer implicitly decodes bytes objects as latin-1, we must
# therefore re-encode the final representation.
encode = lambda x: x.encode('latin-1')
for c, s in p:
if c is MARK:
groupsappend((i, s))
# literal[i] is already None
else:
literals[i] = encode(s)
i = i + 1
return groups, literals
def expand_template(template, match):
g = match.group
sep = match.string[:0]
groups, literals = template
literals = literals[:]
try:
for index, group in groups:
literals[index] = s = g(group)
if s is None:
raise error("unmatched group")
except IndexError:
raise error("invalid group reference")
return sep.join(literals)
| agpl-3.0 |
Endika/hr | hr_expense_analytic_default/expense_analytic_default.py | 23 | 2312 | # -*- encoding: utf-8 -*-
##############################################################################
#
# HR Expense Analytic Default module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
class hr_employee(orm.Model):
_inherit = 'hr.employee'
_columns = {
'default_analytic_account_id': fields.many2one(
'account.analytic.account', 'Default Analytic Account',
domain=[('type', 'not in', ('view', 'template'))],
help="This field will be copied on the expenses of this employee."
),
}
class hr_expense_expense(orm.Model):
_inherit = 'hr.expense.expense'
_columns = {
'default_analytic_account_id': fields.many2one(
'account.analytic.account', 'Default Analytic Account',
domain=[('type', 'not in', ('view', 'template'))]),
}
def onchange_employee_id(self, cr, uid, ids, employee_id, context=None):
res = super(hr_expense_expense, self).onchange_employee_id(
cr, uid, ids, employee_id, context=context)
analytic_account_id = False
if employee_id:
employee = self.pool['hr.employee'].browse(
cr, uid, employee_id, context=context)
analytic_account_id = \
employee.default_analytic_account_id.id or False
res['value']['default_analytic_account_id'] = analytic_account_id
return res
| agpl-3.0 |
matthieu-meaux/DLLM | examples/Wrapper_Multipoint/test_multipoint_analysis_AoA.py | 1 | 2570 | # -*-mode: python; py-indent-offset: 4; tab-width: 8; coding: iso-8859-1 -*-
# DLLM (non-linear Differentiated Lifting Line Model, open source software)
#
# Copyright (C) 2013-2015 Airbus Group SAS
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# https://github.com/matthieu-meaux/DLLM.git
#
# @author : Matthieu MEAUX
#
from DLLM.DLLMEval.DLLMMP import DLLMMP
import os
from glob import glob
config_dict={}
config_dict['Case.nb_conditions']=3
config_dict['Case.condition_name']='cond'
config_dict['Case.AoA_id_list']=['AoA1','AoA2','AoA3']
# cond1 Operating condition information
config_dict['Case.cond1.OC.Mach']=0.8
config_dict['Case.cond1.OC.AoA']=3.5
config_dict['Case.cond1.OC.altitude']=10000.
# cond2 Operating condition information
config_dict['Case.cond2.OC.Mach']=0.6
config_dict['Case.cond2.OC.AoA']=4.5
config_dict['Case.cond2.OC.altitude']=5000.
# cond3 Operating condition information
config_dict['Case.cond3.OC.Mach']=0.4
config_dict['Case.cond3.OC.AoA']=6.
config_dict['Case.cond3.OC.altitude']=1000.
# Parameterisation configuration
config_dict['Case.param.geom_type']='Broken'
config_dict['Case.param.n_sect']=20
config_dict['Case.param.BCfilename']='input_parameters_AoA.par'
config_dict['Case.param.airfoil.type']='simple'
config_dict['Case.param.airfoil.AoA0']=-2.
config_dict['Case.param.airfoil.Cm0']=-0.1
# DLLM configuration
config_dict['Case.DLLM.type']='Solver'
config_dict['Case.DLLM.method']='inhouse'
config_dict['Case.DLLM.relax_factor']=0.99
config_dict['Case.DLLM.stop_residual']=1e-9
config_dict['Case.DLLM.max_iterations']=100
config_dict['Case.cond1.DLLM.gamma_file_name']='cond1_gamma.dat'
config_dict['Case.cond2.DLLM.gamma_file_name']='cond2_gamma.dat'
config_dict['Case.cond3.DLLM.gamma_file_name']='cond3_gamma.dat'
list_log=glob('*.log')
for log in list_log:
os.remove(log)
MP=DLLMMP('Case')
MP.configure(config_dict)
MP.analysis()
| gpl-2.0 |
sharad/calibre | src/calibre/gui2/dialogs/confirm_delete_location.py | 1 | 1511 | #!/usr/bin/env python
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net' \
'2010, John Schember <john@nachtimwald.com>'
__docformat__ = 'restructuredtext en'
from functools import partial
from calibre.gui2.dialogs.confirm_delete_location_ui import Ui_Dialog
from PyQt5.Qt import QDialog, Qt, QPixmap, QIcon
class Dialog(QDialog, Ui_Dialog):
def __init__(self, msg, name, parent):
QDialog.__init__(self, parent)
self.setupUi(self)
self.loc = None
self.msg.setText(msg)
self.name = name
self.buttonBox.setFocus(Qt.OtherFocusReason)
self.button_lib.clicked.connect(partial(self.set_loc, 'lib'))
self.button_device.clicked.connect(partial(self.set_loc, 'dev'))
self.button_both.clicked.connect(partial(self.set_loc, 'both'))
def set_loc(self, loc):
self.loc = loc
self.accept()
def choice(self):
return self.loc
def break_cycles(self):
for x in ('lib', 'device', 'both'):
b = getattr(self, 'button_'+x)
try:
b.clicked.disconnect()
except:
pass
def confirm_location(msg, name, parent=None, pixmap='dialog_warning.png'):
d = Dialog(msg, name, parent)
d.label.setPixmap(QPixmap(I(pixmap)))
d.setWindowIcon(QIcon(I(pixmap)))
d.resize(d.sizeHint())
ret = d.exec_()
d.break_cycles()
if ret == d.Accepted:
return d.choice()
return None
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.