code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/python
#
# (c) 2015 Peter Sprygada, <psprygada@ansible.com>
# Copyright (c) 2017 Dell Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: dellos10_command
version_added: "2.2"
author: "Senthil Kumar Ganesan (@skg-net)"
short_description: Run commands on remote devices running Dell OS10
description:
- Sends arbitrary commands to a Dell EMC OS10 node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
- This module does not support running commands in configuration mode.
Please use M(dellos10_config) to configure Dell EMC OS10 devices.
extends_documentation_fragment: dellos10
options:
commands:
description:
- List of commands to send to the remote dellos10 device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retries has expired.
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of I(retries), the task fails.
See examples.
version_added: "2.2"
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the wait_for must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
default: all
choices: ['any', 'all']
version_added: "2.5"
retries:
description:
- Specifies the number of retries a command should be tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(wait_for) conditions.
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
default: 1
"""
EXAMPLES = """
tasks:
- name: run show version on remote devices
dellos10_command:
commands: show version
- name: run show version and check to see if output contains OS10
dellos10_command:
commands: show version
wait_for: result[0] contains OS10
- name: run multiple commands on remote nodes
dellos10_command:
commands:
- show version
- show interface
- name: run multiple commands and evaluate the output
dellos10_command:
commands:
- show version
- show interface
wait_for:
- result[0] contains OS10
- result[1] contains Ethernet
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always apart from low level errors (such as action plugin)
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always apart from low level errors (such as action plugin)
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
warnings:
description: The list of warnings (if any) generated by module based on arguments
returned: always
type: list
sample: ['...', '...']
"""
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.dellos10.dellos10 import run_commands
from ansible.module_utils.network.dellos10.dellos10 import dellos10_argument_spec, check_args
from ansible.module_utils.network.common.utils import ComplexList
from ansible.module_utils.network.common.parsing import Conditional
from ansible.module_utils.six import string_types
def to_lines(stdout):
for item in stdout:
if isinstance(item, string_types):
item = str(item).split('\n')
yield item
def parse_commands(module, warnings):
command = ComplexList(dict(
command=dict(key=True),
prompt=dict(),
answer=dict()
), module)
commands = command(module.params['commands'])
for index, item in enumerate(commands):
if module.check_mode and not item['command'].startswith('show'):
warnings.append(
'only show commands are supported when using check mode, not '
'executing `%s`' % item['command']
)
elif item['command'].startswith('conf'):
module.fail_json(
msg='dellos10_command does not support running config mode '
'commands. Please use dellos10_config instead'
)
return commands
def main():
"""main entry point for module execution
"""
argument_spec = dict(
# { command: <str>, prompt: <str>, response: <str> }
commands=dict(type='list', required=True),
wait_for=dict(type='list'),
match=dict(default='all', choices=['all', 'any']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
argument_spec.update(dellos10_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
result = {'changed': False}
warnings = list()
check_args(module, warnings)
commands = parse_commands(module, warnings)
result['warnings'] = warnings
wait_for = module.params['wait_for'] or list()
conditionals = [Conditional(c) for c in wait_for]
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not been satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result.update({
'changed': False,
'stdout': responses,
'stdout_lines': list(to_lines(responses))
})
module.exit_json(**result)
if __name__ == '__main__':
main()
|
veger/ansible
|
lib/ansible/modules/network/dellos10/dellos10_command.py
|
Python
|
gpl-3.0
| 7,205
|
"""Support for Xiaomi Aqara sensors."""
from __future__ import annotations
import logging
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
LIGHT_LUX,
PERCENTAGE,
POWER_WATT,
PRESSURE_HPA,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import XiaomiDevice
from .const import BATTERY_MODELS, DOMAIN, GATEWAYS_KEY, POWER_MODELS
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES: dict[str, SensorEntityDescription] = {
"temperature": SensorEntityDescription(
key="temperature",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
),
"humidity": SensorEntityDescription(
key="humidity",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.HUMIDITY,
),
"illumination": SensorEntityDescription(
key="illumination",
native_unit_of_measurement="lm",
device_class=SensorDeviceClass.ILLUMINANCE,
),
"lux": SensorEntityDescription(
key="lux",
native_unit_of_measurement=LIGHT_LUX,
device_class=SensorDeviceClass.ILLUMINANCE,
),
"pressure": SensorEntityDescription(
key="pressure",
native_unit_of_measurement=PRESSURE_HPA,
device_class=SensorDeviceClass.PRESSURE,
),
"bed_activity": SensorEntityDescription(
key="bed_activity",
native_unit_of_measurement="μm",
device_class=None,
),
"load_power": SensorEntityDescription(
key="load_power",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
),
"final_tilt_angle": SensorEntityDescription(
key="final_tilt_angle",
),
"coordination": SensorEntityDescription(
key="coordination",
),
"Battery": SensorEntityDescription(
key="Battery",
),
}
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Perform the setup for Xiaomi devices."""
entities = []
gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id]
for device in gateway.devices["sensor"]:
if device["model"] == "sensor_ht":
entities.append(
XiaomiSensor(
device, "Temperature", "temperature", gateway, config_entry
)
)
entities.append(
XiaomiSensor(device, "Humidity", "humidity", gateway, config_entry)
)
elif device["model"] in ("weather", "weather.v1"):
entities.append(
XiaomiSensor(
device, "Temperature", "temperature", gateway, config_entry
)
)
entities.append(
XiaomiSensor(device, "Humidity", "humidity", gateway, config_entry)
)
entities.append(
XiaomiSensor(device, "Pressure", "pressure", gateway, config_entry)
)
elif device["model"] == "sensor_motion.aq2":
entities.append(
XiaomiSensor(device, "Illumination", "lux", gateway, config_entry)
)
elif device["model"] in ("gateway", "gateway.v3", "acpartner.v3"):
entities.append(
XiaomiSensor(
device, "Illumination", "illumination", gateway, config_entry
)
)
elif device["model"] in ("vibration",):
entities.append(
XiaomiSensor(
device, "Bed Activity", "bed_activity", gateway, config_entry
)
)
entities.append(
XiaomiSensor(
device, "Tilt Angle", "final_tilt_angle", gateway, config_entry
)
)
entities.append(
XiaomiSensor(
device, "Coordination", "coordination", gateway, config_entry
)
)
else:
_LOGGER.warning("Unmapped Device Model")
# Set up battery sensors
seen_sids = set() # Set of device sids that are already seen
for devices in gateway.devices.values():
for device in devices:
if device["sid"] in seen_sids:
continue
seen_sids.add(device["sid"])
if device["model"] in BATTERY_MODELS:
entities.append(
XiaomiBatterySensor(device, "Battery", gateway, config_entry)
)
if device["model"] in POWER_MODELS:
entities.append(
XiaomiSensor(
device, "Load Power", "load_power", gateway, config_entry
)
)
async_add_entities(entities)
class XiaomiSensor(XiaomiDevice, SensorEntity):
"""Representation of a XiaomiSensor."""
def __init__(self, device, name, data_key, xiaomi_hub, config_entry):
"""Initialize the XiaomiSensor."""
self._data_key = data_key
self.entity_description = SENSOR_TYPES[data_key]
super().__init__(device, name, xiaomi_hub, config_entry)
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
if (value := data.get(self._data_key)) is None:
return False
if self._data_key in ("coordination", "status"):
self._attr_native_value = value
return True
value = float(value)
if self._data_key in ("temperature", "humidity", "pressure"):
value /= 100
elif self._data_key in ("illumination",):
value = max(value - 300, 0)
if self._data_key == "temperature" and (value < -50 or value > 60):
return False
if self._data_key == "humidity" and (value <= 0 or value > 100):
return False
if self._data_key == "pressure" and value == 0:
return False
if self._data_key in ("illumination", "lux"):
self._attr_native_value = round(value)
else:
self._attr_native_value = round(value, 1)
return True
class XiaomiBatterySensor(XiaomiDevice, SensorEntity):
"""Representation of a XiaomiSensor."""
_attr_native_unit_of_measurement = PERCENTAGE
_attr_device_class = SensorDeviceClass.BATTERY
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
succeed = super().parse_voltage(data)
if not succeed:
return False
battery_level = int(self._extra_state_attributes.pop(ATTR_BATTERY_LEVEL))
if battery_level <= 0 or battery_level > 100:
return False
self._attr_native_value = battery_level
return True
def parse_voltage(self, data):
"""Parse battery level data sent by gateway."""
return False # Override parse_voltage to do nothing
|
rohitranjan1991/home-assistant
|
homeassistant/components/xiaomi_aqara/sensor.py
|
Python
|
mit
| 7,176
|
import heppy.framework.config as cfg
from getFiles import getFiles
gun_211_0_20 = cfg.Component(
'gun_211_0_20',
files = getFiles("/Gun_221_0_20_ptflat/743_v1/AODSIM", cache=True),
)
gun_211_0_10 = cfg.Component(
'gun_211_0_10',
files = getFiles("/Gun_211_0_10_ptflat/743_v1/AODSIM"),
)
gun_211_MatEff_0_20 = cfg.Component(
'gun_211_MatEff_0_20',
files = getFiles("/Gun_221_MatEff_0_20_ptflat/743_v1/AODSIM", cache=True),
)
gun_22_0_50 = cfg.Component(
'gun_22_0_50',
files = getFiles("/Gun_22_0_50/743_v2/RECOSIM"),
)
gun_22_0_50_eta3 = cfg.Component(
'gun_22_0_50_eta3',
files = getFiles("/Gun_22_0_50_eta3/743_v2/AODSIM"),
)
gun_130_0_50 = cfg.Component(
'gun_130_0_50',
files = getFiles("/Gun_130_0_50/743_v1/RECOSIM"),
)
gun_130_007_20 = cfg.Component(
'gun_130_007_20',
files = getFiles("/Gun_130_007_20/743_v1/RECOSIM"),
)
gun_11_0_50 = cfg.Component(
'gun_11_0_50',
files = getFiles("/Gun_11_0_50/743_v1/RECOSIM"),
)
gun_13_0_50 = cfg.Component(
'gun_13_0_50',
files = getFiles("/Gun_13_0_50/743_v1/RECOSIM"),
)
gun_12_0_50 = cfg.Component(
'gun_12_0_50',
files = getFiles("/Gun_12_0_50/743_v1/RECOSIM"),
)
samples = [
gun_211_0_20,
gun_22_0_50,
gun_11_0_50,
gun_13_0_50,
gun_12_0_50,
]
if __name__ == '__main__':
import pprint
for g in samples:
print g
|
semkiv/heppy_fcc
|
samples/gun.py
|
Python
|
gpl-3.0
| 1,404
|
from __future__ import absolute_import, print_function, division
import re
import warnings
import six
from netlib import encoding, strutils, basetypes
from netlib.http import headers
if six.PY2: # pragma: no cover
def _native(x):
return x
def _always_bytes(x):
return x
else:
# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded.
def _native(x):
return x.decode("utf-8", "surrogateescape")
def _always_bytes(x):
return strutils.always_bytes(x, "utf-8", "surrogateescape")
class MessageData(basetypes.Serializable):
def __eq__(self, other):
if isinstance(other, MessageData):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other):
return not self.__eq__(other)
def set_state(self, state):
for k, v in state.items():
if k == "headers":
v = headers.Headers.from_state(v)
setattr(self, k, v)
def get_state(self):
state = vars(self).copy()
state["headers"] = state["headers"].get_state()
return state
@classmethod
def from_state(cls, state):
state["headers"] = headers.Headers.from_state(state["headers"])
return cls(**state)
class Message(basetypes.Serializable):
def __eq__(self, other):
if isinstance(other, Message):
return self.data == other.data
return False
def __ne__(self, other):
return not self.__eq__(other)
def get_state(self):
return self.data.get_state()
def set_state(self, state):
self.data.set_state(state)
@classmethod
def from_state(cls, state):
state["headers"] = headers.Headers.from_state(state["headers"])
return cls(**state)
@property
def headers(self):
"""
Message headers object
Returns:
netlib.http.Headers
"""
return self.data.headers
@headers.setter
def headers(self, h):
self.data.headers = h
@property
def raw_content(self):
# type: () -> bytes
"""
The raw (encoded) HTTP message body
See also: :py:attr:`content`, :py:class:`text`
"""
return self.data.content
@raw_content.setter
def raw_content(self, content):
self.data.content = content
def get_content(self, strict=True):
# type: (bool) -> bytes
"""
The HTTP message body decoded with the content-encoding header (e.g. gzip)
Raises:
ValueError, when the content-encoding is invalid and strict is True.
See also: :py:class:`raw_content`, :py:attr:`text`
"""
if self.raw_content is None:
return None
ce = self.headers.get("content-encoding")
if ce:
try:
return encoding.decode(self.raw_content, ce)
except ValueError:
if strict:
raise
return self.raw_content
else:
return self.raw_content
def set_content(self, value):
if value is None:
self.raw_content = None
return
if not isinstance(value, bytes):
raise TypeError(
"Message content must be bytes, not {}. "
"Please use .text if you want to assign a str."
.format(type(value).__name__)
)
ce = self.headers.get("content-encoding")
try:
self.raw_content = encoding.encode(value, ce or "identity")
except ValueError:
# So we have an invalid content-encoding?
# Let's remove it!
del self.headers["content-encoding"]
self.raw_content = value
self.headers["content-length"] = str(len(self.raw_content))
content = property(get_content, set_content)
@property
def http_version(self):
"""
Version string, e.g. "HTTP/1.1"
"""
return _native(self.data.http_version)
@http_version.setter
def http_version(self, http_version):
self.data.http_version = _always_bytes(http_version)
@property
def timestamp_start(self):
"""
First byte timestamp
"""
return self.data.timestamp_start
@timestamp_start.setter
def timestamp_start(self, timestamp_start):
self.data.timestamp_start = timestamp_start
@property
def timestamp_end(self):
"""
Last byte timestamp
"""
return self.data.timestamp_end
@timestamp_end.setter
def timestamp_end(self, timestamp_end):
self.data.timestamp_end = timestamp_end
def _get_content_type_charset(self):
# type: () -> Optional[str]
ct = headers.parse_content_type(self.headers.get("content-type", ""))
if ct:
return ct[2].get("charset")
def _guess_encoding(self):
# type: () -> str
enc = self._get_content_type_charset()
if enc:
return enc
if "json" in self.headers.get("content-type", ""):
return "utf8"
else:
# We may also want to check for HTML meta tags here at some point.
return "latin-1"
def get_text(self, strict=True):
# type: (bool) -> six.text_type
"""
The HTTP message body decoded with both content-encoding header (e.g. gzip)
and content-type header charset.
Raises:
ValueError, when either content-encoding or charset is invalid and strict is True.
See also: :py:attr:`content`, :py:class:`raw_content`
"""
if self.raw_content is None:
return None
enc = self._guess_encoding()
content = self.get_content(strict)
try:
return encoding.decode(content, enc)
except ValueError:
if strict:
raise
return content.decode("utf8", "replace" if six.PY2 else "surrogateescape")
def set_text(self, text):
if text is None:
self.content = None
return
enc = self._guess_encoding()
try:
self.content = encoding.encode(text, enc)
except ValueError:
# Fall back to UTF-8 and update the content-type header.
ct = headers.parse_content_type(self.headers.get("content-type", "")) or ("text", "plain", {})
ct[2]["charset"] = "utf-8"
self.headers["content-type"] = headers.assemble_content_type(*ct)
enc = "utf8"
self.content = text.encode(enc, "replace" if six.PY2 else "surrogateescape")
text = property(get_text, set_text)
def decode(self, strict=True):
"""
Decodes body based on the current Content-Encoding header, then
removes the header. If there is no Content-Encoding header, no
action is taken.
Raises:
ValueError, when the content-encoding is invalid and strict is True.
"""
self.raw_content = self.get_content(strict)
self.headers.pop("content-encoding", None)
def encode(self, e):
"""
Encodes body with the encoding e, where e is "gzip", "deflate", "identity", or "br".
Any existing content-encodings are overwritten,
the content is not decoded beforehand.
Raises:
ValueError, when the specified content-encoding is invalid.
"""
self.headers["content-encoding"] = e
self.content = self.raw_content
if "content-encoding" not in self.headers:
raise ValueError("Invalid content encoding {}".format(repr(e)))
def replace(self, pattern, repl, flags=0):
"""
Replaces a regular expression pattern with repl in both the headers
and the body of the message. Encoded body will be decoded
before replacement, and re-encoded afterwards.
Returns:
The number of replacements made.
"""
if isinstance(pattern, six.text_type):
pattern = strutils.escaped_str_to_bytes(pattern)
if isinstance(repl, six.text_type):
repl = strutils.escaped_str_to_bytes(repl)
replacements = 0
if self.content:
self.content, replacements = re.subn(
pattern, repl, self.content, flags=flags
)
replacements += self.headers.replace(pattern, repl, flags)
return replacements
# Legacy
@property
def body(self): # pragma: no cover
warnings.warn(".body is deprecated, use .content instead.", DeprecationWarning)
return self.content
@body.setter
def body(self, body): # pragma: no cover
warnings.warn(".body is deprecated, use .content instead.", DeprecationWarning)
self.content = body
class decoded(object):
"""
Deprecated: You can now directly use :py:attr:`content`.
:py:attr:`raw_content` has the encoded content.
"""
def __init__(self, message): # pragma no cover
warnings.warn("decoded() is deprecated, you can now directly use .content instead. "
".raw_content has the encoded content.", DeprecationWarning)
def __enter__(self): # pragma no cover
pass
def __exit__(self, type, value, tb): # pragma no cover
pass
|
x2Ident/x2Ident
|
mitmproxy/netlib/http/message.py
|
Python
|
gpl-3.0
| 9,723
|
# -*- coding: utf-8 -*-
# transformations.py
# Copyright (c) 2006-2015, Christoph Gohlke
# Copyright (c) 2006-2015, The Regents of the University of California
# Produced at the Laboratory for Fluorescence Dynamics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holders nor the names of any
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Homogeneous Transformation Matrices and Quaternions.
A library for calculating 4x4 matrices for translating, rotating, reflecting,
scaling, shearing, projecting, orthogonalizing, and superimposing arrays of
3D homogeneous coordinates as well as for converting between rotation matrices,
Euler angles, and quaternions. Also includes an Arcball control object and
functions to decompose transformation matrices.
:Author:
`Christoph Gohlke <http://www.lfd.uci.edu/~gohlke/>`_
:Organization:
Laboratory for Fluorescence Dynamics, University of California, Irvine
:Version: 2015.07.18
Requirements
------------
* `CPython 2.7 or 3.4 <http://www.python.org>`_
* `Numpy 1.9 <http://www.numpy.org>`_
* `Transformations.c 2015.07.18 <http://www.lfd.uci.edu/~gohlke/>`_
(recommended for speedup of some functions)
Notes
-----
The API is not stable yet and is expected to change between revisions.
This Python code is not optimized for speed. Refer to the transformations.c
module for a faster implementation of some functions.
Documentation in HTML format can be generated with epydoc.
Matrices (M) can be inverted using numpy.linalg.inv(M), be concatenated using
numpy.dot(M0, M1), or transform homogeneous coordinate arrays (v) using
numpy.dot(M, v) for shape (4, \*) column vectors, respectively
numpy.dot(v, M.T) for shape (\*, 4) row vectors ("array of points").
This module follows the "column vectors on the right" and "row major storage"
(C contiguous) conventions. The translation components are in the right column
of the transformation matrix, i.e. M[:3, 3].
The transpose of the transformation matrices may have to be used to interface
with other graphics systems, e.g. with OpenGL's glMultMatrixd(). See also [16].
Calculations are carried out with "f" precision.
Vector, point, quaternion, and matrix function arguments are expected to be
"array like", i.e. tuple, list, or numpy arrays.
Return types are numpy arrays unless specified otherwise.
Angles are in radians unless specified otherwise.
Quaternions w+ix+jy+kz are represented as [w, x, y, z].
A triple of Euler angles can be applied/interpreted in 24 ways, which can
be specified using a 4 character string or encoded 4-tuple:
*Axes 4-string*: e.g. 'sxyz' or 'ryxy'
- first character : rotations are applied to 's'tatic or 'r'otating frame
- remaining characters : successive rotation axis 'x', 'y', or 'z'
*Axes 4-tuple*: e.g. (0, 0, 0, 0) or (1, 1, 1, 1)
- inner axis: code of axis ('x':0, 'y':1, 'z':2) of rightmost matrix.
- parity : even (0) if inner axis 'x' is followed by 'y', 'y' is followed
by 'z', or 'z' is followed by 'x'. Otherwise odd (1).
- repetition : first and last axis are same (1) or different (0).
- frame : rotations are applied to static (0) or rotating (1) frame.
Other Python packages and modules for 3D transformations and quaternions:
* `Transforms3d <https://pypi.python.org/pypi/transforms3d>`_
includes most code of this module.
* `Blender.mathutils <http://www.blender.org/api/blender_python_api>`_
* `numpy-dtypes <https://github.com/numpy/numpy-dtypes>`_
References
----------
(1) Matrices and transformations. Ronald Goldman.
In "Graphics Gems I", pp 472-475. Morgan Kaufmann, 1990.
(2) More matrices and transformations: shear and pseudo-perspective.
Ronald Goldman. In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991.
(3) Decomposing a matrix into simple transformations. Spencer Thomas.
In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991.
(4) Recovering the data from the transformation matrix. Ronald Goldman.
In "Graphics Gems II", pp 324-331. Morgan Kaufmann, 1991.
(5) Euler angle conversion. Ken Shoemake.
In "Graphics Gems IV", pp 222-229. Morgan Kaufmann, 1994.
(6) Arcball rotation control. Ken Shoemake.
In "Graphics Gems IV", pp 175-192. Morgan Kaufmann, 1994.
(7) Representing attitude: Euler angles, unit quaternions, and rotation
vectors. James Diebel. 2006.
(8) A discussion of the solution for the best rotation to relate two sets
of vectors. W Kabsch. Acta Cryst. 1978. A34, 827-828.
(9) Closed-form solution of absolute orientation using unit quaternions.
BKP Horn. J Opt Soc Am A. 1987. 4(4):629-642.
(10) Quaternions. Ken Shoemake.
http://www.sfu.ca/~jwa3/cmpt461/files/quatut.pdf
(11) From quaternion to matrix and back. JMP van Waveren. 2005.
http://www.intel.com/cd/ids/developer/asmo-na/eng/293748.htm
(12) Uniform random rotations. Ken Shoemake.
In "Graphics Gems III", pp 124-132. Morgan Kaufmann, 1992.
(13) Quaternion in molecular modeling. CFF Karney.
J Mol Graph Mod, 25(5):595-604
(14) New method for extracting the quaternion from a rotation matrix.
Itzhack Y Bar-Itzhack, J Guid Contr Dynam. 2000. 23(6): 1085-1087.
(15) Multiple View Geometry in Computer Vision. Hartley and Zissermann.
Cambridge University Press; 2nd Ed. 2004. Chapter 4, Algorithm 4.7, p 130.
(16) Column Vectors vs. Row Vectors.
http://steve.hollasch.net/cgindex/math/matrix/column-vec.html
Examples
--------
>>> alpha, beta, gamma = 0.123, -1.234, 2.345
>>> origin, xaxis, yaxis, zaxis = [0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1]
>>> I = identity_matrix()
>>> Rx = rotation_matrix(alpha, xaxis)
>>> Ry = rotation_matrix(beta, yaxis)
>>> Rz = rotation_matrix(gamma, zaxis)
>>> R = concatenate_matrices(Rx, Ry, Rz)
>>> euler = euler_from_matrix(R, 'rxyz')
>>> numpy.allclose([alpha, beta, gamma], euler)
True
>>> Re = euler_matrix(alpha, beta, gamma, 'rxyz')
>>> is_same_transform(R, Re)
True
>>> al, be, ga = euler_from_matrix(Re, 'rxyz')
>>> is_same_transform(Re, euler_matrix(al, be, ga, 'rxyz'))
True
>>> qx = quaternion_about_axis(alpha, xaxis)
>>> qy = quaternion_about_axis(beta, yaxis)
>>> qz = quaternion_about_axis(gamma, zaxis)
>>> q = quaternion_multiply(qx, qy)
>>> q = quaternion_multiply(q, qz)
>>> Rq = quaternion_matrix(q)
>>> is_same_transform(R, Rq)
True
>>> S = scale_matrix(1.23, origin)
>>> T = translation_matrix([1, 2, 3])
>>> Z = shear_matrix(beta, xaxis, origin, zaxis)
>>> R = random_rotation_matrix(numpy.random.rand(3))
>>> M = concatenate_matrices(T, R, Z, S)
>>> scale, shear, angles, trans, persp = decompose_matrix(M)
>>> numpy.allclose(scale, 1.23)
True
>>> numpy.allclose(trans, [1, 2, 3])
True
>>> numpy.allclose(shear, [0, math.tan(beta), 0])
True
>>> is_same_transform(R, euler_matrix(axes='sxyz', *angles))
True
>>> M1 = compose_matrix(scale, shear, angles, trans, persp)
>>> is_same_transform(M, M1)
True
>>> v0, v1 = random_vector(3), random_vector(3)
>>> M = rotation_matrix(angle_between_vectors(v0, v1), vector_product(v0, v1))
>>> v2 = numpy.dot(v0, M[:3,:3].T)
>>> numpy.allclose(unit_vector(v1), unit_vector(v2))
True
"""
from __future__ import division, print_function
import math
import numpy
__version__ = '2015.07.18'
__docformat__ = 'restructuredtext en'
__all__ = ()
def identity_matrix():
"""Return 4x4 identity/unit matrix.
>>> I = identity_matrix()
>>> numpy.allclose(I, numpy.dot(I, I))
True
>>> numpy.sum(I), numpy.trace(I)
(4.0, 4.0)
>>> numpy.allclose(I, numpy.identity(4))
True
"""
return numpy.identity(4,dtype="f")
def translation_matrix(direction):
"""Return matrix to translate by direction vector.
>>> v = numpy.random.random(3) - 0.5
>>> numpy.allclose(v, translation_matrix(v)[:3, 3])
True
"""
M = numpy.identity(4,dtype="f")
M[:3, 3] = direction[:3]
return M
def translation_from_matrix(matrix):
"""Return translation vector from translation matrix.
>>> v0 = numpy.random.random(3) - 0.5
>>> v1 = translation_from_matrix(translation_matrix(v0))
>>> numpy.allclose(v0, v1)
True
"""
return numpy.array(matrix, copy=False,dtype="f")[:3, 3].copy()
def reflection_matrix(point, normal):
"""Return matrix to mirror at plane defined by point and normal vector.
>>> v0 = numpy.random.random(4) - 0.5
>>> v0[3] = 1.
>>> v1 = numpy.random.random(3) - 0.5
>>> R = reflection_matrix(v0, v1)
>>> numpy.allclose(2, numpy.trace(R))
True
>>> numpy.allclose(v0, numpy.dot(R, v0))
True
>>> v2 = v0.copy()
>>> v2[:3] += v1
>>> v3 = v0.copy()
>>> v2[:3] -= v1
>>> numpy.allclose(v2, numpy.dot(R, v3))
True
"""
normal = unit_vector(normal[:3])
M = numpy.identity(4,dtype="f")
M[:3, :3] -= 2.0 * numpy.outer(normal, normal)
M[:3, 3] = (2.0 * numpy.dot(point[:3], normal)) * normal
return M
def reflection_from_matrix(matrix):
"""Return mirror plane point and normal vector from reflection matrix.
>>> v0 = numpy.random.random(3) - 0.5
>>> v1 = numpy.random.random(3) - 0.5
>>> M0 = reflection_matrix(v0, v1)
>>> point, normal = reflection_from_matrix(M0)
>>> M1 = reflection_matrix(point, normal)
>>> is_same_transform(M0, M1)
True
"""
M = numpy.array(matrix, dtype="f", copy=False)
# normal: unit eigenvector corresponding to eigenvalue -1
w, V = numpy.linalg.eig(M[:3, :3])
i = numpy.where(abs(numpy.real(w) + 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no unit eigenvector corresponding to eigenvalue -1")
normal = numpy.real(V[:, i[0]]).squeeze()
# point: any unit eigenvector corresponding to eigenvalue 1
w, V = numpy.linalg.eig(M)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no unit eigenvector corresponding to eigenvalue 1")
point = numpy.real(V[:, i[-1]]).squeeze()
point /= point[3]
return point, normal
def rotation_matrix(angle, direction, point=None):
"""Return matrix to rotate about axis defined by point and direction.
>>> R = rotation_matrix(math.pi/2, [0, 0, 1], [1, 0, 0])
>>> numpy.allclose(numpy.dot(R, [0, 0, 0, 1]), [1, -1, 0, 1])
True
>>> angle = (random.random() - 0.5) * (2*math.pi)
>>> direc = numpy.random.random(3) - 0.5
>>> point = numpy.random.random(3) - 0.5
>>> R0 = rotation_matrix(angle, direc, point)
>>> R1 = rotation_matrix(angle-2*math.pi, direc, point)
>>> is_same_transform(R0, R1)
True
>>> R0 = rotation_matrix(angle, direc, point)
>>> R1 = rotation_matrix(-angle, -direc, point)
>>> is_same_transform(R0, R1)
True
>>> I = numpy.identity(4, "f")
>>> numpy.allclose(I, rotation_matrix(math.pi*2, direc))
True
>>> numpy.allclose(2, numpy.trace(rotation_matrix(math.pi/2,
... direc, point)))
True
"""
sina = math.sin(angle)
cosa = math.cos(angle)
direction = unit_vector(direction[:3])
# rotation matrix around unit vector
R = numpy.diag([cosa, cosa, cosa])
R += numpy.outer(direction, direction) * (1.0 - cosa)
direction *= sina
R += numpy.array([[ 0.0, -direction[2], direction[1]],
[ direction[2], 0.0, -direction[0]],
[-direction[1], direction[0], 0.0]],dtype="f")
M = numpy.identity(4,dtype="f")
M[:3, :3] = R
if point is not None:
# rotation not around origin
point = numpy.array(point[:3], dtype="f", copy=False)
M[:3, 3] = point - numpy.dot(R, point)
return M
def rotation_from_matrix(matrix):
"""Return rotation angle and axis from rotation matrix.
>>> angle = (random.random() - 0.5) * (2*math.pi)
>>> direc = numpy.random.random(3) - 0.5
>>> point = numpy.random.random(3) - 0.5
>>> R0 = rotation_matrix(angle, direc, point)
>>> angle, direc, point = rotation_from_matrix(R0)
>>> R1 = rotation_matrix(angle, direc, point)
>>> is_same_transform(R0, R1)
True
"""
R = numpy.array(matrix, dtype="f", copy=False)
R33 = R[:3, :3]
# direction: unit eigenvector of R33 corresponding to eigenvalue of 1
w, W = numpy.linalg.eig(R33.T)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no unit eigenvector corresponding to eigenvalue 1")
direction = numpy.real(W[:, i[-1]]).squeeze()
# point: unit eigenvector of R33 corresponding to eigenvalue of 1
w, Q = numpy.linalg.eig(R)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no unit eigenvector corresponding to eigenvalue 1")
point = numpy.real(Q[:, i[-1]]).squeeze()
point /= point[3]
# rotation angle depending on direction
cosa = (numpy.trace(R33) - 1.0) / 2.0
if abs(direction[2]) > 1e-8:
sina = (R[1, 0] + (cosa-1.0)*direction[0]*direction[1]) / direction[2]
elif abs(direction[1]) > 1e-8:
sina = (R[0, 2] + (cosa-1.0)*direction[0]*direction[2]) / direction[1]
else:
sina = (R[2, 1] + (cosa-1.0)*direction[1]*direction[2]) / direction[0]
angle = math.atan2(sina, cosa)
return angle, direction, point
def scale_matrix(factor, origin=None, direction=None):
"""Return matrix to scale by factor around origin in direction.
Use factor -1 for point symmetry.
>>> v = (numpy.random.rand(4, 5) - 0.5) * 20
>>> v[3] = 1
>>> S = scale_matrix(-1.234)
>>> numpy.allclose(numpy.dot(S, v)[:3], -1.234*v[:3])
True
>>> factor = random.random() * 10 - 5
>>> origin = numpy.random.random(3) - 0.5
>>> direct = numpy.random.random(3) - 0.5
>>> S = scale_matrix(factor, origin)
>>> S = scale_matrix(factor, origin, direct)
"""
if direction is None:
# uniform scaling
M = numpy.array(numpy.diag([factor, factor, factor, 1.0]),dtype="f")
if origin is not None:
M[:3, 3] = origin[:3]
M[:3, 3] *= 1.0 - factor
else:
# nonuniform scaling
direction = unit_vector(direction[:3])
factor = 1.0 - factor
M = numpy.identity(4,dtype="f")
M[:3, :3] -= factor * numpy.outer(direction, direction)
if origin is not None:
M[:3, 3] = (factor * numpy.dot(origin[:3], direction)) * direction
return M
def scale_from_matrix(matrix):
"""Return scaling factor, origin and direction from scaling matrix.
>>> factor = random.random() * 10 - 5
>>> origin = numpy.random.random(3) - 0.5
>>> direct = numpy.random.random(3) - 0.5
>>> S0 = scale_matrix(factor, origin)
>>> factor, origin, direction = scale_from_matrix(S0)
>>> S1 = scale_matrix(factor, origin, direction)
>>> is_same_transform(S0, S1)
True
>>> S0 = scale_matrix(factor, origin, direct)
>>> factor, origin, direction = scale_from_matrix(S0)
>>> S1 = scale_matrix(factor, origin, direction)
>>> is_same_transform(S0, S1)
True
"""
M = numpy.array(matrix, dtype="f", copy=False)
M33 = M[:3, :3]
factor = numpy.trace(M33) - 2.0
try:
# direction: unit eigenvector corresponding to eigenvalue factor
w, V = numpy.linalg.eig(M33)
i = numpy.where(abs(numpy.real(w) - factor) < 1e-8)[0][0]
direction = numpy.real(V[:, i]).squeeze()
direction /= vector_norm(direction)
except IndexError:
# uniform scaling
factor = (factor + 2.0) / 3.0
direction = None
# origin: any eigenvector corresponding to eigenvalue 1
w, V = numpy.linalg.eig(M)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no eigenvector corresponding to eigenvalue 1")
origin = numpy.real(V[:, i[-1]]).squeeze()
origin /= origin[3]
return factor, origin, direction
def projection_matrix(point, normal, direction=None,
perspective=None, pseudo=False):
"""Return matrix to project onto plane defined by point and normal.
Using either perspective point, projection direction, or none of both.
If pseudo is True, perspective projections will preserve relative depth
such that Perspective = dot(Orthogonal, PseudoPerspective).
>>> P = projection_matrix([0, 0, 0], [1, 0, 0])
>>> numpy.allclose(P[1:, 1:], numpy.identity(4)[1:, 1:])
True
>>> point = numpy.random.random(3) - 0.5
>>> normal = numpy.random.random(3) - 0.5
>>> direct = numpy.random.random(3) - 0.5
>>> persp = numpy.random.random(3) - 0.5
>>> P0 = projection_matrix(point, normal)
>>> P1 = projection_matrix(point, normal, direction=direct)
>>> P2 = projection_matrix(point, normal, perspective=persp)
>>> P3 = projection_matrix(point, normal, perspective=persp, pseudo=True)
>>> is_same_transform(P2, numpy.dot(P0, P3))
True
>>> P = projection_matrix([3, 0, 0], [1, 1, 0], [1, 0, 0])
>>> v0 = (numpy.random.rand(4, 5) - 0.5) * 20
>>> v0[3] = 1
>>> v1 = numpy.dot(P, v0)
>>> numpy.allclose(v1[1], v0[1])
True
>>> numpy.allclose(v1[0], 3-v1[1])
True
"""
M = numpy.identity(4)
point = numpy.array(point[:3], dtype="f", copy=False)
normal = unit_vector(normal[:3])
if perspective is not None:
# perspective projection
perspective = numpy.array(perspective[:3], dtype="f",
copy=False)
M[0, 0] = M[1, 1] = M[2, 2] = numpy.dot(perspective-point, normal)
M[:3, :3] -= numpy.outer(perspective, normal)
if pseudo:
# preserve relative depth
M[:3, :3] -= numpy.outer(normal, normal)
M[:3, 3] = numpy.dot(point, normal) * (perspective+normal)
else:
M[:3, 3] = numpy.dot(point, normal) * perspective
M[3, :3] = -normal
M[3, 3] = numpy.dot(perspective, normal)
elif direction is not None:
# parallel projection
direction = numpy.array(direction[:3], dtype="f", copy=False)
scale = numpy.dot(direction, normal)
M[:3, :3] -= numpy.outer(direction, normal) / scale
M[:3, 3] = direction * (numpy.dot(point, normal) / scale)
else:
# orthogonal projection
M[:3, :3] -= numpy.outer(normal, normal)
M[:3, 3] = numpy.dot(point, normal) * normal
return M
def projection_from_matrix(matrix, pseudo=False):
"""Return projection plane and perspective point from projection matrix.
Return values are same as arguments for projection_matrix function:
point, normal, direction, perspective, and pseudo.
>>> point = numpy.random.random(3) - 0.5
>>> normal = numpy.random.random(3) - 0.5
>>> direct = numpy.random.random(3) - 0.5
>>> persp = numpy.random.random(3) - 0.5
>>> P0 = projection_matrix(point, normal)
>>> result = projection_from_matrix(P0)
>>> P1 = projection_matrix(*result)
>>> is_same_transform(P0, P1)
True
>>> P0 = projection_matrix(point, normal, direct)
>>> result = projection_from_matrix(P0)
>>> P1 = projection_matrix(*result)
>>> is_same_transform(P0, P1)
True
>>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=False)
>>> result = projection_from_matrix(P0, pseudo=False)
>>> P1 = projection_matrix(*result)
>>> is_same_transform(P0, P1)
True
>>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=True)
>>> result = projection_from_matrix(P0, pseudo=True)
>>> P1 = projection_matrix(*result)
>>> is_same_transform(P0, P1)
True
"""
M = numpy.array(matrix, dtype="f", copy=False)
M33 = M[:3, :3]
w, V = numpy.linalg.eig(M)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not pseudo and len(i):
# point: any eigenvector corresponding to eigenvalue 1
point = numpy.real(V[:, i[-1]]).squeeze()
point /= point[3]
# direction: unit eigenvector corresponding to eigenvalue 0
w, V = numpy.linalg.eig(M33)
i = numpy.where(abs(numpy.real(w)) < 1e-8)[0]
if not len(i):
raise ValueError("no eigenvector corresponding to eigenvalue 0")
direction = numpy.real(V[:, i[0]]).squeeze()
direction /= vector_norm(direction)
# normal: unit eigenvector of M33.T corresponding to eigenvalue 0
w, V = numpy.linalg.eig(M33.T)
i = numpy.where(abs(numpy.real(w)) < 1e-8)[0]
if len(i):
# parallel projection
normal = numpy.real(V[:, i[0]]).squeeze()
normal /= vector_norm(normal)
return point, normal, direction, None, False
else:
# orthogonal projection, where normal equals direction vector
return point, direction, None, None, False
else:
# perspective projection
i = numpy.where(abs(numpy.real(w)) > 1e-8)[0]
if not len(i):
raise ValueError(
"no eigenvector not corresponding to eigenvalue 0")
point = numpy.real(V[:, i[-1]]).squeeze()
point /= point[3]
normal = - M[3, :3]
perspective = M[:3, 3] / numpy.dot(point[:3], normal)
if pseudo:
perspective -= normal
return point, normal, None, perspective, pseudo
def clip_matrix(left, right, bottom, top, near, far, perspective=False):
"""Return matrix to obtain normalized device coordinates from frustum.
The frustum bounds are axis-aligned along x (left, right),
y (bottom, top) and z (near, far).
Normalized device coordinates are in range [-1, 1] if coordinates are
inside the frustum.
If perspective is True the frustum is a truncated pyramid with the
perspective point at origin and direction along z axis, otherwise an
orthographic canonical view volume (a box).
Homogeneous coordinates transformed by the perspective clip matrix
need to be dehomogenized (divided by w coordinate).
>>> frustum = numpy.random.rand(6)
>>> frustum[1] += frustum[0]
>>> frustum[3] += frustum[2]
>>> frustum[5] += frustum[4]
>>> M = clip_matrix(perspective=False, *frustum)
>>> numpy.dot(M, [frustum[0], frustum[2], frustum[4], 1])
array([-1., -1., -1., 1.])
>>> numpy.dot(M, [frustum[1], frustum[3], frustum[5], 1])
array([ 1., 1., 1., 1.])
>>> M = clip_matrix(perspective=True, *frustum)
>>> v = numpy.dot(M, [frustum[0], frustum[2], frustum[4], 1])
>>> v / v[3]
array([-1., -1., -1., 1.])
>>> v = numpy.dot(M, [frustum[1], frustum[3], frustum[4], 1])
>>> v / v[3]
array([ 1., 1., -1., 1.])
"""
#if left >= right or bottom >= top or near >= far:
# raise ValueError("invalid frustum")
if perspective:
if near <= _EPS:
raise ValueError("invalid frustum: near <= 0")
t = 2.0 * near
M = [[t/(right-left), 0.0, (right+left)/(right-left), 0.0],
[0.0, t/(top-bottom), (top+bottom)/(top-bottom), 0.0],
[0.0, 0.0, -(far+near)/(far-near), -t*far/(far-near)],
[0.0, 0.0, -1.0, 0.0]]
else:
M = [[2.0/(right-left), 0.0, 0.0, (right+left)/(left-right)],
[0.0, 2.0/(top-bottom), 0.0, (top+bottom)/(bottom-top)],
[0.0, 0.0, 2.0/(far-near), (far+near)/(near-far)],
[0.0, 0.0, 0.0, 1.0]]
return numpy.array(M,dtype="f")
def shear_matrix(angle, direction, point, normal):
"""Return matrix to shear by angle along direction vector on shear plane.
The shear plane is defined by a point and normal vector. The direction
vector must be orthogonal to the plane's normal vector.
A point P is transformed by the shear matrix into P" such that
the vector P-P" is parallel to the direction vector and its extent is
given by the angle of P-P'-P", where P' is the orthogonal projection
of P onto the shear plane.
>>> angle = (random.random() - 0.5) * 4*math.pi
>>> direct = numpy.random.random(3) - 0.5
>>> point = numpy.random.random(3) - 0.5
>>> normal = numpy.cross(direct, numpy.random.random(3))
>>> S = shear_matrix(angle, direct, point, normal)
>>> numpy.allclose(1, numpy.linalg.det(S))
True
"""
normal = unit_vector(normal[:3])
direction = unit_vector(direction[:3])
if abs(numpy.dot(normal, direction)) > 1e-6:
raise ValueError("direction and normal vectors are not orthogonal")
angle = math.tan(angle)
M = numpy.identity(4)
M[:3, :3] += angle * numpy.outer(direction, normal)
M[:3, 3] = -angle * numpy.dot(point[:3], normal) * direction
return M
def shear_from_matrix(matrix):
"""Return shear angle, direction and plane from shear matrix.
>>> angle = (random.random() - 0.5) * 4*math.pi
>>> direct = numpy.random.random(3) - 0.5
>>> point = numpy.random.random(3) - 0.5
>>> normal = numpy.cross(direct, numpy.random.random(3))
>>> S0 = shear_matrix(angle, direct, point, normal)
>>> angle, direct, point, normal = shear_from_matrix(S0)
>>> S1 = shear_matrix(angle, direct, point, normal)
>>> is_same_transform(S0, S1)
True
"""
M = numpy.array(matrix, dtype="f", copy=False)
M33 = M[:3, :3]
# normal: cross independent eigenvectors corresponding to the eigenvalue 1
w, V = numpy.linalg.eig(M33)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-4)[0]
if len(i) < 2:
raise ValueError("no two linear independent eigenvectors found %s" % w)
V = numpy.real(V[:, i]).squeeze().T
lenorm = -1.0
for i0, i1 in ((0, 1), (0, 2), (1, 2)):
n = numpy.cross(V[i0], V[i1])
w = vector_norm(n)
if w > lenorm:
lenorm = w
normal = n
normal /= lenorm
# direction and angle
direction = numpy.dot(M33 - numpy.identity(3), normal)
angle = vector_norm(direction)
direction /= angle
angle = math.atan(angle)
# point: eigenvector corresponding to eigenvalue 1
w, V = numpy.linalg.eig(M)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no eigenvector corresponding to eigenvalue 1")
point = numpy.real(V[:, i[-1]]).squeeze()
point /= point[3]
return angle, direction, point, normal
def decompose_matrix(matrix):
"""Return sequence of transformations from transformation matrix.
matrix : array_like
Non-degenerative homogeneous transformation matrix
Return tuple of:
scale : vector of 3 scaling factors
shear : list of shear factors for x-y, x-z, y-z axes
angles : list of Euler angles about static x, y, z axes
translate : translation vector along x, y, z axes
perspective : perspective partition of matrix
Raise ValueError if matrix is of wrong type or degenerative.
>>> T0 = translation_matrix([1, 2, 3])
>>> scale, shear, angles, trans, persp = decompose_matrix(T0)
>>> T1 = translation_matrix(trans)
>>> numpy.allclose(T0, T1)
True
>>> S = scale_matrix(0.123)
>>> scale, shear, angles, trans, persp = decompose_matrix(S)
>>> scale[0]
0.123
>>> R0 = euler_matrix(1, 2, 3)
>>> scale, shear, angles, trans, persp = decompose_matrix(R0)
>>> R1 = euler_matrix(*angles)
>>> numpy.allclose(R0, R1)
True
"""
M = numpy.array(matrix, dtype="f", copy=True).T
if abs(M[3, 3]) < _EPS:
raise ValueError("M[3, 3] is zero")
M /= M[3, 3]
P = M.copy()
P[:, 3] = 0.0, 0.0, 0.0, 1.0
if not numpy.linalg.det(P):
raise ValueError("matrix is singular")
scale = numpy.zeros((3, ))
shear = [0.0, 0.0, 0.0]
angles = [0.0, 0.0, 0.0]
if any(abs(M[:3, 3]) > _EPS):
perspective = numpy.dot(M[:, 3], numpy.linalg.inv(P.T))
M[:, 3] = 0.0, 0.0, 0.0, 1.0
else:
perspective = numpy.array([0.0, 0.0, 0.0, 1.0])
translate = M[3, :3].copy()
M[3, :3] = 0.0
row = M[:3, :3].copy()
scale[0] = vector_norm(row[0])
row[0] /= scale[0]
shear[0] = numpy.dot(row[0], row[1])
row[1] -= row[0] * shear[0]
scale[1] = vector_norm(row[1])
row[1] /= scale[1]
shear[0] /= scale[1]
shear[1] = numpy.dot(row[0], row[2])
row[2] -= row[0] * shear[1]
shear[2] = numpy.dot(row[1], row[2])
row[2] -= row[1] * shear[2]
scale[2] = vector_norm(row[2])
row[2] /= scale[2]
shear[1:] /= scale[2]
if numpy.dot(row[0], numpy.cross(row[1], row[2])) < 0:
numpy.negative(scale, scale)
numpy.negative(row, row)
angles[1] = math.asin(-row[0, 2])
if math.cos(angles[1]):
angles[0] = math.atan2(row[1, 2], row[2, 2])
angles[2] = math.atan2(row[0, 1], row[0, 0])
else:
#angles[0] = math.atan2(row[1, 0], row[1, 1])
angles[0] = math.atan2(-row[2, 1], row[1, 1])
angles[2] = 0.0
return scale, shear, angles, translate, perspective
def compose_matrix(scale=None, shear=None, angles=None, translate=None,
perspective=None):
"""Return transformation matrix from sequence of transformations.
This is the inverse of the decompose_matrix function.
Sequence of transformations:
scale : vector of 3 scaling factors
shear : list of shear factors for x-y, x-z, y-z axes
angles : list of Euler angles about static x, y, z axes
translate : translation vector along x, y, z axes
perspective : perspective partition of matrix
>>> scale = numpy.random.random(3) - 0.5
>>> shear = numpy.random.random(3) - 0.5
>>> angles = (numpy.random.random(3) - 0.5) * (2*math.pi)
>>> trans = numpy.random.random(3) - 0.5
>>> persp = numpy.random.random(4) - 0.5
>>> M0 = compose_matrix(scale, shear, angles, trans, persp)
>>> result = decompose_matrix(M0)
>>> M1 = compose_matrix(*result)
>>> is_same_transform(M0, M1)
True
"""
M = numpy.identity(4)
if perspective is not None:
P = numpy.identity(4)
P[3, :] = perspective[:4]
M = numpy.dot(M, P)
if translate is not None:
T = numpy.identity(4)
T[:3, 3] = translate[:3]
M = numpy.dot(M, T)
if angles is not None:
R = euler_matrix(angles[0], angles[1], angles[2], 'sxyz')
M = numpy.dot(M, R)
if shear is not None:
Z = numpy.identity(4)
Z[1, 2] = shear[2]
Z[0, 2] = shear[1]
Z[0, 1] = shear[0]
M = numpy.dot(M, Z)
if scale is not None:
S = numpy.identity(4)
S[0, 0] = scale[0]
S[1, 1] = scale[1]
S[2, 2] = scale[2]
M = numpy.dot(M, S)
M /= M[3, 3]
return M
def orthogonalization_matrix(lengths, angles):
"""Return orthogonalization matrix for crystallographic cell coordinates.
Angles are expected in degrees.
The de-orthogonalization matrix is the inverse.
>>> O = orthogonalization_matrix([10, 10, 10], [90, 90, 90])
>>> numpy.allclose(O[:3, :3], numpy.identity(3, float) * 10)
True
>>> O = orthogonalization_matrix([9.8, 12.0, 15.5], [87.2, 80.7, 69.7])
>>> numpy.allclose(numpy.sum(O), 43.063229)
True
"""
a, b, c = lengths
angles = numpy.radians(angles)
sina, sinb, _ = numpy.sin(angles)
cosa, cosb, cosg = numpy.cos(angles)
co = (cosa * cosb - cosg) / (sina * sinb)
return numpy.array([
[ a*sinb*math.sqrt(1.0-co*co), 0.0, 0.0, 0.0],
[-a*sinb*co, b*sina, 0.0, 0.0],
[ a*cosb, b*cosa, c, 0.0],
[ 0.0, 0.0, 0.0, 1.0]])
def affine_matrix_from_points(v0, v1, shear=True, scale=True, usesvd=True):
"""Return affine transform matrix to register two point sets.
v0 and v1 are shape (ndims, \*) arrays of at least ndims non-homogeneous
coordinates, where ndims is the dimensionality of the coordinate space.
If shear is False, a similarity transformation matrix is returned.
If also scale is False, a rigid/Euclidean transformation matrix
is returned.
By default the algorithm by Hartley and Zissermann [15] is used.
If usesvd is True, similarity and Euclidean transformation matrices
are calculated by minimizing the weighted sum of squared deviations
(RMSD) according to the algorithm by Kabsch [8].
Otherwise, and if ndims is 3, the quaternion based algorithm by Horn [9]
is used, which is slower when using this Python implementation.
The returned matrix performs rotation, translation and uniform scaling
(if specified).
>>> v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]]
>>> v1 = [[675, 826, 826, 677], [55, 52, 281, 277]]
>>> affine_matrix_from_points(v0, v1)
array([[ 0.14549, 0.00062, 675.50008],
[ 0.00048, 0.14094, 53.24971],
[ 0. , 0. , 1. ]])
>>> T = translation_matrix(numpy.random.random(3)-0.5)
>>> R = random_rotation_matrix(numpy.random.random(3))
>>> S = scale_matrix(random.random())
>>> M = concatenate_matrices(T, R, S)
>>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20
>>> v0[3] = 1
>>> v1 = numpy.dot(M, v0)
>>> v0[:3] += numpy.random.normal(0, 1e-8, 300).reshape(3, -1)
>>> M = affine_matrix_from_points(v0[:3], v1[:3])
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
More examples in superimposition_matrix()
"""
v0 = numpy.array(v0, dtype="f", copy=True)
v1 = numpy.array(v1, dtype="f", copy=True)
ndims = v0.shape[0]
if ndims < 2 or v0.shape[1] < ndims or v0.shape != v1.shape:
raise ValueError("input arrays are of wrong shape or type")
# move centroids to origin
t0 = -numpy.mean(v0, axis=1)
M0 = numpy.identity(ndims+1)
M0[:ndims, ndims] = t0
v0 += t0.reshape(ndims, 1)
t1 = -numpy.mean(v1, axis=1)
M1 = numpy.identity(ndims+1)
M1[:ndims, ndims] = t1
v1 += t1.reshape(ndims, 1)
if shear:
# Affine transformation
A = numpy.concatenate((v0, v1), axis=0)
u, s, vh = numpy.linalg.svd(A.T)
vh = vh[:ndims].T
B = vh[:ndims]
C = vh[ndims:2*ndims]
t = numpy.dot(C, numpy.linalg.pinv(B))
t = numpy.concatenate((t, numpy.zeros((ndims, 1))), axis=1)
M = numpy.vstack((t, ((0.0,)*ndims) + (1.0,)))
elif usesvd or ndims != 3:
# Rigid transformation via SVD of covariance matrix
u, s, vh = numpy.linalg.svd(numpy.dot(v1, v0.T))
# rotation matrix from SVD orthonormal bases
R = numpy.dot(u, vh)
if numpy.linalg.det(R) < 0.0:
# R does not constitute right handed system
R -= numpy.outer(u[:, ndims-1], vh[ndims-1, :]*2.0)
s[-1] *= -1.0
# homogeneous transformation matrix
M = numpy.identity(ndims+1)
M[:ndims, :ndims] = R
else:
# Rigid transformation matrix via quaternion
# compute symmetric matrix N
xx, yy, zz = numpy.sum(v0 * v1, axis=1)
xy, yz, zx = numpy.sum(v0 * numpy.roll(v1, -1, axis=0), axis=1)
xz, yx, zy = numpy.sum(v0 * numpy.roll(v1, -2, axis=0), axis=1)
N = [[xx+yy+zz, 0.0, 0.0, 0.0],
[yz-zy, xx-yy-zz, 0.0, 0.0],
[zx-xz, xy+yx, yy-xx-zz, 0.0],
[xy-yx, zx+xz, yz+zy, zz-xx-yy]]
# quaternion: eigenvector corresponding to most positive eigenvalue
w, V = numpy.linalg.eigh(N)
q = V[:, numpy.argmax(w)]
q /= vector_norm(q) # unit quaternion
# homogeneous transformation matrix
M = quaternion_matrix(q)
if scale and not shear:
# Affine transformation; scale is ratio of RMS deviations from centroid
v0 *= v0
v1 *= v1
M[:ndims, :ndims] *= math.sqrt(numpy.sum(v1) / numpy.sum(v0))
# move centroids back
M = numpy.dot(numpy.linalg.inv(M1), numpy.dot(M, M0))
M /= M[ndims, ndims]
return M
def superimposition_matrix(v0, v1, scale=False, usesvd=True):
"""Return matrix to transform given 3D point set into second point set.
v0 and v1 are shape (3, \*) or (4, \*) arrays of at least 3 points.
The parameters scale and usesvd are explained in the more general
affine_matrix_from_points function.
The returned matrix is a similarity or Euclidean transformation matrix.
This function has a fast C implementation in transformations.c.
>>> v0 = numpy.random.rand(3, 10)
>>> M = superimposition_matrix(v0, v0)
>>> numpy.allclose(M, numpy.identity(4))
True
>>> R = random_rotation_matrix(numpy.random.random(3))
>>> v0 = [[1,0,0], [0,1,0], [0,0,1], [1,1,1]]
>>> v1 = numpy.dot(R, v0)
>>> M = superimposition_matrix(v0, v1)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20
>>> v0[3] = 1
>>> v1 = numpy.dot(R, v0)
>>> M = superimposition_matrix(v0, v1)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> S = scale_matrix(random.random())
>>> T = translation_matrix(numpy.random.random(3)-0.5)
>>> M = concatenate_matrices(T, R, S)
>>> v1 = numpy.dot(M, v0)
>>> v0[:3] += numpy.random.normal(0, 1e-9, 300).reshape(3, -1)
>>> M = superimposition_matrix(v0, v1, scale=True)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> v = numpy.empty((4, 100, 3))
>>> v[:, :, 0] = v0
>>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False)
>>> numpy.allclose(v1, numpy.dot(M, v[:, :, 0]))
True
"""
v0 = numpy.array(v0, dtype="f", copy=False)[:3]
v1 = numpy.array(v1, dtype="f", copy=False)[:3]
return affine_matrix_from_points(v0, v1, shear=False,
scale=scale, usesvd=usesvd)
def euler_matrix(ai, aj, ak, axes='sxyz'):
"""Return homogeneous rotation matrix from Euler angles and axis sequence.
ai, aj, ak : Euler's roll, pitch and yaw angles
axes : One of 24 axis sequences as string or encoded tuple
>>> R = euler_matrix(1, 2, 3, 'syxz')
>>> numpy.allclose(numpy.sum(R[0]), -1.34786452)
True
>>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1))
>>> numpy.allclose(numpy.sum(R[0]), -0.383436184)
True
>>> ai, aj, ak = (4*math.pi) * (numpy.random.random(3) - 0.5)
>>> for axes in _AXES2TUPLE.keys():
... R = euler_matrix(ai, aj, ak, axes)
>>> for axes in _TUPLE2AXES.keys():
... R = euler_matrix(ai, aj, ak, axes)
"""
try:
firstaxis, parity, repetition, frame = _AXES2TUPLE[axes]
except (AttributeError, KeyError):
_TUPLE2AXES[axes] # validation
firstaxis, parity, repetition, frame = axes
i = firstaxis
j = _NEXT_AXIS[i+parity]
k = _NEXT_AXIS[i-parity+1]
if frame:
ai, ak = ak, ai
if parity:
ai, aj, ak = -ai, -aj, -ak
si, sj, sk = math.sin(ai), math.sin(aj), math.sin(ak)
ci, cj, ck = math.cos(ai), math.cos(aj), math.cos(ak)
cc, cs = ci*ck, ci*sk
sc, ss = si*ck, si*sk
M = numpy.identity(4)
if repetition:
M[i, i] = cj
M[i, j] = sj*si
M[i, k] = sj*ci
M[j, i] = sj*sk
M[j, j] = -cj*ss+cc
M[j, k] = -cj*cs-sc
M[k, i] = -sj*ck
M[k, j] = cj*sc+cs
M[k, k] = cj*cc-ss
else:
M[i, i] = cj*ck
M[i, j] = sj*sc-cs
M[i, k] = sj*cc+ss
M[j, i] = cj*sk
M[j, j] = sj*ss+cc
M[j, k] = sj*cs-sc
M[k, i] = -sj
M[k, j] = cj*si
M[k, k] = cj*ci
return M
def euler_from_matrix(matrix, axes='sxyz'):
"""Return Euler angles from rotation matrix for specified axis sequence.
axes : One of 24 axis sequences as string or encoded tuple
Note that many Euler angle triplets can describe one matrix.
>>> R0 = euler_matrix(1, 2, 3, 'syxz')
>>> al, be, ga = euler_from_matrix(R0, 'syxz')
>>> R1 = euler_matrix(al, be, ga, 'syxz')
>>> numpy.allclose(R0, R1)
True
>>> angles = (4*math.pi) * (numpy.random.random(3) - 0.5)
>>> for axes in _AXES2TUPLE.keys():
... R0 = euler_matrix(axes=axes, *angles)
... R1 = euler_matrix(axes=axes, *euler_from_matrix(R0, axes))
... if not numpy.allclose(R0, R1): print(axes, "failed")
"""
try:
firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()]
except (AttributeError, KeyError):
_TUPLE2AXES[axes] # validation
firstaxis, parity, repetition, frame = axes
i = firstaxis
j = _NEXT_AXIS[i+parity]
k = _NEXT_AXIS[i-parity+1]
M = numpy.array(matrix, dtype="f", copy=False)[:3, :3]
if repetition:
sy = math.sqrt(M[i, j]*M[i, j] + M[i, k]*M[i, k])
if sy > _EPS:
ax = math.atan2( M[i, j], M[i, k])
ay = math.atan2( sy, M[i, i])
az = math.atan2( M[j, i], -M[k, i])
else:
ax = math.atan2(-M[j, k], M[j, j])
ay = math.atan2( sy, M[i, i])
az = 0.0
else:
cy = math.sqrt(M[i, i]*M[i, i] + M[j, i]*M[j, i])
if cy > _EPS:
ax = math.atan2( M[k, j], M[k, k])
ay = math.atan2(-M[k, i], cy)
az = math.atan2( M[j, i], M[i, i])
else:
ax = math.atan2(-M[j, k], M[j, j])
ay = math.atan2(-M[k, i], cy)
az = 0.0
if parity:
ax, ay, az = -ax, -ay, -az
if frame:
ax, az = az, ax
return ax, ay, az
def euler_from_quaternion(quaternion, axes='sxyz'):
"""Return Euler angles from quaternion for specified axis sequence.
>>> angles = euler_from_quaternion([0.99810947, 0.06146124, 0, 0])
>>> numpy.allclose(angles, [0.123, 0, 0])
True
"""
return euler_from_matrix(quaternion_matrix(quaternion), axes)
def quaternion_from_euler(ai, aj, ak, axes='sxyz'):
"""Return quaternion from Euler angles and axis sequence.
ai, aj, ak : Euler's roll, pitch and yaw angles
axes : One of 24 axis sequences as string or encoded tuple
>>> q = quaternion_from_euler(1, 2, 3, 'ryxz')
>>> numpy.allclose(q, [0.435953, 0.310622, -0.718287, 0.444435])
True
"""
try:
firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()]
except (AttributeError, KeyError):
_TUPLE2AXES[axes] # validation
firstaxis, parity, repetition, frame = axes
i = firstaxis + 1
j = _NEXT_AXIS[i+parity-1] + 1
k = _NEXT_AXIS[i-parity] + 1
if frame:
ai, ak = ak, ai
if parity:
aj = -aj
ai /= 2.0
aj /= 2.0
ak /= 2.0
ci = math.cos(ai)
si = math.sin(ai)
cj = math.cos(aj)
sj = math.sin(aj)
ck = math.cos(ak)
sk = math.sin(ak)
cc = ci*ck
cs = ci*sk
sc = si*ck
ss = si*sk
q = numpy.empty((4, ))
if repetition:
q[0] = cj*(cc - ss)
q[i] = cj*(cs + sc)
q[j] = sj*(cc + ss)
q[k] = sj*(cs - sc)
else:
q[0] = cj*cc + sj*ss
q[i] = cj*sc - sj*cs
q[j] = cj*ss + sj*cc
q[k] = cj*cs - sj*sc
if parity:
q[j] *= -1.0
return q
def quaternion_about_axis(angle, axis):
"""Return quaternion for rotation about axis.
>>> q = quaternion_about_axis(0.123, [1, 0, 0])
>>> numpy.allclose(q, [0.99810947, 0.06146124, 0, 0])
True
"""
q = numpy.array([0.0, axis[0], axis[1], axis[2]])
qlen = vector_norm(q)
if qlen > _EPS:
q *= math.sin(angle/2.0) / qlen
q[0] = math.cos(angle/2.0)
return q
def quaternion_matrix(quaternion):
"""Return homogeneous rotation matrix from quaternion.
>>> M = quaternion_matrix([0.99810947, 0.06146124, 0, 0])
>>> numpy.allclose(M, rotation_matrix(0.123, [1, 0, 0]))
True
>>> M = quaternion_matrix([1, 0, 0, 0])
>>> numpy.allclose(M, numpy.identity(4))
True
>>> M = quaternion_matrix([0, 1, 0, 0])
>>> numpy.allclose(M, numpy.diag([1, -1, -1, 1]))
True
"""
q = numpy.array(quaternion, dtype="f", copy=True)
n = numpy.dot(q, q)
if n < _EPS:
return numpy.identity(4)
q *= math.sqrt(2.0 / n)
q = numpy.outer(q, q)
return numpy.array([
[1.0-q[2, 2]-q[3, 3], q[1, 2]-q[3, 0], q[1, 3]+q[2, 0], 0.0],
[ q[1, 2]+q[3, 0], 1.0-q[1, 1]-q[3, 3], q[2, 3]-q[1, 0], 0.0],
[ q[1, 3]-q[2, 0], q[2, 3]+q[1, 0], 1.0-q[1, 1]-q[2, 2], 0.0],
[ 0.0, 0.0, 0.0, 1.0]],dtype="f")
def quaternion_from_matrix(matrix, isprecise=False):
"""Return quaternion from rotation matrix.
If isprecise is True, the input matrix is assumed to be a precise rotation
matrix and a faster algorithm is used.
>>> q = quaternion_from_matrix(numpy.identity(4), True)
>>> numpy.allclose(q, [1, 0, 0, 0])
True
>>> q = quaternion_from_matrix(numpy.diag([1, -1, -1, 1]))
>>> numpy.allclose(q, [0, 1, 0, 0]) or numpy.allclose(q, [0, -1, 0, 0])
True
>>> R = rotation_matrix(0.123, (1, 2, 3))
>>> q = quaternion_from_matrix(R, True)
>>> numpy.allclose(q, [0.9981095, 0.0164262, 0.0328524, 0.0492786])
True
>>> R = [[-0.545, 0.797, 0.260, 0], [0.733, 0.603, -0.313, 0],
... [-0.407, 0.021, -0.913, 0], [0, 0, 0, 1]]
>>> q = quaternion_from_matrix(R)
>>> numpy.allclose(q, [0.19069, 0.43736, 0.87485, -0.083611])
True
>>> R = [[0.395, 0.362, 0.843, 0], [-0.626, 0.796, -0.056, 0],
... [-0.677, -0.498, 0.529, 0], [0, 0, 0, 1]]
>>> q = quaternion_from_matrix(R)
>>> numpy.allclose(q, [0.82336615, -0.13610694, 0.46344705, -0.29792603])
True
>>> R = random_rotation_matrix()
>>> q = quaternion_from_matrix(R)
>>> is_same_transform(R, quaternion_matrix(q))
True
>>> R = euler_matrix(0.0, 0.0, numpy.pi/2.0)
>>> numpy.allclose(quaternion_from_matrix(R, isprecise=False),
... quaternion_from_matrix(R, isprecise=True))
True
"""
M = numpy.array(matrix, dtype="f", copy=False)[:4, :4]
if isprecise:
q = numpy.empty((4, ))
t = numpy.trace(M)
if t > M[3, 3]:
q[0] = t
q[3] = M[1, 0] - M[0, 1]
q[2] = M[0, 2] - M[2, 0]
q[1] = M[2, 1] - M[1, 2]
else:
i, j, k = 1, 2, 3
if M[1, 1] > M[0, 0]:
i, j, k = 2, 3, 1
if M[2, 2] > M[i, i]:
i, j, k = 3, 1, 2
t = M[i, i] - (M[j, j] + M[k, k]) + M[3, 3]
q[i] = t
q[j] = M[i, j] + M[j, i]
q[k] = M[k, i] + M[i, k]
q[3] = M[k, j] - M[j, k]
q *= 0.5 / math.sqrt(t * M[3, 3])
else:
m00 = M[0, 0]
m01 = M[0, 1]
m02 = M[0, 2]
m10 = M[1, 0]
m11 = M[1, 1]
m12 = M[1, 2]
m20 = M[2, 0]
m21 = M[2, 1]
m22 = M[2, 2]
# symmetric matrix K
K = numpy.array([[m00-m11-m22, 0.0, 0.0, 0.0],
[m01+m10, m11-m00-m22, 0.0, 0.0],
[m02+m20, m12+m21, m22-m00-m11, 0.0],
[m21-m12, m02-m20, m10-m01, m00+m11+m22]])
K /= 3.0
# quaternion is eigenvector of K that corresponds to largest eigenvalue
w, V = numpy.linalg.eigh(K)
q = V[[3, 0, 1, 2], numpy.argmax(w)]
if q[0] < 0.0:
numpy.negative(q, q)
return q
def quaternion_multiply(quaternion1, quaternion0):
"""Return multiplication of two quaternions.
>>> q = quaternion_multiply([4, 1, -2, 3], [8, -5, 6, 7])
>>> numpy.allclose(q, [28, -44, -14, 48])
True
"""
w0, x0, y0, z0 = quaternion0
w1, x1, y1, z1 = quaternion1
return numpy.array([-x1*x0 - y1*y0 - z1*z0 + w1*w0,
x1*w0 + y1*z0 - z1*y0 + w1*x0,
-x1*z0 + y1*w0 + z1*x0 + w1*y0,
x1*y0 - y1*x0 + z1*w0 + w1*z0], dtype="f")
def quaternion_conjugate(quaternion):
"""Return conjugate of quaternion.
>>> q0 = random_quaternion()
>>> q1 = quaternion_conjugate(q0)
>>> q1[0] == q0[0] and all(q1[1:] == -q0[1:])
True
"""
q = numpy.array(quaternion, dtype="f", copy=True)
numpy.negative(q[1:], q[1:])
return q
def quaternion_inverse(quaternion):
"""Return inverse of quaternion.
>>> q0 = random_quaternion()
>>> q1 = quaternion_inverse(q0)
>>> numpy.allclose(quaternion_multiply(q0, q1), [1, 0, 0, 0])
True
"""
q = numpy.array(quaternion, dtype="f", copy=True)
numpy.negative(q[1:], q[1:])
return q / numpy.dot(q, q)
def quaternion_real(quaternion):
"""Return real part of quaternion.
>>> quaternion_real([3, 0, 1, 2])
3.0
"""
return float(quaternion[0])
def quaternion_imag(quaternion):
"""Return imaginary part of quaternion.
>>> quaternion_imag([3, 0, 1, 2])
array([ 0., 1., 2.])
"""
return numpy.array(quaternion[1:4], dtype="f", copy=True)
def quaternion_slerp(quat0, quat1, fraction, spin=0, shortestpath=True):
"""Return spherical linear interpolation between two quaternions.
>>> q0 = random_quaternion()
>>> q1 = random_quaternion()
>>> q = quaternion_slerp(q0, q1, 0)
>>> numpy.allclose(q, q0)
True
>>> q = quaternion_slerp(q0, q1, 1, 1)
>>> numpy.allclose(q, q1)
True
>>> q = quaternion_slerp(q0, q1, 0.5)
>>> angle = math.acos(numpy.dot(q0, q))
>>> numpy.allclose(2, math.acos(numpy.dot(q0, q1)) / angle) or \
numpy.allclose(2, math.acos(-numpy.dot(q0, q1)) / angle)
True
"""
q0 = unit_vector(quat0[:4])
q1 = unit_vector(quat1[:4])
if fraction == 0.0:
return q0
elif fraction == 1.0:
return q1
d = numpy.dot(q0, q1)
if abs(abs(d) - 1.0) < _EPS:
return q0
if shortestpath and d < 0.0:
# invert rotation
d = -d
numpy.negative(q1, q1)
angle = math.acos(d) + spin * math.pi
if abs(angle) < _EPS:
return q0
isin = 1.0 / math.sin(angle)
q0 *= math.sin((1.0 - fraction) * angle) * isin
q1 *= math.sin(fraction * angle) * isin
q0 += q1
return q0
def random_quaternion(rand=None):
"""Return uniform random unit quaternion.
rand: array like or None
Three independent random variables that are uniformly distributed
between 0 and 1.
>>> q = random_quaternion()
>>> numpy.allclose(1, vector_norm(q))
True
>>> q = random_quaternion(numpy.random.random(3))
>>> len(q.shape), q.shape[0]==4
(1, True)
"""
if rand is None:
rand = numpy.random.rand(3)
else:
assert len(rand) == 3
r1 = numpy.sqrt(1.0 - rand[0])
r2 = numpy.sqrt(rand[0])
pi2 = math.pi * 2.0
t1 = pi2 * rand[1]
t2 = pi2 * rand[2]
return numpy.array([numpy.cos(t2)*r2, numpy.sin(t1)*r1,
numpy.cos(t1)*r1, numpy.sin(t2)*r2])
def random_rotation_matrix(rand=None):
"""Return uniform random rotation matrix.
rand: array like
Three independent random variables that are uniformly distributed
between 0 and 1 for each returned quaternion.
>>> R = random_rotation_matrix()
>>> numpy.allclose(numpy.dot(R.T, R), numpy.identity(4))
True
"""
return quaternion_matrix(random_quaternion(rand))
class Arcball(object):
"""Virtual Trackball Control.
>>> ball = Arcball()
>>> ball = Arcball(initial=numpy.identity(4))
>>> ball.place([320, 320], 320)
>>> ball.down([500, 250])
>>> ball.drag([475, 275])
>>> R = ball.matrix()
>>> numpy.allclose(numpy.sum(R), 3.90583455)
True
>>> ball = Arcball(initial=[1, 0, 0, 0])
>>> ball.place([320, 320], 320)
>>> ball.setaxes([1, 1, 0], [-1, 1, 0])
>>> ball.constrain = True
>>> ball.down([400, 200])
>>> ball.drag([200, 400])
>>> R = ball.matrix()
>>> numpy.allclose(numpy.sum(R), 0.2055924)
True
>>> ball.next()
"""
def __init__(self, initial=None):
"""Initialize virtual trackball control.
initial : quaternion or rotation matrix
"""
self._axis = None
self._axes = None
self._radius = 1.0
self._center = [0.0, 0.0]
self._vdown = numpy.array([0.0, 0.0, 1.0])
self._constrain = False
if initial is None:
self._qdown = numpy.array([1.0, 0.0, 0.0, 0.0])
else:
initial = numpy.array(initial, dtype="f")
if initial.shape == (4, 4):
self._qdown = quaternion_from_matrix(initial)
elif initial.shape == (4, ):
initial /= vector_norm(initial)
self._qdown = initial
else:
raise ValueError("initial not a quaternion or matrix")
self._qnow = self._qpre = self._qdown
def place(self, center, radius):
"""Place Arcball, e.g. when window size changes.
center : sequence[2]
Window coordinates of trackball center.
radius : float
Radius of trackball in window coordinates.
"""
self._radius = float(radius)
self._center[0] = center[0]
self._center[1] = center[1]
def setaxes(self, *axes):
"""Set axes to constrain rotations."""
if axes is None:
self._axes = None
else:
self._axes = [unit_vector(axis) for axis in axes]
@property
def constrain(self):
"""Return state of constrain to axis mode."""
return self._constrain
@constrain.setter
def constrain(self, value):
"""Set state of constrain to axis mode."""
self._constrain = bool(value)
def down(self, point):
"""Set initial cursor window coordinates and pick constrain-axis."""
self._vdown = arcball_map_to_sphere(point, self._center, self._radius)
self._qdown = self._qpre = self._qnow
if self._constrain and self._axes is not None:
self._axis = arcball_nearest_axis(self._vdown, self._axes)
self._vdown = arcball_constrain_to_axis(self._vdown, self._axis)
else:
self._axis = None
def drag(self, point):
"""Update current cursor window coordinates."""
vnow = arcball_map_to_sphere(point, self._center, self._radius)
if self._axis is not None:
vnow = arcball_constrain_to_axis(vnow, self._axis)
self._qpre = self._qnow
t = numpy.cross(self._vdown, vnow)
if numpy.dot(t, t) < _EPS:
self._qnow = self._qdown
else:
q = [numpy.dot(self._vdown, vnow), t[0], t[1], t[2]]
self._qnow = quaternion_multiply(q, self._qdown)
def next(self, acceleration=0.0):
"""Continue rotation in direction of last drag."""
q = quaternion_slerp(self._qpre, self._qnow, 2.0+acceleration, False)
self._qpre, self._qnow = self._qnow, q
def matrix(self):
"""Return homogeneous rotation matrix."""
return quaternion_matrix(self._qnow)
def arcball_map_to_sphere(point, center, radius):
"""Return unit sphere coordinates from window coordinates."""
v0 = (point[0] - center[0]) / radius
v1 = (center[1] - point[1]) / radius
n = v0*v0 + v1*v1
if n > 1.0:
# position outside of sphere
n = math.sqrt(n)
return numpy.array([v0/n, v1/n, 0.0])
else:
return numpy.array([v0, v1, math.sqrt(1.0 - n)])
def arcball_constrain_to_axis(point, axis):
"""Return sphere point perpendicular to axis."""
v = numpy.array(point, dtype="f", copy=True)
a = numpy.array(axis, dtype="f", copy=True)
v -= a * numpy.dot(a, v) # on plane
n = vector_norm(v)
if n > _EPS:
if v[2] < 0.0:
numpy.negative(v, v)
v /= n
return v
if a[2] == 1.0:
return numpy.array([1.0, 0.0, 0.0])
return unit_vector([-a[1], a[0], 0.0])
def arcball_nearest_axis(point, axes):
"""Return axis, which arc is nearest to point."""
point = numpy.array(point, dtype="f", copy=False)
nearest = None
mx = -1.0
for axis in axes:
t = numpy.dot(arcball_constrain_to_axis(point, axis), point)
if t > mx:
nearest = axis
mx = t
return nearest
# epsilon for testing whether a number is close to zero
_EPS = numpy.finfo(float).eps * 4.0
# axis sequences for Euler angles
_NEXT_AXIS = [1, 2, 0, 1]
# map axes strings to/from tuples of inner axis, parity, repetition, frame
_AXES2TUPLE = {
'sxyz': (0, 0, 0, 0), 'sxyx': (0, 0, 1, 0), 'sxzy': (0, 1, 0, 0),
'sxzx': (0, 1, 1, 0), 'syzx': (1, 0, 0, 0), 'syzy': (1, 0, 1, 0),
'syxz': (1, 1, 0, 0), 'syxy': (1, 1, 1, 0), 'szxy': (2, 0, 0, 0),
'szxz': (2, 0, 1, 0), 'szyx': (2, 1, 0, 0), 'szyz': (2, 1, 1, 0),
'rzyx': (0, 0, 0, 1), 'rxyx': (0, 0, 1, 1), 'ryzx': (0, 1, 0, 1),
'rxzx': (0, 1, 1, 1), 'rxzy': (1, 0, 0, 1), 'ryzy': (1, 0, 1, 1),
'rzxy': (1, 1, 0, 1), 'ryxy': (1, 1, 1, 1), 'ryxz': (2, 0, 0, 1),
'rzxz': (2, 0, 1, 1), 'rxyz': (2, 1, 0, 1), 'rzyz': (2, 1, 1, 1)}
_TUPLE2AXES = dict((v, k) for k, v in _AXES2TUPLE.items())
def vector_norm(data, axis=None, out=None):
"""Return length, i.e. Euclidean norm, of ndarray along axis.
>>> v = numpy.random.random(3)
>>> n = vector_norm(v)
>>> numpy.allclose(n, numpy.linalg.norm(v))
True
>>> v = numpy.random.rand(6, 5, 3)
>>> n = vector_norm(v, axis=-1)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=2)))
True
>>> n = vector_norm(v, axis=1)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1)))
True
>>> v = numpy.random.rand(5, 4, 3)
>>> n = numpy.empty((5, 3))
>>> vector_norm(v, axis=1, out=n)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1)))
True
>>> vector_norm([])
0.0
>>> vector_norm([1])
1.0
"""
data = numpy.array(data, dtype="f", copy=True)
if out is None:
if data.ndim == 1:
return math.sqrt(numpy.dot(data, data))
data *= data
out = numpy.atleast_1d(numpy.sum(data, axis=axis))
numpy.sqrt(out, out)
return out
else:
data *= data
numpy.sum(data, axis=axis, out=out)
numpy.sqrt(out, out)
def unit_vector(data, axis=None, out=None):
"""Return ndarray normalized by length, i.e. Euclidean norm, along axis.
>>> v0 = numpy.random.random(3)
>>> v1 = unit_vector(v0)
>>> numpy.allclose(v1, v0 / numpy.linalg.norm(v0))
True
>>> v0 = numpy.random.rand(5, 4, 3)
>>> v1 = unit_vector(v0, axis=-1)
>>> v2 = v0 / numpy.expand_dims(numpy.sqrt(numpy.sum(v0*v0, axis=2)), 2)
>>> numpy.allclose(v1, v2)
True
>>> v1 = unit_vector(v0, axis=1)
>>> v2 = v0 / numpy.expand_dims(numpy.sqrt(numpy.sum(v0*v0, axis=1)), 1)
>>> numpy.allclose(v1, v2)
True
>>> v1 = numpy.empty((5, 4, 3))
>>> unit_vector(v0, axis=1, out=v1)
o>> numpy.allclose(v1, v2)
n
True
>>> list(unit_vector([]))
[]
>>> list(unit_vector([1]))
[1.0]
"""
if out is None:
data = numpy.array(data, dtype="f", copy=True)
if data.ndim == 1:
data /= math.sqrt(numpy.dot(data, data))
return data
else:
if out is not data:
out[:] = numpy.array(data, copy=False)
data = out
length = numpy.atleast_1d(numpy.sum(data*data, axis))
numpy.sqrt(length, length)
if axis is not None:
length = numpy.expand_dims(length, axis)
data /= length
if out is None:
return data
def random_vector(size):
"""Return array of random doubles in the half-open interval [0.0, 1.0).
>>> v = random_vector(10000)
>>> numpy.all(v >= 0) and numpy.all(v < 1)
True
>>> v0 = random_vector(10)
>>> v1 = random_vector(10)
>>> numpy.any(v0 == v1)
False
"""
return numpy.random.random(size)
def vector_product(v0, v1, axis=0):
"""Return vector perpendicular to vectors.
>>> v = vector_product([2, 0, 0], [0, 3, 0])
>>> numpy.allclose(v, [0, 0, 6])
True
>>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]]
>>> v1 = [[3], [0], [0]]
>>> v = vector_product(v0, v1)
>>> numpy.allclose(v, [[0, 0, 0, 0], [0, 0, 6, 6], [0, -6, 0, -6]])
True
>>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]]
>>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]]
>>> v = vector_product(v0, v1, axis=1)
>>> numpy.allclose(v, [[0, 0, 6], [0, -6, 0], [6, 0, 0], [0, -6, 6]])
True
"""
return numpy.cross(v0, v1, axis=axis)
def angle_between_vectors(v0, v1, directed=True, axis=0):
"""Return angle between vectors.
If directed is False, the input vectors are interpreted as undirected axes,
i.e. the maximum angle is pi/2.
>>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3])
>>> numpy.allclose(a, math.pi)
True
>>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3], directed=False)
>>> numpy.allclose(a, 0)
True
>>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]]
>>> v1 = [[3], [0], [0]]
>>> a = angle_between_vectors(v0, v1)
>>> numpy.allclose(a, [0, 1.5708, 1.5708, 0.95532])
True
>>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]]
>>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]]
>>> a = angle_between_vectors(v0, v1, axis=1)
>>> numpy.allclose(a, [1.5708, 1.5708, 1.5708, 0.95532])
True
"""
v0 = numpy.array(v0, dtype="f", copy=False)
v1 = numpy.array(v1, dtype="f", copy=False)
dot = numpy.sum(v0 * v1, axis=axis)
dot /= vector_norm(v0, axis=axis) * vector_norm(v1, axis=axis)
return numpy.arccos(dot if directed else numpy.fabs(dot))
def inverse_matrix(matrix):
"""Return inverse of square transformation matrix.
>>> M0 = random_rotation_matrix()
>>> M1 = inverse_matrix(M0.T)
>>> numpy.allclose(M1, numpy.linalg.inv(M0.T))
True
>>> for size in range(1, 7):
... M0 = numpy.random.rand(size, size)
... M1 = inverse_matrix(M0)
... if not numpy.allclose(M1, numpy.linalg.inv(M0)): print(size)
"""
return numpy.linalg.inv(matrix)
def concatenate_matrices(*matrices):
"""Return concatenation of series of transformation matrices.
>>> M = numpy.random.rand(16).reshape((4, 4)) - 0.5
>>> numpy.allclose(M, concatenate_matrices(M))
True
>>> numpy.allclose(numpy.dot(M, M.T), concatenate_matrices(M, M.T))
True
"""
M = numpy.identity(4)
for i in matrices:
M = numpy.dot(M, i)
return M
def is_same_transform(matrix0, matrix1):
"""Return True if two matrices perform same transformation.
>>> is_same_transform(numpy.identity(4), numpy.identity(4))
True
>>> is_same_transform(numpy.identity(4), random_rotation_matrix())
False
"""
matrix0 = numpy.array(matrix0, dtype="f", copy=True)
matrix0 /= matrix0[3, 3]
matrix1 = numpy.array(matrix1, dtype="f", copy=True)
matrix1 /= matrix1[3, 3]
return numpy.allclose(matrix0, matrix1)
def _import_module(name, package=None, warn=True, prefix='_py_', ignore='_'):
"""Try import all public attributes from module into global namespace.
Existing attributes with name clashes are renamed with prefix.
Attributes starting with underscore are ignored by default.
Return True on successful import.
"""
import warnings
from importlib import import_module
try:
if not package:
module = import_module(name)
else:
module = import_module('.' + name, package=package)
except ImportError:
if warn:
warnings.warn("failed to import module %s" % name)
else:
for attr in dir(module):
if ignore and attr.startswith(ignore):
continue
if prefix:
if attr in globals():
globals()[prefix + attr] = globals()[attr]
elif warn:
warnings.warn("no Python implementation of " + attr)
globals()[attr] = getattr(module, attr)
return True
if 0:
_import_module('_transformations')
if __name__ == "__main__":
import doctest
import random # used in doctests
numpy.set_printoptions(suppress=True, precision=5)
doctest.testmod()
|
Gato-X/NotYourData
|
gameowfication/libs/transformations.py
|
Python
|
mit
| 65,838
|
import locale
import os
import re
import subprocess
import sys
import platform
import time
# init libs
PROGRAM_DIR = os.path.dirname(os.path.normpath(os.path.abspath(os.path.join(__file__, os.pardir))))
LIBS_DIR = os.path.join(PROGRAM_DIR, 'libs')
sys.path.insert(0, LIBS_DIR)
# init preliminaries
SYS_ARGV = sys.argv[1:]
APP_FILENAME = sys.argv[0]
APP_NAME = os.path.basename(APP_FILENAME)
LOG_DIR = os.path.join(PROGRAM_DIR, 'logs')
LOG_FILE = os.path.join(LOG_DIR, 'nzbtomedia.log')
PID_FILE = os.path.join(LOG_DIR, 'nzbtomedia.pid')
CONFIG_FILE = os.path.join(PROGRAM_DIR, 'autoProcessMedia.cfg')
CONFIG_SPEC_FILE = os.path.join(PROGRAM_DIR, 'autoProcessMedia.cfg.spec')
CONFIG_MOVIE_FILE = os.path.join(PROGRAM_DIR, 'autoProcessMovie.cfg')
CONFIG_TV_FILE = os.path.join(PROGRAM_DIR, 'autoProcessTv.cfg')
TEST_FILE = os.path.join(os.path.join(PROGRAM_DIR, 'tests'), 'test.mp4')
MYAPP = None
from core.autoProcess.autoProcessComics import autoProcessComics
from core.autoProcess.autoProcessGames import autoProcessGames
from core.autoProcess.autoProcessMovie import autoProcessMovie
from core.autoProcess.autoProcessMusic import autoProcessMusic
from core.autoProcess.autoProcessTV import autoProcessTV
from core import logger, versionCheck, nzbToMediaDB
from core.nzbToMediaConfig import config
from core.nzbToMediaUtil import category_search, sanitizeName, copy_link, parse_args, flatten, getDirs, \
rmReadOnly,rmDir, pause_torrent, resume_torrent, remove_torrent, listMediaFiles, \
extractFiles, cleanDir, update_downloadInfoStatus, get_downloadInfo, WakeUp, makeDir, cleanDir, \
create_torrent_class, listMediaFiles, RunningProcess
from core.transcoder import transcoder
from core.databases import mainDB
# Client Agents
NZB_CLIENTS = ['sabnzbd','nzbget']
TORRENT_CLIENTS = ['transmission', 'deluge', 'utorrent', 'rtorrent', 'other']
# sabnzbd constants
SABNZB_NO_OF_ARGUMENTS = 8
SABNZB_0717_NO_OF_ARGUMENTS = 9
# sickbeard fork/branch constants
FORKS = {}
FORK_DEFAULT = "default"
FORK_FAILED = "failed"
FORK_FAILED_TORRENT = "failed-torrent"
FORK_SICKRAGE = "sickrage"
FORKS[FORK_DEFAULT] = {"dir": None}
FORKS[FORK_FAILED] = {"dirName": None, "failed": None}
FORKS[FORK_FAILED_TORRENT] = {"dir": None, "failed": None, "process_method": None}
FORKS[FORK_SICKRAGE] = {"dir": None, "failed": None, "process_method": None, "force": None}
ALL_FORKS = {"dir": None, "dirName": None, "failed": None, "process_method": None, "force": None}
SICKBEARD_FAILED = [FORK_FAILED, FORK_FAILED_TORRENT, FORK_SICKRAGE]
SICKBEARD_TORRENT = [FORK_FAILED_TORRENT, FORK_SICKRAGE]
# NZBGet Exit Codes
NZBGET_POSTPROCESS_PARCHECK = 92
NZBGET_POSTPROCESS_SUCCESS = 93
NZBGET_POSTPROCESS_ERROR = 94
NZBGET_POSTPROCESS_NONE = 95
CFG = None
LOG_DEBUG = None
LOG_DB = None
LOG_ENV = None
LOG_GIT = None
SYS_ENCODING = None
AUTO_UPDATE = None
NZBTOMEDIA_VERSION = None
NEWEST_VERSION = None
NEWEST_VERSION_STRING = None
VERSION_NOTIFY = None
GIT_PATH = None
GIT_USER = None
GIT_BRANCH = None
GIT_REPO = None
FORCE_CLEAN = None
SAFE_MODE = None
NZB_CLIENTAGENT = None
SABNZBDHOST = None
SABNZBDPORT = None
SABNZBDAPIKEY = None
NZB_DEFAULTDIR = None
TORRENT_CLIENTAGENT = None
TORRENT_CLASS = None
USELINK = None
OUTPUTDIRECTORY = None
NOFLATTEN = []
DELETE_ORIGINAL = None
TORRENT_DEFAULTDIR = None
REMOTEPATHS = []
UTORRENTWEBUI = None
UTORRENTUSR = None
UTORRENTPWD = None
TRANSMISSIONHOST = None
TRANSMISSIONPORT = None
TRANSMISSIONUSR = None
TRANSMISSIONPWD = None
DELUGEHOST = None
DELUGEPORT = None
DELUGEUSR = None
DELUGEPWD = None
PLEXSSL = None
PLEXHOST = None
PLEXPORT = None
PLEXTOKEN = None
PLEXSEC = []
EXTCONTAINER = []
COMPRESSEDCONTAINER = []
MEDIACONTAINER = []
AUDIOCONTAINER = []
METACONTAINER = []
SECTIONS = []
CATEGORIES = []
GETSUBS = False
TRANSCODE = None
CONCAT = None
FFMPEG_PATH = None
DUPLICATE = None
IGNOREEXTENSIONS = []
VEXTENSION = None
OUTPUTVIDEOPATH = None
PROCESSOUTPUT = False
GENERALOPTS = []
ALANGUAGE = None
AINCLUDE = False
SLANGUAGES = []
SINCLUDE = False
SUBSDIR = None
ALLOWSUBS = False
SEXTRACT = False
SEMBED = False
BURN = False
DEFAULTS = None
VCODEC = None
VCODEC_ALLOW = []
VPRESET = None
VFRAMERATE = None
VBITRATE = None
VLEVEL = None
VCRF = None
VRESOLUTION = None
ACODEC = None
ACODEC_ALLOW = []
ACHANNELS = None
ABITRATE = None
ACODEC2 = None
ACODEC2_ALLOW = []
ACHANNELS2 = None
ABITRATE2 = None
ACODEC3 = None
ACODEC3_ALLOW = []
ACHANNELS3 = None
ABITRATE3 = None
SCODEC = None
OUTPUTFASTSTART = None
OUTPUTQUALITYPERCENT = None
FFMPEG = None
SEVENZIP = None
FFPROBE = None
CHECK_MEDIA = None
NICENESS = []
HWACCEL = False
PASSWORDSFILE = None
DOWNLOADINFO = None
GROUPS = None
USER_SCRIPT_MEDIAEXTENSIONS = None
USER_SCRIPT = None
USER_SCRIPT_PARAM = None
USER_SCRIPT_SUCCESSCODES = None
USER_SCRIPT_CLEAN = None
USER_DELAY = None
USER_SCRIPT_RUNONCE = None
__INITIALIZED__ = False
def initialize(section=None):
global NZBGET_POSTPROCESS_ERROR, NZBGET_POSTPROCESS_NONE, NZBGET_POSTPROCESS_PARCHECK, NZBGET_POSTPROCESS_SUCCESS, \
NZBTOMEDIA_TIMEOUT, FORKS, FORK_DEFAULT, FORK_FAILED_TORRENT, FORK_FAILED, SICKBEARD_TORRENT, SICKBEARD_FAILED, \
NZBTOMEDIA_BRANCH, NZBTOMEDIA_VERSION, NEWEST_VERSION, NEWEST_VERSION_STRING, VERSION_NOTIFY, SYS_ARGV, CFG, \
SABNZB_NO_OF_ARGUMENTS, SABNZB_0717_NO_OF_ARGUMENTS, CATEGORIES, TORRENT_CLIENTAGENT, USELINK, OUTPUTDIRECTORY, \
NOFLATTEN, UTORRENTPWD, UTORRENTUSR, UTORRENTWEBUI, DELUGEHOST, DELUGEPORT, DELUGEUSR, DELUGEPWD, VLEVEL, \
TRANSMISSIONHOST, TRANSMISSIONPORT, TRANSMISSIONPWD, TRANSMISSIONUSR, COMPRESSEDCONTAINER, MEDIACONTAINER, \
METACONTAINER, SECTIONS, ALL_FORKS, TEST_FILE, GENERALOPTS, LOG_GIT, GROUPS, SEVENZIP, CONCAT, VCRF, \
__INITIALIZED__, AUTO_UPDATE, APP_FILENAME, USER_DELAY, APP_NAME, TRANSCODE, DEFAULTS, GIT_PATH, GIT_USER, \
GIT_BRANCH, GIT_REPO, SYS_ENCODING, NZB_CLIENTAGENT, SABNZBDHOST, SABNZBDPORT, SABNZBDAPIKEY, \
DUPLICATE, IGNOREEXTENSIONS, VEXTENSION, OUTPUTVIDEOPATH, PROCESSOUTPUT, VCODEC, VCODEC_ALLOW, VPRESET, \
VFRAMERATE, LOG_DB, VBITRATE, VRESOLUTION, ALANGUAGE, AINCLUDE, ACODEC, ACODEC_ALLOW, ABITRATE, \
ACODEC2, ACODEC2_ALLOW, ABITRATE2, ACODEC3, ACODEC3_ALLOW, ABITRATE3, ALLOWSUBS, SEXTRACT, SEMBED, SLANGUAGES, \
SINCLUDE, SUBSDIR, SCODEC, OUTPUTFASTSTART, OUTPUTQUALITYPERCENT, BURN, GETSUBS, HWACCEL, LOG_DIR, LOG_FILE, \
NICENESS, LOG_DEBUG, FORCE_CLEAN, FFMPEG_PATH, FFMPEG, FFPROBE, AUDIOCONTAINER, EXTCONTAINER, TORRENT_CLASS, \
DELETE_ORIGINAL, PASSWORDSFILE, USER_DELAY, USER_SCRIPT, USER_SCRIPT_CLEAN, USER_SCRIPT_MEDIAEXTENSIONS, \
USER_SCRIPT_PARAM, USER_SCRIPT_RUNONCE, USER_SCRIPT_SUCCESSCODES, DOWNLOADINFO, CHECK_MEDIA, SAFE_MODE, \
TORRENT_DEFAULTDIR, NZB_DEFAULTDIR, REMOTEPATHS, LOG_ENV, PID_FILE, MYAPP, ACHANNELS, ACHANNELS2, ACHANNELS3, \
PLEXSSL, PLEXHOST, PLEXPORT, PLEXTOKEN, PLEXSEC
if __INITIALIZED__:
return False
if os.environ.has_key('NTM_LOGFILE'):
LOG_FILE = os.environ['NTM_LOGFILE']
LOG_DIR = os.path.split(LOG_FILE)[0]
if not makeDir(LOG_DIR):
print("No log folder, logging to screen only")
MYAPP = RunningProcess()
while MYAPP.alreadyrunning():
print("Waiting for existing session to end")
time.sleep(30)
try:
locale.setlocale(locale.LC_ALL, "")
SYS_ENCODING = locale.getpreferredencoding()
except (locale.Error, IOError):
pass
# For OSes that are poorly configured I'll just randomly force UTF-8
if not SYS_ENCODING or SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
SYS_ENCODING = 'UTF-8'
if not hasattr(sys, "setdefaultencoding"):
reload(sys)
try:
# pylint: disable=E1101
# On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
sys.setdefaultencoding(SYS_ENCODING)
except:
print 'Sorry, you MUST add the nzbToMedia folder to the PYTHONPATH environment variable'
print 'or find another way to force Python to use ' + SYS_ENCODING + ' for string encoding.'
if os.environ.has_key('NZBOP_SCRIPTDIR'):
sys.exit(NZBGET_POSTPROCESS_ERROR)
else:
sys.exit(1)
# init logging
logger.ntm_log_instance.initLogging()
# run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options.
if not config.migrate():
logger.error("Unable to migrate config file %s, exiting ..." % (CONFIG_FILE))
if os.environ.has_key('NZBOP_SCRIPTDIR'):
pass # We will try and read config from Environment.
else:
sys.exit(-1)
# run migrate to convert NzbGet data from old cfg style to new cfg style
if os.environ.has_key('NZBOP_SCRIPTDIR'):
CFG = config.addnzbget()
else: # load newly migrated config
logger.info("Loading config from [%s]" % (CONFIG_FILE))
CFG = config()
# Enable/Disable DEBUG Logging
LOG_DEBUG = int(CFG['General']['log_debug'])
LOG_DB = int(CFG['General']['log_db'])
LOG_ENV = int(CFG['General']['log_env'])
LOG_GIT = int(CFG['General']['log_git'])
if LOG_ENV:
for item in os.environ:
logger.info("%s: %s" % (item, os.environ[item]), "ENVIRONMENT")
# initialize the main SB database
nzbToMediaDB.upgradeDatabase(nzbToMediaDB.DBConnection(), mainDB.InitialSchema)
# Set Version and GIT variables
NZBTOMEDIA_VERSION = '10.11'
VERSION_NOTIFY = int(CFG['General']['version_notify'])
AUTO_UPDATE = int(CFG['General']['auto_update'])
GIT_REPO = 'nzbToMedia'
GIT_PATH = CFG['General']['git_path']
GIT_USER = CFG['General']['git_user'] or 'clinton-hall'
GIT_BRANCH = CFG['General']['git_branch'] or 'master'
FORCE_CLEAN = int(CFG["General"]["force_clean"])
FFMPEG_PATH = CFG["General"]["ffmpeg_path"]
CHECK_MEDIA = int(CFG["General"]["check_media"])
SAFE_MODE = int(CFG["General"]["safe_mode"])
# Check for updates via GitHUB
if versionCheck.CheckVersion().check_for_new_version():
if AUTO_UPDATE == 1:
logger.info("Auto-Updating nzbToMedia, Please wait ...")
updated = versionCheck.CheckVersion().update()
if updated:
# restart nzbToMedia
try:
del MYAPP
except: pass
restart()
else:
logger.error("Update wasn't successful, not restarting. Check your log for more information.")
# Set Current Version
logger.info(
'nzbToMedia Version:' + NZBTOMEDIA_VERSION + ' Branch:' + GIT_BRANCH + ' (' + platform.system() + ' ' + platform.release() + ')')
if int(CFG["WakeOnLan"]["wake"]) == 1:
WakeUp()
NZB_CLIENTAGENT = CFG["Nzb"]["clientAgent"] # sabnzbd
SABNZBDHOST = CFG["Nzb"]["sabnzbd_host"]
SABNZBDPORT = int(CFG["Nzb"]["sabnzbd_port"])
SABNZBDAPIKEY = CFG["Nzb"]["sabnzbd_apikey"]
NZB_DEFAULTDIR = CFG["Nzb"]["default_downloadDirectory"]
GROUPS = CFG["Custom"]["remove_group"]
if isinstance(GROUPS, str): GROUPS = GROUPS.split(',')
if GROUPS == ['']: GROUPS = None
TORRENT_CLIENTAGENT = CFG["Torrent"]["clientAgent"] # utorrent | deluge | transmission | rtorrent | vuze |other
USELINK = CFG["Torrent"]["useLink"] # no | hard | sym
OUTPUTDIRECTORY = CFG["Torrent"]["outputDirectory"] # /abs/path/to/complete/
TORRENT_DEFAULTDIR = CFG["Torrent"]["default_downloadDirectory"]
CATEGORIES = (CFG["Torrent"]["categories"]) # music,music_videos,pictures,software
NOFLATTEN = (CFG["Torrent"]["noFlatten"])
if isinstance(NOFLATTEN, str): NOFLATTEN = NOFLATTEN.split(',')
if isinstance(CATEGORIES, str): CATEGORIES = CATEGORIES.split(',')
DELETE_ORIGINAL = int(CFG["Torrent"]["deleteOriginal"])
UTORRENTWEBUI = CFG["Torrent"]["uTorrentWEBui"] # http://localhost:8090/gui/
UTORRENTUSR = CFG["Torrent"]["uTorrentUSR"] # mysecretusr
UTORRENTPWD = CFG["Torrent"]["uTorrentPWD"] # mysecretpwr
TRANSMISSIONHOST = CFG["Torrent"]["TransmissionHost"] # localhost
TRANSMISSIONPORT = int(CFG["Torrent"]["TransmissionPort"])
TRANSMISSIONUSR = CFG["Torrent"]["TransmissionUSR"] # mysecretusr
TRANSMISSIONPWD = CFG["Torrent"]["TransmissionPWD"] # mysecretpwr
DELUGEHOST = CFG["Torrent"]["DelugeHost"] # localhost
DELUGEPORT = int(CFG["Torrent"]["DelugePort"]) # 8084
DELUGEUSR = CFG["Torrent"]["DelugeUSR"] # mysecretusr
DELUGEPWD = CFG["Torrent"]["DelugePWD"] # mysecretpwr
REMOTEPATHS = CFG["Network"]["mount_points"] or []
if REMOTEPATHS:
if isinstance(REMOTEPATHS, list): REMOTEPATHS = ','.join(REMOTEPATHS) # fix in case this imported as list.
REMOTEPATHS = [ tuple(item.split(',')) for item in REMOTEPATHS.split('|') ] # /volume1/Public/,E:\|/volume2/share/,\\NAS\
PLEXSSL = int(CFG["Plex"]["plex_ssl"])
PLEXHOST = CFG["Plex"]["plex_host"]
PLEXPORT = CFG["Plex"]["plex_port"]
PLEXTOKEN = CFG["Plex"]["plex_token"]
PLEXSEC = CFG["Plex"]["plex_sections"] or []
if PLEXSEC:
if isinstance(PLEXSEC, list): PLEXSEC = ','.join(PLEXSEC) # fix in case this imported as list.
PLEXSEC = [ tuple(item.split(',')) for item in PLEXSEC.split('|') ]
devnull = open(os.devnull, 'w')
try:
subprocess.Popen(["nice"], stdout=devnull, stderr=devnull).communicate()
NICENESS.extend(['nice', '-n%s' % (int(CFG["Posix"]["niceness"]))])
except: pass
try:
subprocess.Popen(["ionice"], stdout=devnull, stderr=devnull).communicate()
try:
NICENESS.extend(['ionice', '-c%s' % (int(CFG["Posix"]["ionice_class"]))])
except: pass
try:
if 'ionice' in NICENESS:
NICENESS.extend(['-n%s' % (int(CFG["Posix"]["ionice_classdata"]))])
else:
NICENESS.extend(['ionice', '-n%s' % (int(CFG["Posix"]["ionice_classdata"]))])
except: pass
except: pass
devnull.close()
COMPRESSEDCONTAINER = [re.compile('.r\d{2}$', re.I),
re.compile('.part\d+.rar$', re.I),
re.compile('.rar$', re.I)]
COMPRESSEDCONTAINER += [re.compile('%s$' % ext, re.I) for ext in CFG["Extensions"]["compressedExtensions"]]
MEDIACONTAINER = CFG["Extensions"]["mediaExtensions"]
AUDIOCONTAINER = CFG["Extensions"]["audioExtensions"]
METACONTAINER = CFG["Extensions"]["metaExtensions"] # .nfo,.sub,.srt
if isinstance(COMPRESSEDCONTAINER, str): COMPRESSEDCONTAINER = COMPRESSEDCONTAINER.split(',')
if isinstance(MEDIACONTAINER, str): MEDIACONTAINER = MEDIACONTAINER.split(',')
if isinstance(AUDIOCONTAINER, str): AUDIOCONTAINER = AUDIOCONTAINER.split(',')
if isinstance(METACONTAINER, str): METACONTAINER = METACONTAINER.split(',')
GETSUBS = int(CFG["Transcoder"]["getSubs"])
TRANSCODE = int(CFG["Transcoder"]["transcode"])
DUPLICATE = int(CFG["Transcoder"]["duplicate"])
CONCAT = int(CFG["Transcoder"]["concat"])
IGNOREEXTENSIONS = (CFG["Transcoder"]["ignoreExtensions"])
if isinstance(IGNOREEXTENSIONS, str): IGNOREEXTENSIONS = IGNOREEXTENSIONS.split(',')
OUTPUTFASTSTART = int(CFG["Transcoder"]["outputFastStart"])
GENERALOPTS = (CFG["Transcoder"]["generalOptions"])
if isinstance(GENERALOPTS, str): GENERALOPTS = GENERALOPTS.split(',')
if GENERALOPTS == ['']: GENERALOPTS = []
if not '-fflags' in GENERALOPTS: GENERALOPTS.append('-fflags')
if not '+genpts' in GENERALOPTS: GENERALOPTS.append('+genpts')
try:
OUTPUTQUALITYPERCENT = int(CFG["Transcoder"]["outputQualityPercent"])
except: pass
OUTPUTVIDEOPATH = CFG["Transcoder"]["outputVideoPath"]
PROCESSOUTPUT = int(CFG["Transcoder"]["processOutput"])
ALANGUAGE = CFG["Transcoder"]["audioLanguage"]
AINCLUDE = int(CFG["Transcoder"]["allAudioLanguages"])
SLANGUAGES = CFG["Transcoder"]["subLanguages"]
if isinstance(SLANGUAGES, str): SLANGUAGES = SLANGUAGES.split(',')
if SLANGUAGES == ['']: SLANGUAGES = []
SINCLUDE = int(CFG["Transcoder"]["allSubLanguages"])
SEXTRACT = int(CFG["Transcoder"]["extractSubs"])
SEMBED = int(CFG["Transcoder"]["embedSubs"])
SUBSDIR = CFG["Transcoder"]["externalSubDir"]
VEXTENSION = CFG["Transcoder"]["outputVideoExtension"].strip()
VCODEC = CFG["Transcoder"]["outputVideoCodec"].strip()
VCODEC_ALLOW = CFG["Transcoder"]["VideoCodecAllow"].strip()
if isinstance(VCODEC_ALLOW, str): VCODEC_ALLOW = VCODEC_ALLOW.split(',')
if VCODEC_ALLOW == ['']: VCODEC_ALLOW = []
VPRESET = CFG["Transcoder"]["outputVideoPreset"].strip()
try:
VFRAMERATE = float(CFG["Transcoder"]["outputVideoFramerate"].strip())
except: pass
try:
VCRF = int(CFG["Transcoder"]["outputVideoCRF"].strip())
except: pass
try:
VLEVEL = CFG["Transcoder"]["outputVideoLevel"].strip()
except: pass
try:
VBITRATE = int((CFG["Transcoder"]["outputVideoBitrate"].strip()).replace('k','000'))
except: pass
VRESOLUTION = CFG["Transcoder"]["outputVideoResolution"]
ACODEC = CFG["Transcoder"]["outputAudioCodec"].strip()
ACODEC_ALLOW = CFG["Transcoder"]["AudioCodecAllow"].strip()
if isinstance(ACODEC_ALLOW, str): ACODEC_ALLOW = ACODEC_ALLOW.split(',')
if ACODEC_ALLOW == ['']: ACODEC_ALLOW = []
try:
ACHANNELS = int(CFG["Transcoder"]["outputAudioChannels"].strip())
except: pass
try:
ABITRATE = int((CFG["Transcoder"]["outputAudioBitrate"].strip()).replace('k','000'))
except: pass
ACODEC2 = CFG["Transcoder"]["outputAudioTrack2Codec"].strip()
ACODEC2_ALLOW = CFG["Transcoder"]["AudioCodec2Allow"].strip()
if isinstance(ACODEC2_ALLOW, str): ACODEC2_ALLOW = ACODEC2_ALLOW.split(',')
if ACODEC2_ALLOW == ['']: ACODEC2_ALLOW = []
try:
ACHANNELS2 = int(CFG["Transcoder"]["outputAudioTrack2Channels"].strip())
except: pass
try:
ABITRATE2 = int((CFG["Transcoder"]["outputAudioTrack2Bitrate"].strip()).replace('k','000'))
except: pass
ACODEC3 = CFG["Transcoder"]["outputAudioOtherCodec"].strip()
ACODEC3_ALLOW = CFG["Transcoder"]["AudioOtherCodecAllow"].strip()
if isinstance(ACODEC3_ALLOW, str): ACODEC3_ALLOW = ACODEC3_ALLOW.split(',')
if ACODEC3_ALLOW == ['']: ACODEC3_ALLOW = []
try:
ACHANNELS3 = int(CFG["Transcoder"]["outputAudioOtherChannels"].strip())
except: pass
try:
ABITRATE3 = int((CFG["Transcoder"]["outputAudioOtherBitrate"].strip()).replace('k','000'))
except: pass
SCODEC = CFG["Transcoder"]["outputSubtitleCodec"].strip()
BURN = int(CFG["Transcoder"]["burnInSubtitle"].strip())
DEFAULTS = CFG["Transcoder"]["outputDefault"].strip()
HWACCEL = int(CFG["Transcoder"]["hwAccel"])
allow_subs = ['.mkv','.mp4', '.m4v', 'asf', 'wma', 'wmv']
codec_alias = {
'libx264':['libx264', 'h264', 'h.264', 'AVC', 'MPEG-4'],
'libmp3lame':['libmp3lame', 'mp3'],
'libfaac':['libfaac', 'aac', 'faac']
}
transcode_defaults = {
'iPad':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2,
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'iPad-1080p':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':'1920:1080','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2,
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'iPad-720p':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2,
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'Apple-TV':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6,
'ACODEC2':'aac','ACODEC2_ALLOW':['libfaac'],'ABITRATE2':None, 'ACHANNELS2':2,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'iPod':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'iPhone':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':'460:320','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'PS3':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6,
'ACODEC2':'aac','ACODEC2_ALLOW':['libfaac'],'ABITRATE2':None, 'ACHANNELS2':2,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'xbox':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6,
'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'Roku-480p':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'Roku-720p':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'Roku-1080p':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':160000, 'ACHANNELS':2,
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
'SCODEC':'mov_text'
},
'mkv':{
'VEXTENSION':'.mkv','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'],
'ACODEC':'dts','ACODEC_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE':None, 'ACHANNELS':8,
'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
'ACODEC3':'ac3','ACODEC3_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE3':None, 'ACHANNELS3':8,
'SCODEC':'mov_text'
},
'mp4-scene-release':{
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':19,'VLEVEL':'3.1',
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'],
'ACODEC':'dts','ACODEC_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE':None, 'ACHANNELS':8,
'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
'ACODEC3':'ac3','ACODEC3_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE3':None, 'ACHANNELS3':8,
'SCODEC':'mov_text'
}
}
if DEFAULTS and DEFAULTS in transcode_defaults:
VEXTENSION = transcode_defaults[DEFAULTS]['VEXTENSION']
VCODEC = transcode_defaults[DEFAULTS]['VCODEC']
VPRESET = transcode_defaults[DEFAULTS]['VPRESET']
VFRAMERATE = transcode_defaults[DEFAULTS]['VFRAMERATE']
VBITRATE = transcode_defaults[DEFAULTS]['VBITRATE']
VRESOLUTION = transcode_defaults[DEFAULTS]['VRESOLUTION']
VCRF = transcode_defaults[DEFAULTS]['VCRF']
VLEVEL = transcode_defaults[DEFAULTS]['VLEVEL']
VCODEC_ALLOW = transcode_defaults[DEFAULTS]['VCODEC_ALLOW']
ACODEC = transcode_defaults[DEFAULTS]['ACODEC']
ACODEC_ALLOW = transcode_defaults[DEFAULTS]['ACODEC_ALLOW']
ACHANNELS = transcode_defaults[DEFAULTS]['ACHANNELS']
ABITRATE = transcode_defaults[DEFAULTS]['ABITRATE']
ACODEC2 = transcode_defaults[DEFAULTS]['ACODEC2']
ACODEC2_ALLOW = transcode_defaults[DEFAULTS]['ACODEC2_ALLOW']
ACHANNELS2 = transcode_defaults[DEFAULTS]['ACHANNELS2']
ABITRATE2 = transcode_defaults[DEFAULTS]['ABITRATE2']
ACODEC3 = transcode_defaults[DEFAULTS]['ACODEC3']
ACODEC3_ALLOW = transcode_defaults[DEFAULTS]['ACODEC3_ALLOW']
ACHANNELS3 = transcode_defaults[DEFAULTS]['ACHANNELS3']
ABITRATE3 = transcode_defaults[DEFAULTS]['ABITRATE3']
SCODEC = transcode_defaults[DEFAULTS]['SCODEC']
transcode_defaults = {} # clear memory
if transcode_defaults in ['mp4-scene-release'] and not OUTPUTQUALITYPERCENT:
OUTPUTQUALITYPERCENT = 100
if VEXTENSION in allow_subs:
ALLOWSUBS = 1
if not VCODEC_ALLOW and VCODEC: VCODEC_ALLOW.extend([VCODEC])
for codec in VCODEC_ALLOW:
if codec in codec_alias:
extra = [ item for item in codec_alias[codec] if item not in VCODEC_ALLOW ]
VCODEC_ALLOW.extend(extra)
if not ACODEC_ALLOW and ACODEC: ACODEC_ALLOW.extend([ACODEC])
for codec in ACODEC_ALLOW:
if codec in codec_alias:
extra = [ item for item in codec_alias[codec] if item not in ACODEC_ALLOW ]
ACODEC_ALLOW.extend(extra)
if not ACODEC2_ALLOW and ACODEC2: ACODEC2_ALLOW.extend([ACODEC2])
for codec in ACODEC2_ALLOW:
if codec in codec_alias:
extra = [ item for item in codec_alias[codec] if item not in ACODEC2_ALLOW ]
ACODEC2_ALLOW.extend(extra)
if not ACODEC3_ALLOW and ACODEC3: ACODEC3_ALLOW.extend([ACODEC3])
for codec in ACODEC3_ALLOW:
if codec in codec_alias:
extra = [ item for item in codec_alias[codec] if item not in ACODEC3_ALLOW ]
ACODEC3_ALLOW.extend(extra)
codec_alias = {} # clear memory
PASSWORDSFILE = CFG["passwords"]["PassWordFile"]
# Setup FFMPEG, FFPROBE and SEVENZIP locations
if platform.system() == 'Windows':
FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg.exe')
FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe.exe')
SEVENZIP = os.path.join(PROGRAM_DIR, 'core', 'extractor', 'bin', platform.machine(), '7z.exe')
if not (os.path.isfile(FFMPEG)): # problem
FFMPEG = None
logger.warning("Failed to locate ffmpeg.exe. Transcoding disabled!")
logger.warning("Install ffmpeg with x264 support to enable this feature ...")
if not (os.path.isfile(FFPROBE)):
FFPROBE = None
if CHECK_MEDIA:
logger.warning("Failed to locate ffprobe.exe. Video corruption detection disabled!")
logger.warning("Install ffmpeg with x264 support to enable this feature ...")
else:
try:
SEVENZIP = subprocess.Popen(['which', '7z'], stdout=subprocess.PIPE).communicate()[0].strip()
except: pass
if not SEVENZIP:
try:
SEVENZIP = subprocess.Popen(['which', '7zr'], stdout=subprocess.PIPE).communicate()[0].strip()
except: pass
if not SEVENZIP:
try:
SEVENZIP = subprocess.Popen(['which', '7za'], stdout=subprocess.PIPE).communicate()[0].strip()
except: pass
if not SEVENZIP:
SEVENZIP = None
logger.warning("Failed to locate 7zip. Transcosing of disk images and extraction of .7z files will not be possible!")
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(os.path.join(FFMPEG_PATH, 'ffmpeg'), os.X_OK):
FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg')
elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avconv')) or os.access(os.path.join(FFMPEG_PATH, 'avconv'), os.X_OK):
FFMPEG = os.path.join(FFMPEG_PATH, 'avconv')
else:
try:
FFMPEG = subprocess.Popen(['which', 'ffmpeg'], stdout=subprocess.PIPE).communicate()[0].strip()
except: pass
if not FFMPEG:
try:
FFMPEG = subprocess.Popen(['which', 'avconv'], stdout=subprocess.PIPE).communicate()[0].strip()
except: pass
if not FFMPEG:
FFMPEG = None
logger.warning("Failed to locate ffmpeg. Transcoding disabled!")
logger.warning("Install ffmpeg with x264 support to enable this feature ...")
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(os.path.join(FFMPEG_PATH, 'ffprobe'), os.X_OK):
FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe')
elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avprobe')) or os.access(os.path.join(FFMPEG_PATH, 'avprobe'), os.X_OK):
FFPROBE = os.path.join(FFMPEG_PATH, 'avprobe')
else:
try:
FFPROBE = subprocess.Popen(['which', 'ffprobe'], stdout=subprocess.PIPE).communicate()[0].strip()
except: pass
if not FFPROBE:
try:
FFPROBE = subprocess.Popen(['which', 'avprobe'], stdout=subprocess.PIPE).communicate()[0].strip()
except: pass
if not FFPROBE:
FFPROBE = None
if CHECK_MEDIA:
logger.warning("Failed to locate ffprobe. Video corruption detection disabled!")
logger.warning("Install ffmpeg with x264 support to enable this feature ...")
# check for script-defied section and if None set to allow sections
SECTIONS = CFG[tuple(x for x in CFG if CFG[x].sections and CFG[x].isenabled()) if not section else (section,)]
for section,subsections in SECTIONS.items():
CATEGORIES.extend([subsection for subsection in subsections if CFG[section][subsection].isenabled()])
CATEGORIES = list(set(CATEGORIES))
# create torrent class
TORRENT_CLASS = create_torrent_class(TORRENT_CLIENTAGENT)
# finished initalizing
return True
def restart():
install_type = versionCheck.CheckVersion().install_type
status = 0
popen_list = []
if install_type in ('git', 'source'):
popen_list = [sys.executable, APP_FILENAME]
if popen_list:
popen_list += SYS_ARGV
logger.log(u"Restarting nzbToMedia with " + str(popen_list))
logger.close()
p = subprocess.Popen(popen_list, cwd=os.getcwd())
p.wait()
status = p.returncode
os._exit(status)
|
grantsewell/nzbToMedia
|
core/__init__.py
|
Python
|
gpl-3.0
| 33,961
|
from google.appengine.ext import ndb
import json
import re
from models.util import DateTimeProperty
from models.email import EmailReport
import actions
class RuleValidationException(Exception):
"""
RuleValidationException - a generic validation error
"""
def __init__(self, message):
super(RuleValidationException, self).__init__(message)
self.message = message
def __str__(self):
return self.message
class RuleCondition(ndb.Model):
"""
RuleCondition - represents a condition to be matched
"""
attribute = ndb.StringProperty(required=True)
key = ndb.StringProperty()
value = ndb.StringProperty()
matches = ndb.StringProperty(required=True)
def _text_match(self, matches, got, expected):
if matches == 'regex':
return re.search(got, expected)
elif matches == 'equals':
return got == expected
else:
return False
def match(self, report):
'''
Returns a boolean indicating if the condition matched the provided
report.
'''
matches = False
# Try to match the headers
if self.attribute == 'header':
for header in report.headers:
if self.key == "" or self._text_match(self.matches,
header.name, self.key):
if self.value == "" or self._text_match(
self.matches, header.value, self.value):
matches = True
# Try to match the text/html
if self.attribute == 'body':
if self.value != "":
if self._text_match(self.matches, report.text, self.value):
matches = True
if self.text_match(self.matches, report.html, self.value):
matches = True
return matches
class RuleAction(ndb.Model):
"""
RuleAction - represents an action to occur when a rule matches an
EmailReport
"""
action = ndb.StringProperty(required=True)
options = ndb.JsonProperty(required=True)
def execute(self, report):
"""
Loads and executes the correct rule on the report
"""
action = actions.load(self.action)
if not action:
return None
result = action.execute(report, self.options)
return result
class Rule(ndb.Model):
"""
Rule - represents a rule to apply to incoming EmailReports
"""
name = ndb.StringProperty(required=True)
date_created = DateTimeProperty(indexed=False, auto_now_add=True)
date_updated = DateTimeProperty(indexed=False, auto_now=True)
owner_domain = ndb.StringProperty(required=True)
created_by = ndb.StringProperty(required=True)
active = ndb.BooleanProperty(default=False, required=True)
conditions = ndb.StructuredProperty(RuleCondition, repeated=True)
actions = ndb.StructuredProperty(RuleAction, repeated=True)
@classmethod
def domain_query(cls, domain):
return cls.query(cls.owner_domain == domain)
@classmethod
def get_by_name(cls, base_query, name):
""" Returns the template that matches the given name."""
return base_query.filter(cls.name == name).get()
@classmethod
def validate(cls, data):
"""
Validates a rule
:param data: The dictionary containing the rule attributes
"""
required_props = ['name']
for prop in required_props:
if not data.get(prop):
raise RuleValidationException(
'Missing required field {}'.format(prop))
'''
for condition in data.get('conditions'):
if not condition.validate():
return False
for action in data.get('actions'):
if not action.validate():
return False
'''
return True
def from_dict(self, data):
"""
Sets attributes of a rule according to values given in a dict.
:param data: The dictionary containing the rule attributes
"""
self.name = data.get('name')
self.active = data.get('active')
self.conditions = []
self.actions = []
for condition in data.get('conditions'):
self.conditions.append(
RuleCondition(
attribute=condition.get('attribute'),
key=condition.get('key'),
value=condition.get('body'),
matches=condition.get('matches')))
for action in data.get('actions'):
self.actions.append(
RuleAction(
action=action.get('action'), options=action.get(
'options')))
def evaluate(self, report):
"""
Returns a boolean if the rule was evaluated against
the provided EmailReport
:param report: The EmailReport to check
"""
for condition in self.conditions:
if not condition.match(report):
return False
# If we made it this far, the rule matched
# Let's perform every action
exceptions = []
for action in self.actions:
try:
action.execute(report)
except Exception as e:
exceptions.append(e)
return True
|
duo-labs/isthislegit
|
dashboard/models/rule.py
|
Python
|
bsd-3-clause
| 5,412
|
import unittest
import numpy as np
import bayesnet as bn
class TestPower(unittest.TestCase):
def test_power(self):
x = bn.Parameter(2.)
y = 2 ** x
self.assertEqual(y.value, 4)
y.backward()
self.assertEqual(x.grad, 4 * np.log(2))
x = np.random.rand(10, 2)
xp = bn.Parameter(x)
y = xp ** 3
self.assertTrue((y.value == x ** 3).all())
y.backward(np.ones((10, 2)))
self.assertTrue(np.allclose(xp.grad, 3 * x ** 2))
if __name__ == '__main__':
unittest.main()
|
ctgk/BayesianNetwork
|
test/math/test_power.py
|
Python
|
mit
| 555
|
import os
from json import loads
SECRET_KEY = os.environ.get('SECRET_KEY', 'devkey')
REDIS_URL = os.environ.get('REDIS_URL')
|
natfoster82/desertmoon
|
config.py
|
Python
|
mit
| 127
|
#!/usr/bin/env python
#
# Generate pnSeed[] from Pieter's DNS seeder
#
NSEEDS=600
import re
import sys
from subprocess import check_output
def main():
lines = sys.stdin.readlines()
ips = []
pattern = re.compile(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3}):CM_Port")
for line in lines:
m = pattern.match(line)
if m is None:
continue
ip = 0
for i in range(0,4):
ip = ip + (int(m.group(i+1)) << (8*(i)))
if ip == 0:
continue
ips.append(ip)
for row in range(0, min(NSEEDS,len(ips)), 8):
print " " + ", ".join([ "0x%08x"%i for i in ips[row:row+8] ]) + ","
if __name__ == '__main__':
main()
|
realspencerdupre/coinmaker
|
sourcecoin/contrib/seeds/makeseeds.py
|
Python
|
gpl-3.0
| 711
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Odoo, Open Source Management Solution
# Copyright (C) 2022 Smile (<https://www.smile.eu>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Smile API Rest",
"version": "1.0.0",
"sequence": 100,
"category": "Tools",
"author": "Smile",
"license": 'AGPL-3',
"website": 'https://www.smile.eu',
"description": """
This module provisions you with an API which allows
you to access models through HTTP requests.
Documentation generate with Swagger OpenAPI
Specification - Version 2.0 (https://swagger.io/specification/v2/)
Suggestions & Feedback to: Corentin POUHET-BRUNERIE & Julien DRECQ
""",
"depends": [
'base',
],
"data": [
# Security
'security/groups.xml',
'security/ir.model.access.csv',
# Views
'views/api_rest_version_views.xml',
'views/api_rest_path_views.xml',
'views/api_rest_tag_views.xml',
'views/api_rest_log_views.xml',
'views/swagger_templates.xml',
],
'assets': {
'smile_api_rest.assets_swagger': [
'smile_api_rest/static/lib/swagger-ui-3.38.0/swagger-ui.css',
'smile_api_rest/static/lib/swagger-ui-3.38.0/swagger-ui-bundle.js',
'smile_api_rest/static/lib/swagger-ui-3.38.0/swagger-ui-standalone-preset.js',
],
},
"test": [],
'installable': True,
'auto_install': False,
'application': False,
}
|
Smile-SA/odoo_addons
|
smile_api_rest/__manifest__.py
|
Python
|
agpl-3.0
| 2,259
|
"""The mütesync integration."""
from __future__ import annotations
import logging
import async_timeout
import mutesync
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import update_coordinator
from .const import DOMAIN, UPDATE_INTERVAL_IN_MEETING, UPDATE_INTERVAL_NOT_IN_MEETING
PLATFORMS = [Platform.BINARY_SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up mütesync from a config entry."""
client = mutesync.PyMutesync(
entry.data["token"],
entry.data["host"],
hass.helpers.aiohttp_client.async_get_clientsession(),
)
async def update_data():
"""Update the data."""
async with async_timeout.timeout(2.5):
state = await client.get_state()
if state["muted"] is None or state["in_meeting"] is None:
raise update_coordinator.UpdateFailed("Got invalid response")
if state["in_meeting"]:
coordinator.update_interval = UPDATE_INTERVAL_IN_MEETING
else:
coordinator.update_interval = UPDATE_INTERVAL_NOT_IN_MEETING
return state
coordinator = hass.data.setdefault(DOMAIN, {})[
entry.entry_id
] = update_coordinator.DataUpdateCoordinator(
hass,
logging.getLogger(__name__),
name=DOMAIN,
update_interval=UPDATE_INTERVAL_NOT_IN_MEETING,
update_method=update_data,
)
await coordinator.async_config_entry_first_refresh()
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
home-assistant/home-assistant
|
homeassistant/components/mutesync/__init__.py
|
Python
|
apache-2.0
| 1,978
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class TaskAddResult(Model):
"""Result for a single task added as part of an add task collection operation.
:param status: The status of the add task request. Possible values
include: 'success', 'clientError', 'serverError'
:type status: str or ~azure.batch.models.TaskAddStatus
:param task_id: The ID of the task for which this is the result.
:type task_id: str
:param e_tag: The ETag of the task, if the task was successfully added.
You can use this to detect whether the task has changed between requests.
In particular, you can be pass the ETag with an Update Task request to
specify that your changes should take effect only if nobody else has
modified the job in the meantime.
:type e_tag: str
:param last_modified: The last modified time of the task.
:type last_modified: datetime
:param location: The URL of the task, if the task was successfully added.
:type location: str
:param error: The error encountered while attempting to add the task.
:type error: ~azure.batch.models.BatchError
"""
_validation = {
'status': {'required': True},
'task_id': {'required': True},
}
_attribute_map = {
'status': {'key': 'status', 'type': 'TaskAddStatus'},
'task_id': {'key': 'taskId', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'last_modified': {'key': 'lastModified', 'type': 'iso-8601'},
'location': {'key': 'location', 'type': 'str'},
'error': {'key': 'error', 'type': 'BatchError'},
}
def __init__(self, status, task_id, e_tag=None, last_modified=None, location=None, error=None):
super(TaskAddResult, self).__init__()
self.status = status
self.task_id = task_id
self.e_tag = e_tag
self.last_modified = last_modified
self.location = location
self.error = error
|
lmazuel/azure-sdk-for-python
|
azure-batch/azure/batch/models/task_add_result.py
|
Python
|
mit
| 2,414
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: orderer/configuration.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='orderer/configuration.proto',
package='orderer',
syntax='proto3',
serialized_pb=_b('\n\x1borderer/configuration.proto\x12\x07orderer\"\x1d\n\rConsensusType\x12\x0c\n\x04type\x18\x01 \x01(\t\"Y\n\tBatchSize\x12\x17\n\x0fmaxMessageCount\x18\x01 \x01(\r\x12\x18\n\x10\x61\x62soluteMaxBytes\x18\x02 \x01(\r\x12\x19\n\x11preferredMaxBytes\x18\x03 \x01(\r\"\x1f\n\x0c\x42\x61tchTimeout\x12\x0f\n\x07timeout\x18\x01 \x01(\t\"\x1f\n\x0cKafkaBrokers\x12\x0f\n\x07\x62rokers\x18\x01 \x03(\t\"(\n\x13\x43hannelRestrictions\x12\x11\n\tmax_count\x18\x01 \x01(\x04\x42U\n%org.hyperledger.fabric.protos.ordererZ,github.com/hyperledger/fabric/protos/ordererb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_CONSENSUSTYPE = _descriptor.Descriptor(
name='ConsensusType',
full_name='orderer.ConsensusType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='orderer.ConsensusType.type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=40,
serialized_end=69,
)
_BATCHSIZE = _descriptor.Descriptor(
name='BatchSize',
full_name='orderer.BatchSize',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='maxMessageCount', full_name='orderer.BatchSize.maxMessageCount', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='absoluteMaxBytes', full_name='orderer.BatchSize.absoluteMaxBytes', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='preferredMaxBytes', full_name='orderer.BatchSize.preferredMaxBytes', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=71,
serialized_end=160,
)
_BATCHTIMEOUT = _descriptor.Descriptor(
name='BatchTimeout',
full_name='orderer.BatchTimeout',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timeout', full_name='orderer.BatchTimeout.timeout', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=162,
serialized_end=193,
)
_KAFKABROKERS = _descriptor.Descriptor(
name='KafkaBrokers',
full_name='orderer.KafkaBrokers',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='brokers', full_name='orderer.KafkaBrokers.brokers', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=195,
serialized_end=226,
)
_CHANNELRESTRICTIONS = _descriptor.Descriptor(
name='ChannelRestrictions',
full_name='orderer.ChannelRestrictions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='max_count', full_name='orderer.ChannelRestrictions.max_count', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=228,
serialized_end=268,
)
DESCRIPTOR.message_types_by_name['ConsensusType'] = _CONSENSUSTYPE
DESCRIPTOR.message_types_by_name['BatchSize'] = _BATCHSIZE
DESCRIPTOR.message_types_by_name['BatchTimeout'] = _BATCHTIMEOUT
DESCRIPTOR.message_types_by_name['KafkaBrokers'] = _KAFKABROKERS
DESCRIPTOR.message_types_by_name['ChannelRestrictions'] = _CHANNELRESTRICTIONS
ConsensusType = _reflection.GeneratedProtocolMessageType('ConsensusType', (_message.Message,), dict(
DESCRIPTOR = _CONSENSUSTYPE,
__module__ = 'orderer.configuration_pb2'
# @@protoc_insertion_point(class_scope:orderer.ConsensusType)
))
_sym_db.RegisterMessage(ConsensusType)
BatchSize = _reflection.GeneratedProtocolMessageType('BatchSize', (_message.Message,), dict(
DESCRIPTOR = _BATCHSIZE,
__module__ = 'orderer.configuration_pb2'
# @@protoc_insertion_point(class_scope:orderer.BatchSize)
))
_sym_db.RegisterMessage(BatchSize)
BatchTimeout = _reflection.GeneratedProtocolMessageType('BatchTimeout', (_message.Message,), dict(
DESCRIPTOR = _BATCHTIMEOUT,
__module__ = 'orderer.configuration_pb2'
# @@protoc_insertion_point(class_scope:orderer.BatchTimeout)
))
_sym_db.RegisterMessage(BatchTimeout)
KafkaBrokers = _reflection.GeneratedProtocolMessageType('KafkaBrokers', (_message.Message,), dict(
DESCRIPTOR = _KAFKABROKERS,
__module__ = 'orderer.configuration_pb2'
# @@protoc_insertion_point(class_scope:orderer.KafkaBrokers)
))
_sym_db.RegisterMessage(KafkaBrokers)
ChannelRestrictions = _reflection.GeneratedProtocolMessageType('ChannelRestrictions', (_message.Message,), dict(
DESCRIPTOR = _CHANNELRESTRICTIONS,
__module__ = 'orderer.configuration_pb2'
# @@protoc_insertion_point(class_scope:orderer.ChannelRestrictions)
))
_sym_db.RegisterMessage(ChannelRestrictions)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n%org.hyperledger.fabric.protos.ordererZ,github.com/hyperledger/fabric/protos/orderer'))
try:
# THESE ELEMENTS WILL BE DEPRECATED.
# Please use the generated *_pb2_grpc.py files instead.
import grpc
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
except ImportError:
pass
# @@protoc_insertion_point(module_scope)
|
cophey/fabric
|
bddtests/orderer/configuration_pb2.py
|
Python
|
apache-2.0
| 7,996
|
"""
A class that reflect a decoration to be made on certain XML node using WX
"""
import types, os
from collections import defaultdict
import glob
import logging
import random
from lxml import etree
#import cStringIO
import wx
sEncoding = "utf-8"
def setEncoding(s):
global sEncoding
sEncoding = s
class DecoSeparator:
"""
this is not properly a decoration but rather a separator of decoration in the toolbar
"""
def __init__(self, cfg, sSurname, xpCtxt):
"""
cfg is a configuration object
sSurname is the surname of the decoration and the section name in the config file!
xpCtxt is an XPath context
"""
self.sSurname = sSurname
def __str__(self):
return "--------"
def isSeparator(self):
return True
def setXPathContext(self, xpCtxt):
pass
class Deco:
"""A general decoration class"""
def __init__(self, cfg, sSurname, xpCtxt):
"""
cfg is a configuration object
sSurname is the surname of the decoration and the section name in the config file!
xpCtxt is an XPath context
"""
self.sSurname = sSurname
self.xpMain = cfg.get(sSurname, "xpath") # a main XPath that select nodes to be decorated in this way
self.xpCtxt = xpCtxt #this context may include the declaration of some namespace
sEnabled = cfg.get(sSurname, "enabled").lower()
self.bEnabled = sEnabled in ['1', 'yes', 'true']
def isSeparator(cls):
return False
isSeparator = classmethod(isSeparator)
def __str__(self):
return "(Surname=%s xpath==%s)" % (self.sSurname, self.xpMain)
def getDecoClass(cls, sClass):
"""given a decoration type, return the associated class"""
c = globals()[sClass]
if type(c) != types.ClassType: raise Exception("No such decoration type: '%s'"%sClass)
return c
getDecoClass = classmethod(getDecoClass)
def getSurname(self):
return self.sSurname
def getMainXPath(self):
return self.xpMain
def isEnabled(self):
return self.bEnabled
def setEnabled(self, b=True):
self.bEnabled = b
return b
def isActionable(self):
return False
def setXPathContext(self, xpCtxt):
self.xpCtxt = xpCtxt
def xpathError(self, node, xpExpr, eExcpt, sMsg=""):
"""report an xpath error"""
try:
Deco._s_prev_xpath_error
except AttributeError:
Deco._s_prev_xpath_error = ""
Deco._prev_xpath_error_count = 0
iMaxLen = 200 # to truncate the node serialization
s = "-"*60
s += "\n--- XPath ERROR on class %s"%self.__class__
s += "\n--- xpath=%s" % xpExpr
s += "\n--- Python Exception=%s" % str(eExcpt)
if sMsg: s += "\n--- Info: %s" % sMsg
if s == Deco._s_prev_xpath_error:
# let's not overload the console.
return
Deco._s_prev_xpath_error = s
Deco._prev_xpath_error_count += 1
if Deco._prev_xpath_error_count > 10:
return
try:
sNode = etree.tostring(node)
except:
sNode = str(node)
if len(sNode) > iMaxLen: sNode = sNode[:iMaxLen] + "..."
s += "\n--- XML node = %s" % sNode
s += "\n" + "-"*60 + "\n"
logging.warning(s)
def warning(self, sMsg):
"""report an xpath error"""
try:
Deco._s_prev_warning
except AttributeError:
Deco._s_prev_warning = ""
Deco._warning_count = 0
# if sMsg != Deco._s_prev_warning and Deco._warning_count < 1000:
if sMsg != Deco._s_prev_warning:
logging.warning(sMsg)
Deco._warning_count += 1
Deco._s_prev_warning = sMsg
def toInt(cls, s):
try:
return int(s)
except ValueError:
return int(round(float(s)))
toInt = classmethod(toInt)
def xpathToInt(self, node, xpExpr, iDefault=0, bShowError=True):
"""The given XPath expression should return an int on the given node.
The XPath expression should return a scalar or a one-node nodeset
On error, return the default int value
"""
try:
# s = node.xpathEval(xpExpr)
self.xpCtxt.setContextNode(node)
if xpExpr[0] == "|":
#must be a lambda
assert xpExpr[:8] == "|lambda ", "Invalid lambda expression %s"%xpExpr
sStartEmpty, sLambdaExpr, xpExprArg, sEndEmpty = xpExpr.split('|')
assert sEndEmpty == "", "Missing last '|'"
sArg = self.xpCtxt.xpathEval(xpExprArg)[0]
sPythonExpr = "(%s)(%s)" % (sLambdaExpr, repr(sArg))
s = eval(sPythonExpr)
else:
s = self.xpCtxt.xpathEval(xpExpr)
if type(s) == types.ListType:
try:
s = s[0].text
except AttributeError:
s = s[0] #should be an attribute value
return Deco.toInt(s)
except Exception, e:
if bShowError: self.xpathError(node, xpExpr, e, "xpathToInt return %d as default value"%iDefault)
return iDefault
def xpathToStr(self, node, xpExpr, sDefault, bShowError=True):
"""The given XPath expression should return a string on the given node
The XPath expression should return a scalar or a one-node nodeset
On error, return the default int value
"""
try:
# s = node.xpathEval(xpExpr)
self.xpCtxt.setContextNode(node)
s = self.xpCtxt.xpathEval(xpExpr)
if type(s) == types.ListType:
try:
s = s[0].text
except AttributeError:
s = s[0]
return s
except Exception, e:
if bShowError: self.xpathError(node, xpExpr, e, "xpathToStr return %s as default value"%sDefault)
return sDefault
def xpathEval(self, node, xpExpr):
""" evaluate the xpath expression
return None on error
"""
try:
# s = node.xpathEval(xpExpr)
self.xpCtxt.setContextNode(node)
return self.xpCtxt.xpathEval(xpExpr)
except Exception, e:
self.xpathError(node, xpExpr, e, "xpathEval return None")
return None
def beginPage(self, node):
"""called before any sequnce of draw for a given page"""
pass
def endPage(self, node):
"""called before any sequnce of draw for a given page"""
pass
def draw(self, wxh, node):
"""draw the associated decorations, return the list of wx created objects"""
return []
class DecoBBXYWH(Deco):
"""A decoration with a bounding box defined by X,Y for its top-left corner and width/height.
xpX, xpY, xpW, xpH are scalar XPath expressions to get the associated x,y,w,h values from the selected nodes
"""
def __init__(self, cfg, sSurname, xpCtxt):
"""
cfg is a config file
sSurname is the decoration surname and the section name in the config file
This section should contain the following items: x, y, w, h
"""
Deco.__init__(self, cfg, sSurname, xpCtxt)
#now get the xpath expressions that let uis find x,y,w,h from a selected node
self.xpX, self.xpY = cfg.get(sSurname, "xpath_x"), cfg.get(sSurname, "xpath_y")
self.xpW, self.xpH = cfg.get(sSurname, "xpath_w"), cfg.get(sSurname, "xpath_h")
self.xpInc = cfg.get(sSurname, "xpath_incr") #to increase the BB width and height
self._node = None
def __str__(self):
s = Deco.__str__(self)
s += "+(x=%s y=%s w=%s h=%s)" % (self.xpX, self.xpY, self.xpW, self.xpH)
return s
def runXYWHI(self, node):
"""get the X,Y values for a node and put them in cache"""
if self._node != node:
self._x = self.xpathToInt(node, self.xpX, 1)
self._y = self.xpathToInt(node, self.xpY, 1)
self._w = self.xpathToInt(node, self.xpW, 1)
self._h = self.xpathToInt(node, self.xpH, 1)
self._inc = self.xpathToInt(node, self.xpInc, 0)
self._x,self._y = self._x-self._inc, self._y-self._inc
self._w,self._h = self._w+2*self._inc, self._h+2*self._inc
self._node = node
return (self._x, self._y, self._w, self._h, self._inc)
class DecoRectangle(DecoBBXYWH):
"""A rectangle
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoBBXYWH.__init__(self, cfg, sSurname, xpCtxt)
#now get the xpath expressions that let us find the rectangle line and fill colors
self.xpLineColor = cfg.get(sSurname, "xpath_LineColor")
self.xpLineWidth = cfg.get(sSurname, "xpath_LineWidth")
self.xpFillColor = cfg.get(sSurname, "xpath_FillColor")
self.xpFillStyle = cfg.get(sSurname, "xpath_FillStyle")
def __str__(self):
s = "%s="%self.__class__
s += DecoBBXYWH.__str__(self)
return s
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
lo = DecoBBXYWH.draw(self, wxh, node)
x,y,w,h,inc = self.runXYWHI(node)
sLineColor = self.xpathToStr(node, self.xpLineColor, "#000000")
iLineWidth = self.xpathToInt(node, self.xpLineWidth, 1)
sFillColor = self.xpathToStr(node, self.xpFillColor, "#000000")
sFillStyle = self.xpathToStr(node, self.xpFillStyle, "Solid")
obj = wxh.AddRectangle((x, -y), (w, -h),
LineWidth=iLineWidth,
LineColor=sLineColor,
FillColor=sFillColor,
FillStyle=sFillStyle)
lo.append(obj)
return lo
class DecoTextBox(DecoRectangle):
"""A text within a bounding box (a rectangle)
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoRectangle.__init__(self, cfg, sSurname, xpCtxt)
self.xpContent = cfg.get(sSurname, "xpath_content")
self.xpFontSize = cfg.get(sSurname, "xpath_font_size")
self.xpFontColor = cfg.get(sSurname, "xpath_font_color")
def __str__(self):
s = "%s="%self.__class__
s += DecoRectangle.__str__(self)
return s
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
lo = DecoRectangle.draw(self, wxh, node)
#add the text itself
txt = self.xpathToStr(node, self.xpContent, "")
iFontSize = self.xpathToInt(node, self.xpFontSize, 8)
sFontColor = self.xpathToStr(node, self.xpFontColor, 'BLACK')
x,y,w,h,inc = self.runXYWHI(node)
obj = wxh.AddScaledTextBox(txt, (x, -y+inc),
Size=iFontSize,
Family=wx.ROMAN, Position='tl',
Color=sFontColor, PadSize=0, LineColor=None)
lo.append(obj)
return lo
class DecoText(DecoBBXYWH):
"""A text
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoBBXYWH.__init__(self, cfg, sSurname, xpCtxt)
self.xpContent = cfg.get(sSurname, "xpath_content")
self.xpFontSize = cfg.get(sSurname, "xpath_font_size")
self.xpFontColor = cfg.get(sSurname, "xpath_font_color")
def __str__(self):
s = "%s="%self.__class__
s += DecoBBXYWH.__str__(self)
return s
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
lo = DecoBBXYWH.draw(self, wxh, node)
#add the text itself
txt = self.getText(wxh, node)
iFontSize = self.xpathToInt(node, self.xpFontSize, 8)
sFontColor = self.xpathToStr(node, self.xpFontColor, 'BLACK')
x,y,w,h,inc = self.runXYWHI(node)
obj = wxh.AddScaledTextBox(txt, (x, -y-h/2.0),
Size=iFontSize,
Family=wx.ROMAN, Position='cl',
Color=sFontColor, PadSize=0, LineColor=None)
lo.append(obj)
return lo
def getText(self, wxh, node):
return self.xpathToStr(node, self.xpContent, "")
class DecoUnicodeChar(DecoText):
"""A character encoded in Unicode
We assume the unicode index is given in a certain base, e.g. 10 or 16
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoText.__init__(self, cfg, sSurname, xpCtxt)
self.base = int(cfg.get(sSurname, "code_base"))
def getText(self, wxh, node):
sEncodedText = self.xpathToStr(node, self.xpContent, "")
try:
return eval('u"\\u%04x"' % int(sEncodedText, self.base))
except ValueError:
logging.error("DecoUnicodeChar: ERROR: base=%d code=%s"%(self.base, sEncodedText))
return ""
class DecoImageBox(DecoRectangle):
"""An image with a box around it
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoRectangle.__init__(self, cfg, sSurname, xpCtxt)
self.xpHRef = cfg.get(sSurname, "xpath_href")
def __str__(self):
s = "%s="%self.__class__
s += DecoRectangle.__str__(self)
return s
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
lo = []
#add the image itself
x,y,w,h,inc = self.runXYWHI(node)
sFilePath = self.xpathToStr(node, self.xpHRef, "")
if sFilePath:
try:
img = wx.Image(sFilePath, wx.BITMAP_TYPE_ANY)
obj = wxh.AddScaledBitmap(img, (x,-y), h)
lo.append(obj)
except Exception, e:
self.warning("DecoImageBox ERROR: File %s: %s"%(sFilePath, str(e)))
lo.append( DecoRectangle.draw(self, wxh, node) )
return lo
class DecoImage(DecoBBXYWH):
"""An image
"""
# in case the use wants to specify it via the menu
sImageFolder = None
def __init__(self, cfg, sSurname, xpCtxt):
DecoBBXYWH.__init__(self, cfg, sSurname, xpCtxt)
self.xpHRef = cfg.get(sSurname, "xpath_href")
def __str__(self):
s = "%s="%self.__class__
s += DecoBBXYWH.__str__(self)
return s
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
lo = DecoBBXYWH.draw(self, wxh, node)
#add the image itself
x,y,w,h,inc = self.runXYWHI(node)
sFilePath = self.xpathToStr(node, self.xpHRef, "")
if sFilePath:
if self.sImageFolder:
sCandidate = os.path.join(self.sImageFolder, sFilePath)
if os.path.exists(sCandidate):
sFilePath = sCandidate
else:
# maybe the file is in a subfolder ?
# e.g. "S_Aicha_an_der_Donau_004-03_0005.jpg" is in folder "S_Aicha_an_der_Donau_004-03"
try:
sDir = sFilePath[:sFilePath.rindex("_")]
sCandidate = os.path.join(self.sImageFolder, sDir, sFilePath)
if os.path.exists(sCandidate):
sFilePath = sCandidate
except ValueError:
pass
if not os.path.exists(sFilePath):
#maybe the image is in a folder with same name as XML file? (Transkribus style)
sUrl = node.getroottree().docinfo.URL.decode('utf-8') # py2 ...
for sPrefix in ["file://", "file:/"]:
if sUrl[0:len(sPrefix)] == sPrefix:
sLocalDir = os.path.dirname(sUrl[len(sPrefix):])
sDir,_ = os.path.splitext(os.path.basename(sUrl))
sCandidate = os.path.abspath(os.path.join(sLocalDir, sDir, sFilePath))
if os.path.exists(sCandidate):
sFilePath = sCandidate
print(sFilePath)
break
if not os.path.exists(sFilePath):
# maybe we have some pattern??
lCandidate = glob.glob(sFilePath)
bKO = True
for s in lCandidate:
if os.path.exists(s):
sFilePath = s
bKO = False
break
if bKO:
self.warning("WARNING: deco Image: file does not exists: '%s'"%sFilePath)
sFilePath = None
if bool(sFilePath):
img = wx.Image(sFilePath, wx.BITMAP_TYPE_ANY)
try:
if h > 0:
obj = wxh.AddScaledBitmap(img, (x,-y), h)
else:
obj = wxh.AddScaledBitmap(img, (x,-y), img.GetHeight())
lo.append(obj)
except Exception, e:
self.warning("DecoImage ERROR: File %s: %s"%(sFilePath, str(e)))
return lo
class DecoOrder(DecoBBXYWH):
"""Show the order with lines
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoBBXYWH.__init__(self, cfg, sSurname, xpCtxt)
self.xpLineColor = cfg.get(sSurname, "xpath_LineColor")
self.xpLineWidth = cfg.get(sSurname, "xpath_LineWidth")
def __str__(self):
s = "%s="%self.__class__
s += DecoBBXYWH.__str__(self)
return s
def beginPage(self, node):
"""called before any sequnce of draw for a given page"""
self.bInit = False
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
lo = DecoBBXYWH.draw(self, wxh, node)
x,y,w,h,inc = self.runXYWHI(node)
sLineColor = self.xpathToStr(node, self.xpLineColor, "BLACK")
x, y = int(x + w/2.0), int(y + h/2.0)
if self.bInit:
#draw a line
iLineWidth = self.xpathToInt(node, self.xpLineWidth, 1)
obj = wxh.AddLine( [(self.prevX, -self.prevY), (x, -y)]
, LineWidth=iLineWidth
, LineColor=sLineColor)
lo.append(obj)
else:
self.bInit = True
iEllipseParam = min(w,h) / 2
wxh.AddEllipse((x, -y), (iEllipseParam, -iEllipseParam), LineColor=sLineColor, LineWidth=5, FillStyle="Transparent")
self.prevX, self.prevY = x, y
return lo
class DecoLine(Deco):
"""A line from x1,y1 to x2,y2
"""
def __init__(self, cfg, sSurname, xpCtxt):
Deco.__init__(self, cfg, sSurname, xpCtxt)
self.xpX1, self.xpY1 = cfg.get(sSurname, "xpath_x1"), cfg.get(sSurname, "xpath_y1")
self.xpX2, self.xpY2 = cfg.get(sSurname, "xpath_x2"), cfg.get(sSurname, "xpath_y2")
#now get the xpath expressions that let us find the rectangle line and fill colors
self.xpLineWidth = cfg.get(sSurname, "xpath_LineWidth")
self.xpLineColor = cfg.get(sSurname, "xpath_LineColor")
self._node = None
def __str__(self):
s = "%s="%self.__class__
s += "+(x1=%s y1=%s x2=%s y2=%s)" % (self.xpX1, self.xpY1, self.xpX2, self.xpY2)
return s
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
# print node.serialize()
# print self.xpX
# for n in node.xpathEval(self.xpX): print n.serialize()
iLARGENEG = -9999
lo = Deco.draw(self, wxh, node)
if self._node != node:
self._x1 = self.xpathToInt(node, self.xpX1, iLARGENEG)
self._y1 = self.xpathToInt(node, self.xpY1, iLARGENEG)
self._x2 = self.xpathToInt(node, self.xpX2, iLARGENEG)
self._y2 = self.xpathToInt(node, self.xpY2, iLARGENEG)
self._node = node
if self._x1 != iLARGENEG and self._y1 != iLARGENEG and self._x2 != iLARGENEG and self._y2 != iLARGENEG:
sLineColor = self.xpathToStr(node, self.xpLineColor, "#000000")
iLineWidth = self.xpathToInt(node, self.xpLineWidth, 1)
#draw a line
obj = wxh.AddLine( [(self._x1, -self._y1), (self._x2, -self._y2)]
, LineWidth=iLineWidth
, LineColor=sLineColor)
lo.append(obj)
return lo
class DecoREAD(Deco):
"""
READ PageXml has a special way to encode coordinates.
like:
<Coords points="985,390 1505,390 1505,440 985,440"/>
or
<Baseline points="985,435 1505,435"/>
"""
def __init__(self, cfg, sSurname, xpCtxt):
Deco.__init__(self, cfg, sSurname, xpCtxt)
self.xpCoords = cfg.get(sSurname, "xpath_lxy")
def _getCoordList(self, node):
sCoords = self.xpathToStr(node, self.xpCoords, "")
if not sCoords:
if node.get("id") is None:
self.warning("No coordinates: node = %s" % etree.tostring(node))
else:
self.warning("No coordinates: node id = %s" % node.get("id"))
return [(0,0)]
try:
ltXY = []
for _sPair in sCoords.split(' '):
(sx, sy) = _sPair.split(',')
ltXY.append((Deco.toInt(sx), Deco.toInt(sy)))
except Exception as e:
logging.error("ERROR: polyline coords are bad: '%s' -> '%s'" % (
self.xpCoords, sCoords))
raise e
return ltXY
def _coordList_to_BB(self, ltXY):
"""
return (x1, y1), (x2, y2)
"""
lX = [_x for _x,_y in ltXY]
lY = [_y for _x,_y in ltXY]
return (min(lX), max(lY)), (max(lX), min(lY))
class DecoREADTextLine(DecoREAD):
"""A TextLine as defined by the PageXml format of the READ project
<TextLine id="line_1551946877389_284" custom="readingOrder {index:0;} Item-name {offset:0; length:11;} Item-price {offset:12; length:2;}">
<Coords points="985,390 1505,390 1505,440 985,440"/>
<Baseline points="985,435 1505,435"/>
<TextEquiv>
<Unicode>Salgadinhos 12</Unicode>
</TextEquiv>
</TextLine>
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoREAD.__init__(self, cfg, sSurname, xpCtxt)
self.xpContent = cfg.get(sSurname, "xpath_content")
self.xpFontColor = cfg.get(sSurname, "xpath_font_color")
self.xpFit = cfg.get(sSurname, "xpath_fit_text_size").lower()
def __str__(self):
s = "%s="%self.__class__
return s
def _getFontSize(self, node, ltXY, txt, Family=wx.FONTFAMILY_TELETYPE):
"""
compute the font size so as to fit the polygon
and the extent of the 'x' character for this font size
return iFontSize, ExtentX, ExtentY
"""
(x1, y1), (x2, y2) = self._coordList_to_BB(ltXY)
sFit = self.xpathToStr(node, self.xpFit, 'xy', bShowError=False)
try:
iFontSize = int(sFit)
Ex, Ey = None, None
except ValueError:
dc = wx.ScreenDC()
# compute for font size of 24 and do proportional
dc.SetFont(wx.Font(24, Family, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
Ex, Ey = dc.GetTextExtent("x")
try:
iFontSizeX = 24 * abs(x2-x1) / Ex / len(txt)
except:
self.warning("absence of text: cannot compute font size along X axis")
iFontSizeX = 8
iFontSizeY = 24 * abs(y2-y1) / Ey
if sFit == "x":
iFontSize = iFontSizeX
elif sFit == "y":
iFontSize = iFontSizeY
else:
iFontSize = min(iFontSizeX, iFontSizeY)
dc.SetFont(wx.Font(iFontSize, Family, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
Ex, Ey = dc.GetTextExtent("x")
del dc
return iFontSize, Ex, Ey
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
lo = []
#add the text itself
txt = self.getText(wxh, node)
sFontColor = self.xpathToStr(node, self.xpFontColor, 'BLACK')
# Position and computation of font size
ltXY = self._getCoordList(node)
iFontSize, Ex, Ey = self._getFontSize(node, ltXY, txt, Family=wx.FONTFAMILY_TELETYPE)
# x, y = ltXY[0]
(x, _y1), (_x2, y) = self._coordList_to_BB(ltXY)
obj = wxh.AddScaledText(txt, (x, -y+iFontSize/6), Size=iFontSize
, Family=wx.FONTFAMILY_TELETYPE
, Position='tl'
, Color=sFontColor)
lo.append(obj)
return lo
def getText(self, wxh, node):
return self.xpathToStr(node, self.xpContent, "")
class READ_custom:
"""
Everything related to the PageXML custom attribute
"""
@classmethod
def parseCustomAttr(cls, s, bNoCase=True):
"""
The custom attribute contains data in a CSS style syntax.
We parse this syntax here and return a dictionary of list of dictionary
Example:
parseCustomAttr( "readingOrder {index:4;} structure {type:catch-word;}" )
--> { 'readingOrder': [{ 'index':'4' }], 'structure':[{'type':'catch-word'}] }
"""
dic = defaultdict(list)
s = s.strip()
lChunk = s.split('}')
if lChunk:
for chunk in lChunk: #things like "a {x:1"
chunk = chunk.strip()
if not chunk: continue
try:
sNames, sValues = chunk.split('{') #things like: ("a,b", "x:1 ; y:2")
except Exception:
raise ValueError("Expected a '{' in '%s'"%chunk)
#the dictionary for that name
dicValForName = dict()
lsKeyVal = sValues.split(';') #things like "x:1"
for sKeyVal in lsKeyVal:
if not sKeyVal.strip(): continue #empty
try:
sKey, sVal = sKeyVal.split(':')
except Exception:
raise ValueError("Expected a comma-separated string, got '%s'"%sKeyVal)
sKey = sKey.strip().lower() if bNoCase else sKey.strip()
dicValForName[sKey] = sVal.strip()
lName = sNames.split(',')
for name in lName:
name = name.strip().lower() if bNoCase else name.strip()
dic[name].append(dicValForName)
return dic
class DecoREADTextLine_custom_offset(DecoREADTextLine, READ_custom):
"""
Here we show the annotation by offset found in the custom attribute
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoREADTextLine.__init__(self, cfg, sSurname, xpCtxt)
self.xpLabel = cfg.get(sSurname, "xpath_label")
self.xpLineColor = cfg.get(sSurname, "xpath_LineColor")
self.xpBackgroundColor = cfg.get(sSurname, "xpath_background_color")
def draw(self, wxh, node):
"""
draw itself using the wx handle
return a list of created WX objects
"""
lo = []
#add the text itself
txt = self.getText(wxh, node)
sFontColor = self.xpathToStr(node, self.xpFontColor, 'BLACK')
sLineColor = self.xpathToStr(node, self.xpLineColor, "#000000")
sBackgroundColor = self.xpathToStr(node, self.xpBackgroundColor, "#000000")
# Position and computation of font size
ltXY = self._getCoordList(node)
iFontSize, Ex, Ey = self._getFontSize(node, ltXY, txt
, Family=wx.FONTFAMILY_TELETYPE)
dCustom = self.parseCustomAttr(node.get("custom"), bNoCase=True)
try:
x0, y0 = ltXY[0]
_ldLabel = dCustom[self.xpathToStr(node, self.xpLabel, "").lower()]
for _dLabel in _ldLabel:
try:
iOffset = int(_dLabel["offset"])
iLength = int(_dLabel["length"])
x = x0 + Ex * iOffset
y = -y0+iFontSize/6
obj = wxh.AddScaledTextBox(txt[iOffset:iOffset+iLength]
, (x, y)
, Size=iFontSize
, Family=wx.FONTFAMILY_TELETYPE
, Position='bl'
, Color=sFontColor
, LineColor=sLineColor
, BackgroundColor=sBackgroundColor)
lo.append(obj)
except KeyError:
pass
except KeyError:
pass
return lo
class DecoPolyLine(DecoREAD):
"""A polyline along
x1,y1,x2,y2, ...,xn,yn
or
x1,y1 x2,y2 .... xn,yn
Example of config:
[TextLine]
type=DecoPolyLine
xpath=.//TextLine/Coords
xpath_lxy=@points
xpath_LineColor="RED"
xpath_FillStyle="Solid"
JL Meunier - March 2016
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoREAD.__init__(self, cfg, sSurname, xpCtxt)
#now get the xpath expressions that let us find the rectangle line and fill colors
self.xpLineWidth = cfg.get(sSurname, "xpath_LineWidth")
self.xpLineColor = cfg.get(sSurname, "xpath_LineColor")
#cached values
self._node = None
self._lxy = None
def __str__(self):
s = "%s="%self.__class__
s += "+(coords=%s)" % (self.xpCoords)
return s
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
# print node.serialize()
# print self.xpX
# for n in node.xpathEval(self.xpX): print n.serialize()
lo = DecoREAD.draw(self, wxh, node)
if self._node != node:
self._lxy = self._getCoordList(node)
self._node = node
if self._lxy:
sLineColor = self.xpathToStr(node, self.xpLineColor, "#000000")
iLineWidth = self.xpathToInt(node, self.xpLineWidth, 1)
for (x1, y1), (x2, y2) in zip(self._lxy, self._lxy[1:]):
#draw a line
obj = wxh.AddLine( [(x1, -y1), (x2, -y2)]
, LineWidth=iLineWidth
, LineColor=sLineColor)
lo.append(obj)
return lo
class DecoClosedPolyLine(DecoPolyLine):
"""A polyline that closes automatically the shape
JL Meunier - September 2016
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoPolyLine.__init__(self, cfg, sSurname, xpCtxt)
def _getCoordList(self, node):
lCoord = DecoPolyLine._getCoordList(self, node)
if lCoord: lCoord.append(lCoord[0])
return lCoord
class DecoTextPolyLine(DecoPolyLine, DecoText):
"""A polyline that closes automatically the shape
JL Meunier - September 2016
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoPolyLine.__init__(self, cfg, sSurname, xpCtxt)
DecoText .__init__(self, cfg, sSurname, xpCtxt)
self.xpX_Inc = cfg.get(sSurname, "xpath_x_incr") #to shift the text
self.xpY_Inc = cfg.get(sSurname, "xpath_y_incr") #to shift the text
def draw(self, wxh, node):
lo = Deco.draw(self, wxh, node)
if self._node != node:
self._lxy = self._getCoordList(node)
self._node = node
#lo = DecoClosedPolyLine.draw(self, wxh, node)
#add the text itself
x, y = self._lxy[0]
x_inc = self.xpathToInt(node, self.xpX_Inc, 0, False)
y_inc = self.xpathToInt(node, self.xpY_Inc, 0, False)
txt = self.xpathToStr(node, self.xpContent, "")
iFontSize = self.xpathToInt(node, self.xpFontSize, 8)
sFontColor = self.xpathToStr(node, self.xpFontColor, 'BLACK')
obj = wxh.AddScaledTextBox(txt, (x+x_inc, -y-y_inc),
Size=iFontSize,
Family=wx.ROMAN, Position='tl',
Color=sFontColor, PadSize=0, LineColor=None)
lo.append(obj)
return lo
class DecoClusterCircle(DecoREAD):
"""
[Cluster]
type=DecoClusterCircle
xpath=.//Cluster
xpath_content=@content
xpath_radius=40
xpath_item_lxy=./pg:Coords/@points
xpath_LineWidth="1"
xpath_FillStyle="Transparent"
LineColors="BLUE SIENNA YELLOW ORANGE RED GREEN"
FillColors="BLUE SIENNA YELLOW ORANGE RED GREEN"
enabled=1
"""
count = 0
def __init__(self, cfg, sSurname, xpCtxt):
DecoREAD.__init__(self, cfg, sSurname, xpCtxt)
self.xpCluster = cfg.get(sSurname, "xpath")
self.xpContent = cfg.get(sSurname, "xpath_content")
self.xpRadius = cfg.get(sSurname, "xpath_radius")
self.xpLineWidth = cfg.get(sSurname, "xpath_LineWidth")
self.xpFillStyle = cfg.get(sSurname, "xpath_FillStyle")
self.lsLineColor = cfg.get(sSurname, "LineColors").split()
self.lsFillColor = cfg.get(sSurname, "FillColors").split()
#cached values
self._node = None
self._laxyr = None
print "DecoClusterCircle lsLineColor = ", self.lsLineColor
print "DecoClusterCircle lsFillColor = ", self.lsFillColor
def __str__(self):
s = "%s="%self.__class__
s += "+(coords=%s)" % (self.xpCoords)
return s
def getArea_and_CenterOfMass(self, lXY):
"""
https://fr.wikipedia.org/wiki/Aire_et_centre_de_masse_d'un_polygone
return A, (Xg, Yg) which are the area and the coordinates (float) of the center of mass of the polygon
"""
if len(lXY) < 2: raise ValueError("Only one point: polygon area is undefined.")
fA = 0.0
xSum, ySum = 0, 0
xprev, yprev = lXY[-1]
for x, y in lXY:
iTerm = xprev*y - yprev*x
fA += iTerm
xSum += iTerm * (xprev+x)
ySum += iTerm * (yprev+y)
xprev, yprev = x, y
if fA == 0.0: raise ValueError("surface == 0.0")
fA = fA / 2
xg, yg = xSum/6/fA, ySum/6/fA
if fA <0:
return -fA, (xg, yg)
else:
return fA, (xg, yg)
assert fA >0 and xg >0 and yg >0, "%s\t%s"%(lXY (fA, (xg, yg)))
return fA, (xg, yg)
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
DecoClusterCircle.count = DecoClusterCircle.count + 1
lo = DecoREAD.draw(self, wxh, node)
if self._node != node:
self._laxyr = []
#need to go thru each item
ndPage = node.xpath("ancestor::*[local-name()='Page']")[0]
sIds = self.xpathEval(node, self.xpContent)[0]
for sId in sIds.split():
l = self.xpathEval(ndPage, './/*[@id="%s"]'%sId)
ndItem = l[0]
lxy = self._getCoordList(ndItem)
fA, (xg, yg) = self.getArea_and_CenterOfMass(lxy)
r = self.xpathToInt(ndItem, self.xpRadius, 1)
self._laxyr.append( (fA, xg, yg, r) )
self._node = node
if self._laxyr:
iMaxFC = len(self.lsFillColor)
iMaxLC = len(self.lsLineColor)
if False:
Nf = DecoClusterCircle.count
Nl = Nf
else:
Nf = random.randrange(iMaxFC)
Nl = random.randrange(iMaxFC)
iLineWidth = self.xpathToInt(node, self.xpLineWidth, 1)
sFillStyle = self.xpathToStr(node, self.xpFillStyle, "Solid")
for (_a, x, y, r) in self._laxyr:
#draw a circle
sFillColor = self.lsFillColor[Nf % iMaxFC]
if self.lsLineColor:
sLineColor = self.lsLineColor[Nl % iMaxLC]
else:
sLineColor = sFillColor
obj = wxh.AddCircle((x, -y), r,
LineWidth=iLineWidth,
LineColor=sLineColor,
FillColor=sFillColor,
FillStyle=sFillStyle)
# obj = wxh.AddRectangle((x, -y), (20, 20),
# LineWidth=iLineWidth,
# LineColor=sLineColor,
# FillColor=sFillColor,
# FillStyle=sFillStyle)
lo.append(obj)
"""
lo = DecoBBXYWH.draw(self, wxh, node)
x,y,w,h,inc = self.runXYWHI(node)
sLineColor = self.xpathToStr(node, self.xpLineColor, "#000000")
iLineWidth = self.xpathToInt(node, self.xpLineWidth, 1)
sFillColor = self.xpathToStr(node, self.xpFillColor, "#000000")
sFillStyle = self.xpathToStr(node, self.xpFillStyle, "Solid")
obj = wxh.AddRectangle((x, -y), (w, -h),
LineWidth=iLineWidth,
LineColor=sLineColor,
FillColor=sFillColor,
FillStyle=sFillStyle)
"""
return lo
class DecoLink(Deco):
"""A link from x1,y1 to x2,y2
"""
def __init__(self, cfg, sSurname, xpCtxt):
Deco.__init__(self, cfg, sSurname, xpCtxt)
self.xpX1, self.xpY1 = cfg.get(sSurname, "xpath_x1"), cfg.get(sSurname, "xpath_y1")
#the following expression must be evaluated twice
self.xpEvalX2, self.xpEvalY2 = cfg.get(sSurname, "eval_xpath_x2"), cfg.get(sSurname, "eval_xpath_y2")
self.xpDfltX2, self.xpDfltY2 = cfg.get(sSurname, "xpath_x2_default"), cfg.get(sSurname, "xpath_y2_default")
#now get the xpath expressions that let us find the rectangle line and fill colors
self.xpLineWidth = cfg.get(sSurname, "xpath_LineWidth")
self.xpLineColor = cfg.get(sSurname, "xpath_LineColor")
self._node = None
def __str__(self):
s = "%s="%self.__class__
s += "+(x1=%s y1=%s x2=%s y2=%s)" % (self.xpX1, self.xpY1, self.xpEvalX2, self.xpEvalY2)
return s
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
# print node.serialize()
# print self.xpX
# for n in node.xpathEval(self.xpX): print n.serialize()
iLARGENEG = -9999
lo = Deco.draw(self, wxh, node)
if self._node != node:
self._x1 = self.xpathToInt(node, self.xpX1, iLARGENEG)
self._y1 = self.xpathToInt(node, self.xpY1, iLARGENEG)
#double evaluation, and a default value if necessary
xpX2 = self.xpathToStr(node, self.xpEvalX2, '""')
self._x2 = self.xpathToInt(node, xpX2, iLARGENEG, False) #do not show any error
if self._x2 == iLARGENEG: self._x2 = self.xpathToInt(node, self.xpDfltX2, iLARGENEG)
xpY2 = self.xpathToStr(node, self.xpEvalY2, '""')
self._y2 = self.xpathToInt(node, xpY2, iLARGENEG, False) #do not show any error
if self._y2 == iLARGENEG: self._y2 = self.xpathToInt(node, self.xpDfltY2, iLARGENEG)
self._node = node
if self._x1 != iLARGENEG and self._y1 != iLARGENEG and self._x2 != iLARGENEG and self._y2 != iLARGENEG:
sLineColor = self.xpathToStr(node, self.xpLineColor, "#000000")
iLineWidth = self.xpathToInt(node, self.xpLineWidth, 1)
#draw a line
obj = wxh.AddLine( [(self._x1, -self._y1), (self._x2, -self._y2)]
, LineWidth=iLineWidth
, LineColor=sLineColor)
lo.append(obj)
return lo
class DecoClickableRectangleSetAttr(DecoBBXYWH):
"""A rectangle
clicking on it add/remove an attribute
the rectangle color is indicative of the presence/absence of the attribute
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoBBXYWH.__init__(self, cfg, sSurname, xpCtxt)
#now get the xpath expressions that let us find the rectangle line and fill colors
self.xpLineColor = cfg.get(sSurname, "xpath_LineColor")
self.xpLineWidth = cfg.get(sSurname, "xpath_LineWidth")
self.xpFillColor = cfg.get(sSurname, "xpath_FillColor")
self.xpFillStyle = cfg.get(sSurname, "xpath_FillStyle")
self.xpAttrName = cfg.get(sSurname, "xpath_AttrName")
self.xpAttrValue = cfg.get(sSurname, "xpath_AttrValue")
self.dInitialValue = {}
self.xpLineColorSlctd = cfg.get(sSurname, "xpath_LineColor_Selected")
self.xpLineWidthSlctd = cfg.get(sSurname, "xpath_LineWidth_Selected")
self.xpFillColorSlctd = cfg.get(sSurname, "xpath_FillColor_Selected")
self.xpFillStyleSlctd = cfg.get(sSurname, "xpath_FillStyle_Selected")
def __str__(self):
s = "%s="%self.__class__
s += DecoBBXYWH.__str__(self)
return s
def isActionable(self):
return True
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
lo = DecoBBXYWH.draw(self, wxh, node)
x,y,w,h,inc = self.runXYWHI(node)
sAttrName = self.xpathToStr(node, self.xpAttrName , None)
sAttrValue = self.xpathToStr(node, self.xpAttrValue, None)
if sAttrName and sAttrValue != None:
if node.prop(sAttrName) == sAttrValue:
sLineColor = self.xpathToStr(node, self.xpLineColorSlctd, "#000000")
iLineWidth = self.xpathToInt(node, self.xpLineWidthSlctd, 1)
sFillColor = self.xpathToStr(node, self.xpFillColorSlctd, "#000000")
sFillStyle = self.xpathToStr(node, self.xpFillStyleSlctd, "Solid")
else:
sLineColor = self.xpathToStr(node, self.xpLineColor, "#000000")
iLineWidth = self.xpathToInt(node, self.xpLineWidth, 1)
sFillColor = self.xpathToStr(node, self.xpFillColor, "#000000")
sFillStyle = self.xpathToStr(node, self.xpFillStyle, "Solid")
obj = wxh.AddRectangle((x, -y), (w, -h),
LineWidth=iLineWidth,
LineColor=sLineColor,
FillColor=sFillColor,
FillStyle=sFillStyle)
lo = [obj] + lo
return lo
def act(self, obj, node):
"""
Toggle the attribute value
"""
s = "do nothing"
sAttrName = self.xpathToStr(node, self.xpAttrName , None)
sAttrValue = self.xpathToStr(node, self.xpAttrValue, None)
if sAttrName and sAttrValue != None:
try:
initialValue = self.dInitialValue[node]
except KeyError:
initialValue = node.prop(sAttrName) #first time
self.dInitialValue[node] = initialValue
if node.get(sAttrName) == sAttrValue:
#back to previous value
if initialValue == None or initialValue == sAttrValue:
#very special case: when an attr was set, then saved, re-clicking on it wil remove it.
del node.attrib[sAttrName]
s = "Removal of @%s"%sAttrName
else:
node.set(sAttrName, initialValue)
s = '@%s := "%s"'%(sAttrName,initialValue)
else:
if not sAttrValue:
del node.attrib[sAttrName]
s = "Removal of @%s"%sAttrName
else:
node.set(sAttrName, sAttrValue)
s = '@%s := "%s"'%(sAttrName,sAttrValue)
return s
class DecoClickableRectangleJump(DecoBBXYWH):
"""A rectangle
clicking on it jump to a node
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoBBXYWH.__init__(self, cfg, sSurname, xpCtxt)
#now get the xpath expressions that let us find the rectangle line and fill colors
self.xpLineColor = cfg.get(sSurname, "xpath_LineColor")
self.xpLineWidth = cfg.get(sSurname, "xpath_LineWidth")
self.xpFillColor = cfg.get(sSurname, "xpath_FillColor")
self.xpFillStyle = cfg.get(sSurname, "xpath_FillStyle")
self.xp_xTo = cfg.get(sSurname, "xpath_xTo")
self.xp_yTo = cfg.get(sSurname, "xpath_yTo")
self.xp_wTo = cfg.get(sSurname, "xpath_wTo")
self.xp_hTo = cfg.get(sSurname, "xpath_hTo")
self.xpAttrToId = cfg.get(sSurname, "xpath_ToId")
self.config = cfg.jl_hack_cfg #HACK
def __str__(self):
s = "%s="%self.__class__
s += DecoBBXYWH.__str__(self)
return s
def isActionable(self):
return True
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
lo = DecoBBXYWH.draw(self, wxh, node)
x,y,w,h,inc = self.runXYWHI(node)
sLineColor = self.xpathToStr(node, self.xpLineColor, "#000000")
iLineWidth = self.xpathToInt(node, self.xpLineWidth, 1)
sFillColor = self.xpathToStr(node, self.xpFillColor, "#000000")
sFillStyle = self.xpathToStr(node, self.xpFillStyle, "Solid")
obj = wxh.AddRectangle((x, -y), (w, -h), LineWidth=iLineWidth,
LineColor=sLineColor,
FillColor=sFillColor,
FillStyle=sFillStyle)
lo = [obj] + lo
return lo
def act(self, obj, node):
"""
return the page number of the destination
or None on error
"""
sPageTag = self.config.getPageTag()
sPageNumberAttr = self.config.getPageNumberAttr()
number = None
x,y,w,h = None, None, None, None
bbHighlight = None
sToId = self.xpathToStr(node, self.xpAttrToId , None)
if sToId:
ln = self.xpathEval(node.doc.getroot(), '//*[@id="%s"]'%sToId.strip())
if ln:
#find the page number
ndTo = nd = ln[0]
#while nd and nd.name != "PAGE": nd = nd.parent
while nd and nd.name != sPageTag: nd = nd.parent
try:
#number = max(0, int(nd.prop("number")) - 1)
number = max(0, self.xpathToInt(nd, sPageNumberAttr, 1, True) - 1)
#maybe we can also indicate the precise arrival point?
if self.xp_xTo and self.xp_yTo and self.xp_hTo and self.xp_wTo:
x = self.xpathToInt(ndTo, self.xp_xTo, None)
y = self.xpathToInt(ndTo, self.xp_yTo, None)
w = self.xpathToInt(ndTo, self.xp_wTo, None)
h = self.xpathToInt(ndTo, self.xp_hTo, None)
if x==None or y==None or w==None or h==None:
x,y,w,h = None, None, None, None
except:
pass
return number,x,y,w,h
class DecoClickableRectangleJumpToPage(DecoBBXYWH):
"""A rectangle
clicking on it jump to a page
"""
def __init__(self, cfg, sSurname, xpCtxt):
DecoBBXYWH.__init__(self, cfg, sSurname, xpCtxt)
#now get the xpath expressions that let us find the rectangle line and fill colors
self.xpLineColor = cfg.get(sSurname, "xpath_LineColor")
self.xpLineWidth = cfg.get(sSurname, "xpath_LineWidth")
self.xpFillColor = cfg.get(sSurname, "xpath_FillColor")
self.xpFillStyle = cfg.get(sSurname, "xpath_FillStyle")
self.xpAttrToPageNumber = cfg.get(sSurname, "xpath_ToPageNumber")
def __str__(self):
s = "%s="%self.__class__
s += DecoBBXYWH.__str__(self)
return s
def isActionable(self):
return True
def draw(self, wxh, node):
"""draw itself using the wx handle
return a list of created WX objects"""
lo = DecoBBXYWH.draw(self, wxh, node)
x,y,w,h,inc = self.runXYWHI(node)
sLineColor = self.xpathToStr(node, self.xpLineColor, "#000000")
iLineWidth = self.xpathToInt(node, self.xpLineWidth, 1)
sFillColor = self.xpathToStr(node, self.xpFillColor, "#000000")
sFillStyle = self.xpathToStr(node, self.xpFillStyle, "Solid")
obj = wxh.AddRectangle((x, -y), (w, -h), LineWidth=iLineWidth,
LineColor=sLineColor,
FillColor=sFillColor,
FillStyle=sFillStyle)
lo = [obj] + lo
return lo
def act(self, obj, node):
"""
return the page number of the destination
or None on error
"""
index,x,y,w,h = None,None,None,None,None
sToPageNum = self.xpathToStr(node, self.xpAttrToPageNumber , None)
if sToPageNum:
index = int(sToPageNum) - 1
return index,x,y,w,h
|
Transkribus/TranskribusDU
|
TranskribusDU/visu/deco.py
|
Python
|
bsd-3-clause
| 51,210
|
prob_trans = {
'P': {
'P': -10.820039589758984,
'B': -100,
'M': -13.523481487774999,
'S': -100,
'X': -100,
'L': -12.686084698370507,
'F': -12.642282075712114,
'W': -14.693552740425254,
'D': -9.2953900389075006,
'G': -12.901793271197199,
'K': -100,
'I': -11.592459951213437,
'A': -100,
'Z': -100,
'J': -10.838688148837056,
'C': -9.4840665875838326,
},
'B': {
'P': -100,
'B': -100,
'M': -100,
'S': -100,
'X': -100,
'L': -100,
'F': -100,
'W': -100,
'D': -100,
'G': -100,
'K': -100,
'I': -100,
'A': -8.8800857786311713,
'Z': -5.1490602816124396,
'J': -100,
'C': -100,
},
'M': {
'P': -11.927932687501565,
'B': -100,
'M': -12.436430021507965,
'S': -100,
'X': -100,
'L': -12.560043977475143,
'F': -16.890777317761472,
'W': -16.890777317761472,
'D': -10.260093932119101,
'G': -14.94486716870616,
'K': -100,
'I': -14.492882044963103,
'A': -9.9830220387793371,
'Z': -100,
'J': -12.038747053841856,
'C': -10.546896883635142,
},
'S': {
'P': -12.436430021507965,
'B': -100,
'M': -13.755283101832324,
'S': -100,
'X': -100,
'L': -11.543669787044005,
'F': -11.263156204070837,
'W': -100,
'D': -12.246386418620101,
'G': -7.4411051342745562,
'K': -8.5890075546443079,
'I': -10.725359463530053,
'A': -7.7307833202220291,
'Z': -2.9515900151036014,
'J': -11.534191043089461,
'C': -8.9145253740158505,
},
'X': {
'P': -13.364416793145312,
'B': -100,
'M': -14.057563973705257,
'S': -100,
'X': -100,
'L': -12.521329465294452,
'F': -12.379917811244624,
'W': -100,
'D': -12.978754312333328,
'G': -8.893786911923824,
'K': -9.8234574691079981,
'I': -11.828182284734506,
'A': -100,
'Z': -100,
'J': -12.54697189590779,
'C': -9.8234574691079981,
},
'L': {
'P': -11.224350629649042,
'B': -100,
'M': -14.588192224767427,
'S': -100,
'X': -100,
'L': -14.000405559865309,
'F': -13.946338338595034,
'W': -100,
'D': -9.5936863126010561,
'G': -12.671269612585366,
'K': -16.890777317761472,
'I': -12.830334307215054,
'A': -100,
'Z': -100,
'J': -10.714910047655712,
'C': -9.729155314822286,
},
'F': {
'P': -12.42486919910689,
'B': -100,
'M': -15.281339405327373,
'S': -100,
'X': -100,
'L': -14.94486716870616,
'F': -11.963523632604268,
'W': -13.019576306853581,
'D': -9.0024428170228195,
'G': -12.046590231302883,
'K': -15.281339405327373,
'I': -12.368988740712433,
'A': -100,
'Z': -100,
'J': -11.797027116954711,
'C': -10.012450849470149,
},
'W': {
'P': -14.588192224767427,
'B': -100,
'M': -100,
'S': -100,
'X': -100,
'L': -15.504482956641583,
'F': -13.755283101832324,
'W': -100,
'D': -11.880142023665218,
'G': -12.23681696760395,
'K': -100,
'I': -14.492882044963103,
'A': -100,
'Z': -100,
'J': -14.693552740425254,
'C': -12.078592962389056,
},
'D': {
'P': -12.103285574979427,
'B': -5.2869970379472049,
'M': -11.714627585187644,
'S': -100,
'X': -8.4600138543436234,
'L': -12.23681696760395,
'F': -11.985502539323043,
'W': -12.813239873855753,
'D': -8.115382359215964,
'G': -9.9651821206510061,
'K': -13.55857280758627,
'I': -10.725359463530053,
'A': -100,
'Z': -100,
'J': -11.457055314207233,
'C': -8.5039206280732387,
},
'G': {
'P': -9.8234574691079981,
'B': -100,
'M': -10.262735941581941,
'S': -100,
'X': -100,
'L': -9.6358925076841349,
'F': -9.7338209531458375,
'W': -11.949134895152168,
'D': -6.2227060347275511,
'G': -8.3386031574499899,
'K': -11.686770630684679,
'I': -8.8503304364583641,
'A': -100,
'Z': -100,
'J': -8.9598519452780856,
'C': -6.7844713777057351,
},
'K': {
'P': -13.671901492893273,
'B': -6.9912977866228845,
'M': -13.425041414961747,
'S': -100,
'X': -9.7695248645169315,
'L': -15.792165029093363,
'F': -14.325827960299936,
'W': -100,
'D': -9.2720349400910607,
'G': -11.675841560152488,
'K': -14.118188595521692,
'I': -12.731894234401802,
'A': -100,
'Z': -100,
'J': -12.847726049926923,
'C': -9.8328793803496168,
},
'I': {
'P': -11.543669787044005,
'B': -100,
'M': -12.865425627026324,
'S': -100,
'X': -100,
'L': -13.227215671631827,
'F': -13.364416793145312,
'W': -14.492882044963103,
'D': -8.2681436138872382,
'G': -11.292355358763098,
'K': -15.792165029093363,
'I': -11.703391511920719,
'A': -100,
'Z': -100,
'J': -11.692280286495647,
'C': -9.0607492352276342,
},
'A': {
'P': -10.040651151615974,
'B': -100,
'M': -11.24887024682336,
'S': -100,
'X': -100,
'L': -9.6309577073982879,
'F': -9.2833958921216819,
'W': -100,
'D': -10.107452117157512,
'G': -5.9092033653439611,
'K': -7.0253032074235611,
'I': -8.7921344740020562,
'A': -100,
'Z': -100,
'J': -9.5243321694338743,
'C': -6.990043553421966,
},
'Z': {
'P': -100,
'B': -100,
'M': -100,
'S': -100,
'X': -100,
'L': -100,
'F': -100,
'W': -100,
'D': -100,
'G': -100,
'K': -100,
'I': -100,
'A': -5.41998720939403,
'Z': -0.089780144201133033,
'J': -100,
'C': -100,
},
'J': {
'P': -10.608510570865466,
'B': -100,
'M': -13.040629716051415,
'S': -100,
'X': -100,
'L': -12.336900426160932,
'F': -12.656670813164213,
'W': -13.755283101832324,
'D': -8.3789987590467341,
'G': -12.326429126293638,
'K': -15.504482956641583,
'I': -12.448126061271157,
'A': -100,
'Z': -100,
'J': -10.3297466518649,
'C': -9.2888753578863081,
},
'C': {
'P': -10.164543915402726,
'B': -100,
'M': -11.330095686745945,
'S': -100,
'X': -100,
'L': -11.7608786028384,
'F': -11.934950260160212,
'W': -11.920964018185472,
'D': -5.9359438023583255,
'G': -9.3369664657532407,
'K': -12.920485404209352,
'I': -10.14554096827711,
'A': -100,
'Z': -100,
'J': -9.5111451651519197,
'C': -6.7268889655554274,
},
}
|
yuanlisky/linlp
|
linlp/algorithm/viterbiMat/prob_trans_organization.py
|
Python
|
apache-2.0
| 5,301
|
# Under MIT License, see LICENSE.txt
STRATEGY_COMMAND_TYPE = 5002
TACTIC_COMMAND_TYPE = 5003
class UIDebugCommand(object):
def __init__(self, raw_cmd):
# print(raw_cmd)
self.data = raw_cmd['data']
self.cmd_type = raw_cmd['type']
def is_strategy_cmd(self):
return self.cmd_type == STRATEGY_COMMAND_TYPE
def is_tactic_cmd(self):
return self.cmd_type == TACTIC_COMMAND_TYPE
|
wonwon0/StrategyIA
|
RULEngine/Debug/ui_debug_command.py
|
Python
|
mit
| 432
|
import sys
sys.path.append('.')
from app import app as application
|
xhacker/neural-alt
|
server/passenger_wsgi.py
|
Python
|
mit
| 67
|
# -*- coding: utf-8 -*-
"""
@author: Eric Vanhove
"""
# Imports
import boto3
# Constants
#Make sure you provide / in the end unless no prefix
prefix = ''
bucket_name = 'aws-logs-079119988851-us-west-2'
path = '231856634451o.jpg'
fileName = '231856634451o.jpg'
# Ensure that your credentials are in place
s3client = boto3.client('s3')
s3resource = boto3.resource('s3')
# Get list of buckets on S3
listOfBuckets = s3client.list_buckets()
for bucket in listOfBuckets['Buckets']:
print('Bucket: {},\n\tCreated {}'.format(bucket['Name'], bucket['CreationDate']))
results = s3client.list_objects(Bucket=bucket['Name'],
Prefix=prefix,
Delimiter='/')
# CommonPrefixes are the "subfolders"
# Yes, I know, S3 has o subfolders...
for o in results.get('CommonPrefixes'):
print 'sub folder : ', o.get('Prefix')
print('Files:')
for o in results.get('Contents'):
print('\t{}'.format(o.get('Key')))
#### don't run the below code!
# To create a bucket
s3client.create_bucket(Bucket=bucket_name)
# To upload a file
s3resource.Bucket(name = bucket_name).upload_file(Filename = path,
Key = fileName,
ExtraArgs = None,
Callback = None,
Config = None)
def renameObject(bucketName, oldObjectName, newObjectName, s3client):
"""
This will rename an object in a bucket to the new name.
The old key will be deleted.
bucketName = string
oldObjectName = string
newObjectName = string
s3client = boto3.client('s3')
Returns True on success, False on failure
No error checking for now, no try/catch
"""
successes = [200, 201, 202, 203, 204, 205, 206, 207, 208, 226]
copySource = {
'Bucket' : bucketName,
'Key' : oldObjectName
}
copyResponse = s3client.copy_object(
Bucket = bucketName,
CopySource = copySource,
Key = newObjectName)
if(copyResponse['ResponseMetadata']['HTTPStatusCode'] not in successes):
return(False)
# Need to come up with a way to pass back errors
deleteResponse = s3client.delete_object(
Bucket = bucketName,
Key = oldObjectName)
return(deleteResponse['ResponseMetadata']['HTTPStatusCode'] in successes)
|
evohnave/myAWS
|
Bucket_and_subfolders.py
|
Python
|
mit
| 2,582
|
#!/usr/bin/env python
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
from tsc_ignore_errors import run_tsc_ignore_errors
webgpu_cts_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
third_party_dir = os.path.dirname(webgpu_cts_dir)
node_dir = os.path.join(third_party_dir, 'node')
try:
old_sys_path = sys.path
sys.path = [node_dir] + sys.path
from node import RunNode
finally:
sys.path = old_sys_path
def compile_src(out_dir):
# First, clean the output directory so deleted files are pruned from old builds.
shutil.rmtree(out_dir)
run_tsc_ignore_errors([
'--project',
os.path.join(webgpu_cts_dir, 'src', 'tsconfig.json'),
'--outDir',
out_dir,
'--noEmit',
'false',
'--noEmitOnError',
'false',
'--declaration',
'false',
'--sourceMap',
'false',
'--target',
'ES2017',
])
def compile_src_for_node(out_dir):
# First, clean the output directory so deleted files are pruned from old builds.
shutil.rmtree(out_dir)
run_tsc_ignore_errors([
'--project',
os.path.join(webgpu_cts_dir, 'src', 'node.tsconfig.json'),
'--outDir',
out_dir,
'--noEmit',
'false',
'--noEmitOnError',
'false',
'--declaration',
'false',
'--sourceMap',
'false',
'--target',
'ES6',
])
if __name__ == '__main__':
if len(sys.argv) != 2:
print('Usage: compile_src.py GEN_DIR')
sys.exit(1)
gen_dir = sys.argv[1]
# Compile the CTS src.
compile_src(os.path.join(gen_dir, 'src'))
compile_src_for_node(os.path.join(gen_dir, 'src-node'))
# Run gen_listings.js to overwrite the dummy src/webgpu/listings.js created
# from transpiling src/
RunNode([
os.path.join(gen_dir, 'src-node', 'common', 'tools',
'gen_listings.js'),
'--no-validate',
os.path.join(gen_dir, 'src'),
os.path.join(gen_dir, 'src-node', 'webgpu'),
])
|
nwjs/chromium.src
|
third_party/webgpu-cts/scripts/compile_src.py
|
Python
|
bsd-3-clause
| 2,225
|
from cloudify import ctx
from cloudify.state import ctx_parameters
ctx.instance.runtime_properties['prop1'] = ctx_parameters['target_id']
|
cloudify-cosmo/cloudify-manager
|
tests/integration_tests/resources/dsl/idd/scripts/set_target_attribute.py
|
Python
|
apache-2.0
| 139
|
# Claire Jaja
# 11/1/2014
#
# Project Euler
# Problem 2
# Even Fibonacci numbers
#
# Each new term in the Fibonacci sequence is generated by adding
# the previous two terms.
# By starting with 1 and 2, the first 10 terms will be:
# 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
# By considering the terms in the Fibonacci sequence
# whose values do not exceed four million,
# find the sum of the even-valued terms.
def main():
max_value = 4000000
# set up first three terms
previous_previous_term = 1
previous_term = 1
current_term = 2
my_sum = 0
while current_term < max_value:
if current_term % 2 == 0:
my_sum += current_term
previous_previous_term = previous_term
previous_term = current_term
current_term = previous_term + previous_previous_term
print(my_sum)
if __name__ == "__main__":
main()
|
clairejaja/project-euler
|
src/main/python/problem2/even_fibonacci_numbers.py
|
Python
|
mit
| 876
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-27 17:04
from __future__ import unicode_literals
import string
from django.db import migrations
def create_short_name(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
Event = apps.get_model('wedding', 'Event')
for event in Event.objects.all():
short_name = "".join(
i for i in event.name.lower() if i in string.ascii_lowercase
)
short_name = short_name[:20]
event.short_name = short_name
event.save()
class Migration(migrations.Migration):
dependencies = [
('wedding', '0009_auto_20170527_1704'),
]
operations = [
migrations.RunPython(create_short_name),
]
|
joshsimmons/animportantdate
|
animportantdate/wedding/migrations/0010_auto_20170527_1704.py
|
Python
|
mit
| 838
|
"""
Artificial Intelligence for Humans
Volume 1: Fundamental Algorithms
Python Version
http://www.aifh.org
http://www.jeffheaton.com
Code repository:
https://github.com/jeffheaton/aifh
Copyright 2013 by Jeff Heaton
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For more information on Heaton Research copyrights, licenses
and trademarks visit:
http://www.heatonresearch.com/copyright
"""
__author__ = 'jheaton'
|
PeterLauris/aifh
|
vol1/python-examples/test/aifh/__init__.py
|
Python
|
apache-2.0
| 972
|
"""The Fronius integration."""
from __future__ import annotations
import asyncio
from collections.abc import Callable
import logging
from typing import Final, TypeVar
from pyfronius import Fronius, FroniusError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_MODEL, ATTR_SW_VERSION, CONF_HOST, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity import DeviceInfo
from .const import DOMAIN, SOLAR_NET_ID_SYSTEM, FroniusDeviceInfo
from .coordinator import (
FroniusCoordinatorBase,
FroniusInverterUpdateCoordinator,
FroniusLoggerUpdateCoordinator,
FroniusMeterUpdateCoordinator,
FroniusOhmpilotUpdateCoordinator,
FroniusPowerFlowUpdateCoordinator,
FroniusStorageUpdateCoordinator,
)
_LOGGER: Final = logging.getLogger(__name__)
PLATFORMS: Final = [Platform.SENSOR]
FroniusCoordinatorType = TypeVar("FroniusCoordinatorType", bound=FroniusCoordinatorBase)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up fronius from a config entry."""
host = entry.data[CONF_HOST]
fronius = Fronius(async_get_clientsession(hass), host)
solar_net = FroniusSolarNet(hass, entry, fronius)
await solar_net.init_devices()
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = solar_net
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
solar_net = hass.data[DOMAIN].pop(entry.entry_id)
while solar_net.cleanup_callbacks:
solar_net.cleanup_callbacks.pop()()
return unload_ok
class FroniusSolarNet:
"""The FroniusSolarNet class routes received values to sensor entities."""
def __init__(
self, hass: HomeAssistant, entry: ConfigEntry, fronius: Fronius
) -> None:
"""Initialize FroniusSolarNet class."""
self.hass = hass
self.cleanup_callbacks: list[Callable[[], None]] = []
self.config_entry = entry
self.coordinator_lock = asyncio.Lock()
self.fronius = fronius
self.host: str = entry.data[CONF_HOST]
# entry.unique_id is either logger uid or first inverter uid if no logger available
# prepended by "solar_net_" to have individual device for whole system (power_flow)
self.solar_net_device_id = f"solar_net_{entry.unique_id}"
self.system_device_info: DeviceInfo | None = None
self.inverter_coordinators: list[FroniusInverterUpdateCoordinator] = []
self.logger_coordinator: FroniusLoggerUpdateCoordinator | None = None
self.meter_coordinator: FroniusMeterUpdateCoordinator | None = None
self.ohmpilot_coordinator: FroniusOhmpilotUpdateCoordinator | None = None
self.power_flow_coordinator: FroniusPowerFlowUpdateCoordinator | None = None
self.storage_coordinator: FroniusStorageUpdateCoordinator | None = None
async def init_devices(self) -> None:
"""Initialize DataUpdateCoordinators for SolarNet devices."""
if self.config_entry.data["is_logger"]:
self.logger_coordinator = FroniusLoggerUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_logger_{self.host}",
)
await self.logger_coordinator.async_config_entry_first_refresh()
# _create_solar_net_device uses data from self.logger_coordinator when available
self.system_device_info = await self._create_solar_net_device()
_inverter_infos = await self._get_inverter_infos()
for inverter_info in _inverter_infos:
coordinator = FroniusInverterUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_inverter_{inverter_info.solar_net_id}_{self.host}",
inverter_info=inverter_info,
)
await coordinator.async_config_entry_first_refresh()
self.inverter_coordinators.append(coordinator)
self.meter_coordinator = await self._init_optional_coordinator(
FroniusMeterUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_meters_{self.host}",
)
)
self.ohmpilot_coordinator = await self._init_optional_coordinator(
FroniusOhmpilotUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_ohmpilot_{self.host}",
)
)
self.power_flow_coordinator = await self._init_optional_coordinator(
FroniusPowerFlowUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_power_flow_{self.host}",
)
)
self.storage_coordinator = await self._init_optional_coordinator(
FroniusStorageUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_storages_{self.host}",
)
)
async def _create_solar_net_device(self) -> DeviceInfo:
"""Create a device for the Fronius SolarNet system."""
solar_net_device: DeviceInfo = DeviceInfo(
configuration_url=self.fronius.url,
identifiers={(DOMAIN, self.solar_net_device_id)},
manufacturer="Fronius",
name="SolarNet",
)
if self.logger_coordinator:
_logger_info = self.logger_coordinator.data[SOLAR_NET_ID_SYSTEM]
# API v0 doesn't provide product_type
solar_net_device[ATTR_MODEL] = _logger_info.get("product_type", {}).get(
"value", "Datalogger Web"
)
solar_net_device[ATTR_SW_VERSION] = _logger_info["software_version"][
"value"
]
device_registry = await dr.async_get_registry(self.hass)
device_registry.async_get_or_create(
config_entry_id=self.config_entry.entry_id,
**solar_net_device,
)
return solar_net_device
async def _get_inverter_infos(self) -> list[FroniusDeviceInfo]:
"""Get information about the inverters in the SolarNet system."""
try:
_inverter_info = await self.fronius.inverter_info()
except FroniusError as err:
raise ConfigEntryNotReady from err
inverter_infos: list[FroniusDeviceInfo] = []
for inverter in _inverter_info["inverters"]:
solar_net_id = inverter["device_id"]["value"]
unique_id = inverter["unique_id"]["value"]
device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
manufacturer=inverter["device_type"].get("manufacturer", "Fronius"),
model=inverter["device_type"].get(
"model", inverter["device_type"]["value"]
),
name=inverter.get("custom_name", {}).get("value"),
via_device=(DOMAIN, self.solar_net_device_id),
)
inverter_infos.append(
FroniusDeviceInfo(
device_info=device_info,
solar_net_id=solar_net_id,
unique_id=unique_id,
)
)
return inverter_infos
@staticmethod
async def _init_optional_coordinator(
coordinator: FroniusCoordinatorType,
) -> FroniusCoordinatorType | None:
"""Initialize an update coordinator and return it if devices are found."""
try:
await coordinator.async_config_entry_first_refresh()
except ConfigEntryNotReady:
# ConfigEntryNotReady raised form FroniusError / KeyError in
# DataUpdateCoordinator if request not supported by the Fronius device
return None
# if no device for the request is installed an empty dict is returned
if not coordinator.data:
return None
return coordinator
|
mezz64/home-assistant
|
homeassistant/components/fronius/__init__.py
|
Python
|
apache-2.0
| 8,577
|
from .registry import RegistryProxy
##
## Idea: A stack of registries, the topone being the one consulted.
## This allows a test to push/pop newly initialized registries, same for custom
## setups. Optionally name levels (e.g. toplevel == global). Might even
## search across levels
class CoreRegistry(object):
def __init__(self):
self.__dict__['reg'] = {} ## bypass __setattr__ recursion
def set(self, name, registry):
wrapped = RegistryProxy(registry)
self.reg[name] = wrapped
return wrapped
def __getattr__(self, k):
try:
return self.reg[k]
except KeyError:
raise AttributeError(k)
def __setattr__(self, k, v):
self.set(k, v)
core = CoreRegistry()
from .workflow import WorkflowRegistry
core.workflow = WorkflowRegistry()
|
wheelcms/wheelcms_axle
|
wheelcms_axle/registries/core_registry.py
|
Python
|
bsd-2-clause
| 830
|
from kivy.uix.floatlayout import FloatLayout
from kivy.properties import NumericProperty, ObjectProperty
from kivy.graphics import Color, Ellipse, Line
from kivy.graphics.transformation import Matrix
from kivy.core.window import Window
from simulationLine import SimulationLine
from simulationAngle import SimulationAngle
from simulationSled import SimulationSled
from chainLengthToXY import ChainLengthtoXY
from posToChainLength import PosToChainLength
from kivy.graphics.transformation import Matrix
import re
import math
class SimulationCanvas(FloatLayout):
scatterObject = ObjectProperty(None)
motorLift = 220
motorTranslate = 258.8
bedWidth = 2438.4 #8'
bedHeight = 1219.2 #4'
motorY = bedHeight + motorLift
motor2X = bedWidth + motorTranslate
def initialize(self):
self.startChains()
self.drawFrame()
self.setSpindleLocation(self.bedWidth/2,self.bedHeight/2)
self.setInitialZoom()
self.xPosSlider.bind(value=self.xPosSliderValueChange)
self.yPosSlider.bind(value=self.yPosSliderValueChange)
self.setupAngles()
self.setupSled()
self.lengthToXY.initialize(self.chainA, self.chainB, self.bedWidth+2*self.motorTranslate, self.bedHeight+self.motorLift, self.motorTranslate, self.motorLift)
self.posToLength.initialize(self.sled, self.bedWidth+2*self.motorTranslate, self.bedHeight+self.motorLift, self.motorTranslate, self.motorLift)
def setSpindleLocation(self,x,y):
self.chainA.setEnd(x,y)
self.chainB.setEnd(x,y)
def xPosSliderValueChange(self,callback,value):
self.setSpindleLocation(value,self.chainA.toPos[1])
def yPosSliderValueChange(self,callback,value):
self.setSpindleLocation(self.chainA.toPos[0], value)
def drawFrame(self):
self.frameLeft.initialize()
self.frameTop.initialize()
self.frameRight.initialize()
self.frameBottom.initialize()
self.frameLeft.setStart(0,0)
self.frameLeft.setEnd(0,self.bedHeight)
self.frameLeft.color = (1,0,0)
self.frameTop.setStart(0,self.bedHeight)
self.frameTop.setEnd(self.bedWidth,self.bedHeight)
self.frameTop.color = (1,0,0)
self.frameRight.setStart(self.bedWidth,0)
self.frameRight.setEnd(self.bedWidth,self.bedHeight)
self.frameRight.color = (1,0,0)
self.frameBottom.setStart(0,0)
self.frameBottom.setEnd(self.bedWidth,0)
self.frameBottom.color = (1,0,0)
def setupAngles(self):
self.angleA.initialize(self.chainA, self.lineT, 0)
self.angleB.initialize(self.chainB, self.lineT, 0)
self.angleP.initialize(self.chainA, self.chainB, 1)
def setupSled(self):
self.sled.initialize(self.chainA, self.chainB, 1, self.angleP)
def setInitialZoom(self):
mat = Matrix().scale(.4, .4, 1)
self.scatterInstance.apply_transform(mat, (0,0))
mat = Matrix().translate(200, 100, 0)
self.scatterInstance.apply_transform(mat)
def startChains(self):
self.chainA.initialize()
self.chainB.initialize()
self.lineT.initialize()
self.lineT.color = (0,0,1)
self.chainA.setStart(-self.motorTranslate, self.motorY)
self.chainB.setStart(self.motor2X, self.motorY)
self.lineT.setStart(-self.motorTranslate,self.motorY)
self.lineT.setEnd(self.motor2X,self.motorY)
|
shy21grams/GroundControl
|
Simulation/simulationCanvas.py
|
Python
|
gpl-3.0
| 3,902
|
import csv
class FieldCleaner:
""" This class takes a field definition file and cleans it, producing a field definition file that can be read by schemaLoader """
@staticmethod
def cleanFile(fileIn, fileOut):
""" Clean input file line by line and create output file """
done = False
# Open CSV file for reading each record as a dictionary
with open(fileIn, "rU") as csvfile:
reader = csv.DictReader(csvfile)
fieldnames = ["fieldname","required","data_type","field_length","rule_labels"]
writer = csv.DictWriter(open(fileOut,"w"),fieldnames=fieldnames,lineterminator='\n')
writer.writeheader()
for record in reader:
# Pass record into cleanRecord to sanitize
record = FieldCleaner.cleanRecord(record)
# Write new row to output file
writer.writerow(record)
@staticmethod
def cleanRecord(record):
""" Clean up an individual record, and write to output file.
Args:
record: dict of field specifications, keys should be 'fieldname','required','data_type', and 'field_length'
Returns:
Cleaned up version of record dict with same keys
"""
# Clean name, required, type, and length
record['fieldname'] = FieldCleaner.cleanName(record['fieldname'])
record['required'] = FieldCleaner.cleanRequired(record['required'])
record['data_type'] = FieldCleaner.cleanType(record['data_type'])
record['field_length'] = FieldCleaner.cleanLength(record['field_length'])
return record
@staticmethod
def cleanString(data,removeSpaces = True):
""" Change to lowercase, trim whitespace on ends, and replace internal spaces with underscores if desired
Args:
data: String to be cleaned
removeSpaces: True if spaces should be replaced with underscores
Returns:
Cleaned version of string
"""
result = data.lower().strip()
if(removeSpaces):
result = result.replace(" ","_")
return result
@staticmethod
def cleanName(name):
""" Remove whitespace from name and change to lowercase """
# Convert to lowercase and remove whitespace on ends
originalName = name
name = FieldCleaner.cleanString(name)
# Remove braces and parantheses
name = name.replace("{","").replace("}","").replace("(","").replace(")","")
# Replace problematic characters with underscores
name = name.replace(" - ","_").replace("-","_")
name = name.replace(",","_")
name = name.replace("/","_")
# Remove duplicate underscores
name = name.replace("__","_")
return name
@staticmethod
def cleanRequired(required):
""" Convert 'required' and '(required)' to True, "optional" and "required if relevant" if False, otherwise raises an exception """
required = FieldCleaner.cleanString(required,False)
if(required == "required" or required == "(required)" or required == "true"):
return "true"
elif(required == "false" or required == "" or required == "optional" or required == "required if relevant" or required == "required if modification" or required == "conditional per validation rule" or required == "conditional per award type" or required == "conditionally required"):
return "false"
else:
raise ValueError("".join(["Unknown value for required: ", required]))
@staticmethod
def cleanType(type):
""" Interprets all inputs as int, str, or bool. For unexpected inputs, raises an exception. """
type = FieldCleaner.cleanString(type,False)
if(type == "integer" or type == "int"):
return "int"
elif(type == "numeric" or type == "float"):
return "float"
elif(type == "alphanumeric" or type == "" or type == "str" or type == "string"):
return "str"
elif(type == "alphanumeric (logically a boolean)"):
# Some of these are intended to be booleans, but others use this value when they have multiple possible values,
# so for now we have to treat them as strings
return "str"
elif(type == "boolean" or type == "bool"):
return "bool"
elif(type == "long"):
return "long"
else:
raise ValueError("".join(["Unknown type: ", type]))
@staticmethod
def cleanLength(length):
""" Checks that input is a positive integer, otherwise raises an exception. """
length = FieldCleaner.cleanString(length,False)
if(length == ""):
# Empty length is fine, this means there is no length requirement
return ""
try:
int(length)
except:
# length cannot be cast as int
raise ValueError("Length must be an integer")
if(length <= 0):
raise ValueError("Length must be positive")
return length
if __name__ == '__main__':
#FieldCleaner.cleanFile("programActivityRaw.csv","programActivityFields.csv")
#FieldCleaner.cleanFile("awardFinFields.csv","awardFinancialFields.csv")
FieldCleaner.cleanFile("../config/appropFieldsRaw.csv","../config/appropFields.csv")
FieldCleaner.cleanFile("../config/awardFinancialFieldsRaw.csv","../config/awardFinancialFields.csv")
FieldCleaner.cleanFile("../config/programActivityFieldsRaw.csv","../config/programActivityFields.csv")
FieldCleaner.cleanFile("../config/awardFieldsRaw.csv","../config/awardFields.csv")
|
fedspendingtransparency/data-act-validator
|
dataactvalidator/filestreaming/fieldCleaner.py
|
Python
|
cc0-1.0
| 5,683
|
import numpy as np
from math import *
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
def pure_math(lengths, angles):
l1, l2 = lengths
theta1, theta2, theta3 = angles
x1, y1, z1 = 0, 0, 0
x2 = -l1 * sin(theta1) * cos(theta2)
y2 = l1 * sin(theta2)
z2 = -l1 * cos(theta1) * cos(theta2)
x3 = x2 - l2 * cos(theta1) * sin(theta3) - l2 * cos(theta2) * cos(theta3) * sin(theta1)
y3 = y2 + l2 * cos(theta3) * sin(theta2)
z3 = z2 + l2 * sin(theta1) * sin(theta3) - l2 * cos(theta1) * cos(theta2) * cos(theta3)
return (x1, y1, z1), (x2, y2, z2), (x3, y3, z3)
def inverse(lengths, target, a2=False, a3=False, deg=False):
l1, l2 = lengths
x, y, z = target
dist = np.linalg.norm(target)
if dist > sum(lengths):
return None
# theta3 *= -1
# Returns [0, 180]. +/- expands solution to [-180, 180].
try:
theta3 = (l1**2 + l2**2 - dist**2) / (2 * l1 * l2)
theta3 = acos(theta3) - pi
if a3:
theta3 *= -1
except (ValueError, ZeroDivisionError):
return None
# theta2 = (pi - theta2)
# Returns [-90, 90]. (pi - theta2) expands solution to [-180, 180].
try:
theta2 = y / (l1 + l2 * cos(theta3))
theta2 = asin(theta2)
if a2:
theta2 = pi - theta2
except (ValueError, ZeroDivisionError):
return None
# theta1 -= 2 * pi
# Sometimes (theta1 - 2 * pi). Doesn't matter. Either is cool.
try:
theta1 = atan2(z, -x) + atan2((l1 + l2 * cos(theta3)) * cos(theta2), l2 * sin(theta3))
except (ValueError, ZeroDivisionError):
return None
if deg:
return degrees(theta1), degrees(theta2), degrees(theta3)
else:
return theta1, theta2, theta3
def full_analysis(l1, l2, n):
d = l1 + l2
phi = np.random.uniform(0, 2*pi, n)
costheta = np.random.uniform(-1, 1, n)
u = np.random.uniform(0, 1, n)
theta = np.arccos(costheta)
r = d * u**(1/3)
x = r * np.sin(theta) * np.cos(phi)
y = r * np.sin(theta) * np.sin(phi)
z = r * np.cos(theta)
x_good, x_bad = [], []
y_good, y_bad = [], []
z_good, z_bad = [], []
for i in range(len(x)):
try:
angles = inverse((l1, l2), (x[i], y[i], z[i]))
forward = pure_math((l1, l2), angles)[2]
assert(np.linalg.norm(np.array(forward) - np.array((x[i], y[i], z[i]))) < 0.0001)
x_good.append(x[i])
y_good.append(y[i])
z_good.append(z[i])
except:
x_bad.append(x[i])
y_bad.append(y[i])
z_bad.append(z[i])
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.set_aspect('equal')
ax.scatter([-d, d, 0, 0, 0, 0], [0, 0, -d, d, 0, 0], [0, 0, 0, 0, -d, d], alpha=0)
ax.scatter(x_good, y_good, z_good, c='g', alpha=0.1)
ax.scatter(x_bad, y_bad, z_bad, c='r')
ax.set_xlabel('X Axis')
ax.set_ylabel('Y Axis')
ax.set_zlabel('Z Axis')
fig2 = plt.figure(figsize=(25, 7.5), dpi=80)
fig2.suptitle('Dead Zone Analysis (Configuration 1)', fontsize=16, fontweight='bold')
ax1 = fig2.add_subplot(131)
ax1.set_aspect('equal')
ax1.scatter(x_good, y_good, c='g', alpha=0.5)
ax1.scatter(x_bad, y_bad, c='r')
ax1.set_xlabel('X Axis')
ax1.set_ylabel('Y Axis')
ax2 = fig2.add_subplot(132)
ax2.set_aspect('equal')
ax2.scatter(x_good, z_good, c='g', alpha=0.5)
ax2.scatter(x_bad, z_bad, c='r')
ax2.set_xlabel('X Axis')
ax2.set_ylabel('Z Axis')
ax3 = fig2.add_subplot(133)
ax3.set_aspect('equal')
ax3.scatter(y_good, z_good, c='g', alpha=0.5)
ax3.scatter(y_bad, z_bad, c='r')
ax3.set_xlabel('Y Axis')
ax3.set_ylabel('Z Axis')
fig2.savefig('analysis.png', bbox_inches='tight')
plt.show()
full_analysis(7.5, 7.5, 10000)
close = []
def test():
for i in range(-180, 190, 5):
for j in range(-180, 190, 5):
for k in range(-180, 190, 5):
a, b, c = radians(i), radians(j), radians(k)
target = pure_math((5, 5), (a, b, c))
c = np.linalg.norm(np.array(target[2]) - np.array([1, 5, 1]))
if c < 3:
print(i, j, k)
continue
angles = inverse((5, 7), (target[2][0], target[2][1], target[2][2]))
ik = pure_math((5, 7), angles)
error = np.linalg.norm(np.array(ik[2]) - np.array(target[2]))
if error > 0.00001:
print('Accuracy Error:', (i, j, k))
# test()
# print(min(close))
def graph():
x = []
y = []
z = []
for i in range(-170, 100, 10):
for j in range(-170, 100, 10):
for k in range(-170, 100, 10):
forward = pure_math((5, 5), (radians(i), radians(j), radians(k)))
x.append(forward[2][0])
y.append(forward[2][1])
z.append(forward[2][2])
fig = plt.figure(figsize=(25, 7.5), dpi=80)
fig.suptitle('Visualization of End Effector Kinematics (Leg 1)', fontsize=16, fontweight='bold')
ax1 = plt.subplot(131)
ax1.set_aspect('equal')
ax1.scatter(x, y, c='r')
ax1.set_xlabel('X Points')
ax1.set_ylabel('Y Points')
ax2 = plt.subplot(132)
ax2.set_aspect('equal')
ax2.scatter(x, z, c='r')
ax2.set_xlabel('X Points')
ax2.set_ylabel('Z Points')
ax3 = plt.subplot(133)
ax3.set_aspect('equal')
ax3.scatter(y, z, c='r')
ax3.set_xlabel('Y Points')
ax3.set_ylabel('Z Points')
fig.savefig('leg1.png', bbox_inches='tight')
plt.show()
# graph()
def analyze():
x = []
y = []
z = []
for i in range(-170, 100, 10):
for j in range(-170, 100, 10):
for k in range(-170, 100, 10):
forward = pure_math((5, 5), (radians(i), radians(j), radians(k)))
x.append(forward[2][0])
y.append(forward[2][1])
z.append(forward[2][2])
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.set_aspect('equal')
ax.scatter(x, y, z, c='g')
plt.show()
# solve a = n * Cos[x] * Sin[z] + n * Cos[z] * Sin[x] * Cos[y] + m * Sin[x] * Cos[y] for x
# solve b = -m * Sin[y] - n * Cos[z] * Sin[y]
# solve c = n * Sin[x] * Sin[z] - m * Cos[x] * Cos[y] - n * Cos[x] * Cos[y] * Cos[z] for x
|
bobbyluig/Eclipse
|
src/finesse/tests/leg1.py
|
Python
|
mit
| 6,403
|
from views.viewEditor import *
from views.boardPrint.board import *
from views.boardPrint.boardEditor import *
import views.resources
import boardOption
from helpOption import *
from pygobstones.commons.i18n import *
from pygobstones.commons.qt_utils import saveFileName
class Editor(QtGui.QWidget):
def __init__(self, parent, generator):
super(Editor, self).__init__()
self.parent = parent
self.ui = Ui_editor()
self.ui.setupUi(self)
icon = QtGui.QIcon(':/logoGobstones.png')
self.setWindowIcon(icon)
self.initcomboBoxLoad()
self.init_combo_box_persist()
self.init_combo_box_options()
self.boardOption = boardOption.BoardOption(self)
self.boardGenerator = generator
self.getInitialBoardFromMainWindow()
self.dictionary_load = {"Load Initial Board":'self.getInitialBoardFromMainWindow()',
"Load from disk":'self.loadBoardFromDisk()',
'Load from ...': 'self.nothing()'}
self.dictionary_persist = {'Persist board': 'self.nothing()',
'Set as initial board': 'self.setInitialBoardToMainWindow()',
'Save board to disk': 'self.saveBoardFromDisk()',
'Save board to image': 'self.saveBoardToImage()'}
self.dictionary_options = {'Options': 'self.nothing()',
'Options Board': 'self.openBoardOptionWindow()',
'User Options': 'self.openUserOptionsWindow()'}
def init_combo_box_options(self):
self.connect(self.ui.combo_box_options, QtCore.SIGNAL('activated(QString)'), self.combo_box_options_chosen)
self.ui.combo_box_options.addItem(i18n('Options'))
self.ui.combo_box_options.addItem(i18n('Options Board'))
self.ui.combo_box_options.addItem(i18n('User Options'))
def initcomboBoxLoad(self):
self.connect(self.ui.comboBoxLoad, QtCore.SIGNAL('activated(QString)'), self.comboBoxLoad_chosen)
self.ui.comboBoxLoad.addItem(i18n('Load from ...'))
self.ui.comboBoxLoad.addItem(i18n("Load Initial Board"))
self.ui.comboBoxLoad.addItem(i18n("Load from disk"))
def init_combo_box_persist(self):
self.connect(self.ui.combo_box_persist, QtCore.SIGNAL('activated(QString)'), self.combo_box_persist_chosen)
self.ui.combo_box_persist.addItem(i18n('Persist board'))
self.ui.combo_box_persist.addItem(i18n('Set as initial board'))
self.ui.combo_box_persist.addItem(i18n('Save board to disk'))
self.ui.combo_box_persist.addItem(i18n('Save board to image'))
def combo_box_persist_chosen(self, string):
exec(self.dictionary_persist[getEnglishTraduction(string)])
def comboBoxLoad_chosen(self, string):
exec(self.dictionary_load[getEnglishTraduction(string)])
def combo_box_options_chosen(self, string):
exec(self.dictionary_options[getEnglishTraduction(string)])
def nothing(self):
pass
def setInitialBoard(self, board):
self.board = board
self.boardGenerator.setInitialBoard(board)
self.ui.boardEditor.setBoard(self.board)
self.ui.boardEditor.populate()
def setInitialBoardToMainWindow(self):
self.board = boardToString(self.ui.boardEditor.getEditedBoard())
self.parent.setInitialBoard(self.board)
self.parent.setAtNothingBoardOptions()
self.reset_combo_persist()
self.reset_combo_options()
def getInitialBoardFromMainWindow(self):
board = self.boardGenerator.getStringBoard()
self.setInitialBoard(board)
self.reset_combo_load()
def openUserOptionsWindow(self):
self.command = CommandHelpWidget()
self.command.show()
self.reset_combo_options()
def openBoardOptionWindow(self):
self.boardOption.openBoardOptionWindow(self.parent.initialBoardGenerator)
self.reset_combo_options()
def update(self):
board = self.boardGenerator.getInitialBoard()
self.setInitialBoard(board)
self.ui.boardEditor.populate()
self.ui.boardEditor.update()
def loadBoardFromDisk(self):
self.boardOption.loadBoard()
self.reset_combo_load()
def saveBoardFromDisk(self):
self.board = boardToString(self.ui.boardEditor.getEditedBoard())
filename = saveFileName(self, '*.gbb')
if not filename == QtCore.QString(''):
(filep, filen) = os.path.split(str(filename))
if not filename[-4:] == '.gbb':
filename = filename + '.gbb'
myFile = open(filename, 'w')
myFile.write(self.board)
myFile.close()
self.reset_combo_persist()
def saveBoardToImage(self):
filename = saveFileName(self, '*.png')
if not filename == QtCore.QString(''):
self.ui.boardEditor.save_to_image(filename)
self.reset_combo_persist()
def reset_combo_load(self):
self.ui.comboBoxLoad.setCurrentIndex(0)
def reset_combo_persist(self):
self.ui.combo_box_persist.setCurrentIndex(0)
def reset_combo_options(self):
self.ui.combo_box_options.setCurrentIndex(0)
def paintEvent(self, event):
painter = QtGui.QPainter()
painter.Antialiasing
painter.begin(self)
rect = QtCore.QRect(0, 0, 1920, 1080)
img = QtGui.QImage(':/backgroundWidget.png')
painter.drawImage(rect, img)
painter.end()
|
gobstones/PyGobstones
|
pygobstones/gui/editor.py
|
Python
|
gpl-3.0
| 5,582
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class TriggeredWebJobPaged(Paged):
"""
A paging container for iterating over a list of :class:`TriggeredWebJob <azure.mgmt.web.models.TriggeredWebJob>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[TriggeredWebJob]'}
}
def __init__(self, *args, **kwargs):
super(TriggeredWebJobPaged, self).__init__(*args, **kwargs)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-web/azure/mgmt/web/models/triggered_web_job_paged.py
|
Python
|
mit
| 951
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and Contributors
# See license.txt
import frappe
import unittest
# test_records = frappe.get_test_records('Quality Inspection')
class TestQualityInspection(unittest.TestCase):
pass
|
bazz-erp/erpnext
|
erpnext/stock/doctype/quality_inspection/test_quality_inspection.py
|
Python
|
gpl-3.0
| 254
|
import numpy as np
import cv2
import timeit
import matplotlib.pyplot as plt
from skimage.segmentation import felzenszwalb,slic
from skimage.segmentation import mark_boundaries
from skimage.color import label2rgb
import multiprocessing
import random
import Obj_segment.Rect
path = "/media/iglu/Data/DatasetIglu"
u = ['user1', 'user2', 'user3', 'user4', 'user5', 'user6', 'user7', 'user8', 'user9', 'user10'] #
a = ['point_1', 'point_2', 'point_3', 'point_4', 'point_5', 'point_6', 'point_7', 'point_8', 'point_9', 'point_10',
'show_1', 'show_2', 'show_3', 'show_4', 'show_5', 'show_6', 'show_7', 'show_8', 'show_9', 'show_10']
homo=np.array([[ 1.00567306e+00 , -4.85118860e-01 , -1.84060385e+01],[ 2.23046547e-02 , 2.27148983e-03 , 1.80858908e+02],[ -1.17505053e-04, -1.38922057e-03 , 1.00000000e+00]])
homo= np.array([[ 9.94775973e-01 , -4.09621743e-01 , -4.37893262e+01],
[ -2.06444142e-02 , 2.43247181e-02 , 1.94859521e+02],
[ -1.13045909e-04 , -1.41217334e-03 , 1.00000000e+00]])
centro = (0,0)
angulo=0
def make_graph(grid):
# get unique labels
vertices = np.unique(grid)
# map unique labels to [1,...,num_labels]
reverse_dict = dict(zip(vertices, np.arange(len(vertices))))
grid2 = np.array([reverse_dict[x] for x in grid.flat]).reshape(grid.shape)
# create edges
down = np.c_[grid2[:-1, :].ravel(), grid2[1:, :].ravel()]
right = np.c_[grid2[:, :-1].ravel(), grid2[:, 1:].ravel()]
all_edges = np.vstack([right, down])
all_edges = all_edges[all_edges[:, 0] != all_edges[:, 1], :]
all_edges = np.sort(all_edges, axis=1)
num_vertices = len(vertices)
edge_hash = all_edges[:, 0] + num_vertices * all_edges[:, 1]
# find unique connections
edges = np.unique(edge_hash)
# undo hashing
edges = [[vertices[x % num_vertices],
vertices[x / num_vertices]] for x in edges]
return vertices, edges
def get_adj(N,segments,edges,img,n):
if N[0] == n:
N=N[1:]
Nei = np.unique([v for v in edges if v[0] in N or v[1] in N])
# print Nei
Imm = np.zeros((img.shape[0], img.shape[1]), np.uint8)
for x in xrange(len(N)):
sp = N[x]
if sp != n:
Imm[segments == sp] = 255
return Imm
def inside(img,seg,p1,p2):
D1 = img.copy()
D2 = img.copy()
D1 = D1[p1[1]:p2[1],p1[0]:p2[0]]
D2[D2 ==seg ] = 0
D2[D2 != 0] = -1
D2 +=1
D1[D1 ==seg ] = 0
D1[D1 != 0] = -1
D1 +=1
Sum1= np.sum(np.sum( np.array(D1)))
Sum2= np.sum(np.sum( np.array(D2)))
Sum3= (p2[1]-p1[1])*(p2[0]-p1[0])
# print seg
# print "% de SP: "+(Sum1*1.0/Sum2).__str__()+" S1: "+Sum1.__str__()
# print Sum3*0.85
if Sum1>int(0.30*Sum2) or Sum3*0.75<=Sum1:
return True
return False
def bbox3(img):
rows = np.any(img, axis=1)
cols = np.any(img, axis=0)
rmin, rmax = np.where(rows)[0][[0, -1]]
cmin, cmax = np.where(cols)[0][[0, -1]]
return cmin, cmax, rmin, rmax
def bbox2(img_sp,sp,p1,p2):
im = img_sp.copy()
# im = im[:,:,0]+im[:,:,1]+im[:,:,2]
for seg in sp:
if seg ==1:
continue
if inside(img_sp,seg,p1,p2):
# print "added"
im[im == seg] = 0
im[im!= 0] = -1
im+=1
im = np.array(im*255)
if np.sum(np.sum(im)) == 0:
return None,None,None,None
return bbox3(im)
def rotateImage(image, angle,center):
rot_mat = cv2.getRotationMatrix2D(center,angle,1.0)
result = cv2.warpAffine(image,rot_mat,image.shape[1::-1],flags=cv2.INTER_LINEAR)
# result = cv2.warpAffine(image, rot_mat, image.shape,flags=cv2.INTER_LINEAR)
return result
def Homo_get(x,y,inverted=False):
p1 = [float(x), float(y), 1.0]
p1 = np.array(p1)
if inverted:
r = np.dot(p1,np.linalg.inv(homo))
else:
r = np.dot(homo, p1)
r = r / r[2]
return r
def get_images(img1, Mask1, img2, Mask2, name):
Mask_1 = Mask1.copy()
kernel = np.ones((7, 7), np.uint8)
Mask_1 = cv2.dilate(Mask_1, kernel, 1)
kernel = np.ones((4, 4), np.uint8)
Mask_1 = cv2.erode(Mask_1, kernel, 1)
edged = cv2.Canny(Mask_1,1,240)
total = np.sum(np.sum( np.array(Mask_1[:, :, 0])))
x_ini = 0
y_ini = 0
width = 0
height= 0
(cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cc = 0
ind = 0
indd= 0
for cnt in cnts:
if cc < cv2.contourArea(cnt):
ind = indd
cc=cv2.contourArea(cnt)
indd+=1
if cc <0.6*total:
return None,None,None
x, y, w, h = cv2.boundingRect(cnts[ind])
rect = cv2.minAreaRect(cnts[ind])
angle =rect[2]
Mask_1 = rotateImage(Mask_1, rect[2], (x + w / 2, y + h / 2))
p_c = (x + w / 2, y + h / 2)
img1 = rotateImage(img1, angle, p_c)
edged = cv2.Canny(Mask_1, 30, 200)
total = np.sum(np.sum( np.array(Mask_1[:, :, 0])))
(cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
for cnt in cnts:
if cv2.contourArea(cnt) < 0.6 * total:
continue
x, y, w, h = cv2.boundingRect(cnt)
x_ini = x
y_ini = y
width = w
height = h
Mask_1 = Mask_1[y:y + h, x:x + w]
Mask_1[0:5,:,:]=255
img1 = img1[y:y + h, x:x + w].copy()
break
# cv2.imwrite(name + ".jpg", Mask_1)
Mask_1 = cv2.bitwise_not(Mask_1)
img1 = cv2.bitwise_and(img1,img1,mask=Mask_1[:,:,0])
# Mask_1 = rotateImage(Mask_1, -angle, p_c)
# cv2.imwrite(name + ".jpg",img1)
edged = cv2.Canny(Mask_1, 50, 200)
i = 0
img2 = img2[200:480,:,:]
Mask2 = Mask2[200:480,:,0]
kernel = np.ones((7, 7), np.uint8)
Mask2 = cv2.dilate(Mask2, kernel, 1)
kernel = np.ones((4, 4), np.uint8)
Mask2 = cv2.erode(Mask2, kernel, 1)
ret3, Mask2 = cv2.threshold(Mask2, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
Sup1 = cv2.bitwise_and(img2,img2,mask=Mask2)
Sup = cv2.cvtColor(Sup1,cv2.COLOR_BGR2RGB)
segments_fz = slic(Sup, n_segments=500, compactness=10)
segments_fz[Mask2!=255] = -1
segments_fz += 2
vert, edg = make_graph(segments_fz)
# Img_Slic = label2rgb(segments_fz,Sup,kind='avg')
# Img_Slic = cv2.cvtColor(Img_Slic, cv2.COLOR_RGB2BGR)
Contornos=[]
(cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
for cnt in cnts:
if cv2.contourArea(cnt) < 50:
continue
col = [random.randint(0, 255), random.randint(0, 255), random.randint(0, 255)]
x, y, w, h = cv2.boundingRect(cnt)
if w > 320:
continue
cv2.rectangle(img1, (x,y),(x+w,y+h), col, 2)
r = Homo_get(x_ini +x,y_ini + y-20)
p1 = (min(int(r[0]),639),min(int(r[1])-200,279))
r = Homo_get(x_ini + x+w, y_ini + y +h-10)
p2 = (min(int(r[0]),639),min(int(r[1])-200,279))
if p1[0] < 0 or p1[1] <0 or p2[0] < 0 or p2[1] <0:
continue
sp = np.unique(np.array(segments_fz[p1[1]:p2[1],p1[0]:p2[0]]))
if len(sp) == 0:
None
elif sp[0] ==[1] and len(sp)==1:
print "Empty..."
else:
m = (p2[1]-p1[1])/2
mm = (p2[0]-p1[0])/2
sp = np.unique(np.array(segments_fz[p1[1]+m-10:p1[1]+m+10, p1[0]+mm-10:p2[0]+mm+10]))
# print sp
# img2[segments_fz!=sp]=0
Im = np.array(get_adj(sp, segments_fz, edg, Sup,1))
if np.sum(np.sum(Im))==0:
continue
# plt.imshow(Im)
# plt.show()
r1, r2, c1, c2 = bbox3(Im)
# masked_data = cv2.bitwise_and(masked_data, masked_data, mask=Im)
Contornos.append([[ r1, c1+200, r2, c2+200, x_ini +x, y_ini+y, x_ini +x+w, y_ini+y+h] ,i] )
# Output = Img_Slic[p1[1]:p2[1],p1[0]:p2[0]]
# print p
# cv2.rectangle(Img_Slic,(r1,c1),(r2,c2),col,2)
# cv2.imwrite(name+"_"+i.__str__()+".jpg",Img_Slic)
# cv2.rectangle(Mask_1, (x, y), (x + w, y + h), (0, 255, 0))
# img2 = img[y:y + h, x:x + w, :].copy()
# cv2.imwrite(name + "_" + i.__str__() + ".jpg", img2)
# print name + "_" + i.__str__() + ".jpg"
i+=1
# cv2.imwrite(name + ".jpg",img1)
# cv2.imwrite(name + "_s.jpg", Sup1)
# cv2.imwrite(name + "_Total.jpg", Img_Slic)
return Contornos,angle,p_c
def add_cnt(Cnts,cnt):
if len(Cnts)==0:
Cnts.append([cnt,1])
else:
done= False
for i in xrange(len(Cnts)):
P = Cnts[i][0][0]
p1 = cnt[0]
if abs(P[4]-p1[4])<=20 and abs(P[5]-p1[5])<=20 and abs(P[6]-p1[6])<=20 and abs(P[7]-p1[7])<=20:
Cnts[i][1]= Cnts[i][1] + 1
done=True
if not done:
Cnts.append([cnt,1])
def obtain_cand(Initial,Initial2,Nsme,user,action,Total):
TT = Initial[1].copy()
Rg = Initial[0].copy()
Output = []
kernel = np.ones((7, 7), np.uint8)
Mask2 = cv2.dilate(Initial2[1][:,:,0], kernel, 1)
kernel = np.ones((4, 4), np.uint8)
Mask2 = cv2.erode(Mask2, kernel, 1)
Mask2 = cv2.bitwise_not(Mask2)
kernel = np.ones((7, 7), np.uint8)
Mask1 = cv2.dilate(Initial2[0][:,:,0], kernel, 1)
kernel = np.ones((4, 4), np.uint8)
Mask1 = cv2.erode(Mask1, kernel, 1)
Mask1 = cv2.bitwise_not(Mask1)
Rg1 = cv2.bitwise_and(Rg,Rg,mask=Mask1)
Sup1 = cv2.bitwise_and(Initial[1],Initial[1],mask=Mask2)
Sup = cv2.cvtColor(Sup1, cv2.COLOR_BGR2RGB)
segments_fz = slic(Sup, n_segments=250, compactness=20, sigma=5)
segments_fz[Mask2 < 1] = -1
segments_fz += 2
# Img_Slic = label2rgb(segments_fz, Sup, kind='avg')
# Img_Slic_TT = cv2.cvtColor(Img_Slic, cv2.COLOR_RGB2BGR)
# Img_Slic = cv2.cvtColor(Img_Slic, cv2.COLOR_RGB2BGR)
for i in xrange(len(Total)):
col = [random.randint(0, 255), random.randint(0, 255), random.randint(0, 255)]
T= Total[i][0][0]
x,y,x2,y2 = T[0],T[1],T[2],T[3]
cv2.rectangle(Rg1, (T[4], T[5]), (T[6],T[7]), col, 2)
P1 = Obj_segment.Rect.Point(T[4], T[5])
P2 = Obj_segment.Rect.Point(T[6],T[7])
Rec_top = Obj_segment.Rect.Rect(P1,P2)
sp = np.array(segments_fz[y:y2,x:x2])
sp = np.unique(sp)
if len(sp) == 0:
# Output =Img_Slic[y:y2,x:x2]
P1 = Obj_segment.Rect.Point(x,y)
P2 = Obj_segment.Rect.Point(x2,y2)
rec = Obj_segment.Rect.Rect(P1,P2)
elif sp[0] ==[1] and len(sp)==1:
# Output = Img_Slic[y:y2, x:x2]
P1 = Obj_segment.Rect.Point(x, y)
P2 = Obj_segment.Rect.Point(x2, y2)
rec = Obj_segment.Rect.Rect(P1, P2)
else:
rmin, rmax,cmin, cmax = bbox2(segments_fz, sp,(x,y),(x2,y2))
if rmin is None:
continue
# Output = TT[cmin:cmax,rmin:rmax]
P1 = Obj_segment.Rect.Point(rmin, cmin)
P2 = Obj_segment.Rect.Point(rmax, cmax)
rec = Obj_segment.Rect.Rect(P1, P2)
Ouput_Top = Rg[T[5]:T[7],T[4]:T[6]]
Output.append((rec,Rec_top))
# cv2.imwrite("Morphed/Patches_Front/"+user+"_"+action+"_"+Nsme[:-4]+"_"+i.__str__()+"_Front.jpg",Output)
# cv2.imwrite("Morphed/Patches_Top/" + user + "_" + action + "_" + Nsme[:-4] + "_" + i.__str__() + "_Top.jpg", Ouput_Top)
# cv2.rectangle(Img_Slic_TT,(x,y),(x2,y2),col,3)
# cv2.imwrite("Morphed/Top/" + user + "_" + action + "_" + Nsme[:-4] + "_v2" + "_Top.jpg", Rg1)
# cv2.imwrite("Morphed/Front/"+user+"_"+action+"_"+Nsme[:-4]+"_v2"+ "_Front.jpg",Img_Slic_TT)
return Output
def get_candidate(Images):
def Clean_Output(Output):
def getKey(item):
area = abs(item[0].top-item[0].bottom)*abs(item[0].left-item[0].right)
return area
Out = []
Output = sorted(Output,key=getKey,reverse=True)
for i in xrange(len(Output)):
N=False
for j in xrange(i+1,len(Output)):
p1 = Output[i]
p1 = p1[1]
P = Output[j]
P = P[1]
if abs(P.bottom - p1.bottom) <= 20 and abs(P.top - p1.top) <= 20 and abs(P.left - p1.left) <= 20 and abs(P.right - p1.right) <= 20:
N = True
if not N:
Out.append(Output[i])
return Out
count = 0
Total = []
Total2 = []
Initial = []
Initial2 = []
for f in Images:
RGB1,Mask1,RGB2,Mask2 = f
R, angle, p_c = get_images(RGB1, Mask1, RGB2, Mask2,"")
if R == [] or R is None:
continue
RGB1 = rotateImage(RGB1, angle, p_c)
Mask1 = rotateImage(Mask1, angle, p_c)
Initial.append((RGB1, RGB2))
Initial2.append((Mask1, Mask2))
for K in xrange(len(R)):
add_cnt(Total, R[K])
if count > 5:
break
count += 1
removers = []
for i in xrange(len(Total)):
if Total[i][1] < 4:
removers.append(Total[i])
for i in xrange(len(removers)):
Total.remove(removers[i])
for i in xrange(len(Initial)):
Total2.append(obtain_cand(Initial[i], Initial2[i], "","","", Total))
Total = [item for sublist in Total2 for item in sublist]
Total = Clean_Output(Total)
return Total
def func(arg):
nu, na = arg
user = u[nu]
action = a[na]
print user
print action
count=0
Total = []
f = open(path + "/" + user + "/" + action + "/k2" + "/List.txt", 'r')
f2 = open(path + "/" + user + "/" + action + "/k1" + "/List.txt", 'r')
Initial = []
Initial2 = []
Nsme = []
for line in f:
Time = line
file1 = next(f).rstrip('\n')
file2 = next(f).rstrip('\n')
Label = next(f).rstrip('\n')
RGB1 = cv2.imread(path + "/" + user + "/" + action + "/k2" + "/RGB/" + file1)
Depth1 = np.load(path + "/" + user + "/" + action + "/k2" + "/Depth/" + file2)
Mask1 = cv2.imread(path + "/" + user + "/" + action + "/k2" + "/MTA/" + file1)
Time = next(f2).rstrip('\n')
file3 = next(f2).rstrip('\n')
file4 = next(f2).rstrip('\n')
Label = next(f2).rstrip('\n')
RGB2 = cv2.imread(path + "/" + user + "/" + action + "/k1" + "/RGB/" + file3)
Depth2 = np.load(path + "/" + user + "/" + action + "/k1" + "/Depth/" + file4)
Mask2 = cv2.imread(path + "/" + user + "/" + action + "/k1" + "/MTA/" + file3)
R,angle,p_c = get_images(RGB1, Mask1, RGB2,Mask2,"Morphed/"+user+"_"+action+"_"+file3[:-4])
if R is None:
continue
Nsme.append(file3)
RGB1= rotateImage(RGB1,angle,p_c)
Mask1 = rotateImage(Mask1,angle,p_c)
Initial.append((RGB1,RGB2))
Initial2.append((Mask1,Mask2))
for K in xrange(len(R)):
add_cnt(Total,R[K])
if count > 5:
break
count+=1
removers=[]
for i in xrange(len(Total)):
if Total[i][1] <4:
removers.append(Total[i])
for i in xrange(len(removers)):
Total.remove(removers[i])
for i in xrange(len(Initial)):
obtain_cand(Initial[i],Initial2[i],Nsme[i],user,action,Total)
return Total,user,action
def In_Patch():
start_time1 = timeit.default_timer()
# z = [func((aa, bb)) for aa in range(10) for bb in range(20)]
# print z
z = [(aa, bb) for aa in range(10) for bb in range(20)]
pool = multiprocessing.Pool(6)
R = pool.map(func, z,1)
pool.close()
pool.join()
Total = [item for sublist in R for item in sublist]
import joblib
joblib.dump(Total,"Values_chosen.pkl",compress=9)
elapsed = timeit.default_timer() - start_time1
print "Tiempo: " + elapsed.__str__()
|
pazagra/catkin_ws
|
src/Multimodal_Interaction/Obj_segment/Obj_Cand.py
|
Python
|
gpl-3.0
| 15,760
|
#!/usr/bin/python
import sys,os
import re
import pprint
import time
import base64
import datetime
import time
import simplejson
import jsonpickle
from functools import wraps
from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash, make_response, jsonify, Markup, Response, send_from_directory, Blueprint
from jinja2 import TemplateNotFound
from contextlib import closing
sys.path.insert(0, 'lib')
import clustertools
import clusterclasses
#TODO: ADD PIPELINE DATA
setupfile = 'uisetup.json'
if not os.path.exists(setupfile):
print "count not find setup file %s" % setupfile
sys.exit(1)
for k,v in jsonpickle.decode(open(setupfile, 'r').read())['server'].items():
#print "SERVER K %s V %s" % (k, v)
globals()[k] = v
for k,v in jsonpickle.decode(open(setupfile, 'r').read())['datas' ].items():
#print "DATAS K %s V %s" % (k, v)
globals()[k] = v
#CONSTANTS
#DEBUG = True
#PER_PAGE = 20
#MAX_QUERY_BUFFER = 100
#SECRET_KEY = 'development key'
#VARIABLES
app = Flask(__name__)
app.config.from_object(__name__)
app.jinja_env.globals['trim_blocks' ] = True
app.jinja_env.add_extension('jinja2.ext.do')
def templated(template=None):
"""
Allows for simple routes to have the template declared as a decorator
and the data just returned as a dict
"""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
template_name = template
if template_name is None:
template_name = request.endpoint \
.replace('.', '/') + '.html'
ctx = f(*args, **kwargs)
if ctx is None:
ctx = {}
elif not isinstance(ctx, dict):
return ctx
return render_template(template_name, **ctx)
return decorated_function
return decorator
requester = None
@app.before_request
def before_request():
"""
before each request, add global variables to the global G variable.
If using WSGI (eg. apache), this wont work
"""
if requester is None:
"""
if DB not loaded yet, redirect to the LOADING.HTML page which will request through a LINK tag
for the initial page. As the request is caming FROM the LOADING page, we will read the db to
memory. The LOADING.HTML will refresh every second. When SETUPDB is finally loaded the page will
be redirected to the initial page
"""
if request.path != "/loading":
print "redirecting"
return render_template('loading.html')
else:
init_classes()
return redirect( url_for('initial') )
init_classes()
##DATABASE
def init_classes_loader():
"""
reads the data from the disk, parses and loads it to global variables.
has to be changed if using WSGI servers aroung it (eg. apache) once global variables
are not shared.
"""
with app.app_context():
print "initializing db"
global requester
requester = clustertools.requests()
print "db loaded"
def init_classes():
"""
if db has not been initialized yet, do it so. Otherwise, just return the global variables
"""
if requester is None:
init_classes_loader()
@app.route('/', methods=['GET'])
@templated('display_normal.html')
def initial():
"""
shows the start page
"""
return dict()
@app.route('/serverstatus', methods=['GET'])
@templated('serverstatus.html')
def serverstatus():
"""
shows the start page
"""
return dict()
@app.route('/job', methods=['GET'])
@templated('getjobinfo.html')
def getjobinfo():
"""
shows the start page
"""
return dict()
@app.route('/job', methods=['POST'])
def getjobinfo():
"""
shows the start page
"""
return dict()
if __name__ == '__main__':
app.run(port=PORT)
#APPLICATION CODE :: SETUP
#@app.before_request
#def before_request():
# """
# before each request, add global variables to the global G variable.
# If using WSGI (eg. apache), this wont work
# """
# global enterprise
# g.enterprise = enterprise
#
# if setupDB is None:
# """
# if DB not loaded yet, redirect to the LOADING.HTML page which will request through a LINK tag
# for the initial page. As the request is caming FROM the LOADING page, we will read the db to
# memory. The LOADING.HTML will refresh every second. When SETUPDB is finally loaded the page will
# be redirected to the initial page
# """
# if request.path != "/loading":
# print "redirecting"
# return render_template('loading.html')
# else:
# g.setupDB, g.projects, g.structure, g.projectDescription, g.projectStatuses, g.plugins = getDb()
# return redirect( url_for('initial') )
#
# g.setupDB, g.projects, g.structure, g.projectDescription, g.projectStatuses, g.plugins = getDb()
#
#
#@app.after_request
#def after_request(response):
# return response
#
#
#@app.teardown_request
#def teardown_request(exception):
# pass
#
#
##APPLICATION CODE :: ROUTE
#@app.route("/loading", methods=['GET'])
#@templated("loading.html")
#def loading():
# """
# Show the LOADING message while loading the SETUPDB
# """
# return dict()
#
#@app.route('/', methods=['GET'])
#@templated('display_normal.html')
#def initial():
# """
# shows the start page
# """
# return dict()
#
#
#@app.route('/query', methods=['GET'])
#def query():
# """
# shows the result for the query
# """
# projectName = request.args.get('projectName', '')
# return render_template('response.html', projectName=projectName)
#
#
#@app.route('/configure', methods=['POST', 'GET'])
#def configure():
# """
# allows to see and change the configuration.
# GET : show the setup
# POST : changes the setup (FUTURE)
# OTHER: show front pate
# """
# if request.method == "GET":
# resTemp = render_template('setup_form.html')
# return resTemp
#
# elif request.method == "POST":
# resTemp = render_template('setup_form.html')
# return resTemp
#
# else:
# print "INVALID REQUEST", request
#
# return render_template('display_normal.html')
#
#
#@app.route('/download', methods=['GET'])
#def download():
# """
# Dowload data. either JSON or Image.
# checks if all information have been sent and if the information is present returning 404 otherwise.
# """
# projectName = request.args.get('projectName' , None)
# projectStatus = request.args.get('projectStatus', None)
# projectSample = request.args.get('projectSample', None)
# sequenceTech = request.args.get('sequenceTech' , None)
# libraryName = request.args.get('libraryName' , None)
# fileName = request.args.get('fileName' , None)
# pluginName = request.args.get('pluginName' , None)
# pluginKey = request.args.get('pluginKey' , None)
# imageName = request.args.get('imageName' , None)
# srctype = request.args.get('srctype' , None)
#
# for var in [projectName, projectStatus, projectSample, sequenceTech, libraryName, fileName, pluginName, pluginKey]:
# """
# returns 404 if any variable was not defined
# """
# if var is None:
# print "none var"
# abort(404)
#
# try:
# """
# tries to get the result. if not found in the database, return 404
# """
# data = g.structure[projectName][projectStatus][projectSample][sequenceTech][libraryName][fileName]
# except:
# print "no data"
# abort(404)
#
# #print request.headers
#
# plugins = data.getPlugins()
# if pluginName in plugins:
# pluginData = data.getPlugin(pluginName)
# if pluginKey in pluginData:
# pluginImages = pluginData[pluginKey]
# pluginImage = None
# pluginData = None
#
# if isinstance(pluginImages, dict):
# if imageName is not None:
# if imageName in pluginImages:
# if srctype is None or srctype == 'img':
# """
# if image has been requested, returns it using using the correct plugin
# """
# if 'img' in pluginImages[imageName]:
# print "valid image key %s" % imageName
# pluginImage = pluginImages[imageName]['img']
# else:
# print "no such image key %s" % imageName
# abort(404)
#
# elif srctype is not None and srctype == 'json':
# """
# if json has been requested, returns it using using the correct plugin
# """
# if 'data' in pluginImages[imageName]:
# print "valid image key %s" % imageName
# pluginData = pluginImages[imageName]['data']
# else:
# print "no such image key %s" % imageName
# abort(404)
#
# else:
# print "no such data key %s to src type %s" % (imageName, srctype)
# abort(404)
#
# else:
# print "no such image %s" % imageName
# abort(404)
#
# elif srctype is not None and srctype == 'json':
# print 'dict w/ image name'
# pluginData = pluginImages
#
# else:
# print "no such data key %s" % imageName
# abort(404)
#
# else:
# pluginImage = pluginImages
# print "not list"
#
#
# if pluginImage is None and pluginData is None:
# print "plugin image is none"
# abort(404)
#
# """
# creates a etag consistent of the fields describing the file and the creation time of the database.
# with this etag, the browser can cache the images and json files to that the same data is not sent twice.
# user has to use SHIFT+F5 to overload the cache
# """
# etag = "".join([str(x) for x in [dbMtime, projectName, projectStatus, projectSample, sequenceTech, libraryName, fileName, pluginName, pluginKey, imageName]])
# if pluginImage is not None:
# """
# if image, decode
# returns the decoded PNG image.
# define the content-length, etag, last-modified and permission to cache for a year.
# """
# try:
# pluginImageData = base64.b64decode(pluginImage)
# except:
# print "plugin image error converting"
# abort(404)
#
# #response = make_response()
# #reponse.headers['Cache-Control'] = 'no-cache'
# #reponse.headers['Content-Type'] = 'image/png'
# #reponse.headers['Content-Length'] = len(pluginImageData)
#
# #print "LAST MODIFIED", dbMtime
# #print "ETAG", etag.replace("/", "").replace("\\", "").replace("_", "").replace(".", "").replace("-", "").replace(" ", "").replace(":", "")
# #print pluginImageData
# return Response(pluginImageData,
# mimetype="image/png",
# headers={
# "Cache-Control" : "public, max-age=36000",
# "Content-Length": len(pluginImageData),
# "ETag" : etag,
# "Last-Modified" : dbMtime
# })
#
# elif pluginData is not None:
# """
# if data, send to the correct graph mapper using the plugin name as key.
# returns the modified JSON data.
# define the content-length, etag, last-modified and permission to cache for a year.
# """
# qry = (pluginName, pluginKey, imageName)
# if qry in graph_mapper:
# func, func_name, func_nfo = graph_mapper[ qry ]
# print "plugin",func_nfo
# func_res = func(pluginData, func_name, func_nfo)
# return Response(func_res,
# mimetype="application/json",
# headers={
# "Cache-Control" : "public, max-age=36000",
# "Content-Length": len(func_res),
# "ETag" : etag,
# "Last-Modified" : dbMtime
# })
#
# else:
# print "no data for combination"
# abort(404)
#
# abort(404)
#
#
#@app.route('/fullscreen', methods=['GET'])
#@templated('fullscreen.html')
#def fullscreen():
# """
# show images and graphs in full screen replacing the /fullscreen for /download as the
# download link so that jquery can ask for the correct image
# """
# srcType = request.args['srctype']
#
# dlink = url_for('download') + request.url[len(request.base_url):]
# #print dlink
#
# return { 'downloadLnk':dlink, 'srcType': srcType }
#
#
#@app.route('/favicon.ico')
#def favicon():
# """
# sends favicon directly to user
# """
# return send_from_directory(os.path.join(app.root_path, 'static'), 'favicon.ico', mimetype="image/vnd.microsoft.icon")
#
#
#@app.context_processor
#def utility_processor():
# """
# set of "macros" that can be directly used in the templates
# """
#
# """
# unity/format converter for each column
# """
# #TODO: move up
# converter = {
# 'info': {
# '_plugin' : 'Info',
# 'size' : ['File Size' , sizeToGb , summing ],
# 'mtime' : ['C. Time' , convTime , None ]
# },
# 'quals': {
# '_plugin' : 'Quality',
# 'seqLenSum' : [ 'Total Size' , sizeTo , summing ],
# 'Q30BP' : [ 'Bp (Q>=30)' , sizeTo , summing ],
# 'COV' : [ 'Coverage' , sizeTo , summing ],
# 'adaptamerSum%' : [ '% w/ Adaptamer' , perc , average ],
# 'seqAvgLen' : [ 'Average Length' , sizeTo , average ],
# 'flx%' : [ '% FLX' , perc , average ],
# 'Ns%' : [ '% Ns' , perc , average ],
# 'xlr%' : [ '% XLR' , perc , average ],
# 'sumRealFileSize' : [ 'Decomp. File Size', sizeToGb , summing ],
# 'Q30COV' : [ 'Cov. (Q>=30)' , roundToTwo , summing ],
# 'Q30' : [ '% Q>=30' , roundToTwo , average ],
# 'numSeqs' : [ '# Seq.' , sizeTo , summing ],
# 'formatType' : [ 'Format' , None , None ]
# },
# 'fastqc': {
# '_plugin' : 'FastQC',
# 'fastqcGraphs' : ['FastQC Graphs' , parseGraph , returnE ],
# },
# 'contamination': {
# '_plugin' : 'Contamination',
# #'contaminationGraph': ['Graph' , parseGraph , None ]
# 'contaminationData' : ['Graph' , parseGraph , None ]
# }
# }
#
# statistics = stat(converter)
# """
# initializes the statistics class to calculate averages and sums by colum
# """
#
# def cleanName(filename):
# """
# clean raw files file names
# """
# #print "FILE NAME",filename
# filename = os.path.basename(filename)
#
# for replaceable in REPLACEABLES:
# filename = filename.replace(replaceable, REPLACEABLES[replaceable])
#
# return filename
#
# def checkPlugin(pluginName):
# """
# plugins to be skipped altogether
# """
# return pluginName not in SKIPPLUGINS
#
# def checkPluginKey(pluginName, pluginKey):
# """
# keys to be skipped
# """
# if pluginName in SKIPKEYS:
# if pluginKey in SKIPKEYS[pluginName]:
# return False
#
# return True
#
# def converterPlugin(pluginName):
# """
# check if should be converted or not
# """
# if pluginName in converter:
# if '_plugin' in converter[pluginName]:
# return converter[pluginName]['_plugin']
#
# return pluginName
#
# def converterKey(pluginName, key):
# """
# checks if key shoudl be converted or not
# """
# if pluginName in converter:
# if key in converter[pluginName]:
# conv = converter[pluginName][key][0]
# if conv is not None:
# return conv
#
# return key
#
# def converterValue(projectName, projectStatus, projectSample, sequenceTech, libraryName, fileName, pluginName, pluginKey, pluginValue):
# """
# checks if value shoud be converted or not. if so, return the converted value
# """
# if pluginValue is None:
# return ""
#
# if pluginValue == "":
# return ""
#
# if pluginName in converter:
# if pluginKey in converter[pluginName]:
# conv = converter[pluginName][pluginKey][1]
# if conv is not None:
# res = conv(projectName, projectStatus, projectSample, sequenceTech, libraryName, fileName, pluginName, pluginKey, pluginValue)
# if res is None:
# return ""
# return res
#
# return pluginValue
#
#
#
#
# def parseSetup(data):
# """
# clean setup and convert it to html
# """
# dataf = {}
#
# for key in data:
# if key in SKIPSETUP: continue
# dataf[key] = data[key]
#
# return printRaw(dataf)
# #return syntaxHighlight(dataf)
#
# def syntaxHighlight(data):
# """
# highlight JSON for pretty viewing in HTML
# """
# def json2html(match):
# """
# checks the data type and add a SPAN around the value with a descriptive class with a color defined by CSS
# """
# #print "MATCH",match
# K = match.group(1)
# K = K.strip()
# #print " K '" + K + "'",
#
# cls = 'jsonnumber'
#
# if K[0] == '"':
# if K[-1] == ":":
# cls = 'jsonkey'
# else:
# cls = 'jsonstring'
#
# elif K in ('true', 'false'):
# cls = 'jsonboolean'
#
# elif K == 'null':
# cls = 'jsonnull'
#
# #print " CLS",cls
# return '<span class="%s">%s</span>' % (cls, K)
#
# json = jsonpickle.encode( data )
# json.replace('&', '&').replace('<', '<').replace('>', '>');
# rep = r'("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|(?:\b|\s)(true|false|null)(?:\b|\s|,)|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?)'
# repc = re.compile(rep)
# jsonRes = repc.sub(json2html, json)
# #print json
# #print jsonRes
# return jsonRes
#
# def printRaw(data):
# """
# print a data structure as nested tabkes
# """
# def parseEl(el, level=0, par=""):
# """
# recursive function that checks if DICT, LIST or PRIMITIVE and net them accordingly.
# darkens the background when nesting and adds classes to identify them
# """
# col = hex( 255-((255/4)*level) )[2:]
#
# if isinstance(el, list):
# res = []
# #TODO: CONVERT TO MULTI SELECT
# #res.append('<td class="config fieldlst">')
#
# for elNum in range(len(el)):
# ele = el[elNum]
# res.extend( parseEl(ele, level=level+1, par=par+"|"+str(elNum) ) )
#
# #res.append('</td>')
#
# return res
#
# elif isinstance(el, dict):
# res = []
#
# if level != 0:
# res.append('<td class="config fielddict">')
#
# res.append("<table>")
# res.append("<tbody>")
#
# for key in el:
# res.append('<tr bgcolor="#%s%s%s">' % ( col, col, col ))
# res.append("<td><b>%s</b></td>" % key)
# res.extend( parseEl( el[key], level=level+1, par=par+"|"+key ) )
# res.append("</tr>")
#
# res.append("</tbody>")
# res.append("</table>")
#
# if level != 0:
# res.append('</td>')
#
# return res
#
# else:
# res = []
#
# if isinstance(el, bool ):
# res.append('<td class="config fieldbool" par="%s">%s</td>' % ( par, str(el) ))
# elif isinstance(el, str ):
# res.append('<td class="config fieldstr" par="%s">%s</td>' % ( par, str(el) ))
# elif isinstance(el, int ):
# res.append('<td class="config fieldint" par="%s">%s</td>' % ( par, str(el) ))
# elif isinstance(el, float):
# res.append('<td class="config fieldfloat" par="%s">%s</td>' % ( par, str(el) ))
# else:
# res.append('<td class="config fieldunk" par="%s">%s</td>' % ( par, str(el) ))
#
# return res
#
# res = parseEl( data )
#
#
#
# #TODO: print only the data concerning the current request
# res.append("<table>")
# res.append("<thead>")
# lcount = 0
# for line in g.projectDescription:
# if lcount == 0 and line[0][0] == "#":
# line[0] = line[0][1:]
#
# res.append("<tr>")
#
# for row in line:
# if lcount == 0:
# res.append("<th>%s</th>" % row.replace('_', " ").capitalize())
#
# else:
# res.append("<td>%s</td>" % row)
#
# res.append("</tr>")
#
# if lcount == 0:
# res.append("</thead>")
# res.append("<tbody>")
#
# lcount += 1
# res.append("</tbody>")
# res.append("</table>")
#
# return "\n".join(res)
#
#
#
#
# def length(var):
# """
# returns the length of a variable
# """
# return len(var)
#
# def stats(**kwargs):
# """
# calculates the statistics for a given set
# """
# s = statistics.add(**kwargs)
# if s is None:
# return ""
#
# return s
#
# return dict(cleanName = cleanName,
# checkPlugin = checkPlugin,
# checkPluginKey = checkPluginKey,
# converterPlugin = converterPlugin,
# converterKey = converterKey,
# converterValue = converterValue,
# parseSetup = parseSetup,
# length = length,
# stats = stats
# )
#
#
#
##DATABASE
#def init_db():
# """
# reads the data from the disk, parses and loads it to global variables.
# has to be changed if using WSGI servers aroung it (eg. apache) once global variables
# are not shared.
# """
# with app.app_context():
# print "initializing db"
#
# if not os.path.exists(setupDBFile):
# print "NO SETUP DATABASE (DB) FILE %s" % setupDBFile
# sys.exit(1)
#
# if not os.path.exists(structureFile):
# print "NO STRUCTURE FILE %s" % structureFile
# sys.exit(1)
#
# if not os.path.exists(projectDescriptionFile):
# print "NO PROJECT DESCRIPTION FILE %s" % projectDescriptionFile
# sys.exit(1)
#
# global setupDB
# global projects
# global structure
# global dbMtime
# global projectDescription
#
# dbMtime = os.stat(structureFile).st_mtime
# dbMtime = time.ctime(dbMtime)
#
# jsonpickle.set_preferred_backend('simplejson')
# jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=1)
#
# setupDB = jsonpickle.decode(open(setupDBFile, 'r').read())
# structure = jsonpickle.decode(open(structureFile, 'r').read())
#
# #folders = [
# # #root name type subs libs to be cleaned
# # [os.path.join(base ,"denovo"), "arcanum" , constants.DENOVO , None , denovoLibs , True ],
# # ]
#
# """
# reads project description CSV file
# """
# projectDescription = []
# with open(projectDescriptionFile, 'r') as pdf:
# for line in pdf:
# #if line[0] == '#': continue
# data = line.split("\t")
# projectDescription.append( data )
#
#
#
# """
# reads project
# """
# projects = {}
# for proj in setupDB['folders']:
# projname = proj[1]
# projects[ projname ] = {
# "root" : proj[0],
# "name" : projname,
# "type" : setupDB['constants']['types'][proj[2]],
# "samples": proj[3],
# "libs" : {},
# "toClean": proj[5],
# }
#
# #["illumina", [
# # [ "pairedend_170", 'PE', '170' ],
# for techd in proj[4]:
# tname = techd[0]
# projects[ projname ]["libs"][tname] = {}
#
# for libd in techd[1]:
# lname = libd[0]
# ltype = libd[1]
# lsize = libd[2]
#
# if lsize is None:
# lsize = 0
#
# projects[ projname ]["libs"][tname][lname] = {
# "type": ltype,
# "size": lsize,
# "name": lname
# }
#
#
#
# if setupDB is None:
# print "no setupDB data"
# sys.exit(1)
#
# if structure is None:
# print "no structure data"
# sys.exit(1)
#
#
#
# global projectStatuses
# global plugins
# projectStatuses = []
# plugins = {}
# stop = False
#
# """
# loads plugins and statuses variables
# """
# for projectName in structure:
# projStatuses = structure[projectName]
# projectBase = os.path.join(setup.getProjectRoot(projectName), projectName)
# if stop: break
# for projectStatus in projStatuses:
# if projectStatus not in projectStatuses: projectStatuses.append(projectStatus)
# samples = structure[projectName][projectStatus]
# if stop: break
# for projectSample in samples:
# if projectSample != None:
# technologies = samples[projectSample]
# if stop: break
# for sequenceTech in technologies:
# if sequenceTech != None:
# libs = technologies[sequenceTech]
# if stop: break
# for libraryName in libs:
# if libraryName != None:
# fileNames = libs[libraryName]
# if stop: break
# for fileName in fileNames:
# if fileName != None:
# data = fileNames[fileName]
# pairName = data.pairName
# for pluginName in data.getPlugins():
# if pluginName not in plugins:
# plugins[pluginName] = []
#
# if pluginName not in []:
# pluginData = data.getPlugin(pluginName)
# for pluginKey in pluginData:
#
# if pluginKey == 'parent':
# continue
#
# if pluginKey not in plugins[pluginName]:
# plugins[pluginName].append(pluginKey)
#
# #TODO: do i still need to get the value?
# #('fastqc', 'fastqcGraphs'), ('contamination', 'contaminationGraph')
# if (pluginName, pluginKey) not in [('info', 'ident'), ('info', 'parent'), ('quals', 'data') ]:
# val = data.getPluginResult(pluginName, pluginKey)
#
# #if pluginKey == 'fastqcGraphs':
# # for key in val:
# # plugins[pluginName].append(key)
# stop = True
# break
#
# projectStatuses.sort()
#
# print "db loaded"
#
#
#def getDb():
# """
# if db has not been initialized yet, do it so. Otherwise, just return the global variables
# """
# if setupDB is None:
# init_db()
#
# return [setupDB, projects, structure, projectDescription, projectStatuses, plugins]
|
sauloal/pycluster
|
ui.py
|
Python
|
mit
| 31,022
|
# -*- coding: utf-8 -*-
from collections import defaultdict
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime
import io
import itertools
import logging
import operator
import os
import socket
from babelfish import Language, LanguageReverseError
from guessit import guessit
from six.moves.xmlrpc_client import ProtocolError
from rarfile import BadRarFile, NotRarFile, RarCannotExec, RarFile
from zipfile import BadZipfile
from ssl import SSLError
import requests
from .exceptions import ServiceUnavailable
from .extensions import provider_manager, refiner_manager
from .score import compute_score as default_compute_score
from .subtitle import SUBTITLE_EXTENSIONS, get_subtitle_path
from .utils import hash_napiprojekt, hash_opensubtitles, hash_shooter, hash_thesubdb
from .video import VIDEO_EXTENSIONS, Episode, Movie, Video
#: Supported archive extensions
ARCHIVE_EXTENSIONS = ('.rar',)
logger = logging.getLogger(__name__)
class ProviderPool(object):
"""A pool of providers with the same API as a single :class:`~subliminal.providers.Provider`.
It has a few extra features:
* Lazy loads providers when needed and supports the `with` statement to :meth:`terminate`
the providers on exit.
* Automatically discard providers on failure.
:param list providers: name of providers to use, if not all.
:param dict provider_configs: provider configuration as keyword arguments per provider name to pass when
instanciating the :class:`~subliminal.providers.Provider`.
"""
def __init__(self, providers=None, provider_configs=None):
#: Name of providers to use
self.providers = providers or provider_manager.names()
#: Provider configuration
self.provider_configs = provider_configs or {}
#: Initialized providers
self.initialized_providers = {}
#: Discarded providers
self.discarded_providers = set()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.terminate()
def __getitem__(self, name):
if name not in self.providers:
raise KeyError
if name not in self.initialized_providers:
logger.info('Initializing provider %s', name)
provider = provider_manager[name].plugin(**self.provider_configs.get(name, {}))
provider.initialize()
self.initialized_providers[name] = provider
return self.initialized_providers[name]
def __delitem__(self, name):
if name not in self.initialized_providers:
raise KeyError(name)
try:
logger.info('Terminating provider %s', name)
self.initialized_providers[name].terminate()
except (requests.Timeout, socket.timeout):
logger.error('Provider %r timed out, improperly terminated', name)
except (ServiceUnavailable, ProtocolError): # OpenSubtitles raises xmlrpclib.ProtocolError when unavailable
logger.error('Provider %r unavailable, improperly terminated', name)
except requests.exceptions.HTTPError as e:
if e.response.status_code in range(500, 600):
logger.error('Provider %r unavailable, improperly terminated', name)
else:
logger.exception('Provider %r http error %r, improperly terminated', name, e.response.status_code)
except SSLError as e:
if e.args[0] == 'The read operation timed out':
logger.error('Provider %r unavailable, improperly terminated', name)
else:
logger.exception('Provider %r SSL error %r, improperly terminated', name, e.args[0])
except:
logger.exception('Provider %r terminated unexpectedly', name)
del self.initialized_providers[name]
def __iter__(self):
return iter(self.initialized_providers)
def list_subtitles_provider(self, provider, video, languages):
"""List subtitles with a single provider.
The video and languages are checked against the provider.
:param str provider: name of the provider.
:param video: video to list subtitles for.
:type video: :class:`~subliminal.video.Video`
:param languages: languages to search for.
:type languages: set of :class:`~babelfish.language.Language`
:return: found subtitles.
:rtype: list of :class:`~subliminal.subtitle.Subtitle` or None
"""
# check video validity
if not provider_manager[provider].plugin.check(video):
logger.info('Skipping provider %r: not a valid video', provider)
return []
# check supported languages
provider_languages = provider_manager[provider].plugin.languages & languages
if not provider_languages:
logger.info('Skipping provider %r: no language to search for', provider)
return []
# list subtitles
logger.info('Listing subtitles with provider %r and languages %r', provider, provider_languages)
try:
return self[provider].list_subtitles(video, provider_languages)
except (requests.Timeout, socket.timeout):
logger.error('Provider %r timed out', provider)
except (ServiceUnavailable, ProtocolError): # OpenSubtitles raises xmlrpclib.ProtocolError when unavailable
logger.error('Provider %r unavailable', provider)
except requests.exceptions.HTTPError as e:
if e.response.status_code in range(500, 600):
logger.error('Provider %r unavailable', provider)
else:
logger.exception('Provider %r http error %r', provider, e.response.status_code)
except SSLError as e:
if e.args[0] == 'The read operation timed out':
logger.error('Provider %r unavailable', provider)
else:
logger.exception('Provider %r SSL error %r', provider, e.args[0])
except:
logger.exception('Unexpected error in provider %r', provider)
def list_subtitles(self, video, languages):
"""List subtitles.
:param video: video to list subtitles for.
:type video: :class:`~subliminal.video.Video`
:param languages: languages to search for.
:type languages: set of :class:`~babelfish.language.Language`
:return: found subtitles.
:rtype: list of :class:`~subliminal.subtitle.Subtitle`
"""
subtitles = []
for name in self.providers:
# check discarded providers
if name in self.discarded_providers:
logger.debug('Skipping discarded provider %r', name)
continue
# list subtitles
provider_subtitles = self.list_subtitles_provider(name, video, languages)
if provider_subtitles is None:
logger.info('Discarding provider %s', name)
self.discarded_providers.add(name)
continue
# add the subtitles
subtitles.extend(provider_subtitles)
return subtitles
def download_subtitle(self, subtitle):
"""Download `subtitle`'s :attr:`~subliminal.subtitle.Subtitle.content`.
:param subtitle: subtitle to download.
:type subtitle: :class:`~subliminal.subtitle.Subtitle`
:return: `True` if the subtitle has been successfully downloaded, `False` otherwise.
:rtype: bool
"""
# check discarded providers
if subtitle.provider_name in self.discarded_providers:
logger.warning('Provider %r is discarded', subtitle.provider_name)
return False
logger.info('Downloading subtitle %r', subtitle)
try:
self[subtitle.provider_name].download_subtitle(subtitle)
except (requests.Timeout, socket.timeout):
logger.error('Provider %r timed out, discarding it', subtitle.provider_name)
self.discarded_providers.add(subtitle.provider_name)
return False
except (ServiceUnavailable, ProtocolError): # OpenSubtitles raises xmlrpclib.ProtocolError when unavailable
logger.error('Provider %r unavailable, discarding it', subtitle.provider_name)
self.discarded_providers.add(subtitle.provider_name)
return False
except requests.exceptions.HTTPError as e:
if e.response.status_code in range(500, 600):
logger.error('Provider %r unavailable, discarding it', subtitle.provider_name)
else:
logger.exception('Provider %r http error %r, discarding it', subtitle.provider_name,
e.response.status_code)
self.discarded_providers.add(subtitle.provider_name)
return False
except SSLError as e:
if e.args[0] == 'The read operation timed out':
logger.error('Provider %r unavailable, discarding it', subtitle.provider_name)
else:
logger.exception('Provider %r SSL error %r, discarding it', subtitle.provider_name, e.args[0])
self.discarded_providers.add(subtitle.provider_name)
return False
except (BadRarFile, BadZipfile):
logger.error('Bad archive for %r', subtitle)
return False
except:
logger.exception('Unexpected error in provider %r, discarding it', subtitle.provider_name)
self.discarded_providers.add(subtitle.provider_name)
return False
# check subtitle validity
if not subtitle.is_valid():
logger.error('Invalid subtitle')
return False
return True
def download_best_subtitles(self, subtitles, video, languages, min_score=0, hearing_impaired=False, only_one=False,
compute_score=None):
"""Download the best matching subtitles.
:param subtitles: the subtitles to use.
:type subtitles: list of :class:`~subliminal.subtitle.Subtitle`
:param video: video to download subtitles for.
:type video: :class:`~subliminal.video.Video`
:param languages: languages to download.
:type languages: set of :class:`~babelfish.language.Language`
:param int min_score: minimum score for a subtitle to be downloaded.
:param bool hearing_impaired: hearing impaired preference.
:param bool only_one: download only one subtitle, not one per language.
:param compute_score: function that takes `subtitle` and `video` as positional arguments,
`hearing_impaired` as keyword argument and returns the score.
:return: downloaded subtitles.
:rtype: list of :class:`~subliminal.subtitle.Subtitle`
"""
compute_score = compute_score or default_compute_score
# sort subtitles by score
scored_subtitles = sorted([(s, compute_score(s, video, hearing_impaired=hearing_impaired))
for s in subtitles], key=operator.itemgetter(1), reverse=True)
# download best subtitles, falling back on the next on error
downloaded_subtitles = []
for subtitle, score in scored_subtitles:
# check score
if score < min_score:
logger.info('Score %d is below min_score (%d)', score, min_score)
break
# check downloaded languages
if subtitle.language in set(s.language for s in downloaded_subtitles):
logger.debug('Skipping subtitle: %r already downloaded', subtitle.language)
continue
# download
if self.download_subtitle(subtitle):
downloaded_subtitles.append(subtitle)
# stop when all languages are downloaded
if set(s.language for s in downloaded_subtitles) == languages:
logger.debug('All languages downloaded')
break
# stop if only one subtitle is requested
if only_one:
logger.debug('Only one subtitle downloaded')
break
return downloaded_subtitles
def terminate(self):
"""Terminate all the :attr:`initialized_providers`."""
logger.debug('Terminating initialized providers')
for name in list(self.initialized_providers):
del self[name]
class AsyncProviderPool(ProviderPool):
"""Subclass of :class:`ProviderPool` with asynchronous support for :meth:`~ProviderPool.list_subtitles`.
:param int max_workers: maximum number of threads to use. If `None`, :attr:`max_workers` will be set
to the number of :attr:`~ProviderPool.providers`.
"""
def __init__(self, max_workers=None, *args, **kwargs):
super(AsyncProviderPool, self).__init__(*args, **kwargs)
#: Maximum number of threads to use
self.max_workers = max_workers or len(self.providers)
def list_subtitles_provider(self, provider, video, languages):
return provider, super(AsyncProviderPool, self).list_subtitles_provider(provider, video, languages)
def list_subtitles(self, video, languages):
subtitles = []
with ThreadPoolExecutor(self.max_workers) as executor:
for provider, provider_subtitles in executor.map(self.list_subtitles_provider, self.providers,
itertools.repeat(video, len(self.providers)),
itertools.repeat(languages, len(self.providers))):
# discard provider that failed
if provider_subtitles is None:
logger.info('Discarding provider %s', provider)
self.discarded_providers.add(provider)
continue
# add subtitles
subtitles.extend(provider_subtitles)
return subtitles
def check_video(video, languages=None, age=None, undefined=False):
"""Perform some checks on the `video`.
All the checks are optional. Return `False` if any of this check fails:
* `languages` already exist in `video`'s :attr:`~subliminal.video.Video.subtitle_languages`.
* `video` is older than `age`.
* `video` has an `undefined` language in :attr:`~subliminal.video.Video.subtitle_languages`.
:param video: video to check.
:type video: :class:`~subliminal.video.Video`
:param languages: desired languages.
:type languages: set of :class:`~babelfish.language.Language`
:param datetime.timedelta age: maximum age of the video.
:param bool undefined: fail on existing undefined language.
:return: `True` if the video passes the checks, `False` otherwise.
:rtype: bool
"""
# language test
if languages and not (languages - video.subtitle_languages):
logger.debug('All languages %r exist', languages)
return False
# age test
if age and video.age > age:
logger.debug('Video is older than %r', age)
return False
# undefined test
if undefined and Language('und') in video.subtitle_languages:
logger.debug('Undefined language found')
return False
return True
def search_external_subtitles(path, directory=None):
"""Search for external subtitles from a video `path` and their associated language.
Unless `directory` is provided, search will be made in the same directory as the video file.
:param str path: path to the video.
:param str directory: directory to search for subtitles.
:return: found subtitles with their languages.
:rtype: dict
"""
# split path
dirpath, filename = os.path.split(path)
dirpath = dirpath or '.'
fileroot, fileext = os.path.splitext(filename)
# search for subtitles
subtitles = {}
for p in os.listdir(directory or dirpath):
# keep only valid subtitle filenames
if not p.startswith(fileroot) or not p.endswith(SUBTITLE_EXTENSIONS):
continue
# extract the potential language code
language = Language('und')
language_code = p[len(fileroot):-len(os.path.splitext(p)[1])].replace(fileext, '').replace('_', '-')[1:]
if language_code:
try:
language = Language.fromietf(language_code)
except (ValueError, LanguageReverseError):
logger.error('Cannot parse language code %r', language_code)
subtitles[p] = language
logger.debug('Found subtitles %r', subtitles)
return subtitles
def scan_video(path):
"""Scan a video from a `path`.
:param str path: existing path to the video.
:return: the scanned video.
:rtype: :class:`~subliminal.video.Video`
"""
# check for non-existing path
if not os.path.exists(path):
raise ValueError('Path does not exist')
# check video extension
if not path.endswith(VIDEO_EXTENSIONS):
raise ValueError('%r is not a valid video extension' % os.path.splitext(path)[1])
dirpath, filename = os.path.split(path)
logger.info('Scanning video %r in %r', filename, dirpath)
# guess
video = Video.fromguess(path, guessit(path))
# size and hashes
video.size = os.path.getsize(path)
if video.size > 10485760:
logger.debug('Size is %d', video.size)
video.hashes['opensubtitles'] = hash_opensubtitles(path)
video.hashes['shooter'] = hash_shooter(path)
video.hashes['thesubdb'] = hash_thesubdb(path)
video.hashes['napiprojekt'] = hash_napiprojekt(path)
logger.debug('Computed hashes %r', video.hashes)
else:
logger.warning('Size is lower than 10MB: hashes not computed')
return video
def scan_archive(path):
"""Scan an archive from a `path`.
:param str path: existing path to the archive.
:return: the scanned video.
:rtype: :class:`~subliminal.video.Video`
"""
# check for non-existing path
if not os.path.exists(path):
raise ValueError('Path does not exist')
# check video extension
if not path.endswith(ARCHIVE_EXTENSIONS):
raise ValueError('%r is not a valid archive extension' % os.path.splitext(path)[1])
dirpath, filename = os.path.split(path)
logger.info('Scanning archive %r in %r', filename, dirpath)
# rar extension
if filename.endswith('.rar'):
rar = RarFile(path)
# filter on video extensions
rar_filenames = [f for f in rar.namelist() if f.endswith(VIDEO_EXTENSIONS)]
# no video found
if not rar_filenames:
raise ValueError('No video in archive')
# more than one video found
if len(rar_filenames) > 1:
raise ValueError('More than one video in archive')
# guess
rar_filename = rar_filenames[0]
rar_filepath = os.path.join(dirpath, rar_filename)
video = Video.fromguess(rar_filepath, guessit(rar_filepath))
# size
video.size = rar.getinfo(rar_filename).file_size
else:
raise ValueError('Unsupported extension %r' % os.path.splitext(path)[1])
return video
def scan_videos(path, age=None, archives=True):
"""Scan `path` for videos and their subtitles.
See :func:`refine` to find additional information for the video.
:param str path: existing directory path to scan.
:param datetime.timedelta age: maximum age of the video or archive.
:param bool archives: scan videos in archives.
:return: the scanned videos.
:rtype: list of :class:`~subliminal.video.Video`
"""
# check for non-existing path
if not os.path.exists(path):
raise ValueError('Path does not exist')
# check for non-directory path
if not os.path.isdir(path):
raise ValueError('Path is not a directory')
# walk the path
videos = []
for dirpath, dirnames, filenames in os.walk(path):
logger.debug('Walking directory %r', dirpath)
# remove badly encoded and hidden dirnames
for dirname in list(dirnames):
if dirname.startswith('.'):
logger.debug('Skipping hidden dirname %r in %r', dirname, dirpath)
dirnames.remove(dirname)
# scan for videos
for filename in filenames:
# filter on videos and archives
if not (filename.endswith(VIDEO_EXTENSIONS) or archives and filename.endswith(ARCHIVE_EXTENSIONS)):
continue
# skip hidden files
if filename.startswith('.'):
logger.debug('Skipping hidden filename %r in %r', filename, dirpath)
continue
# reconstruct the file path
filepath = os.path.join(dirpath, filename)
# skip links
if os.path.islink(filepath):
logger.debug('Skipping link %r in %r', filename, dirpath)
continue
# skip old files
try:
file_age = datetime.utcfromtimestamp(os.path.getmtime(filepath))
except ValueError:
logger.warning('Could not get age of file %r in %r', filename, dirpath)
continue
else:
if age and datetime.utcnow() - file_age > age:
logger.debug('Skipping old file %r in %r', filename, dirpath)
continue
# scan
if filename.endswith(VIDEO_EXTENSIONS): # video
try:
video = scan_video(filepath)
except ValueError: # pragma: no cover
logger.exception('Error scanning video')
continue
elif archives and filename.endswith(ARCHIVE_EXTENSIONS): # archive
try:
video = scan_archive(filepath)
except (NotRarFile, RarCannotExec, ValueError): # pragma: no cover
logger.exception('Error scanning archive')
continue
else: # pragma: no cover
raise ValueError('Unsupported file %r' % filename)
videos.append(video)
return videos
def refine(video, episode_refiners=None, movie_refiners=None, **kwargs):
"""Refine a video using :ref:`refiners`.
.. note::
Exceptions raised in refiners are silently passed and logged.
:param video: the video to refine.
:type video: :class:`~subliminal.video.Video`
:param tuple episode_refiners: refiners to use for episodes.
:param tuple movie_refiners: refiners to use for movies.
:param \*\*kwargs: additional parameters for the :func:`~subliminal.refiners.refine` functions.
"""
refiners = ()
if isinstance(video, Episode):
refiners = episode_refiners or ('metadata', 'tvdb', 'omdb')
elif isinstance(video, Movie):
refiners = movie_refiners or ('metadata', 'omdb')
for refiner in refiners:
logger.info('Refining video with %s', refiner)
try:
refiner_manager[refiner].plugin(video, **kwargs)
except:
logger.error('Failed to refine video %r', video.name)
logger.debug('Refiner exception:', exc_info=True)
def list_subtitles(videos, languages, pool_class=ProviderPool, **kwargs):
"""List subtitles.
The `videos` must pass the `languages` check of :func:`check_video`.
:param videos: videos to list subtitles for.
:type videos: set of :class:`~subliminal.video.Video`
:param languages: languages to search for.
:type languages: set of :class:`~babelfish.language.Language`
:param pool_class: class to use as provider pool.
:type pool_class: :class:`ProviderPool`, :class:`AsyncProviderPool` or similar
:param \*\*kwargs: additional parameters for the provided `pool_class` constructor.
:return: found subtitles per video.
:rtype: dict of :class:`~subliminal.video.Video` to list of :class:`~subliminal.subtitle.Subtitle`
"""
listed_subtitles = defaultdict(list)
# check videos
checked_videos = []
for video in videos:
if not check_video(video, languages=languages):
logger.info('Skipping video %r', video)
continue
checked_videos.append(video)
# return immediately if no video passed the checks
if not checked_videos:
return listed_subtitles
# list subtitles
with pool_class(**kwargs) as pool:
for video in checked_videos:
logger.info('Listing subtitles for %r', video)
subtitles = pool.list_subtitles(video, languages - video.subtitle_languages)
listed_subtitles[video].extend(subtitles)
logger.info('Found %d subtitle(s)', len(subtitles))
return listed_subtitles
def download_subtitles(subtitles, pool_class=ProviderPool, **kwargs):
"""Download :attr:`~subliminal.subtitle.Subtitle.content` of `subtitles`.
:param subtitles: subtitles to download.
:type subtitles: list of :class:`~subliminal.subtitle.Subtitle`
:param pool_class: class to use as provider pool.
:type pool_class: :class:`ProviderPool`, :class:`AsyncProviderPool` or similar
:param \*\*kwargs: additional parameters for the provided `pool_class` constructor.
"""
with pool_class(**kwargs) as pool:
for subtitle in subtitles:
logger.info('Downloading subtitle %r', subtitle)
pool.download_subtitle(subtitle)
def download_best_subtitles(videos, languages, min_score=0, hearing_impaired=False, only_one=False, compute_score=None,
pool_class=ProviderPool, **kwargs):
"""List and download the best matching subtitles.
The `videos` must pass the `languages` and `undefined` (`only_one`) checks of :func:`check_video`.
:param videos: videos to download subtitles for.
:type videos: set of :class:`~subliminal.video.Video`
:param languages: languages to download.
:type languages: set of :class:`~babelfish.language.Language`
:param int min_score: minimum score for a subtitle to be downloaded.
:param bool hearing_impaired: hearing impaired preference.
:param bool only_one: download only one subtitle, not one per language.
:param compute_score: function that takes `subtitle` and `video` as positional arguments,
`hearing_impaired` as keyword argument and returns the score.
:param pool_class: class to use as provider pool.
:type pool_class: :class:`ProviderPool`, :class:`AsyncProviderPool` or similar
:param \*\*kwargs: additional parameters for the provided `pool_class` constructor.
:return: downloaded subtitles per video.
:rtype: dict of :class:`~subliminal.video.Video` to list of :class:`~subliminal.subtitle.Subtitle`
"""
downloaded_subtitles = defaultdict(list)
# check videos
checked_videos = []
for video in videos:
if not check_video(video, languages=languages, undefined=only_one):
logger.info('Skipping video %r', video)
continue
checked_videos.append(video)
# return immediately if no video passed the checks
if not checked_videos:
return downloaded_subtitles
# download best subtitles
with pool_class(**kwargs) as pool:
for video in checked_videos:
logger.info('Downloading best subtitles for %r', video)
subtitles = pool.download_best_subtitles(pool.list_subtitles(video, languages - video.subtitle_languages),
video, languages, min_score=min_score,
hearing_impaired=hearing_impaired, only_one=only_one,
compute_score=compute_score)
logger.info('Downloaded %d subtitle(s)', len(subtitles))
downloaded_subtitles[video].extend(subtitles)
return downloaded_subtitles
def save_subtitles(video, subtitles, single=False, directory=None, encoding=None):
"""Save subtitles on filesystem.
Subtitles are saved in the order of the list. If a subtitle with a language has already been saved, other subtitles
with the same language are silently ignored.
The extension used is `.lang.srt` by default or `.srt` is `single` is `True`, with `lang` being the IETF code for
the :attr:`~subliminal.subtitle.Subtitle.language` of the subtitle.
:param video: video of the subtitles.
:type video: :class:`~subliminal.video.Video`
:param subtitles: subtitles to save.
:type subtitles: list of :class:`~subliminal.subtitle.Subtitle`
:param bool single: save a single subtitle, default is to save one subtitle per language.
:param str directory: path to directory where to save the subtitles, default is next to the video.
:param str encoding: encoding in which to save the subtitles, default is to keep original encoding.
:return: the saved subtitles
:rtype: list of :class:`~subliminal.subtitle.Subtitle`
"""
saved_subtitles = []
for subtitle in subtitles:
# check content
if subtitle.content is None:
logger.error('Skipping subtitle %r: no content', subtitle)
continue
# check language
if subtitle.language in set(s.language for s in saved_subtitles):
logger.debug('Skipping subtitle %r: language already saved', subtitle)
continue
# create subtitle path
subtitle_path = get_subtitle_path(video.name, None if single else subtitle.language)
if directory is not None:
subtitle_path = os.path.join(directory, os.path.split(subtitle_path)[1])
# save content as is or in the specified encoding
logger.info('Saving %r to %r', subtitle, subtitle_path)
if encoding is None:
with io.open(subtitle_path, 'wb') as f:
f.write(subtitle.content)
else:
with io.open(subtitle_path, 'w', encoding=encoding) as f:
f.write(subtitle.text)
saved_subtitles.append(subtitle)
# check single
if single:
break
return saved_subtitles
|
fernandog/subliminal
|
subliminal/core.py
|
Python
|
mit
| 30,261
|
import pytest
from xmitgcm import file_utils
@pytest.fixture(scope="session")
def directory_with_files(tmpdir_factory):
temppath = tmpdir_factory.mktemp("xmitgcm_test_data")
temppath.join('bar.0000000001.meta').ensure(file=True)
temppath.join('baz.data').ensure(file=True)
return temppath
def test_listdir(directory_with_files):
path = str(directory_with_files)
assert sorted(file_utils.listdir(path)) == sorted(['bar.0000000001.meta', 'baz.data'])
def test_listdir_startswith(directory_with_files):
path = str(directory_with_files)
assert file_utils.listdir_startswith(path, 'bar') == ['bar.0000000001.meta']
def test_listdir_endswith(directory_with_files):
path = str(directory_with_files)
assert file_utils.listdir_endswith(path, '.data') == ['baz.data']
def test_listdir_startsandendswith(directory_with_files):
path = str(directory_with_files)
assert file_utils.listdir_startsandendswith(path, 'bar', '.meta') == ['bar.0000000001.meta']
def test_listdir_fnmatch(directory_with_files):
path = str(directory_with_files)
assert file_utils.listdir_fnmatch(path, '*.??????????.meta') == ['bar.0000000001.meta']
def test_clear_cache():
file_utils.clear_cache()
|
xgcm/xmitgcm
|
xmitgcm/test/test_file_utils.py
|
Python
|
mit
| 1,228
|
# Copyright (c) 2007, Enthought, Inc.
# License: BSD Style.
#--(Interfaces)-----------------------------------------------------------------
"""
Interfaces
==========
In Traits 3.0, the ability to define, implement and use *interfaces* has been
added to the package.
Defining Interfaces
-------------------
Interfaces are defined by subclassing from the **Interface** class, as shown
in the example below::
from traits.api import Interface
class IName ( Interface ):
def get_name ( self ):
" Returns the name of an object. "
This same code is shown in the **IName Interface** tab of the code.
Interface classes are intended mainly as documentation of the methods and
traits that the interface defines, and should not contain any actual
implementation code, although no check is performed to enforce this currently.
Implementing Interfaces
-----------------------
A class declares that it implements one or more interfaces using the
**implements** function, which has the form::
implements( interface [, interface2, ..., interfacen] )
The semantics of this function is that the class declares that it implements
each of the *interfaces* specified as an argument to **implements**.
Also, the call to **implements** must occur at class scope within the class
definition, as shown in the following example::
from traits.api import HasTraits, implements
class Person ( HasTraits ):
implements( IName )
...
Only a single call to **implements** should occur within a class definition.
Refer to the **Person Class** tab in the code for a complete example of using
**implements**.
Note that in the current version, traits does not check to ensure that the
class containing the **implements** function actually implements the interfaces
it says it does.
Using Interfaces
----------------
Being able to define and implement interfaces would be of little use without
the ability to *use* interfaces in your code. In traits, using an interface is
accomplished using the **Instance** trait, as shown in the following example::
from traits.api import HasTraits, Instance
class Apartment ( HasTraits ):
renter = Instance( IName )
Using an interface class in an **Instance** trait definition declares that the
trait only accepts values which are objects that either:
- Implement the specified interface.
- Can be adapted to an object that implements the specified interface.
Additional information on what it means to *adapt* an object to implement an
interface is presented in the next section of the tutorial.
As before, the **Instance** trait can also be used with classes that are not
interfaces, such as::
from traits.api import HasTraits, Instance
class Apartment ( HasTraits ):
renter = Instance( Person )
In this case, the value of the trait must be an object which is an instance of
the specified class or one of its subclasses.
"""
#--<Imports>--------------------------------------------------------------------
from traits.api import *
#--[IName Interface]------------------------------------------------------------
# Define the 'IName' interface:
class IName ( Interface ):
def get_name ( self ):
""" Returns the name of an object. """
#--[Person Class]---------------------------------------------------------------
class Person ( HasTraits ):
implements( IName )
first_name = Str( 'John' )
last_name = Str( 'Doe' )
# Implementation of the 'IName' interface:
def get_name ( self ):
""" Returns the name of an object. """
return ('%s %s' % ( self.first_name, self.last_name ))
#--[Apartment Class]------------------------------------------------------------
# Define a class using an object that implements the 'IName' interface:
class Apartment ( HasTraits ):
renter = Instance( IName )
#--[Example*]--------------------------------------------------------------------
# Create an object implementing the 'IName' interface:
william = Person( first_name = 'William', last_name = 'Adams' )
# Create an apartment, and assign 'renter' an object implementing 'IName':
apt = Apartment( renter = william )
# Verify that the object works correctly:
print 'Renter is:', apt.renter.get_name()
|
burnpanck/traits
|
examples/tutorials/traits_4.0/interfaces/interfaces.py
|
Python
|
bsd-3-clause
| 4,275
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import json
import time
from django.db.models import Q
from django.conf.urls import url
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from django.db.models import Count
from django.http.response import HttpResponse
from django.template.response import TemplateResponse
from django.utils.translation import get_language
from avatar.templatetags.avatar_tags import avatar_url
from tastypie import http
from tastypie.exceptions import BadRequest
from geonode import qgis_server, geoserver
from geonode.api.paginator import CrossSiteXHRPaginator
from geonode.api.authorization import GeoNodeStyleAuthorization
from geonode.qgis_server.models import QGISServerStyle
from guardian.shortcuts import get_objects_for_user
from tastypie.bundle import Bundle
from geonode.base.models import ResourceBase
from geonode.base.models import TopicCategory
from geonode.base.models import Region
from geonode.base.models import HierarchicalKeyword
from geonode.base.models import ThesaurusKeywordLabel
from geonode.layers.models import Layer, Style
from geonode.maps.models import Map
from geonode.documents.models import Document
from geonode.groups.models import GroupProfile, GroupCategory
from django.core.serializers.json import DjangoJSONEncoder
from tastypie.serializers import Serializer
from tastypie import fields
from tastypie.resources import ModelResource
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from tastypie.utils import trailing_slash
from geonode.utils import check_ogc_backend
from geonode.security.utils import get_visible_resources
FILTER_TYPES = {
'layer': Layer,
'map': Map,
'document': Document
}
class CountJSONSerializer(Serializer):
"""Custom serializer to post process the api and add counts"""
def get_resources_counts(self, options):
if settings.SKIP_PERMS_FILTER:
resources = ResourceBase.objects.all()
else:
resources = get_objects_for_user(
options['user'],
'base.view_resourcebase'
)
resources = get_visible_resources(
resources,
options['user'],
admin_approval_required=settings.ADMIN_MODERATE_UPLOADS,
unpublished_not_visible=settings.RESOURCE_PUBLISHING,
private_groups_not_visibile=settings.GROUP_PRIVATE_RESOURCES)
if resources and resources.count() > 0:
if options['title_filter']:
resources = resources.filter(title__icontains=options['title_filter'])
if options['type_filter']:
resources = resources.instance_of(options['type_filter'])
counts = list(resources.values(options['count_type']).annotate(count=Count(options['count_type'])))
return dict([(c[options['count_type']], c['count']) for c in counts])
def to_json(self, data, options=None):
options = options or {}
data = self.to_simple(data, options)
counts = self.get_resources_counts(options)
if 'objects' in data:
for item in data['objects']:
item['count'] = counts.get(item['id'], 0)
# Add in the current time.
data['requested_time'] = time.time()
return json.dumps(data, cls=DjangoJSONEncoder, sort_keys=True)
class TypeFilteredResource(ModelResource):
""" Common resource used to apply faceting to categories, keywords, and
regions based on the type passed as query parameter in the form
type:layer/map/document"""
count = fields.IntegerField()
def build_filters(self, filters=None, ignore_bad_filters=False):
if filters is None:
filters = {}
self.type_filter = None
self.title_filter = None
orm_filters = super(TypeFilteredResource, self).build_filters(filters)
if 'type' in filters and filters['type'] in FILTER_TYPES.keys():
self.type_filter = FILTER_TYPES[filters['type']]
else:
self.type_filter = None
if 'title__icontains' in filters:
self.title_filter = filters['title__icontains']
return orm_filters
def serialize(self, request, data, format, options=None):
if options is None:
options = {}
options['title_filter'] = getattr(self, 'title_filter', None)
options['type_filter'] = getattr(self, 'type_filter', None)
options['user'] = request.user
return super(TypeFilteredResource, self).serialize(request, data, format, options)
class TagResource(TypeFilteredResource):
"""Tags api"""
def serialize(self, request, data, format, options=None):
if options is None:
options = {}
options['count_type'] = 'keywords'
return super(TagResource, self).serialize(request, data, format, options)
class Meta:
queryset = HierarchicalKeyword.objects.all().order_by('name')
resource_name = 'keywords'
allowed_methods = ['get']
filtering = {
'slug': ALL,
}
serializer = CountJSONSerializer()
class ThesaurusKeywordResource(TypeFilteredResource):
"""ThesaurusKeyword api"""
thesaurus_identifier = fields.CharField(null=False)
label_id = fields.CharField(null=False)
def build_filters(self, filters={}, ignore_bad_filters=False):
"""adds filtering by current language"""
id = filters.pop('id', None)
orm_filters = super(ThesaurusKeywordResource, self).build_filters(filters)
if id is not None:
orm_filters['keyword__id'] = id
orm_filters['lang'] = filters['lang'] if 'lang' in filters else get_language()
if 'thesaurus' in filters:
orm_filters['keyword__thesaurus__identifier'] = filters['thesaurus']
return orm_filters
def serialize(self, request, data, format, options={}):
options['count_type'] = 'tkeywords__id'
return super(ThesaurusKeywordResource, self).serialize(request, data, format, options)
def dehydrate_id(self, bundle):
return bundle.obj.keyword.id
def dehydrate_label_id(self, bundle):
return bundle.obj.id
def dehydrate_thesaurus_identifier(self, bundle):
return bundle.obj.keyword.thesaurus.identifier
class Meta:
queryset = ThesaurusKeywordLabel.objects \
.all() \
.order_by('label') \
.select_related('keyword') \
.select_related('keyword__thesaurus')
resource_name = 'thesaurus/keywords'
allowed_methods = ['get']
filtering = {
'id': ALL,
'label': ALL,
'lang': ALL,
'thesaurus': ALL,
}
serializer = CountJSONSerializer()
class RegionResource(TypeFilteredResource):
"""Regions api"""
def serialize(self, request, data, format, options=None):
if options is None:
options = {}
options['count_type'] = 'regions'
return super(RegionResource, self).serialize(request, data, format, options)
class Meta:
queryset = Region.objects.all().order_by('name')
resource_name = 'regions'
allowed_methods = ['get']
filtering = {
'name': ALL,
'code': ALL,
}
if settings.API_INCLUDE_REGIONS_COUNT:
serializer = CountJSONSerializer()
class TopicCategoryResource(TypeFilteredResource):
"""Category api"""
layers_count = fields.IntegerField(default=0)
def dehydrate_layers_count(self, bundle):
request = bundle.request
obj_with_perms = get_objects_for_user(request.user,
'base.view_resourcebase').instance_of(Layer)
filter_set = bundle.obj.resourcebase_set.filter(id__in=obj_with_perms.values('id'))
if not settings.SKIP_PERMS_FILTER:
filter_set = get_visible_resources(
filter_set,
request.user if request else None,
admin_approval_required=settings.ADMIN_MODERATE_UPLOADS,
unpublished_not_visible=settings.RESOURCE_PUBLISHING,
private_groups_not_visibile=settings.GROUP_PRIVATE_RESOURCES)
return filter_set.distinct().count()
def serialize(self, request, data, format, options=None):
if options is None:
options = {}
options['count_type'] = 'category'
return super(TopicCategoryResource, self).serialize(request, data, format, options)
class Meta:
queryset = TopicCategory.objects.all()
resource_name = 'categories'
allowed_methods = ['get']
filtering = {
'identifier': ALL,
}
serializer = CountJSONSerializer()
class GroupCategoryResource(TypeFilteredResource):
detail_url = fields.CharField()
member_count = fields.IntegerField()
class Meta:
queryset = GroupCategory.objects.all()
allowed_methods = ['get']
include_resource_uri = False
fields = ['name', 'slug']
filtering = {'slug': ALL,
'name': ALL}
def dehydrate_detail_url(self, bundle):
return bundle.obj.get_absolute_url()
def dehydrate_member_count(self, bundle):
return bundle.obj.groups.all().count()
class GroupResource(TypeFilteredResource):
"""Groups api"""
detail_url = fields.CharField()
member_count = fields.IntegerField()
manager_count = fields.IntegerField()
categories = fields.ToManyField(GroupCategoryResource, 'categories', full=True)
def build_filters(self, filters=None, ignore_bad_filters=False):
"""adds filtering by group functionality"""
if filters is None:
filters = {}
orm_filters = super(GroupResource, self).build_filters(filters)
if 'group' in filters:
orm_filters['group'] = filters['group']
if 'name__icontains' in filters:
orm_filters['title__icontains'] = filters['name__icontains']
orm_filters['title_en__icontains'] = filters['name__icontains']
return orm_filters
def apply_filters(self, request, applicable_filters):
"""filter by group if applicable by group functionality"""
group = applicable_filters.pop('group', None)
name = applicable_filters.pop('name__icontains', None)
semi_filtered = super(
GroupResource,
self).apply_filters(
request,
applicable_filters)
if group is not None:
semi_filtered = semi_filtered.filter(
groupmember__group__slug=group)
if name is not None:
semi_filtered = semi_filtered.filter(
Q(title__icontains=name) | Q(title_en__icontains=name))
return semi_filtered
def dehydrate_member_count(self, bundle):
return bundle.obj.member_queryset().count()
def dehydrate_manager_count(self, bundle):
return bundle.obj.get_managers().count()
def dehydrate_detail_url(self, bundle):
return reverse('group_detail', args=[bundle.obj.slug])
class Meta:
queryset = GroupProfile.objects.all()
resource_name = 'groups'
allowed_methods = ['get']
filtering = {
'title': ALL,
'categories': ALL_WITH_RELATIONS,
}
ordering = ['title', 'last_modified']
class ProfileResource(TypeFilteredResource):
"""Profile api"""
avatar_100 = fields.CharField(null=True)
profile_detail_url = fields.CharField()
email = fields.CharField(default='')
layers_count = fields.IntegerField(default=0)
maps_count = fields.IntegerField(default=0)
documents_count = fields.IntegerField(default=0)
current_user = fields.BooleanField(default=False)
activity_stream_url = fields.CharField(null=True)
def build_filters(self, filters=None, ignore_bad_filters=False):
"""adds filtering by group functionality"""
if filters is None:
filters = {}
orm_filters = super(ProfileResource, self).build_filters(filters)
if 'group' in filters:
orm_filters['group'] = filters['group']
if 'name__icontains' in filters:
orm_filters['username__icontains'] = filters['name__icontains']
return orm_filters
def apply_filters(self, request, applicable_filters):
"""filter by group if applicable by group functionality"""
group = applicable_filters.pop('group', None)
name = applicable_filters.pop('name__icontains', None)
semi_filtered = super(
ProfileResource,
self).apply_filters(
request,
applicable_filters)
if group is not None:
semi_filtered = semi_filtered.filter(
groupmember__group__slug=group)
if name is not None:
semi_filtered = semi_filtered.filter(
profile__first_name__icontains=name)
return semi_filtered
def dehydrate_email(self, bundle):
email = ''
if bundle.request.user.is_authenticated():
email = bundle.obj.email
return email
def dehydrate_layers_count(self, bundle):
obj_with_perms = get_objects_for_user(bundle.request.user,
'base.view_resourcebase').instance_of(Layer)
return bundle.obj.resourcebase_set.filter(id__in=obj_with_perms.values('id')).distinct().count()
def dehydrate_maps_count(self, bundle):
obj_with_perms = get_objects_for_user(bundle.request.user,
'base.view_resourcebase').instance_of(Map)
return bundle.obj.resourcebase_set.filter(id__in=obj_with_perms.values('id')).distinct().count()
def dehydrate_documents_count(self, bundle):
obj_with_perms = get_objects_for_user(bundle.request.user,
'base.view_resourcebase').instance_of(Document)
return bundle.obj.resourcebase_set.filter(id__in=obj_with_perms.values('id')).distinct().count()
def dehydrate_avatar_100(self, bundle):
return avatar_url(bundle.obj, 240)
def dehydrate_profile_detail_url(self, bundle):
return bundle.obj.get_absolute_url()
def dehydrate_current_user(self, bundle):
return bundle.request.user.username == bundle.obj.username
def dehydrate_activity_stream_url(self, bundle):
return reverse(
'actstream_actor',
kwargs={
'content_type_id': ContentType.objects.get_for_model(
bundle.obj).pk,
'object_id': bundle.obj.pk})
def prepend_urls(self):
if settings.HAYSTACK_SEARCH:
return [
url(r"^(?P<resource_name>%s)/search%s$" % (
self._meta.resource_name, trailing_slash()
),
self.wrap_view('get_search'), name="api_get_search"),
]
else:
return []
def serialize(self, request, data, format, options=None):
if options is None:
options = {}
options['count_type'] = 'owner'
return super(ProfileResource, self).serialize(request, data, format, options)
class Meta:
queryset = get_user_model().objects.exclude(Q(username='AnonymousUser') | Q(is_active=False))
resource_name = 'profiles'
allowed_methods = ['get']
ordering = ['username', 'date_joined']
excludes = ['is_staff', 'password', 'is_superuser',
'is_active', 'last_login']
filtering = {
'username': ALL,
}
serializer = CountJSONSerializer()
class OwnersResource(TypeFilteredResource):
"""Owners api, lighter and faster version of the profiles api"""
full_name = fields.CharField(null=True)
def dehydrate_full_name(self, bundle):
return bundle.obj.get_full_name() or bundle.obj.username
def serialize(self, request, data, format, options=None):
if options is None:
options = {}
options['count_type'] = 'owner'
return super(OwnersResource, self).serialize(request, data, format, options)
class Meta:
queryset = get_user_model().objects.exclude(username='AnonymousUser')
resource_name = 'owners'
allowed_methods = ['get']
ordering = ['username', 'date_joined']
excludes = ['is_staff', 'password', 'is_superuser',
'is_active', 'last_login']
filtering = {
'username': ALL,
}
serializer = CountJSONSerializer()
class QGISStyleResource(ModelResource):
"""Styles API for QGIS Server backend."""
body = fields.CharField(attribute='body', use_in='detail')
name = fields.CharField(attribute='name')
title = fields.CharField(attribute='title')
layer = fields.ForeignKey(
'geonode.api.resourcebase_api.LayerResource',
attribute='layer',
null=True)
style_url = fields.CharField(attribute='style_url')
type = fields.CharField(attribute='type')
class Meta:
paginator_class = CrossSiteXHRPaginator
queryset = QGISServerStyle.objects.all()
resource_name = 'styles'
detail_uri_name = 'id'
allowed_methods = ['get', 'post', 'delete']
authorization = GeoNodeStyleAuthorization()
filtering = {
'id': ALL,
'title': ALL,
'name': ALL,
'layer': ALL_WITH_RELATIONS
}
def populate_object(self, style):
"""Populate results with necessary fields
:param style: Style objects
:type style: QGISServerStyle
:return:
"""
try:
qgis_layer = style.layer_styles.first()
""":type: geonode.qgis_server.QGISServerLayer"""
style.layer = qgis_layer.layer
style.type = 'qml'
except BaseException:
pass
return style
def build_filters(self, filters=None, **kwargs):
"""Apply custom filters for layer."""
filters = super(QGISStyleResource, self).build_filters(
filters, **kwargs)
# Convert layer__ filters into layer_styles__layer__
updated_filters = {}
for key, value in filters.iteritems():
key = key.replace('layer__', 'layer_styles__layer__')
updated_filters[key] = value
return updated_filters
def build_bundle(self, obj=None, data=None, request=None, **kwargs):
"""Override build_bundle method to add additional info."""
if obj is None and self._meta.object_class:
obj = self._meta.object_class()
elif obj:
obj = self.populate_object(obj)
return Bundle(
obj=obj,
data=data,
request=request,
**kwargs)
def post_list(self, request, **kwargs):
"""Attempt to redirect to QGIS Server Style management.
A post method should have the following field:
name: Slug name of style
title: Title of style
style: the style file uploaded
Also, should have kwargs:
layername or layer__name: The layer name associated with the style
or
layer__id: The layer id associated with the style
"""
from geonode.qgis_server.views import qml_style
# Extract layer name information
POST = request.POST
FILES = request.FILES
layername = POST.get('layername') or POST.get('layer__name')
if not layername:
layer_id = POST.get('layer__id')
layer = Layer.objects.get(id=layer_id)
layername = layer.name
# move style file
FILES['qml'] = FILES['style']
response = qml_style(request, layername)
if isinstance(response, TemplateResponse):
if response.status_code == 201:
obj = QGISServerStyle.objects.get(
layer_styles__layer__name=layername,
name=POST['name'])
updated_bundle = self.build_bundle(obj=obj, request=request)
location = self.get_resource_uri(updated_bundle)
if not self._meta.always_return_data:
return http.HttpCreated(location=location)
else:
updated_bundle = self.full_dehydrate(updated_bundle)
updated_bundle = self.alter_detail_data_to_serialize(
request, updated_bundle)
return self.create_response(
request, updated_bundle,
response_class=http.HttpCreated,
location=location)
else:
context = response.context_data
# Check form valid
style_upload_form = context['style_upload_form']
if not style_upload_form.is_valid():
raise BadRequest(style_upload_form.errors.as_text())
alert_message = context['alert_message']
raise BadRequest(alert_message)
elif isinstance(response, HttpResponse):
response_class = None
if response.status_code == 403:
response_class = http.HttpForbidden
return self.error_response(
request, response.content,
response_class=response_class)
def delete_detail(self, request, **kwargs):
"""Attempt to redirect to QGIS Server Style management."""
from geonode.qgis_server.views import qml_style
style_id = kwargs.get('id')
qgis_style = QGISServerStyle.objects.get(id=style_id)
layername = qgis_style.layer_styles.first().layer.name
response = qml_style(request, layername, style_name=qgis_style.name)
if isinstance(response, TemplateResponse):
if response.status_code == 200:
# style deleted
return http.HttpNoContent()
else:
context = response.context_data
# Check form valid
style_upload_form = context['style_upload_form']
if not style_upload_form.is_valid():
raise BadRequest(style_upload_form.errors.as_text())
alert_message = context['alert_message']
raise BadRequest(alert_message)
elif isinstance(response, HttpResponse):
response_class = None
if response.status_code == 403:
response_class = http.HttpForbidden
return self.error_response(
request, response.content,
response_class=response_class)
def delete_list(self, request, **kwargs):
"""Do not allow delete list"""
return http.HttpForbidden()
class GeoserverStyleResource(ModelResource):
"""Styles API for Geoserver backend."""
body = fields.CharField(
attribute='sld_body',
use_in='detail')
name = fields.CharField(attribute='name')
title = fields.CharField(attribute='sld_title')
# layer_default_style is polymorphic, so it will have many to many
# relation
layer = fields.ManyToManyField(
'geonode.api.resourcebase_api.LayerResource',
attribute='layer_default_style',
null=True)
version = fields.CharField(
attribute='sld_version',
null=True,
blank=True)
style_url = fields.CharField(attribute='sld_url')
workspace = fields.CharField(attribute='workspace', null=True)
type = fields.CharField(attribute='type')
class Meta:
paginator_class = CrossSiteXHRPaginator
queryset = Style.objects.all()
resource_name = 'styles'
detail_uri_name = 'id'
authorization = GeoNodeStyleAuthorization()
allowed_methods = ['get']
filtering = {
'id': ALL,
'title': ALL,
'name': ALL,
'layer': ALL_WITH_RELATIONS
}
def build_filters(self, filters=None, **kwargs):
"""Apply custom filters for layer."""
filters = super(GeoserverStyleResource, self).build_filters(
filters, **kwargs)
# Convert layer__ filters into layer_styles__layer__
updated_filters = {}
for key, value in filters.iteritems():
key = key.replace('layer__', 'layer_default_style__')
updated_filters[key] = value
return updated_filters
def populate_object(self, style):
"""Populate results with necessary fields
:param style: Style objects
:type style: Style
:return:
"""
style.type = 'sld'
return style
def build_bundle(self, obj=None, data=None, request=None, **kwargs):
"""Override build_bundle method to add additional info."""
if obj is None and self._meta.object_class:
obj = self._meta.object_class()
elif obj:
obj = self.populate_object(obj)
return Bundle(
obj=obj,
data=data,
request=request,
**kwargs)
if check_ogc_backend(qgis_server.BACKEND_PACKAGE):
class StyleResource(QGISStyleResource):
"""Wrapper for Generic Style Resource"""
pass
elif check_ogc_backend(geoserver.BACKEND_PACKAGE):
class StyleResource(GeoserverStyleResource):
"""Wrapper for Generic Style Resource"""
pass
|
ppasq/geonode
|
geonode/api/api.py
|
Python
|
gpl-3.0
| 26,619
|
# -*- python -*-
# ex: set syntax=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from buildbot.process import factory
from buildbot.steps.source import Mercurial
from buildbot.steps.shell import *
from custom.buildbot_ext.steps.shellAddons import *
from buildbot.steps.trigger import Trigger
from commonsteps import *
class sandbox:
BRANCHES = ["sandbox"]
####### SCHEDULERS
from buildbot.scheduler import *
# custom.buildbot_ext.scheduler import MUST happen after importing buildbot.scheduler
from custom.buildbot_ext.scheduler import *
#### SANDBOX
compile = Scheduler(name="compile-sandbox", branch=BRANCHES, treeStableTimer=30, properties={'silent':'true'},
builderNames=["windows-compile-sandbox", "windows64-compile-sandbox",
"mac-intel-10.5-compile-sandbox", "mac64-intel-compile-sandbox",
"linux-compile-sandbox",
"linux64-compile-sandbox",
"android-compile-sandbox",
"linux-arm-compile-sandbox",
"linux-mips-compile-sandbox",
])
smoke = BuilderDependent(name="smoke-sandbox",upstream=compile, callbackInterval=60, properties={'silent':'true'},
builderNames=["windows-smoke-sandbox", "windows64-smoke-sandbox",
"mac-intel-10.5-smoke-sandbox", "mac64-intel-smoke-sandbox",
"linux-smoke-sandbox",
"linux64-smoke-sandbox",
"android-smoke-sandbox",
"linux-arm-smoke-sandbox",
"linux-mips-smoke-sandbox",
],
builderDependencies=[
["windows-smoke-sandbox", "windows-compile-sandbox"],
["windows64-smoke-sandbox", "windows64-compile-sandbox"],
["mac-intel-10.5-smoke-sandbox", "mac-intel-10.5-compile-sandbox"],
["mac64-intel-smoke-sandbox", "mac64-intel-compile-sandbox"],
["linux-smoke-sandbox", "linux-compile-sandbox"],
["linux64-smoke-sandbox", "linux64-compile-sandbox"],
["android-smoke-sandbox","android-compile-sandbox"],
["linux-arm-smoke-sandbox","linux-arm-compile-sandbox"],
["linux-mips-smoke-sandbox","linux-mips-compile-sandbox"],
])
test = BuilderDependent(name="test-sandbox",upstream=smoke, callbackInterval=60, properties={'silent':'true'},
builderNames=["windows-test-sandbox", "windows64-test-sandbox",
"mac-intel-10.5-test-sandbox", "mac64-intel-test-sandbox",
"linux-test-sandbox",
"linux64-test-sandbox",
"android-test-sandbox",
"linux-arm-test-sandbox",
"linux-mips-test-sandbox",
],
builderDependencies=[
["windows-test-sandbox", "windows-smoke-sandbox"],
["windows64-test-sandbox", "windows64-smoke-sandbox"],
["mac-intel-10.5-test-sandbox", "mac-intel-10.5-smoke-sandbox"],
["mac64-intel-test-sandbox", "mac64-intel-smoke-sandbox"],
["linux-test-sandbox", "linux-smoke-sandbox"],
["linux64-test-sandbox", "linux64-smoke-sandbox"],
["android-test-sandbox", "android-smoke-sandbox"],
["linux-arm-test-sandbox", "linux-arm-smoke-sandbox"],
["linux-mips-test-sandbox", "linux-mips-smoke-sandbox"],
])
schedulers = [compile, smoke, test]
################################################################################
################################################################################
#### ####
#### SANDBOX COMPILE BUILDERS ####
#### ####
################################################################################
################################################################################
#############################################
#### builder for windows-compile-sandbox ####
#############################################
sb_windows_compile_factory = factory.BuildFactory()
sb_windows_compile_factory.addStep(sync_clean)
sb_windows_compile_factory.addStep(sync_clone_sandbox)
sb_windows_compile_factory.addStep(sync_update)
sb_windows_compile_factory.addStep(bb_slaveupdate(slave="windows"))
sb_windows_compile_factory.addStep(verify_builtinabc)
sb_windows_compile_factory.addStep(compile_generic(name="Release", shellname="avmshell", args=" ", upload="false"))
sb_windows_compile_factory.addStep(compile_generic(name="Debug", shellname="avmshell_d", args="--enable-debug", upload="false"))
sb_windows_compile_factory.addStep(compile_generic(name="ReleaseDebugger", shellname="avmshell_s", args="--enable-debugger", upload="false"))
sb_windows_compile_factory.addStep(compile_generic(name="DebugDebugger", shellname="avmshell_sd", args="--enable-debug --enable-debugger", upload="false"))
sb_windows_compile_factory.addStep(compile_buildcheck)
sb_windows_compile_factory.addStep(verify_tracers)
sb_windows_compile_factory.addStep(util_upload_asteam)
sb_windows_compile_factory.addStep(BuildShellCommand(
command=['../all/file-check.py', '../../../../../repo'],
env={'branch': WithProperties('%s','branch'), 'silent':WithProperties('%s','silent')},
description='running file-check against source...',
descriptionDone='finished file-check.',
name="FileCheck",
workdir="../repo/build/buildbot/slaves/scripts")
)
sb_windows_compile_builder = {
'name': "windows-compile-sandbox",
'slavename': "windows",
'factory': sb_windows_compile_factory,
'builddir': './sandbox-windows-compile',
}
###############################################
#### builder for windows64-compile-sandbox ####
###############################################
sb_windows_64_compile_factory = factory.BuildFactory()
sb_windows_64_compile_factory.addStep(sync_clean)
sb_windows_64_compile_factory.addStep(sync_clone_sandbox)
sb_windows_64_compile_factory.addStep(sync_update)
sb_windows_64_compile_factory.addStep(bb_slaveupdate(slave="windows64"))
sb_windows_64_compile_factory.addStep(verify_builtinabc)
sb_windows_64_compile_factory.addStep(compile_generic(name="Release", shellname="avmshell_64", args="--target=x86_64-win", upload="false", features="+AVMSYSTEM_64BIT +AVMSYSTEM_AMD64"))
sb_windows_64_compile_factory.addStep(compile_generic(name="Debug", shellname="avmshell_d_64", args="--enable-debug --target=x86_64-win", upload="false", features="+AVMSYSTEM_64BIT +AVMSYSTEM_AMD64"))
sb_windows_64_compile_factory.addStep(compile_generic(name="ReleaseDebugger", shellname="avmshell_s_64", args="--enable-debugger --target=x86_64-win", upload="false", features="'+AVMSYSTEM_64BIT +AVMSYSTEM_AMD64 +AVMFEATURE_DEBUGGER"))
sb_windows_64_compile_factory.addStep(compile_generic(name="DebugDebugger", shellname="avmshell_sd_64", args="--enable-debug --enable-debugger --target=x86_64-win", upload="false", features="+AVMSYSTEM_64BIT +AVMSYSTEM_AMD64 +AVMFEATURE_DEBUGGER"))
sb_windows_64_compile_factory.addStep(compile_buildcheck)
sb_windows_64_compile_factory.addStep(verify_tracers)
sb_windows_64_compile_factory.addStep(util_upload_asteam)
sb_windows_64_compile_builder = {
'name': "windows64-compile-sandbox",
'slavename': "windows64",
'factory': sb_windows_64_compile_factory,
'builddir': './sandbox-windows64-compile',
}
####################################################
#### builder for mac-intel-10_5-compile-sandbox ####
####################################################
sb_mac_intel_105_compile_factory = factory.BuildFactory()
sb_mac_intel_105_compile_factory.addStep(sync_clean)
sb_mac_intel_105_compile_factory.addStep(sync_clone_sandbox)
sb_mac_intel_105_compile_factory.addStep(sync_update)
sb_mac_intel_105_compile_factory.addStep(bb_slaveupdate(slave="mac-intel-10_5"))
sb_mac_intel_105_compile_factory.addStep(verify_builtinabc)
sb_mac_intel_105_compile_factory.addStep(compile_generic(name="Release", shellname="avmshell", args="--mac-sdk=105 --target=i686-darwin", upload="false", features="+AVMSYSTEM_32BIT +AVMSYSTEM_IA32"))
sb_mac_intel_105_compile_factory.addStep(compile_generic(name="Debug", shellname="avmshell_d", args="--enable-debug --mac-sdk=105 --target=i686-darwin", upload="false", features="+AVMSYSTEM_32BIT +AVMSYSTEM_IA32"))
sb_mac_intel_105_compile_factory.addStep(compile_generic(name="ReleaseDebugger", shellname="avmshell_s", args="--enable-debugger --mac-sdk=105 --target=i686-darwin", upload="false", features="+AVMSYSTEM_32BIT +AVMSYSTEM_IA32 +AVMFEATURE_DEBUGGER"))
sb_mac_intel_105_compile_factory.addStep(compile_generic(name="DebugDebugger", shellname="avmshell_sd", args="--enable-debug --enable-debugger --mac-sdk=105 --target=i686-darwin", upload="false", features="+AVMSYSTEM_32BIT +AVMSYSTEM_IA32 +AVMFEATURE_DEBUGGER"))
sb_mac_intel_105_compile_factory.addStep(compile_buildcheck)
sb_mac_intel_105_compile_factory.addStep(verify_tracers)
sb_mac_intel_105_compile_factory.addStep(util_upload_asteam)
sb_mac_intel_105_compile_builder = {
'name': "mac-intel-10.5-compile-sandbox",
'slavename': "mac-intel-10_5",
'factory': sb_mac_intel_105_compile_factory,
'builddir': './sandbox-mac-intel-10_5-compile',
}
##################################################
#### builder for mac-intel-64-compile-sandbox ####
##################################################
sb_mac_intel_64_compile_factory = factory.BuildFactory()
sb_mac_intel_64_compile_factory.addStep(sync_clean)
sb_mac_intel_64_compile_factory.addStep(sync_clone_sandbox)
sb_mac_intel_64_compile_factory.addStep(sync_update)
sb_mac_intel_64_compile_factory.addStep(bb_slaveupdate(slave="mac64-intel"))
sb_mac_intel_64_compile_factory.addStep(verify_builtinabc)
sb_mac_intel_64_compile_factory.addStep(compile_generic(name="Release", shellname="avmshell_64", args="--target=x86_64-darwin --mac-sdk=105", upload="false", features="+AVMSYSTEM_64BIT +AVMSYSTEM_AMD64"))
sb_mac_intel_64_compile_factory.addStep(compile_generic(name="Debug", shellname="avmshell_d_64", args="--enable-debug --target=x86_64-darwin --mac-sdk=105", upload="false", features="+AVMSYSTEM_64BIT +AVMSYSTEM_AMD64"))
sb_mac_intel_64_compile_factory.addStep(compile_generic(name="ReleaseDebugger", shellname="avmshell_s_64", args="--enable-debugger --target=x86_64-darwin --mac-sdk=105", upload="false", features="+AVMSYSTEM_64BIT +AVMSYSTEM_AMD64 +AVMFEATURE_DEBUGGER"))
sb_mac_intel_64_compile_factory.addStep(compile_generic(name="DebugDebugger", shellname="avmshell_sd_64", args="--enable-debug --enable-debugger --target=x86_64-darwin --mac-sdk=105", upload="false", features="+AVMSYSTEM_64BIT +AVMSYSTEM_AMD64 +AVMFEATURE_DEBUGGER"))
sb_mac_intel_64_compile_factory.addStep(compile_buildcheck)
sb_mac_intel_64_compile_factory.addStep(compile_testmedia)
sb_mac_intel_64_compile_factory.addStep(util_upload_asteam_local)
sb_mac_intel_64_compile_factory.addStep(verify_tracers)
sb_mac_intel_64_compile_builder = {
'name': "mac64-intel-compile-sandbox",
'slavename': "mac64-intel",
'factory': sb_mac_intel_64_compile_factory,
'builddir': './sandbox-mac64-intel-compile',
}
###########################################
#### builder for linux-compile-sandbox ####
###########################################
sb_linux_compile_factory = factory.BuildFactory()
sb_linux_compile_factory.addStep(sync_clean)
sb_linux_compile_factory.addStep(sync_clone_sandbox)
sb_linux_compile_factory.addStep(sync_update)
sb_linux_compile_factory.addStep(bb_slaveupdate(slave="linux"))
sb_linux_compile_factory.addStep(verify_builtinabc)
sb_linux_compile_factory.addStep(compile_generic(name="Release", shellname="avmshell", args="--target=i686-linux", upload="false", features="+AVMSYSTEM_32BIT +AVMSYSTEM_IA32"))
sb_linux_compile_factory.addStep(compile_generic(name="Debug", shellname="avmshell_d", args="--enable-debug --target=i686-linux", upload="false", features="+AVMSYSTEM_32BIT +AVMSYSTEM_IA32"))
sb_linux_compile_factory.addStep(compile_generic(name="ReleaseDebugger", shellname="avmshell_s", args="--enable-debugger --target=i686-linux", upload="false", features="+AVMSYSTEM_32BIT +AVMSYSTEM_IA32 +AVMFEATURE_DEBUGGER"))
sb_linux_compile_factory.addStep(compile_generic(name="DebugDebugger", shellname="avmshell_sd", args="--enable-debug --enable-debugger --target=i686-linux", upload="false", features="+AVMSYSTEM_32BIT +AVMSYSTEM_IA32 +AVMFEATURE_DEBUGGER"))
sb_linux_compile_factory.addStep(compile_buildcheck)
sb_linux_compile_factory.addStep(verify_tracers)
sb_linux_compile_factory.addStep(util_upload_asteam)
sb_linux_compile_builder = {
'name': "linux-compile-sandbox",
'slavename': "linux",
'factory': sb_linux_compile_factory,
'builddir': './sandbox-linux-compile',
}
#############################################
#### builder for linux64-compile-sandbox ####
#############################################
sb_linux_64_compile_factory = factory.BuildFactory()
sb_linux_64_compile_factory.addStep(sync_clean)
sb_linux_64_compile_factory.addStep(sync_clone_sandbox)
sb_linux_64_compile_factory.addStep(sync_update)
sb_linux_64_compile_factory.addStep(bb_slaveupdate(slave="linux64"))
sb_linux_64_compile_factory.addStep(verify_builtinabc)
sb_linux_64_compile_factory.addStep(compile_generic(name="Release", shellname="avmshell_64", args=" ", upload="false"))
sb_linux_64_compile_factory.addStep(compile_generic(name="Debug", shellname="avmshell_d_64", args="--enable-debug", upload="false"))
sb_linux_64_compile_factory.addStep(compile_generic(name="ReleaseDebugger", shellname="avmshell_s_64", args="--enable-debugger", upload="false"))
sb_linux_64_compile_factory.addStep(compile_generic(name="DebugDebugger", shellname="avmshell_sd_64", args="--enable-debug --enable-debugger", upload="false"))
sb_linux_64_compile_factory.addStep(verify_tracers)
sb_linux_64_compile_factory.addStep(compile_buildcheck)
sb_linux_64_compile_factory.addStep(util_upload_asteam)
sb_linux_64_compile_builder = {
'name': "linux64-compile-sandbox",
'slavename': "linux64",
'factory': sb_linux_64_compile_factory,
'builddir': './sandbox-linux64-compile',
}
###########################################
#### builder for android on mac ####
###########################################
sb_android_compile_factory = factory.BuildFactory()
sb_android_compile_factory.addStep(sync_clean)
sb_android_compile_factory.addStep(sync_clone_sandbox)
sb_android_compile_factory.addStep(sync_update)
sb_android_compile_factory.addStep(bb_slaveupdate(slave="android"))
sb_android_compile_factory.addStep(verify_builtinabc)
sb_android_compile_factory.addStep(compile_generic(name="Release", shellname="avmshell", args="--arm-arch=armv7-a --target=arm-android", upload="false"))
sb_android_compile_factory.addStep(compile_generic(name="Debug", shellname="avmshell_d", args="--enable-debug --arm-arch=armv7-a --target=arm-android", upload="false"))
sb_android_compile_factory.addStep(verify_tracers)
sb_android_compile_factory.addStep(compile_buildcheck_local)
sb_android_compile_factory.addStep(util_upload_asteam_local)
sb_android_compile_builder = {
'name': "android-compile-sandbox",
'slavename': "android",
'factory': sb_android_compile_factory,
'builddir': './sandbox-android-compile',
}
###############################
#### builder for linux-arm ####
###############################
sb_linux_arm_compile_factory = factory.BuildFactory()
sb_linux_arm_compile_factory.addStep(sync_clean)
sb_linux_arm_compile_factory.addStep(sync_clone_sandbox)
sb_linux_arm_compile_factory.addStep(sync_update)
sb_linux_arm_compile_factory.addStep(bb_slaveupdate(slave="linux-arm"))
sb_linux_arm_compile_factory.addStep(verify_builtinabc)
sb_linux_arm_compile_factory.addStep(compile_generic(name="Release", shellname="avmshell_neon_arm", args="--enable-arm-neon --arm-arch=armv7-a --target=arm-linux --enable-sys-root-dir=/usr/local/arm-linux/debian5", upload="false", features=""))
sb_linux_arm_compile_factory.addStep(compile_generic(name="Debug", shellname="avmshell_neon_arm_d", args="--enable-debug --enable-arm-neon --arm-arch=armv7-a --target=arm-linux --enable-sys-root-dir=/usr/local/arm-linux/debian5", upload="false", features=""))
sb_linux_arm_compile_factory.addStep(verify_tracers)
sb_linux_arm_compile_factory.addStep(compile_buildcheck_local)
sb_linux_arm_compile_factory.addStep(util_upload_asteam_local)
sb_linux_arm_compile_builder = {
'name': "linux-arm-compile-sandbox",
'slavename': "linux-arm",
'factory': sb_linux_arm_compile_factory,
'builddir': './sandbox-linux-arm-compile',
}
################################
#### builder for linux-mips ####
################################
sb_linux_mips_compile_factory = factory.BuildFactory()
sb_linux_mips_compile_factory.addStep(sync_clean)
sb_linux_mips_compile_factory.addStep(sync_clone_sandbox)
sb_linux_mips_compile_factory.addStep(sync_update)
sb_linux_mips_compile_factory.addStep(bb_slaveupdate(slave="linux-mips"))
sb_linux_mips_compile_factory.addStep(verify_builtinabc)
sb_linux_mips_compile_factory.addStep(compile_generic(name="Release", shellname="avmshell_mips", args="--target=mips-linux", upload="false", features=""))
sb_linux_mips_compile_factory.addStep(compile_generic(name="Debug", shellname="avmshell_mips_d", args="--enable-debug --target=mips-linux", upload="false", features=""))
sb_linux_mips_compile_factory.addStep(verify_tracers)
sb_linux_mips_compile_factory.addStep(compile_buildcheck_local)
sb_linux_mips_compile_factory.addStep(util_upload_asteam_local)
sb_linux_mips_compile_builder = {
'name': "linux-mips-compile-sandbox",
'slavename': "linux-mips",
'factory': sb_linux_mips_compile_factory,
'builddir': './sandbox-linux-mips-compile',
}
################################################################################
################################################################################
#### ####
#### SANDBOX SMOKE BUILDERS ####
#### ####
################################################################################
################################################################################
###########################################
#### builder for windows-smoke-sandbox ####
###########################################
sb_windows_smoke_factory = factory.BuildFactory()
sb_windows_smoke_factory.addStep(download_testmedia)
sb_windows_smoke_factory.addStep(test_smoke)
sb_windows_smoke_factory.addStep(util_process_clean)
sb_windows_smoke_builder = {
'name': "windows-smoke-sandbox",
'slavename': "windows",
'factory': sb_windows_smoke_factory,
'builddir': './sandbox-windows-smoke',
}
#############################################
#### builder for windows64-smoke-sandbox ####
#############################################
sb_windows_64_smoke_factory = factory.BuildFactory()
sb_windows_64_smoke_factory.addStep(download_testmedia)
sb_windows_64_smoke_factory.addStep(test_smoke)
sb_windows_64_smoke_factory.addStep(util_process_clean)
sb_windows_64_smoke_builder = {
'name': "windows64-smoke-sandbox",
'slavename': "windows64",
'factory': sb_windows_64_smoke_factory,
'builddir': './sandbox-windows64-smoke',
}
##################################################
#### builder for mac-intel-10_5-smoke-sandbox ####
##################################################
sb_mac_intel_105_smoke_factory = factory.BuildFactory()
sb_mac_intel_105_smoke_factory.addStep(download_testmedia)
sb_mac_intel_105_smoke_factory.addStep(test_smoke)
sb_mac_intel_105_smoke_factory.addStep(util_process_clean)
sb_mac_intel_105_smoke_builder = {
'name': "mac-intel-10.5-smoke-sandbox",
'slavename': "mac-intel-10_5",
'factory': sb_mac_intel_105_smoke_factory,
'builddir': './sandbox-mac-intel-10_5-smoke',
}
###############################################
#### builder for mac64-intel-smoke-sandbox ####
###############################################
sb_mac_intel_64_smoke_factory = factory.BuildFactory()
sb_mac_intel_64_smoke_factory.addStep(download_testmedia)
sb_mac_intel_64_smoke_factory.addStep(test_smoke)
sb_mac_intel_64_smoke_factory.addStep(util_process_clean)
sb_mac_intel_64_smoke_builder = {
'name': "mac64-intel-smoke-sandbox",
'slavename': "mac64-intel",
'factory': sb_mac_intel_64_smoke_factory,
'builddir': './sandbox-mac64-intel-smoke',
}
#########################################
#### builder for linux-smoke-sandbox ####
#########################################
sb_linux_smoke_factory = factory.BuildFactory()
sb_linux_smoke_factory.addStep(download_testmedia)
sb_linux_smoke_factory.addStep(test_smoke)
sb_linux_smoke_factory.addStep(util_process_clean)
sb_linux_smoke_builder = {
'name': "linux-smoke-sandbox",
'slavename': "linux",
'factory': sb_linux_smoke_factory,
'builddir': './sandbox-linux-smoke',
}
###########################################
#### builder for linux64-smoke-sandbox ####
###########################################
sb_linux_64_smoke_factory = factory.BuildFactory()
sb_linux_64_smoke_factory.addStep(download_testmedia)
sb_linux_64_smoke_factory.addStep(test_smoke)
sb_linux_64_smoke_factory.addStep(util_process_clean)
sb_linux_64_smoke_builder = {
'name': "linux64-smoke-sandbox",
'slavename': "linux64",
'factory': sb_linux_64_smoke_factory,
'builddir': './sandbox-linux64-smoke',
}
#########################################
#### builder for android-smoke ####
#########################################
sb_android_smoke_factory = factory.BuildFactory()
sb_android_smoke_factory.addStep(download_testmedia)
sb_android_smoke_factory.addStep(test_smoke_local)
sb_android_smoke_factory.addStep(util_process_clean)
sb_android_smoke_builder = {
'name': "android-smoke-sandbox",
'slavename': "android",
'factory': sb_android_smoke_factory,
'builddir': './sanbox-android-smoke',
}
###########################################
#### builder for linxu-arm-smoke ####
###########################################
sb_linux_arm_smoke_factory = factory.BuildFactory()
sb_linux_arm_smoke_factory.addStep(download_testmedia)
sb_linux_arm_smoke_factory.addStep(TestSuiteShellCommand(
command=['../all/run-smoketests-ssh.sh', WithProperties('%s','revision'), './runsmokes-ssh.txt'],
env={'branch': WithProperties('%s','branch'), 'silent':WithProperties('%s','silent')},
description='starting to run smoke tests...',
descriptionDone='finished smoke tests.',
name="SmokeTest",
workdir="../repo/build/buildbot/slaves/scripts")
)
sb_linux_arm_smoke_factory.addStep(util_process_clean)
sb_linux_arm_smoke_builder = {
'name': "linux-arm-smoke-sandbox",
'slavename': "linux-arm",
'factory': sb_linux_arm_smoke_factory,
'builddir': './sandbox-linux-arm-smoke',
}
#########################################
#### builder for linux-mips-smoke ####
#########################################
sb_linux_mips_smoke_factory = factory.BuildFactory()
sb_linux_mips_smoke_factory.addStep(download_testmedia)
sb_linux_mips_smoke_factory.addStep(test_smoke_ssh)
sb_linux_mips_smoke_factory.addStep(util_process_clean_ssh)
sb_linux_mips_smoke_builder = {
'name': "linux-mips-smoke-sandbox",
'slavename': "linux-mips",
'factory': sb_linux_mips_smoke_factory,
'builddir': './sandbox-linux-mips-smoke',
}
################################################################################
################################################################################
#### ####
#### SANDBOX TEST BUILDERS ####
#### ####
################################################################################
################################################################################
##########################################
#### builder for windows-test-sandbox ####
##########################################
sb_windows_test_factory = factory.BuildFactory()
sb_windows_test_factory.addStep(test_commandline)
sb_windows_test_factory.addStep(test_selftest(name="Release", shellname="avmshell"))
sb_windows_test_factory.addStep(test_generic(name="Release", shellname="avmshell", vmargs="", config="", scriptargs=""))
sb_windows_test_factory.addStep(test_generic(name="Release-interp", shellname="avmshell", vmargs="-Dinterp", config="", scriptargs=""))
sb_windows_test_factory.addStep(test_generic(name="Release-jit", shellname="avmshell", vmargs="-Ojit", config="", scriptargs=""))
sb_windows_test_factory.addStep(test_generic(name="ReleaseDebugger", shellname="avmshell_s", vmargs="", config="", scriptargs=""))
sb_windows_test_factory.addStep(test_generic(name="Debug", shellname="avmshell_d", vmargs="", config="", scriptargs=""))
sb_windows_test_factory.addStep(test_generic(name="DebugDebugger", shellname="avmshell_sd", vmargs="", config="", scriptargs=""))
sb_windows_test_factory.addStep(test_differential)
sb_windows_test_factory.addStep(util_process_clean)
sb_windows_test_factory.addStep(util_clean_buildsdir)
sb_windows_test_factory.addStep(sync_clean)
sb_windows_test_builder = {
'name': "windows-test-sandbox",
'slavename': "windows",
'factory': sb_windows_test_factory,
'builddir': './sandbox-windows-test',
}
############################################
#### builder for windows64-test-sandbox ####
############################################
sb_windows_64_test_factory = factory.BuildFactory()
sb_windows_64_test_factory.addStep(test_commandline)
sb_windows_64_test_factory.addStep(test_selftest(name="Release", shellname="avmshell_64"))
sb_windows_64_test_factory.addStep(test_generic(name="Release", shellname="avmshell_64", vmargs="", config="", scriptargs=""))
sb_windows_64_test_factory.addStep(test_generic(name="Release-interp", shellname="avmshell_64", vmargs="-Dinterp", config="", scriptargs=""))
sb_windows_64_test_factory.addStep(test_generic(name="Release-jit", shellname="avmshell_64", vmargs="-Ojit", config="", scriptargs=""))
sb_windows_64_test_factory.addStep(test_generic(name="ReleaseDebugger", shellname="avmshell_s_64", vmargs="", config="", scriptargs=""))
sb_windows_64_test_factory.addStep(test_generic(name="Debug", shellname="avmshell_d_64", vmargs="", config="", scriptargs=""))
sb_windows_64_test_factory.addStep(test_generic(name="DebugDebugger", shellname="avmshell_sd_64", vmargs="", config="", scriptargs=""))
sb_windows_64_test_factory.addStep(util_process_clean)
sb_windows_64_test_factory.addStep(util_clean_buildsdir)
sb_windows_64_test_factory.addStep(sync_clean)
sb_windows_64_test_builder = {
'name': "windows64-test-sandbox",
'slavename': "windows64",
'factory': sb_windows_64_test_factory,
'builddir': './sandbox-windows64-test',
}
#################################################
#### builder for mac-intel-10_5-test-sandbox ####
#################################################
sb_mac_intel_105_test_factory = factory.BuildFactory()
sb_mac_intel_105_test_factory.addStep(test_commandline)
sb_mac_intel_105_test_factory.addStep(test_selftest(name="Release", shellname="avmshell"))
sb_mac_intel_105_test_factory.addStep(test_generic(name="Release", shellname="avmshell", vmargs="", config="", scriptargs=""))
sb_mac_intel_105_test_factory.addStep(test_generic(name="Release-interp", shellname="avmshell", vmargs="-Dinterp", config="", scriptargs=""))
sb_mac_intel_105_test_factory.addStep(test_generic(name="Release-jit", shellname="avmshell", vmargs="-Ojit", config="", scriptargs=""))
sb_mac_intel_105_test_factory.addStep(test_generic(name="ReleaseDebugger", shellname="avmshell_s", vmargs="", config="", scriptargs=""))
sb_mac_intel_105_test_factory.addStep(test_generic(name="Debug", shellname="avmshell_d", vmargs="", config="", scriptargs=""))
sb_mac_intel_105_test_factory.addStep(test_generic(name="DebugDebugger", shellname="avmshell_sd", vmargs="", config="", scriptargs=""))
sb_mac_intel_105_test_factory.addStep(test_differential)
sb_mac_intel_105_test_factory.addStep(util_process_clean)
sb_mac_intel_105_test_factory.addStep(util_clean_buildsdir)
sb_mac_intel_105_test_factory.addStep(sync_clean)
sb_mac_intel_105_test_builder = {
'name': "mac-intel-10.5-test-sandbox",
'slavename': "mac-intel-10_5",
'factory': sb_mac_intel_105_test_factory,
'builddir': './sandbox-mac-intel-10_5-test',
}
##############################################
#### builder for mac64-intel-test-sandbox ####
##############################################
sb_mac_intel_64_test_factory = factory.BuildFactory()
sb_mac_intel_64_test_factory.addStep(test_commandline)
sb_mac_intel_64_test_factory.addStep(test_selftest(name="Release", shellname="avmshell_64"))
sb_mac_intel_64_test_factory.addStep(test_generic(name="Release", shellname="avmshell_64", vmargs="", config="", scriptargs=""))
sb_mac_intel_64_test_factory.addStep(test_generic(name="Release-interp", shellname="avmshell_64", vmargs="-Dinterp", config="", scriptargs=""))
sb_mac_intel_64_test_factory.addStep(test_generic(name="Release-jit", shellname="avmshell_64", vmargs="-Ojit", config="", scriptargs=""))
sb_mac_intel_64_test_factory.addStep(test_generic(name="ReleaseDebugger", shellname="avmshell_s_64", vmargs="", config="", scriptargs=""))
sb_mac_intel_64_test_factory.addStep(test_generic(name="Debug", shellname="avmshell_d_64", vmargs="", config="", scriptargs=""))
sb_mac_intel_64_test_factory.addStep(test_generic(name="DebugDebugger", shellname="avmshell_sd_64", vmargs="", config="", scriptargs=""))
sb_mac_intel_64_test_factory.addStep(util_process_clean)
sb_mac_intel_64_test_factory.addStep(util_clean_buildsdir)
sb_mac_intel_64_test_factory.addStep(sync_clean)
sb_mac_intel_64_test_builder = {
'name': "mac64-intel-test-sandbox",
'slavename': "mac64-intel",
'factory': sb_mac_intel_64_test_factory,
'builddir': './sandbox-mac64-intel-test',
}
########################################
#### builder for linux-test-sandbox ####
########################################
sb_linux_test_factory = factory.BuildFactory()
sb_linux_test_factory.addStep(test_commandline)
sb_linux_test_factory.addStep(test_selftest(name="Release", shellname="avmshell"))
sb_linux_test_factory.addStep(test_generic(name="Release", shellname="avmshell", vmargs="", config="", scriptargs=""))
sb_linux_test_factory.addStep(test_generic(name="Release-interp", shellname="avmshell", vmargs="-Dinterp", config="", scriptargs=""))
sb_linux_test_factory.addStep(test_generic(name="Release-jit", shellname="avmshell", vmargs="-Ojit", config="", scriptargs=""))
sb_linux_test_factory.addStep(test_generic(name="ReleaseDebugger", shellname="avmshell_s", vmargs="", config="", scriptargs=""))
sb_linux_test_factory.addStep(test_generic(name="Debug", shellname="avmshell_d", vmargs="", config="", scriptargs=""))
sb_linux_test_factory.addStep(test_generic(name="DebugDebugger", shellname="avmshell_sd", vmargs="", config="", scriptargs=""))
sb_linux_test_factory.addStep(test_differential)
sb_linux_test_factory.addStep(util_process_clean)
sb_linux_test_factory.addStep(util_clean_buildsdir)
sb_linux_test_factory.addStep(sync_clean)
sb_linux_test_builder = {
'name': "linux-test-sandbox",
'slavename': "linux",
'factory': sb_linux_test_factory,
'builddir': './sandbox-linux-test',
}
##########################################
#### builder for linux64-test-sandbox ####
##########################################
sb_linux_64_test_factory = factory.BuildFactory()
sb_linux_64_test_factory.addStep(test_commandline)
sb_linux_64_test_factory.addStep(test_selftest(name="Release", shellname="avmshell_64"))
sb_linux_64_test_factory.addStep(test_generic(name="Release", shellname="avmshell_64", vmargs="", config="", scriptargs=""))
sb_linux_64_test_factory.addStep(test_generic(name="Release-interp", shellname="avmshell_64", vmargs="-Dinterp", config="", scriptargs=""))
sb_linux_64_test_factory.addStep(test_generic(name="Release-jit", shellname="avmshell_64", vmargs="-Ojit", config="", scriptargs=""))
sb_linux_64_test_factory.addStep(test_generic(name="ReleaseDebugger", shellname="avmshell_s_64", vmargs="", config="", scriptargs=""))
sb_linux_64_test_factory.addStep(test_generic(name="Debug", shellname="avmshell_d_64", vmargs="", config="", scriptargs=""))
sb_linux_64_test_factory.addStep(test_generic(name="DebugDebugger", shellname="avmshell_sd_64", vmargs="", config="", scriptargs=""))
sb_linux_64_test_factory.addStep(util_process_clean)
sb_linux_64_test_factory.addStep(util_clean_buildsdir)
sb_linux_64_test_factory.addStep(sync_clean)
sb_linux_64_test_builder = {
'name': "linux64-test-sandbox",
'slavename': "linux64",
'factory': sb_linux_64_test_factory,
'builddir': './sandbox-linux64-test',
}
########################################
#### builder for android-test ####
########################################
sb_android_test_factory = factory.BuildFactory()
sb_android_test_factory.addStep(test_generic_adb(name="Release", shellname="avmshell", vmargs="", config="", scriptargs=""))
sb_android_test_factory.addStep(test_generic_adb(name="Release-interp", shellname="avmshell", vmargs="-Dinterp", config="", scriptargs=""))
sb_android_test_factory.addStep(test_generic_adb(name="Release-jit", shellname="avmshell", vmargs="-Ojit", config="", scriptargs=""))
sb_android_test_factory.addStep(test_generic_adb(name="Debug", shellname="avmshell_d", vmargs="", config="", scriptargs=""))
sb_android_test_factory.addStep(util_process_clean)
sb_android_test_factory.addStep(util_clean_buildsdir)
sb_android_test_factory.addStep(sync_clean)
sb_android_test_builder = {
'name': "android-test-sandbox",
'slavename': "android",
'factory': sb_android_test_factory,
'builddir': './sandbox-android-test',
}
##########################################
#### builder for linux-arm-test ####
##########################################
sb_linux_arm_test_factory = factory.BuildFactory()
sb_linux_arm_test_factory.addStep(test_selftest_ssh(name="Release", shellname="avmshell_neon_arm"))
sb_linux_arm_test_factory.addStep(test_generic_ssh(name="Release-vfp", shellname="avmshell_neon_arm", vmargs="-Darm_arch 7 -Darm_vfp", config="arm-lnx-tvm-release", scriptargs=""))
sb_linux_arm_test_factory.addStep(test_generic_ssh(name="Release-interp", shellname="avmshell_neon_arm", vmargs="-Dinterp", config="arm-lnx-tvm-release-Dinterp", scriptargs=""))
sb_linux_arm_test_factory.addStep(test_generic_ssh(name="Release-jit-vfp", shellname="avmshell_neon_arm", vmargs="-Darm_arch 7 -Darm_vfp -Ojit", config="arm-lnx-tvm-release-Ojit", scriptargs=""))
sb_linux_arm_test_factory.addStep(util_acceptance_clean_ssh)
sb_linux_arm_test_factory.addStep(util_clean_buildsdir)
sb_linux_arm_test_factory.addStep(sync_clean)
sb_linux_arm_test_builder = {
'name': "linux-arm-test-sandbox",
'slavename': "linux-arm",
'factory': sb_linux_arm_test_factory,
'builddir': './sandbox-linux-arm-test',
}
##########################################
#### builder for linux-mips-test ####
##########################################
sb_linux_mips_test_factory = factory.BuildFactory()
sb_linux_mips_test_factory.addStep(test_generic_ssh(name="Release", shellname="avmshell_mips", vmargs="", config="mips-lnx-tvm-release", scriptargs=""))
sb_linux_mips_test_factory.addStep(test_generic_ssh(name="Debug", shellname="avmshell_mips_d", vmargs="", config="mips-lnx-tvm-debug", scriptargs=""))
sb_linux_mips_test_factory.addStep(util_process_clean_ssh)
sb_linux_mips_test_factory.addStep(util_clean_buildsdir)
sb_linux_mips_test_factory.addStep(sync_clean)
sb_linux_mips_test_builder = {
'name': "linux-mips-test-sandbox",
'slavename': "linux-mips",
'factory': sb_linux_mips_test_factory,
'builddir': './sandbox-linux-mips-test',
}
builders = [
sb_windows_compile_builder,
sb_windows_64_compile_builder,
sb_mac_intel_105_compile_builder,
sb_mac_intel_64_compile_builder,
sb_linux_compile_builder,
sb_linux_64_compile_builder,
sb_android_compile_builder,
sb_linux_arm_compile_builder,
sb_linux_mips_compile_builder,
sb_windows_smoke_builder,
sb_windows_64_smoke_builder,
sb_mac_intel_105_smoke_builder,
sb_mac_intel_64_smoke_builder,
sb_linux_smoke_builder,
sb_linux_64_smoke_builder,
sb_android_smoke_builder,
sb_linux_arm_smoke_builder,
sb_linux_mips_smoke_builder,
sb_windows_test_builder,
sb_windows_64_test_builder,
sb_mac_intel_105_test_builder,
sb_mac_intel_64_test_builder,
sb_linux_test_builder,
sb_linux_64_test_builder,
sb_android_test_builder,
sb_linux_arm_test_builder,
sb_linux_mips_test_builder,
]
|
adobe-flash/avmplus
|
build/buildbot/master/sandbox.py
|
Python
|
mpl-2.0
| 41,692
|
"""Tests around handling repositories which require authentication."""
from cookiecutter.prompt import read_repo_password
def test_click_invocation(mocker):
"""Test click function called correctly by cookiecutter.
Test for password (hidden input) type invocation.
"""
prompt = mocker.patch('click.prompt')
prompt.return_value = 'sekrit'
assert read_repo_password('Password') == 'sekrit'
prompt.assert_called_once_with('Password', hide_input=True)
|
audreyr/cookiecutter
|
tests/test_read_repo_password.py
|
Python
|
bsd-3-clause
| 480
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
DESCRIPTION = 'webui for youtube-dl'
LONG_DESCRIPTION = 'Another webui for youtube-dl, powered by youtube-dl'
setup (
name='youtube_dl_webui',
version='rolling',
packages=['youtube_dl_webui'],
license='GPL-2.0',
author='d0u9, yuanyingfeiyu',
author_email='d0u9.su@outlook.com',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
include_package_data=True,
zip_safe=False,
install_requires=[
'Flask>=0.2',
'youtube-dl',
],
entry_points={
'console_scripts': [
'youtube-dl-webui = youtube_dl_webui:main'
]
},
)
|
d0u9/youtube-dl-webui
|
setup.py
|
Python
|
gpl-2.0
| 770
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-07 07:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('veggie', '0011_auto_20170606_1913'),
]
operations = [
migrations.AlterField(
model_name='item',
name='item_status',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='offer',
name='status',
field=models.BooleanField(default=True),
),
]
|
pgastinger/bratshop
|
veggie/migrations/0012_auto_20170607_0744.py
|
Python
|
gpl-3.0
| 609
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014-2019 khalim19
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module provides a class to store and increment version numbers.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from future.builtins import *
import re
class Version(object):
def __init__(
self, major=None, minor=None, patch=None, prerelease=None, prerelease_patch=None):
self.major = major
self.minor = minor
self.patch = patch
self.prerelease = prerelease
self.prerelease_patch = prerelease_patch
def __str__(self):
version_str = "{}.{}".format(self.major, self.minor)
if self.patch is not None:
version_str += ".{}".format(self.patch)
if self.prerelease is not None:
version_str += "-{}".format(self.prerelease)
if self.prerelease_patch is not None:
version_str += ".{}".format(self.prerelease_patch)
return version_str
def __lt__(self, other_version):
this_version_main_components = self._get_main_components_tuple(self)
other_version_main_components = self._get_main_components_tuple(other_version)
if this_version_main_components < other_version_main_components:
return True
elif this_version_main_components > other_version_main_components:
return False
else:
if self.prerelease is not None and other_version.prerelease is None:
return True
elif self.prerelease is not None and other_version.prerelease is not None:
if self.prerelease < other_version.prerelease:
return True
elif self.prerelease > other_version.prerelease:
return False
else:
return (
self._get_default_number(self.prerelease_patch)
< self._get_default_number(other_version.prerelease_patch))
else:
return False
def __le__(self, other_version):
return self.__lt__(other_version) or self.__eq__(other_version)
def __eq__(self, other_version):
return (
(self._get_main_components_tuple(self)
== self._get_main_components_tuple(other_version))
and self.prerelease == other_version.prerelease
and (self._get_default_number(self.prerelease_patch)
== self._get_default_number(other_version.prerelease_patch)))
def __ne__(self, other_version):
return not self.__eq__(other_version)
def __gt__(self, other_version):
return not self.__le__(other_version)
def __ge__(self, other_version):
return not self.__lt__(other_version)
def increment(self, component_to_increment, prerelease=None):
"""
Increment the version as per `component_to_increment` and `prerelease`.
`component_to_increment` can be `"major"`, `"minor"` or `"patch"`. Given the
format `X.Y.Z`, `"major"` increments `X`, `"minor"` increments `Y` and
`"patch"` increments `Z`. If `patch` attribute is `None` and `"patch"` is
specified, `1` will be assigned (e.g. `3.3` becomes `3.3.1`).
If the `prerelease` string is not `None` and non-empty, append the
pre-release to the version. For example, `3.3` with `"major"` compoment and
`"alpha"` as the pre-release string becomes `4.0-alpha`.
If the version already has the same pre-release, append a number to the
pre-release (e.g. `4.0-alpha` becomes `4.0-alpha.2`).
If the version already has a different pre-release (lexically earlier than
`prerelease`), replace the existing pre-release with `prerelease` (e.g.
`4.0-alpha` with the `"beta"` pre-release becomes `4.0-beta`).
Raises:
* `ValueError`:
* Invalid value for `component_to_increment`.
* The specified `prerelease` contains non-alphanumeric characters or is
lexically earlier than the existing `prerelease` attribute.
"""
if component_to_increment not in ["major", "minor", "patch"]:
raise ValueError("invalid version component '{}'".format(component_to_increment))
if prerelease:
if not re.search(r"^[a-zA-Z0-9]+$", prerelease):
raise ValueError("invalid pre-release format '{}'".format(prerelease))
if prerelease < self.prerelease:
raise ValueError(
"the specified pre-release '{}' is lexically earlier than "
"the existing pre-release '{}'".format(prerelease, self.prerelease))
if not prerelease:
prerelease = None
def increment_major():
self.major += 1
self.minor = 0
self.patch = None
def increment_minor():
self.minor += 1
self.patch = None
def increment_patch():
if self.patch is None:
self.patch = 0
self.patch += 1
def clear_prerelease():
self.prerelease = None
self.prerelease_patch = None
def set_new_prerelease():
self.prerelease = prerelease
self.prerelease_patch = None
def increment_prerelease():
if self.prerelease_patch is None:
self.prerelease_patch = 1
self.prerelease_patch += 1
if component_to_increment == "major":
increment_component_func = increment_major
elif component_to_increment == "minor":
increment_component_func = increment_minor
elif component_to_increment == "patch":
increment_component_func = increment_patch
if prerelease is None:
increment_component_func()
clear_prerelease()
else:
if self.prerelease is None:
increment_component_func()
set_new_prerelease()
else:
if prerelease == self.prerelease:
increment_prerelease()
else:
set_new_prerelease()
@classmethod
def parse(cls, version_str):
"""
Parse the `version_str` string and return a `Version` instance.
Raises:
* `InvalidVersionFormatError` - `version_str` has invalid format.
"""
ver = Version()
cls._fill_version_components(ver, version_str)
return ver
@classmethod
def _fill_version_components(cls, version_obj, version_str):
version_str_components = version_str.split("-")
if len(version_str_components) > 2:
raise InvalidVersionFormatError
cls._set_main_version_components(version_obj, version_str_components[0])
if len(version_str_components) == 2:
cls._set_prerelease_version_components(version_obj, version_str_components[1])
@classmethod
def _set_main_version_components(cls, version_obj, main_str_components):
match = re.search(r'^([0-9]+?)\.([0-9]+?)$', main_str_components)
if match is None:
match = re.search(r'^([0-9]+?)\.([0-9]+?)\.([1-9][0-9]*)$', main_str_components)
if match is None:
raise InvalidVersionFormatError
match_groups = match.groups()
version_obj.major = int(match_groups[0])
version_obj.minor = int(match_groups[1])
if len(match_groups) == 3:
version_obj.patch = int(match_groups[2])
@classmethod
def _set_prerelease_version_components(cls, version_obj, prerelease_str_components):
match = re.search(r'^([a-zA-Z0-9]+?)$', prerelease_str_components)
if match is None:
match = re.search(
r'^([a-zA-Z0-9]+?)\.([2-9]|[1-9][0-9]+)$', prerelease_str_components)
if match is None:
raise InvalidVersionFormatError
match_groups = match.groups()
version_obj.prerelease = match_groups[0]
if len(match_groups) == 2:
version_obj.prerelease_patch = int(match_groups[1])
@staticmethod
def _get_main_components_tuple(ver):
return tuple(
number if number is not None else -1
for number in [ver.major, ver.minor, ver.patch])
@staticmethod
def _get_default_number(component):
return component if component is not None else -1
class InvalidVersionFormatError(Exception):
pass
|
khalim19/gimp-plugin-export-layers
|
export_layers/pygimplib/version.py
|
Python
|
gpl-3.0
| 8,370
|
import sys
from PySide.QtCore import *
from PySide.QtGui import *
class WorkBookWindow(QMainWindow):
def __init__(self, parent=None):
super(WorkBookWindow, self).__init__(parent)
self.lock = True
self.ID = -1
def getID(self):
return self.ID
def setID(self, newID):
self.ID = newID
def closeEvent(self, event):
if self.lock:
self.showMinimized()
event.ignore()
else:
event.accept()
self.showMaximized()
def unlock(self) :
self.lock = False
def lock(self) :
self.lock = True
if __name__ == '__main__':
app = QApplication(sys.argv)
frame = WorkBookWindow()
frame.showMaximized()
app.exec_()
sys.exit
|
s910324/Sloth
|
wbSubWin.py
|
Python
|
lgpl-3.0
| 658
|
#!/usr/bin/env python
from unittest import TestCase
from boutiques.bosh import bosh
from boutiques import __file__ as bfile
import os
import subprocess
class TestInvocation(TestCase):
def test_invocation(self):
descriptor = os.path.join(os.path.split(bfile)[0],
"schema/examples/good.json")
invocation = os.path.join(os.path.split(bfile)[0],
"schema/examples/good_invocation.json")
self.assertFalse(bosh(["invocation", descriptor, "-i",
invocation, "-w"]))
self.assertFalse(bosh(["invocation", descriptor, "-i",
invocation, "-w"]))
def test_invocation_json_obj(self):
descriptor = open(os.path.join(os.path.split(bfile)[0],
"schema/examples/good.json")).read()
invocation = open(os.path.join(os.path.split(bfile)[0],
"schema/examples/"
"good_invocation.json")).read()
self.assertFalse(bosh(["invocation", descriptor, "-i",
invocation, "-w"]))
self.assertFalse(bosh(["invocation", descriptor, "-i",
invocation, "-w"]))
def test_invocation_invalid_cli(self):
descriptor = os.path.join(os.path.split(bfile)[0],
"schema/examples/good.json")
invocation = os.path.join(os.path.split(bfile)[0],
"schema/examples/wrong_invocation.json")
command = ("bosh invocation " + descriptor + "-i " + invocation)
process = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
self.assertTrue(process.returncode)
|
boutiques/schema
|
tools/python/boutiques/tests/test_invocation.py
|
Python
|
gpl-2.0
| 1,925
|
#!/usr/bin/env python3
#
# Copyright (c) 2017, Neil Booth
#
# All rights reserved.
#
# See the file "LICENCE" for information about the copyright
# and warranty status of this software.
'''Script to compact the history database. This should save space and
will reset the flush counter to a low number, avoiding overflow when
the flush count reaches 65,536.
This needs to lock the database so ElectrumX must not be running -
shut it down cleanly first.
It is recommended you run this script with the same environment as
ElectrumX. However it is intended to be runnable with just
DB_DIRECTORY, DB_ENGINE, COIN and NET set (COIN defaults as for ElectrumX).
If you use daemon tools, you might run this script like so:
export DB_DIRECTORY=/db
export DB_ENGINE=rocksdb
export COIN=groestlcoin
export NET=mainnet
python3 /usr/local/bin/electrumx_compact_history
Depending on your hardware this script may take up to 6 hours to
complete; it logs progress regularly.
Compaction can be interrupted and restarted harmlessly and will pick
up where it left off. However, if you restart ElectrumX without
running the compaction to completion, it will not benefit and
subsequent compactions will restart from the beginning.
'''
import logging
import sys
import traceback
from os import environ
from server.env import Env
from server.db import DB
def compact_history():
if sys.version_info < (3, 6):
raise RuntimeError('Python >= 3.6 is required to run ElectrumX')
environ['DAEMON_URL'] = '' # Avoid Env erroring out
env = Env()
db = DB(env)
assert not db.first_sync
# Continue where we left off, if interrupted
if db.comp_cursor == -1:
db.comp_cursor = 0
db.comp_flush_count = max(db.comp_flush_count, 1)
limit = 8 * 1000 * 1000
while db.comp_cursor != -1:
db._compact_history(limit)
def main():
logging.basicConfig(level=logging.INFO)
logging.info('Starting history compaction...')
try:
compact_history()
except Exception:
traceback.print_exc()
logging.critical('History compaction terminated abnormally')
else:
logging.info('History compaction complete')
if __name__ == '__main__':
main()
|
Groestlcoin/electrumx-grs
|
compact_history.py
|
Python
|
mit
| 2,235
|
import six
from .html_elements import InputCollection
from .input import Input
from ..meta_elements import MetaHTMLElement
@six.add_metaclass(MetaHTMLElement)
class CheckBox(Input):
@property
def is_set(self):
"""
Returns True if the element is checked
:rtype: bool
"""
return self._element_call(lambda: self.el.is_selected())
is_checked = is_set
def set(self, value=True):
"""
Sets checkbox to the given value
:param value: True to check, False to uncheck
:Example:
checkbox = browser.checkbox(id='new_user_interests_cars')
checkbox.is_set #=> false
checkbox.set()
checkbox.is_set #=> true
checkbox.set(False)
checkbox.set #=> false
"""
self._assert_enabled() if self.is_set == value else self.click()
check = set
def clear(self):
self.set(value=False)
uncheck = clear
@six.add_metaclass(MetaHTMLElement)
class CheckBoxCollection(InputCollection):
pass
|
lmtierney/watir-snake
|
nerodia/elements/check_box.py
|
Python
|
mit
| 1,069
|
from django.utils.translation import ugettext_noop as _
from transurlvania.defaults import *
urlpatterns = patterns('garfield.views',
url(r'^$', 'landing', name='garfield_landing'),
url(_(r'^the-president/$'), 'the_president', name='garfield_the_president'),
(_(r'^the-cat/$'), 'comic_strip_list', {}, 'garfield_the_cat'),
url(_(r'^the-cat/(?:P<strip_id>\d+)/$'), 'comic_strip_detail',
name='garfield_comic_strip_detail'),
url(r'', include('garfield.extra_urls')),
)
|
trapeze/transurlvania
|
tests/garfield/urls.py
|
Python
|
bsd-3-clause
| 502
|
from xblock.fields import Boolean, Scope
from group_project_v2.stage.utils import StageState
from group_project_v2.utils import gettext as _
class SimpleCompletionStageMixin(object):
"""
runtime.publish(block, 'progress', {'user_id': user_id}) properly creates completion records, but they are
unavailable to API until current request is ended. They are created in transaction and looks like in LMS every
request have dedicated transaction, but that's speculation. Anyway, we can't rely on
runtime.publish - project_api.get_stage_id to update stage state and get new state in single run.
"""
completed = Boolean(
display_name=_(u"Completed"),
scope=Scope.user_state
)
def get_stage_state(self):
if self.completed:
return StageState.COMPLETED
return StageState.NOT_STARTED
def mark_complete(self, user_id=None):
result = super(SimpleCompletionStageMixin, self).mark_complete(user_id)
self.completed = True
return result
def get_users_completion(self, _target_workgroups, _target_users):
"""
Returns sets of completed user ids and partially completed user ids
:param collections.Iterable[group_project_v2.project_api.dtos.WorkgroupDetails] _target_workgroups:
:param collections.Iterable[group_project_v2.project_api.dtos.ReducedUserDetails] _target_users:
:rtype: (set[int], set[int])
"""
completions = self.project_api.get_completions_by_content_id(self.course_id, self.content_id)
return set(completion.user_id for completion in completions), set()
|
open-craft/xblock-group-project-v2
|
group_project_v2/stage/mixins.py
|
Python
|
agpl-3.0
| 1,632
|
#!/usr/bin/env python
import os
import time
import sys;
time.sleep(600)
os.system('sudo sysctl net.mpip.mpip_skype=1')
time.sleep(600)
os.system('sudo sysctl net.mpip.mpip_skype=0')
time.sleep(600)
#os.system('sudo killall emulab-iperf')
exit()
|
gbtian/mptcp
|
sigcomm15/experiment/skype/skype.py
|
Python
|
gpl-2.0
| 248
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-21 12:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('database', '0018_auto_20170821_0840'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='rate',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True, verbose_name='United States Dollar - USD'),
),
migrations.AlterField(
model_name='profile',
name='unit',
field=models.DecimalField(blank=True, decimal_places=2, default=1.0, max_digits=12, null=True, verbose_name='Hours'),
),
]
|
ACLARKNET/aclarknet-database
|
aclarknet/database/migrations/0019_auto_20170821_0842.py
|
Python
|
mit
| 769
|
"""
hostdev device support class(es)
http://libvirt.org/formatdomain.html#elementsHostDev
"""
from virttest.libvirt_xml.devices import base
from virttest.libvirt_xml import accessors
class Hostdev(base.TypedDeviceBase):
__slots__ = ('mode', 'hostdev_type', 'source',
'managed', 'boot_order',)
def __init__(self, type_name="hostdev", virsh_instance=base.base.virsh):
accessors.XMLAttribute('hostdev_type', self, parent_xpath='/',
tag_name='hostdev', attribute='type')
accessors.XMLAttribute('mode', self, parent_xpath='/',
tag_name='hostdev', attribute='mode')
accessors.XMLAttribute('managed', self, parent_xpath='/',
tag_name='hostdev', attribute='managed')
accessors.XMLElementNest('source', self, parent_xpath='/',
tag_name='source', subclass=self.Source,
subclass_dargs={
'virsh_instance': virsh_instance})
accessors.XMLAttribute('boot_order', self, parent_xpath='/',
tag_name='boot', attribute='order')
super(self.__class__, self).__init__(device_tag='hostdev',
type_name=type_name,
virsh_instance=virsh_instance)
def new_source(self, **dargs):
new_one = self.Source(virsh_instance=self.virsh)
if self.hostdev_type == 'pci':
new_address = new_one.new_untyped_address(**dargs)
new_one.untyped_address = new_address
if self.hostdev_type == 'usb':
new_one.vendor_id = dargs.pop("vendor_id", None)
new_one.product_id = dargs.pop("product_id", None)
new_address = new_one.new_untyped_address(**dargs)
new_one.untyped_address = new_address
return new_one
class Source(base.base.LibvirtXMLBase):
__slots__ = ('untyped_address', 'vendor_id', 'product_id')
def __init__(self, virsh_instance=base.base.virsh):
accessors.XMLAttribute('vendor_id', self, parent_xpath='/',
tag_name='vendor', attribute='id')
accessors.XMLAttribute('product_id', self, parent_xpath='/',
tag_name='product', attribute='id')
accessors.XMLElementNest('untyped_address', self, parent_xpath='/',
tag_name='address', subclass=self.UntypedAddress,
subclass_dargs={
'virsh_instance': virsh_instance})
super(self.__class__, self).__init__(virsh_instance=virsh_instance)
self.xml = '<source/>'
def new_untyped_address(self, **dargs):
new_one = self.UntypedAddress(virsh_instance=self.virsh)
for key, value in dargs.items():
setattr(new_one, key, value)
return new_one
class UntypedAddress(base.UntypedDeviceBase):
__slots__ = ('device', 'domain', 'bus', 'slot', 'function',)
def __init__(self, virsh_instance=base.base.virsh):
accessors.XMLAttribute('domain', self, parent_xpath='/',
tag_name='address', attribute='domain')
accessors.XMLAttribute('slot', self, parent_xpath='/',
tag_name='address', attribute='slot')
accessors.XMLAttribute('bus', self, parent_xpath='/',
tag_name='address', attribute='bus')
accessors.XMLAttribute('device', self, parent_xpath='/',
tag_name='address', attribute='device')
accessors.XMLAttribute('function', self, parent_xpath='/',
tag_name='address', attribute='function')
super(self.__class__, self).__init__(
"address", virsh_instance=virsh_instance)
self.xml = "<address/>"
|
vipmike007/avocado-vt
|
virttest/libvirt_xml/devices/hostdev.py
|
Python
|
gpl-2.0
| 4,170
|
plot = coh.plot()
ax = plot.gca()
ax.set_ylabel('Frequency [Hz]')
ax.set_yscale('log')
ax.set_ylim(10, 8000)
ax.set_title('Coherence between SRCL and CARM for L1')
ax.grid(True, 'both', 'both')
plot.add_colorbar(label='Coherence', clim=[0, 1])
plot.show()
|
gwpy/gwpy.github.io
|
docs/v0.4/examples/spectrogram/coherence-4.py
|
Python
|
gpl-3.0
| 255
|
from .models import TaskState, TaskMeta, WorkerState
from rest_framework import serializers
class TaskStateSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = TaskState
class WorkerStateSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = WorkerState
class TaskMetaSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = TaskMeta
|
hollowpoint/hollowpoint
|
hpt/core/serializers.py
|
Python
|
apache-2.0
| 427
|
""" define extension dtypes """
import re
import numpy as np
from pandas import compat
class ExtensionDtype(object):
"""
A np.dtype duck-typed class, suitable for holding a custom dtype.
THIS IS NOT A REAL NUMPY DTYPE
"""
name = None
names = None
type = None
subdtype = None
kind = None
str = None
num = 100
shape = tuple()
itemsize = 8
base = None
isbuiltin = 0
isnative = 0
_metadata = []
def __unicode__(self):
return self.name
def __str__(self):
"""
Return a string representation for a particular Object
Invoked by str(df) in both py2/py3.
Yields Bytestring in Py2, Unicode String in py3.
"""
if compat.PY3:
return self.__unicode__()
return self.__bytes__()
def __bytes__(self):
"""
Return a string representation for a particular object.
Invoked by bytes(obj) in py3 only.
Yields a bytestring in both py2/py3.
"""
from pandas.core.config import get_option
encoding = get_option("display.encoding")
return self.__unicode__().encode(encoding, 'replace')
def __repr__(self):
"""
Return a string representation for a particular object.
Yields Bytestring in Py2, Unicode String in py3.
"""
return str(self)
def __hash__(self):
raise NotImplementedError("sub-classes should implement an __hash__ method")
def __eq__(self, other):
raise NotImplementedError("sub-classes should implement an __eq__ method")
@classmethod
def is_dtype(cls, dtype):
""" Return a boolean if we if the passed type is an actual dtype that we can match (via string or type) """
if hasattr(dtype, 'dtype'):
dtype = dtype.dtype
if isinstance(dtype, cls):
return True
elif isinstance(dtype, np.dtype):
return False
try:
return cls.construct_from_string(dtype) is not None
except:
return False
class CategoricalDtypeType(type):
"""
the type of CategoricalDtype, this metaclass determines subclass ability
"""
pass
class CategoricalDtype(ExtensionDtype):
"""
A np.dtype duck-typed class, suitable for holding a custom categorical dtype.
THIS IS NOT A REAL NUMPY DTYPE, but essentially a sub-class of np.object
"""
name = 'category'
type = CategoricalDtypeType
kind = 'O'
str = '|O08'
base = np.dtype('O')
def __hash__(self):
# make myself hashable
return hash(str(self))
def __eq__(self, other):
if isinstance(other, compat.string_types):
return other == self.name
return isinstance(other, CategoricalDtype)
@classmethod
def construct_from_string(cls, string):
""" attempt to construct this type from a string, raise a TypeError if its not possible """
try:
if string == 'category':
return cls()
except:
pass
raise TypeError("cannot construct a CategoricalDtype")
class DatetimeTZDtypeType(type):
"""
the type of DatetimeTZDtype, this metaclass determines subclass ability
"""
pass
class DatetimeTZDtype(ExtensionDtype):
"""
A np.dtype duck-typed class, suitable for holding a custom datetime with tz dtype.
THIS IS NOT A REAL NUMPY DTYPE, but essentially a sub-class of np.datetime64[ns]
"""
type = DatetimeTZDtypeType
kind = 'M'
str = '|M8[ns]'
num = 101
base = np.dtype('M8[ns]')
_metadata = ['unit','tz']
_match = re.compile("(datetime64|M8)\[(?P<unit>.+), (?P<tz>.+)\]")
def __init__(self, unit, tz=None):
"""
Parameters
----------
unit : string unit that this represents, currently must be 'ns'
tz : string tz that this represents
"""
if isinstance(unit, DatetimeTZDtype):
self.unit, self.tz = unit.unit, unit.tz
return
if tz is None:
# we were passed a string that we can construct
try:
m = self._match.search(unit)
if m is not None:
self.unit = m.groupdict()['unit']
self.tz = m.groupdict()['tz']
return
except:
raise ValueError("could not construct DatetimeTZDtype")
raise ValueError("DatetimeTZDtype constructor must have a tz supplied")
if unit != 'ns':
raise ValueError("DatetimeTZDtype only supports ns units")
self.unit = unit
self.tz = tz
@classmethod
def construct_from_string(cls, string):
""" attempt to construct this type from a string, raise a TypeError if its not possible """
try:
return cls(unit=string)
except ValueError:
raise TypeError("could not construct DatetimeTZDtype")
def __unicode__(self):
# format the tz
return "datetime64[{unit}, {tz}]".format(unit=self.unit, tz=self.tz)
@property
def name(self):
return str(self)
def __hash__(self):
# make myself hashable
return hash(str(self))
def __eq__(self, other):
if isinstance(other, compat.string_types):
return other == self.name
return isinstance(other, DatetimeTZDtype) and self.unit == other.unit and self.tz == other.tz
|
Vvucinic/Wander
|
venv_2_7/lib/python2.7/site-packages/pandas/core/dtypes.py
|
Python
|
artistic-2.0
| 5,492
|
# <pep8 compliant>
import bpy
from . import import_xnalara_model
from . import import_xnalara_pose
class ArmatureBonesHideByName_Op(bpy.types.Operator):
bl_idname = 'xps_tools.bones_hide_by_name'
bl_label = 'Hide bones by name'
bl_description = 'Move bones starting with "unused" to the armature layer 2'
bl_options = {'PRESET'}
@classmethod
def poll(cls, context):
return bool(
next(
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
None))
def execute(self, context):
import_xnalara_model.hideBonesByName(self.armature_objs)
return {'FINISHED'}
def invoke(self, context, event):
self.armature_objs = [
obj for obj in context.selected_objects if obj.type == 'ARMATURE']
return self.execute(context)
def check(self, context):
print('CHECK')
return {'RUNNING_MODAL'}
class ArmatureBonesHideByVertexGroup_Op(bpy.types.Operator):
bl_idname = 'xps_tools.bones_hide_by_vertex_group'
bl_label = 'Hide bones by weight'
bl_description = 'Move bones that do not alter any mesh to the armature layer 2'
bl_options = {'PRESET'}
@classmethod
def poll(cls, context):
return bool(
next(
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
None))
def execute(self, context):
import_xnalara_model.hideBonesByVertexGroup(self.armature_objs)
return {'FINISHED'}
def invoke(self, context, event):
self.armature_objs = [
obj for obj in context.selected_objects if obj.type == 'ARMATURE']
return self.execute(context)
def check(self, context):
print('CHECK')
return {'RUNNING_MODAL'}
class ArmatureBonesShowAll_Op(bpy.types.Operator):
bl_idname = 'xps_tools.bones_show_all'
bl_label = 'Show all Bones'
bl_description = 'Move all bones to the armature layer 1'
bl_options = {'PRESET'}
@classmethod
def poll(cls, context):
return bool(
next(
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
None))
def execute(self, context):
import_xnalara_model.showAllBones(self.armature_objs)
return {'FINISHED'}
def invoke(self, context, event):
self.armature_objs = [
obj for obj in context.selected_objects if obj.type == 'ARMATURE']
return self.execute(context)
def check(self, context):
print('CHECK')
return {'RUNNING_MODAL'}
class ArmatureBonesRenameToBlender_Op(bpy.types.Operator):
bl_idname = 'xps_tools.bones_rename_to_blender'
bl_label = 'Rename Bones'
bl_description = 'Rename bones to Blender bone name convention (left -> .L)'
bl_options = {'PRESET'}
@classmethod
def poll(cls, context):
return bool(
next(
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
None))
def execute(self, context):
armatures_obs = filter(
lambda obj: obj.type == 'ARMATURE',
context.selected_objects)
import_xnalara_pose.renameBonesToBlender(armatures_obs)
return {'FINISHED'}
class ArmatureBonesRenameToXps_Op(bpy.types.Operator):
bl_idname = 'xps_tools.bones_rename_to_xps'
bl_label = 'Rename Bones'
bl_description = 'Rename bones back to XPS (.L -> left)'
bl_options = {'PRESET'}
@classmethod
def poll(cls, context):
return bool(
next(
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
None))
def execute(self, context):
armatures_obs = filter(
lambda obj: obj.type == 'ARMATURE',
context.selected_objects)
import_xnalara_pose.renameBonesToXps(armatures_obs)
return {'FINISHED'}
class ArmatureBonesConnect_Op(bpy.types.Operator):
bl_idname = 'xps_tools.bones_connect'
bl_label = 'Set Bones Connection'
bl_description = 'Set Bones Connection'
bl_options = {'PRESET'}
connectBones: bpy.props.BoolProperty()
@classmethod
def poll(cls, context):
return bool(
next(
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
None))
def execute(self, context):
armatures_obs = filter(
lambda obj: obj.type == 'ARMATURE',
context.selected_objects)
activeObj = bpy.context.active_object
for armature_ob in armatures_obs:
bpy.context.view_layer.objects.active = armature_ob
import_xnalara_model.setBoneConnect(self.connectBones)
bpy.context.view_layer.objects.active = activeObj
return {'FINISHED'}
class NewRestPose_Op(bpy.types.Operator):
bl_idname = 'xps_tools.new_rest_pose'
bl_label = 'New Rest Pose'
bl_description = 'Set Current Pose as The New Rest Pose'
bl_options = {"PRESET"}
@classmethod
def poll(cls, context):
return (context.active_object and context.active_object.type == 'ARMATURE' and
bool(next(
(obj for obj in context.selected_objects if obj.type == 'MESH'),
None)))
def action_common(self, context):
meshes_obs = filter(lambda obj: obj.type == 'MESH', context.selected_objects)
activeArmature = context.active_object
for obj in meshes_obs:
if (obj.find_armature() == activeArmature):
sourceModif = obj.modifiers[-1]
if (sourceModif and sourceModif.type == 'ARMATURE'):
destModif = obj.modifiers.new(sourceModif.name, sourceModif.type)
# collect names of writable properties
properties = [p.identifier for p in destModif.bl_rna.properties
if not p.is_readonly]
# copy those properties
for prop in properties:
setattr(destModif, prop, getattr(sourceModif, prop))
print(destModif.name)
bpy.context.view_layer.objects.active = obj
bpy.ops.object.modifier_apply(modifier=destModif.name)
bpy.context.view_layer.objects.active = activeArmature
bpy.ops.object.mode_set(mode='POSE')
bpy.ops.pose.armature_apply()
bpy.ops.object.mode_set(mode='OBJECT')
def execute(self, context):
self.action_common(context)
return {"FINISHED"}
def invoke(self, context, event):
self.action_common(context)
return {"FINISHED"}
|
feureau/Small-Scripts
|
Blender/Blender config/2.91/scripts/addons/XNALaraMesh/xps_toolshelf.py
|
Python
|
gpl-3.0
| 6,741
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class RecoveryPointResourcePaged(Paged):
"""
A paging container for iterating over a list of :class:`RecoveryPointResource <azure.mgmt.recoveryservicesbackup.models.RecoveryPointResource>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[RecoveryPointResource]'}
}
def __init__(self, *args, **kwargs):
super(RecoveryPointResourcePaged, self).__init__(*args, **kwargs)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/models/recovery_point_resource_paged.py
|
Python
|
mit
| 1,000
|
""" Time transformations
This handles all the wibbly wobbly timey wimey stuff.
Such as easy conversions between different time systems.
Supported systems: GPS, UTC, GMST, LST, JD and MJD.
Formulae from:
IETF list of leap seconds
'Leap seconds announced by the IERS'
https://www.ietf.org/timezones/data/leap-seconds.list
Duffett-Smith1990
'Astronomy with your personal computer'
ISBN 0-521-38995-X
USNO
'Computing Greenwich Apparent Sidereal Time'
http://aa.usno.navy.mil/faq/docs/GAST.php
Adrian Price-Whelan
apwlib.convert
https://github.com/adrn/apwlib
"""
import calendar
import datetime
import math
from time import strptime
from . import angles, base
#: Dates of leap second introductions.
LEAP_SECONDS = (('January 1, 2017', 18),
('July 1, 2015', 17),
('July 1, 2012', 16),
('January 1, 2009', 15),
('January 1, 2006', 14),
('January 1, 1999', 13),
('July 1, 1997', 12),
('January 1, 1996', 11),
('July 1, 1994', 10),
('July 1, 1993', 9),
('July 1, 1992', 8),
('January 1, 1991', 7),
('January 1, 1990', 6),
('January 1, 1988', 5),
('July 1, 1985', 4),
('July 1, 1983', 3),
('July 1, 1982', 2),
('July 1, 1981', 1))
def time_to_decimal(time):
"""Converts a time or datetime object into decimal time
:param time: datetime.time or datetime.datetime object.
:return: decimal number representing the input time.
"""
return (time.hour + time.minute / 60. + time.second / 3600. +
time.microsecond / 3600000000.)
def decimal_to_time(hours):
"""Converts decimal time to a time object
:param hours: datetime.time or datetime.datetime object.
:return: decimal number representing the input time.
"""
hours, minutes, seconds = base.decimal_to_sexagesimal(hours)
seconds_frac, seconds = math.modf(seconds)
seconds = int(seconds)
microseconds = int(seconds_frac * 1e6)
return datetime.time(hours, minutes, seconds, microseconds)
def date_to_juliandate(year, month, day):
"""Convert year, month, and day to a Julian Date
Julian Date is the number of days since noon on January 1, 4713 B.C.
So the returned date will end in .5 because the date refers to midnight.
:param year: a Gregorian year (B.C. years are negative).
:param month: a Gregorian month (1-12).
:param day: a Gregorian day (1-31).
:return: the Julian Date for the given year, month, and day.
"""
year1 = year
month1 = month
if year1 < 0:
year1 += 1
if month in [1, 2]:
year1 -= 1
month1 = month + 12
if year1 > 1582 or (year1 == 1582 and month >= 10 and day >= 15):
a = int(year1 / 100)
b = 2 - a + int(a / 4)
else:
b = 0
if year1 < 0:
c = int((365.25 * year1) - 0.75)
else:
c = int(365.25 * year1)
d = int(30.6001 * (month1 + 1))
return b + c + d + day + 1720994.5
def datetime_to_juliandate(dt):
"""Convert a datetime object in UTC to a Julian Date
:param dt: datetime object.
:return: The Julian Date for the given datetime object.
"""
juliandate = date_to_juliandate(dt.year, dt.month, dt.day)
decimal_time = time_to_decimal(dt.time()) / 24.
return juliandate + decimal_time
def juliandate_to_modifiedjd(juliandate):
"""Convert a Julian Date to a Modified Julian Date
:param juliandate: a Julian Date.
:return: the Modified Julian Date.
"""
return juliandate - 2400000.5
def modifiedjd_to_juliandate(modifiedjd):
"""Convert a Modified Julian Date to Julian Date
:param modifiedjf: a Modified Julian Date.
:return: Julian Date.
"""
return modifiedjd + 2400000.5
def datetime_to_modifiedjd(dt):
"""Convert a datetime object in UTC to a Modified Julian Date
:param dt: datetime object.
:return: the Modified Julian Date.
"""
jd = datetime_to_juliandate(dt)
return juliandate_to_modifiedjd(jd)
def juliandate_to_gmst(juliandate):
"""Convert a Julian Date to Greenwich Mean Sidereal Time
:param juliandate: Julian Date.
:return: decimal hours in GMST.
"""
jd0 = int(juliandate - 0.5) + 0.5 # Julian Date of previous midnight
h = (juliandate - jd0) * 24. # Hours since mightnight
# Days since J2000 (Julian Date 2451545.)
d0 = jd0 - 2451545.
d = juliandate - 2451545.
t = d / 36525. # Centuries since J2000
gmst = (6.697374558 + 0.06570982441908 * d0 + 1.00273790935 * h +
0.000026 * t * t)
return gmst % 24.
def utc_to_gmst(dt):
"""Convert a datetime object in UTC time to Greenwich Mean Sidereal Time
:param dt: datetime object in UTC time.
:return: decimal hours in GMST.
"""
jd = datetime_to_juliandate(dt)
return juliandate_to_gmst(jd)
def gmst_to_utc(dt):
"""Convert datetime object in Greenwich Mean Sidereal Time to UTC
Note: this requires a datetime object, not just the decimal hours.
:param dt: datetime object in GMST time.
:return: datetime object in UTC.
"""
jd = date_to_juliandate(dt.year, dt.month, dt.day)
d = jd - 2451545.
t = d / 36525.
t0 = 6.697374558 + (2400.051336 * t) + (0.000025862 * t * t)
t0 %= 24
gst = (time_to_decimal(dt.time()) - t0) % 24
ut = gst * 0.9972695663
time = decimal_to_time(ut)
return dt.replace(hour=time.hour, minute=time.minute, second=time.seconds,
microsecond=time.microsecond)
def juliandate_to_utc(juliandate):
"""Convert Julian Date to datetime object in UTC
:param juliandate: a Julian Date.
:return: datetime object in UTC time.
"""
juliandate += 0.5
jd_frac, jd_int = math.modf(juliandate)
if jd_int > 2299160:
a = int((jd_int - 1867216.25) / 36524.25)
b = jd_int + 1 + a - int(a / 4)
else:
b = jd_int
c = b + 1524
d = int((c - 122.1) / 365.25)
e = int(365.25 * d)
g = int((c - e) / 30.6001)
day = c - e + jd_frac - int(30.6001 * g)
if g < 13.5:
month = g - 1
else:
month = g - 13
month = int(month)
if month > 2.5:
year = d - 4716
else:
year = d - 4715
year = int(year)
day_frac, day = math.modf(day)
day = int(day)
date = datetime.date(year, month, day)
hours = day_frac * 24 # fractional part of day * 24 hours
time = decimal_to_time(hours)
return datetime.datetime.combine(date, time)
def modifiedjd_to_utc(modifiedjd):
"""Convert a Modified Julian Date to datetime object in UTC
:param juliandate: a Modified Julian Date.
:return: datetime object in UTC time.
"""
juliandate = modifiedjd_to_juliandate(modifiedjd)
return juliandate_to_utc(juliandate)
def gmst_to_lst(hours, longitude):
"""Convert Greenwich Mean Sidereal Time to Local Sidereal Time
:param hours: decimal hours in GMST.
:param longitude: location in degrees, east positive.
:return: decimal hours in LST.
"""
longitude_time = angles.degrees_to_hours(longitude)
lst = hours + longitude_time
lst %= 24
return lst
def lst_to_gmst(hours, longitude):
"""Convert Local Sidereal Time to Greenwich Mean Sidereal Time
:param hours: decimal hours in LST.
:param longitude: location in degrees, east positive.
:return: decimal hours in GMST.
"""
longitude_time = angles.degrees_to_hours(longitude)
gmst = hours - longitude_time
gmst %= 24
return gmst
def utc_to_lst(dt, longitude):
"""Convert UTC to Local Sidereal Time
:param dt: datetime object in UTC.
:param longitude: location in degrees, east positive.
:return: decimal hours in LST.
"""
gmst = utc_to_gmst(dt)
return gmst_to_lst(gmst, longitude)
def gps_to_utc(timestamp):
"""Convert GPS time to UTC
:param timestamp: GPS timestamp in seconds.
:return: UTC timestamp in seconds.
"""
offset = next((seconds for date, seconds in LEAP_SECONDS
if timestamp >= utc_from_string(date)), 0)
return timestamp - offset
def utc_to_gps(timestamp):
"""Convert UTC to GPS time
:param timestamp: UTC timestamp in seconds.
:return: GPS timestamp in seconds.
"""
offset = next((seconds for date, seconds in LEAP_SECONDS
if timestamp >= utc_from_string(date)), 0)
return timestamp + offset
def utc_from_string(date):
"""Convert a date string to UTC
:param date: date string.
:return: UTC timestamp in seconds.
"""
t = strptime(date, '%B %d, %Y')
return calendar.timegm(t)
def gps_from_string(date):
"""Convert a date string to GPS time
:param date: date string.
:return: GPS timestamp in seconds.
"""
t = strptime(date, '%B %d, %Y')
return utc_to_gps(calendar.timegm(t))
def gps_to_lst(timestamp, longitude):
"""Convert a GPS timestamp to lst
:param timestamp: GPS timestamp in seconds.
:param longitude: location in degrees, east positive.
:return: decimal hours in LST.
"""
utc_timestamp = gps_to_utc(timestamp)
utc = datetime.datetime.utcfromtimestamp(utc_timestamp)
return utc_to_lst(utc, longitude)
def gps_to_datetime(timestamp):
"""Convert a GPS timestamp to datetime object
:param timestamp: GPS timestamp in seconds.
:return: datetime object.
"""
gps_dt = datetime.datetime.utcfromtimestamp(timestamp)
return gps_dt
def datetime_to_gps(dt):
"""Convert a GPS datetime object to a timestamp
:param dt: GPS datetime object.
:return: GPS timestamp in seconds.
"""
timestamp = calendar.timegm(dt.timetuple())
return timestamp
def process_time(time):
"""Convert timestamp or datetime to timestamp
:param time: GPS datetime object or GPS timestamp.
:return: GPS timestamp.
"""
try:
return int(time)
except (TypeError, ValueError):
try:
return datetime_to_gps(time)
except Exception:
raise RuntimeError('Unable to parse time: ', time)
|
HiSPARC/sapphire
|
sapphire/transformations/clock.py
|
Python
|
gpl-3.0
| 10,275
|
#! /usr/bin/env python
import pytest
from lmcipy.interpret import (
process_labels,
generate_opcodes,
opcode_func_deconstruct,
load_opcodes,
eval_opcode,
interpret,
SyntaxError
)
from lmcipy.machine import MachineState
@pytest.fixture
def empty_machine():
return MachineState()
def test_process_labels(empty_machine):
test_program = [
['INP'],
['LABEL1', 'DAT'],
['SUB', 'LABEL2'],
]
test_machine, res_program = process_labels(machine=empty_machine, program=test_program)
assert test_machine.labels == {'LABEL1': 1}
assert res_program == [
['INP'],
['DAT'],
['SUB', 'LABEL2']
]
def test_generate_opcodes(empty_machine):
test_program = [
['ADD', '10'],
['SUB', 'TESTLABEL'],
['INP'],
['HLT'],
['DAT'],
['DAT', '23'],
]
empty_machine.labels['TESTLABEL'] = 87
opcodes = generate_opcodes(machine=empty_machine, program=test_program)
assert list(opcodes) == [110, 287, 901, 0, 0, 23]
def test_generate_opcodes_arg_wrong_count(empty_machine):
test_program = [
['ADD', '10', '20']
]
with pytest.raises(SyntaxError):
generate_opcodes(machine=empty_machine, program=test_program)
def test_generate_opcodes_arg_wrong_size(empty_machine):
test_program = [
['ADD', '100']
]
with pytest.raises(SyntaxError):
generate_opcodes(machine=empty_machine, program=test_program)
def test_generate_opcodes_arg_wrong_argument(empty_machine):
test_program = [
['ADD']
]
with pytest.raises(SyntaxError):
generate_opcodes(machine=empty_machine, program=test_program)
def test_opcode_to_func(empty_machine):
opcode = '901'
func = opcode_func_deconstruct(empty_machine, opcode)
assert func.__name__ == "opc_inp"
def test_opcode_to_func_with_arg(empty_machine):
opcode = '111'
func = opcode_func_deconstruct(empty_machine, opcode)
assert func.func.__name__ == "opc_add"
assert func.keywords['value'] == 11
def test_opcode_to_func_with_arg_single_digit(empty_machine):
opcode = '101'
func = opcode_func_deconstruct(empty_machine, opcode)
assert func.func.__name__ == "opc_add"
assert func.keywords['value'] == 1
def test_opcode_to_func_invalid_opcode(empty_machine):
opcode = '400'
with pytest.raises(Exception):
opcode_func_deconstruct(empty_machine, opcode)
def test_load_opcodes(empty_machine):
opcodes = [111, 000, 901, 244]
res_machine = load_opcodes(machine=empty_machine, opcodes=opcodes)
assert res_machine.memory[0:4] == opcodes
def test_eval_opcode_memory(empty_machine):
empty_machine.accumulator = 10
test_opcode = 301
res_machine = eval_opcode(machine=empty_machine, opcode=test_opcode)
assert res_machine.memory[1] == 10
def test_eval_opcode_accumulator(empty_machine):
empty_machine.accumulator = 10
empty_machine.memory[1] = 5
test_opcode = 201
res_machine = eval_opcode(machine=empty_machine, opcode=test_opcode)
assert res_machine.accumulator == 5
def test_minus_flag_set(empty_machine):
empty_machine.memory[1] = 20
empty_machine.accumulator = 15
test_opcode = 201
res_machine = eval_opcode(machine=empty_machine, opcode=test_opcode)
assert res_machine.minus_flag == True
assert res_machine.accumulator == 5
def test_minus_flag_unset(empty_machine):
empty_machine.memory[1] = 10
empty_machine.accumulator = 5
empty_machine.minus_flag = True
test_opcode = 101
res_machine = eval_opcode(machine=empty_machine, opcode=test_opcode)
assert res_machine.minus_flag == False
assert res_machine.accumulator == 5
|
fm4d/lmcipy
|
tests/test_interpret.py
|
Python
|
gpl-3.0
| 3,763
|
#runas BlackScholes(range(1,100), range(1,100), range(1,100), 0.5, 0.76, 12)
#bench BlackScholes(range(1,400001), range(1,400001), range(1,400001), 0.5, 0.76, 400000)
#pythran export BlackScholes(float list, float list, float list, float, float, int)
import math
def BlackScholes(stock_price, option_strike, option_years, Riskfree, Volatility, nb_opt):
RSQRT2PI = 1 / math.sqrt(math.pi * 2)
A1 = 0.31938153
A2 = -0.356563782
A3 = 1.781477937
A4 = -1.821255978
A5 = 1.330274429
call_result = []
put_result = []
for opt in xrange(0, nb_opt) :
sqrtT = math.sqrt(option_years[opt])
d1 = math.log(stock_price[opt] / option_strike[opt])
d1 += (Riskfree + 0.5 * Volatility * Volatility) * option_years[opt]
d1 /= (Volatility * sqrtT)
d2 = d1 - Volatility * sqrtT
K = 1.0 / (1.0 + 0.2316419 * abs(d1))
CNDD1 = RSQRT2PI * math.exp(-0.5 * d1 * d1) * (K * (A1 + K * (A2 + K * (A3 + K * (A4 + K * A5)))))
K = 1.0 / (1.0 + 0.2316419 * abs(d2))
CNDD2 = RSQRT2PI * math.exp(-0.5 * d2 * d2) * (K * (A1 + K * (A2 + K * (A3 + K * (A4 + K * A5)))))
expRT = math.exp(-Riskfree * option_years[opt])
call_result.append(stock_price[opt] * CNDD1 - option_strike[opt] * expRT * CNDD2)
put_result.append(option_strike[opt] * expRT * (1.0 - CNDD2) - stock_price[opt] * (1.0 - CNDD1))
return call_result, put_result
|
artas360/pythran
|
pythran/tests/cases/blacksholes.py
|
Python
|
bsd-3-clause
| 1,427
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
from glob import glob
import os
import sys
from setuptools import setup, Extension
from Cython.Build import cythonize
if sys.version_info[:2] < (2, 7):
print(
'nxcpy requires Python version 2.7 or later' +
' ({}.{} detected).'.format(*sys.version_info[:2]))
# Because networkx does
sys.exit(-1)
libraries = [
('nxcpy', {'sources': glob('src/*.c') + glob('src/*/*.c'),
'depends': glob('src/*.h') + glob('src/*/*.h'),
'include_dirs': ['src']})]
ext_modules = cythonize([
Extension('*.*', ['*/*.pyx'],
include_dirs=['src'],
libraries=['nxcpy']),
Extension('*.*.*', ['*/*/*.pyx'],
include_dirs=['src'],
libraries=['nxcpy'])]
)
install_requires = ['networkx', 'decorator']
if __name__ == "__main__":
setup(
name = 'nxcpy',
packages = ['nxcpy'],
libraries = libraries,
ext_modules = ext_modules,
install_requires = install_requires,
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
|
OrkoHunter/nxcpy
|
setup.py
|
Python
|
bsd-3-clause
| 1,193
|
from behaviours.Behaviour import Behaviour
from src.GameMethods import GameMethods
class Fall(Behaviour):
def update(self, delta_time, keys, config, game_methods: GameMethods):
self.owner.velocity.y += float(config["Physics"]["gravity"]) * delta_time;
|
cthit/CodeIT
|
behaviours/Fall.py
|
Python
|
mit
| 266
|
# Natural Language Toolkit: Tagged Corpus Reader
#
# Copyright (C) 2001-2016 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# Steven Bird <stevenbird1@gmail.com>
# Jacob Perkins <japerk@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
A reader for corpora whose documents contain part-of-speech-tagged words.
"""
import os
from nltk import compat
from nltk.tag import str2tuple, map_tag
from nltk.tokenize import *
from nltk.corpus.reader.api import *
from nltk.corpus.reader.util import *
from nltk.corpus.reader.timit import read_timit_block
class TaggedCorpusReader(CorpusReader):
"""
Reader for simple part-of-speech tagged corpora. Paragraphs are
assumed to be split using blank lines. Sentences and words can be
tokenized using the default tokenizers, or by custom tokenizers
specified as parameters to the constructor. Words are parsed
using ``nltk.tag.str2tuple``. By default, ``'/'`` is used as the
separator. I.e., words should have the form::
word1/tag1 word2/tag2 word3/tag3 ...
But custom separators may be specified as parameters to the
constructor. Part of speech tags are case-normalized to upper
case.
"""
def __init__(self, root, fileids,
sep='/', word_tokenizer=WhitespaceTokenizer(),
sent_tokenizer=RegexpTokenizer('\n', gaps=True),
para_block_reader=read_blankline_block,
encoding='utf8',
tagset=None):
"""
Construct a new Tagged Corpus reader for a set of documents
located at the given root directory. Example usage:
>>> root = '/...path to corpus.../'
>>> reader = TaggedCorpusReader(root, '.*', '.txt') # doctest: +SKIP
:param root: The root directory for this corpus.
:param fileids: A list or regexp specifying the fileids in this corpus.
"""
CorpusReader.__init__(self, root, fileids, encoding)
self._sep = sep
self._word_tokenizer = word_tokenizer
self._sent_tokenizer = sent_tokenizer
self._para_block_reader = para_block_reader
self._tagset = tagset
def raw(self, fileids=None):
"""
:return: the given file(s) as a single string.
:rtype: str
"""
if fileids is None: fileids = self._fileids
elif isinstance(fileids, compat.string_types): fileids = [fileids]
return concat([self.open(f).read() for f in fileids])
def words(self, fileids=None):
"""
:return: the given file(s) as a list of words
and punctuation symbols.
:rtype: list(str)
"""
return concat([TaggedCorpusView(fileid, enc,
False, False, False,
self._sep, self._word_tokenizer,
self._sent_tokenizer,
self._para_block_reader,
None)
for (fileid, enc) in self.abspaths(fileids, True)])
def sents(self, fileids=None):
"""
:return: the given file(s) as a list of
sentences or utterances, each encoded as a list of word
strings.
:rtype: list(list(str))
"""
return concat([TaggedCorpusView(fileid, enc,
False, True, False,
self._sep, self._word_tokenizer,
self._sent_tokenizer,
self._para_block_reader,
None)
for (fileid, enc) in self.abspaths(fileids, True)])
def paras(self, fileids=None):
"""
:return: the given file(s) as a list of
paragraphs, each encoded as a list of sentences, which are
in turn encoded as lists of word strings.
:rtype: list(list(list(str)))
"""
return concat([TaggedCorpusView(fileid, enc,
False, True, True,
self._sep, self._word_tokenizer,
self._sent_tokenizer,
self._para_block_reader,
None)
for (fileid, enc) in self.abspaths(fileids, True)])
def tagged_words(self, fileids=None, tagset=None):
"""
:return: the given file(s) as a list of tagged
words and punctuation symbols, encoded as tuples
``(word,tag)``.
:rtype: list(tuple(str,str))
"""
if tagset and tagset != self._tagset:
tag_mapping_function = lambda t: map_tag(self._tagset, tagset, t)
else:
tag_mapping_function = None
return concat([TaggedCorpusView(fileid, enc,
True, False, False,
self._sep, self._word_tokenizer,
self._sent_tokenizer,
self._para_block_reader,
tag_mapping_function)
for (fileid, enc) in self.abspaths(fileids, True)])
def tagged_sents(self, fileids=None, tagset=None):
"""
:return: the given file(s) as a list of
sentences, each encoded as a list of ``(word,tag)`` tuples.
:rtype: list(list(tuple(str,str)))
"""
if tagset and tagset != self._tagset:
tag_mapping_function = lambda t: map_tag(self._tagset, tagset, t)
else:
tag_mapping_function = None
return concat([TaggedCorpusView(fileid, enc,
True, True, False,
self._sep, self._word_tokenizer,
self._sent_tokenizer,
self._para_block_reader,
tag_mapping_function)
for (fileid, enc) in self.abspaths(fileids, True)])
def tagged_paras(self, fileids=None, tagset=None):
"""
:return: the given file(s) as a list of
paragraphs, each encoded as a list of sentences, which are
in turn encoded as lists of ``(word,tag)`` tuples.
:rtype: list(list(list(tuple(str,str))))
"""
if tagset and tagset != self._tagset:
tag_mapping_function = lambda t: map_tag(self._tagset, tagset, t)
else:
tag_mapping_function = None
return concat([TaggedCorpusView(fileid, enc,
True, True, True,
self._sep, self._word_tokenizer,
self._sent_tokenizer,
self._para_block_reader,
tag_mapping_function)
for (fileid, enc) in self.abspaths(fileids, True)])
class CategorizedTaggedCorpusReader(CategorizedCorpusReader,
TaggedCorpusReader):
"""
A reader for part-of-speech tagged corpora whose documents are
divided into categories based on their file identifiers.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the corpus reader. Categorization arguments
(``cat_pattern``, ``cat_map``, and ``cat_file``) are passed to
the ``CategorizedCorpusReader`` constructor. The remaining arguments
are passed to the ``TaggedCorpusReader``.
"""
CategorizedCorpusReader.__init__(self, kwargs)
TaggedCorpusReader.__init__(self, *args, **kwargs)
def _resolve(self, fileids, categories):
if fileids is not None and categories is not None:
raise ValueError('Specify fileids or categories, not both')
if categories is not None:
return self.fileids(categories)
else:
return fileids
def raw(self, fileids=None, categories=None):
return TaggedCorpusReader.raw(
self, self._resolve(fileids, categories))
def words(self, fileids=None, categories=None):
return TaggedCorpusReader.words(
self, self._resolve(fileids, categories))
def sents(self, fileids=None, categories=None):
return TaggedCorpusReader.sents(
self, self._resolve(fileids, categories))
def paras(self, fileids=None, categories=None):
return TaggedCorpusReader.paras(
self, self._resolve(fileids, categories))
def tagged_words(self, fileids=None, categories=None, tagset=None):
return TaggedCorpusReader.tagged_words(
self, self._resolve(fileids, categories), tagset)
def tagged_sents(self, fileids=None, categories=None, tagset=None):
return TaggedCorpusReader.tagged_sents(
self, self._resolve(fileids, categories), tagset)
def tagged_paras(self, fileids=None, categories=None, tagset=None):
return TaggedCorpusReader.tagged_paras(
self, self._resolve(fileids, categories), tagset)
class TaggedCorpusView(StreamBackedCorpusView):
"""
A specialized corpus view for tagged documents. It can be
customized via flags to divide the tagged corpus documents up by
sentence or paragraph, and to include or omit part of speech tags.
``TaggedCorpusView`` objects are typically created by
``TaggedCorpusReader`` (not directly by nltk users).
"""
def __init__(self, corpus_file, encoding, tagged, group_by_sent,
group_by_para, sep, word_tokenizer, sent_tokenizer,
para_block_reader, tag_mapping_function=None):
self._tagged = tagged
self._group_by_sent = group_by_sent
self._group_by_para = group_by_para
self._sep = sep
self._word_tokenizer = word_tokenizer
self._sent_tokenizer = sent_tokenizer
self._para_block_reader = para_block_reader
self._tag_mapping_function = tag_mapping_function
StreamBackedCorpusView.__init__(self, corpus_file, encoding=encoding)
def read_block(self, stream):
"""Reads one paragraph at a time."""
block = []
for para_str in self._para_block_reader(stream):
para = []
for sent_str in self._sent_tokenizer.tokenize(para_str):
sent = [str2tuple(s, self._sep) for s in
self._word_tokenizer.tokenize(sent_str)]
if self._tag_mapping_function:
sent = [(w, self._tag_mapping_function(t)) for (w,t) in sent]
if not self._tagged:
sent = [w for (w,t) in sent]
if self._group_by_sent:
para.append(sent)
else:
para.extend(sent)
if self._group_by_para:
block.append(para)
else:
block.extend(para)
return block
# needs to implement simplified tags
class MacMorphoCorpusReader(TaggedCorpusReader):
"""
A corpus reader for the MAC_MORPHO corpus. Each line contains a
single tagged word, using '_' as a separator. Sentence boundaries
are based on the end-sentence tag ('_.'). Paragraph information
is not included in the corpus, so each paragraph returned by
``self.paras()`` and ``self.tagged_paras()`` contains a single
sentence.
"""
def __init__(self, root, fileids, encoding='utf8', tagset=None):
TaggedCorpusReader.__init__(
self, root, fileids, sep='_',
word_tokenizer=LineTokenizer(),
sent_tokenizer=RegexpTokenizer('.*\n'),
para_block_reader=self._read_block,
encoding=encoding,
tagset=tagset)
def _read_block(self, stream):
return read_regexp_block(stream, r'.*', r'.*_\.')
class TimitTaggedCorpusReader(TaggedCorpusReader):
"""
A corpus reader for tagged sentences that are included in the TIMIT corpus.
"""
def __init__(self, *args, **kwargs):
TaggedCorpusReader.__init__(
self, para_block_reader=read_timit_block, *args, **kwargs)
def paras(self):
raise NotImplementedError('use sents() instead')
def tagged_paras(self):
raise NotImplementedError('use tagged_sents() instead')
|
adazey/Muzez
|
libs/nltk/corpus/reader/tagged.py
|
Python
|
gpl-3.0
| 12,886
|
#
# Copyright (C) 2015 The CyanogenMod Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def FullOTA_InstallEnd(info):
info.script.AppendExtra(
('mount("ext4", "EMMC", "/dev/block/platform/msm_sdcc.1/by-name/system", "/system", "");\n'
'assert(run_program("/tmp/install/bin/removenfc.sh") == 0);\n'
'unmount("/system");'))
|
Shahanmik3/android_device_xiaomi_cancro
|
releasetools/releasetools.py
|
Python
|
gpl-2.0
| 841
|
#!/usr/bin/env python3
import gpio
import rospy
import time
from std_msgs.msg import Bool
from diagnostic_msgs.msg import DiagnosticArray, DiagnosticStatus
CHANNEL1 = 394
CHANNEL2 = 393
gpio.setup(CHANNEL1, gpio.OUT)
gpio.setup(CHANNEL2, gpio.OUT)
FIRETIME = 0.04
t1Last = False
t2Last = False
def fireTorp1(data):
global t1Last
if data.data and not t1Last:
gpio.output(CHANNEL1, True)
time.sleep(FIRETIME)
gpio.output(CHANNEL1, False)
t1Last = data.data
def fireTorp2(data):
global t2Last
if data.data and not t2Last:
gpio.output(CHANNEL2, True)
time.sleep(FIRETIME)
gpio.output(CHANNEL2, False)
t2Last = data.data
def start():
rospy.init_node('TorpedoWatcher', anonymous=True)
rospy.Subscriber('Torpedo1', Bool, fireTorp1)
rospy.Subscriber('Torpedo2', Bool, fireTorp2)
rospy.spin()
if __name__ == '__main__':
try:
start()
except rospy.ROSInterruptException:
pass
|
RoboticsClubatUCF/RoboSub
|
ucf_sub_embedded_ros/ucf_sub/src/sub_sensors/src/Torpedo.py
|
Python
|
mit
| 900
|
"""
Copyright 2020 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
This module contains functions to detect date from strings using the
parameter day_first.
Day_first denotes that does day in the date occurs before month in the
dates in the date column
Example - '29-02-19', here day_first is true
"""
import datetime
import pandas
def str_to_datetime(date, day_first):
"""
As pandas to_datetime returns a timestramp - this function converts it to
a datetime object using the strptime method.
Args :
date - string representing date
day_first - Bool/None
Argument required to parse ambiguous dates.
Returns :
date as a datetime object
"""
# converting str date to a timestamp format ex- '2019-08-05 17:51:29'
date_timestamp = str(pandas.to_datetime(date, dayfirst=day_first))
# converting the timestamp to datetime object
date_datetime = datetime.datetime.strptime(date_timestamp,
'%Y-%m-%d %H:%M:%S')
return date_datetime
|
googleinterns/debaised-analysis
|
intents/util/date_module.py
|
Python
|
apache-2.0
| 1,544
|
# -*- coding: UTF-8 -*-
## Copyright 2013-2015 Luc Saffre
# License: BSD (see file COPYING for details)
"""
Moved to :ref:`welfare.tested.misc`.
"""
|
khchine5/lino-welfare
|
tests/test_welfare_demo.py
|
Python
|
agpl-3.0
| 152
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import UserDict
from datetime import datetime, timedelta, timezone
from unittest.mock import MagicMock, Mock, patch
import flask
import pytest
import requests
import responses
import main
# Create a fake "app" for generating test request contexts.
@pytest.fixture(scope="module")
def app():
return flask.Flask(__name__)
def test_lazy_globals(app):
with app.test_request_context():
main.lazy_globals(flask.request)
def test_scope_demo(app):
with app.test_request_context():
res = main.scope_demo(flask.request)
assert res == 'Per instance: 362880, per function: 45'
@responses.activate
def test_connection_pooling_200(app):
responses.add(responses.GET, 'http://example.com',
json={'status': 'OK'}, status=200)
with app.test_request_context():
main.connection_pooling(flask.request)
@responses.activate
def test_connection_pooling_404(app):
responses.add(responses.GET, 'http://example.com',
json={'error': 'not found'}, status=404)
with app.test_request_context():
with pytest.raises(requests.exceptions.HTTPError):
main.connection_pooling(flask.request)
def test_avoid_infinite_retries(capsys):
now = datetime.now(timezone.utc)
with patch('main.datetime', wraps=datetime) as datetime_mock:
datetime_mock.now = Mock(return_value=now)
old_context = UserDict()
old_context.timestamp = (now - timedelta(seconds=15)).isoformat()
old_context.event_id = 'old_event_id'
young_context = UserDict()
young_context.timestamp = (now - timedelta(seconds=5)).isoformat()
young_context.event_id = 'young_event_id'
main.avoid_infinite_retries(None, old_context)
out, _ = capsys.readouterr()
assert f"Dropped {old_context.event_id} (age 15000.0ms)" in out
main.avoid_infinite_retries(None, young_context)
out, _ = capsys.readouterr()
assert f"Processed {young_context.event_id} (age 5000.0ms)" in out
def test_retry_or_not():
with patch('main.error_client') as error_client_mock:
error_client_mock.report_exception = MagicMock()
event = Mock(data={})
main.retry_or_not(event, None)
assert error_client_mock.report_exception.call_count == 1
event.data = {'retry': True}
with pytest.raises(RuntimeError):
main.retry_or_not(event, None)
assert error_client_mock.report_exception.call_count == 2
|
GoogleCloudPlatform/python-docs-samples
|
functions/tips/main_test.py
|
Python
|
apache-2.0
| 3,090
|
# Copyright 2016 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals, absolute_import
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from ci.tests import DBTester
from client.tests import utils
class LiveClientTester(StaticLiveServerTestCase, DBTester.DBCompare):
def setUp(self):
super(LiveClientTester, self).setUp()
self.client_info = utils.default_client_info()
self.client_info["servers"] = [self.live_server_url]
self.client_info["server"] = self.live_server_url
self.client_info["update_step_time"] = 1
self.client_info["server_update_interval"] = 1
|
idaholab/civet
|
client/tests/LiveClientTester.py
|
Python
|
apache-2.0
| 1,201
|
def func() -> int:
pass
|
allotria/intellij-community
|
python/testData/stubs/FunctionAnnotation.py
|
Python
|
apache-2.0
| 27
|
from __future__ import absolute_import
from celery import shared_task
@shared_task(ignore_result=False)
def add(x, y):
return x + y
|
delving/nave
|
nave/common/tasks.py
|
Python
|
gpl-2.0
| 139
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import random
import time
from user_info import get_user_info
def get_media_id_user_feed(self):
if self.login_status:
if self.is_by_tag != True:
log_string = "======> Get media id by user: %s <======" % (
self.current_user)
if self.is_checked != True:
get_user_info(self, self.current_user)
if self.is_fake_account != True and self.is_active_user != False and self.is_selebgram != True or self.is_by_tag != False:
url = 'https://www.instagram.com/%s/?__a=1' % (self.current_user)
else:
log_string = "======> Get media id by Tag <======"
url = 'https://www.instagram.com/explore/tags/%s/?__a=1' % (
random.choice(self.tag_list))
self.write_log(log_string)
if self.login_status == 1 and self.is_fake_account != True and self.is_active_user != False and self.is_selebgram != True or self.is_by_tag != False:
try:
r = self.s.get(url)
all_data = json.loads(r.text)
if self.is_by_tag != True:
self.media_by_user = list(all_data['user']['media']['nodes'])
else:
self.media_by_user = list(all_data['tag']['media']['nodes'])
log_string = "Get media by user success!"
self.write_log(log_string)
except:
self.media_by_user = []
self.write_log("XXXXXXX Except on get_media! XXXXXXX")
time.sleep(60)
return 0
else:
log_string = "Reject this account \n=================== \nReason : \n Is Selebgram : %s \n Is Fake Account : %s \n Is Active User : %s \n" % (
self.is_selebgram, self.is_fake_account, self.is_active_user)
self.write_log(log_string)
self.is_rejected = True
self.media_by_user = []
self.media_on_feed = []
return 0
|
iamandresdiaz/botSolidarity
|
src/user_feed.py
|
Python
|
mit
| 2,064
|
from __future__ import absolute_import, division, print_function
import copy
from ._compat import iteritems
from ._make import NOTHING, _obj_setattr, fields
from .exceptions import AttrsAttributeNotFoundError
def asdict(
inst,
recurse=True,
filter=None,
dict_factory=dict,
retain_collection_types=False,
):
"""
Return the ``attrs`` attribute values of *inst* as a dict.
Optionally recurse into other ``attrs``-decorated classes.
:param inst: Instance of an ``attrs``-decorated class.
:param bool recurse: Recurse into classes that are also
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
called with the :class:`attr.Attribute` as the first argument and the
value as the second argument.
:param callable dict_factory: A callable to produce dictionaries from. For
example, to produce ordered dictionaries instead of normal Python
dictionaries, pass in ``collections.OrderedDict``.
:param bool retain_collection_types: Do not convert to ``list`` when
encountering an attribute whose type is ``tuple`` or ``set``. Only
meaningful if ``recurse`` is ``True``.
:rtype: return type of *dict_factory*
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 16.0.0 *dict_factory*
.. versionadded:: 16.1.0 *retain_collection_types*
"""
attrs = fields(inst.__class__)
rv = dict_factory()
for a in attrs:
v = getattr(inst, a.name)
if filter is not None and not filter(a, v):
continue
if recurse is True:
if has(v.__class__):
rv[a.name] = asdict(
v, True, filter, dict_factory, retain_collection_types
)
elif isinstance(v, (tuple, list, set)):
cf = v.__class__ if retain_collection_types is True else list
rv[a.name] = cf(
[
_asdict_anything(
i, filter, dict_factory, retain_collection_types
)
for i in v
]
)
elif isinstance(v, dict):
df = dict_factory
rv[a.name] = df(
(
_asdict_anything(
kk, filter, df, retain_collection_types
),
_asdict_anything(
vv, filter, df, retain_collection_types
),
)
for kk, vv in iteritems(v)
)
else:
rv[a.name] = v
else:
rv[a.name] = v
return rv
def _asdict_anything(val, filter, dict_factory, retain_collection_types):
"""
``asdict`` only works on attrs instances, this works on anything.
"""
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
# Attrs class.
rv = asdict(val, True, filter, dict_factory, retain_collection_types)
elif isinstance(val, (tuple, list, set)):
cf = val.__class__ if retain_collection_types is True else list
rv = cf(
[
_asdict_anything(
i, filter, dict_factory, retain_collection_types
)
for i in val
]
)
elif isinstance(val, dict):
df = dict_factory
rv = df(
(
_asdict_anything(kk, filter, df, retain_collection_types),
_asdict_anything(vv, filter, df, retain_collection_types),
)
for kk, vv in iteritems(val)
)
else:
rv = val
return rv
def astuple(
inst,
recurse=True,
filter=None,
tuple_factory=tuple,
retain_collection_types=False,
):
"""
Return the ``attrs`` attribute values of *inst* as a tuple.
Optionally recurse into other ``attrs``-decorated classes.
:param inst: Instance of an ``attrs``-decorated class.
:param bool recurse: Recurse into classes that are also
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
called with the :class:`attr.Attribute` as the first argument and the
value as the second argument.
:param callable tuple_factory: A callable to produce tuples from. For
example, to produce lists instead of tuples.
:param bool retain_collection_types: Do not convert to ``list``
or ``dict`` when encountering an attribute which type is
``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
``True``.
:rtype: return type of *tuple_factory*
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 16.2.0
"""
attrs = fields(inst.__class__)
rv = []
retain = retain_collection_types # Very long. :/
for a in attrs:
v = getattr(inst, a.name)
if filter is not None and not filter(a, v):
continue
if recurse is True:
if has(v.__class__):
rv.append(
astuple(
v,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
)
elif isinstance(v, (tuple, list, set)):
cf = v.__class__ if retain is True else list
rv.append(
cf(
[
astuple(
j,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(j.__class__)
else j
for j in v
]
)
)
elif isinstance(v, dict):
df = v.__class__ if retain is True else dict
rv.append(
df(
(
astuple(
kk,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(kk.__class__)
else kk,
astuple(
vv,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(vv.__class__)
else vv,
)
for kk, vv in iteritems(v)
)
)
else:
rv.append(v)
else:
rv.append(v)
return rv if tuple_factory is list else tuple_factory(rv)
def has(cls):
"""
Check whether *cls* is a class with ``attrs`` attributes.
:param type cls: Class to introspect.
:raise TypeError: If *cls* is not a class.
:rtype: :class:`bool`
"""
return getattr(cls, "__attrs_attrs__", None) is not None
def assoc(inst, **changes):
"""
Copy *inst* and apply *changes*.
:param inst: Instance of a class with ``attrs`` attributes.
:param changes: Keyword changes in the new copy.
:return: A copy of inst with *changes* incorporated.
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
be found on *cls*.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. deprecated:: 17.1.0
Use :func:`evolve` instead.
"""
import warnings
warnings.warn(
"assoc is deprecated and will be removed after 2018/01.",
DeprecationWarning,
stacklevel=2,
)
new = copy.copy(inst)
attrs = fields(inst.__class__)
for k, v in iteritems(changes):
a = getattr(attrs, k, NOTHING)
if a is NOTHING:
raise AttrsAttributeNotFoundError(
"{k} is not an attrs attribute on {cl}.".format(
k=k, cl=new.__class__
)
)
_obj_setattr(new, k, v)
return new
def evolve(inst, **changes):
"""
Create a new instance, based on *inst* with *changes* applied.
:param inst: Instance of a class with ``attrs`` attributes.
:param changes: Keyword changes in the new copy.
:return: A copy of inst with *changes* incorporated.
:raise TypeError: If *attr_name* couldn't be found in the class
``__init__``.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 17.1.0
"""
cls = inst.__class__
attrs = fields(cls)
for a in attrs:
if not a.init:
continue
attr_name = a.name # To deal with private attributes.
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
if init_name not in changes:
changes[init_name] = getattr(inst, attr_name)
return cls(**changes)
|
fnaum/rez
|
src/rez/vendor/attr/_funcs.py
|
Python
|
lgpl-3.0
| 9,725
|
#!/usr/bin/env python
"""A server that retrieves Rekall profiles by name."""
import json
import urllib2
import zlib
import logging
from grr.lib import access_control
from grr.lib import aff4
from grr.lib import config_lib
from grr.lib import rdfvalue
from grr.lib import registry
class ProfileServer(object):
__metaclass__ = registry.MetaclassRegistry
def __init__(self):
self.token = access_control.ACLToken(username="RekallProfileServer",
reason="Implied.")
self.token.supervisor = True
def GetProfileByName(self, profile_name):
"""Retrieves a profile by name."""
pass
class CachingProfileServer(ProfileServer):
"""A ProfileServer that caches profiles in the AFF4 space."""
def _GetProfileFromCache(self, profile_name):
cache_urn = rdfvalue.RDFURN(config_lib.CONFIG["Rekall.profile_cache_urn"])
try:
aff4_profile = aff4.FACTORY.Open(
cache_urn.Add(profile_name), aff4_type="AFF4RekallProfile",
token=self.token)
return aff4_profile.Get(aff4_profile.Schema.PROFILE)
except IOError:
pass
def _StoreProfile(self, profile):
cache_urn = rdfvalue.RDFURN(config_lib.CONFIG["Rekall.profile_cache_urn"])
aff4_profile = aff4.FACTORY.Create(
cache_urn.Add(profile.name), "AFF4RekallProfile",
token=self.token)
aff4_profile.Set(aff4_profile.Schema.PROFILE(profile))
aff4_profile.Close()
def GetProfileByName(self, profile_name, ignore_cache=False):
"""Retrieves a profile by name."""
if not profile_name.endswith(".gz"):
profile_name = "%s.gz" % profile_name
if not ignore_cache:
profile = self._GetProfileFromCache(profile_name)
if profile:
return profile
profile = super(CachingProfileServer, self).GetProfileByName(profile_name)
if profile:
self._StoreProfile(profile)
return profile
class RekallRepositoryProfileServer(ProfileServer):
"""This server gets the profiles from the official Rekall repository."""
def GetProfileByName(self, profile_name):
if not profile_name.endswith(".gz"):
profile_name = "%s.gz" % profile_name
try:
url = "%s/%s" % (config_lib.CONFIG["Rekall.profile_repository"],
profile_name)
handle = urllib2.urlopen(url, timeout=10)
except urllib2.HTTPError as e:
if e.code == 404:
logging.info(
"Got a 404 while downloading Rekall profile %s", url)
return None
raise
except urllib2.URLError as e:
logging.info(
"Got an URLError while downloading Rekall profile %s: %s",
url, e.reason)
raise
profile_data = handle.read()
if profile_data[:3] != "\x1F\x8B\x08":
raise ValueError("Downloaded file does not look like gzipped data: %s",
profile_data[:100])
return rdfvalue.RekallProfile(name=profile_name,
data=profile_data)
class GRRRekallProfileServer(CachingProfileServer,
RekallRepositoryProfileServer):
"""A caching Rekall profile server."""
def GetAllProfiles(self):
"""This function will download all profiles and cache them locally."""
inv_profile = self.GetProfileByName("v1.0/inventory", ignore_cache=True)
inventory_json = zlib.decompress(inv_profile.data, 16 + zlib.MAX_WBITS)
inventory = json.loads(inventory_json)
for profile in inventory["$INVENTORY"].keys():
profile = "v1.0/%s" % profile
logging.info("Getting profile: %s", profile)
try:
self.GetProfileByName(profile, ignore_cache=True)
except urllib2.URLError as e:
logging.info("Exception: %s", e)
def GetMissingProfiles(self):
"""This will download all profiles that are not already cached."""
inv_profile = self.GetProfileByName("v1.0/inventory", ignore_cache=True)
inventory_json = zlib.decompress(inv_profile.data, 16 + zlib.MAX_WBITS)
inventory = json.loads(inventory_json)
cache_urn = rdfvalue.RDFURN(config_lib.CONFIG["Rekall.profile_cache_urn"])
profiles = []
for profile in inventory["$INVENTORY"].keys():
profile = "v1.0/%s" % profile
if not profile.endswith(".gz"):
profile = "%s.gz" % profile
profiles.append(profile)
profile_urns = [cache_urn.Add(profile) for profile in profiles]
stats = aff4.FACTORY.Stat(profile_urns)
profile_infos = {}
for metadata in stats:
profile_infos[metadata["urn"]] = metadata["type"][1]
for profile in sorted(profiles):
profile_urn = cache_urn.Add(profile)
if (profile_urn not in profile_infos or
profile_infos[profile_urn] != u"AFF4RekallProfile"):
logging.info("Getting missing profile: %s" % profile)
try:
self.GetProfileByName(profile, ignore_cache=True)
except urllib2.URLError as e:
logging.info("Exception: %s", e)
|
pchaigno/grreat
|
lib/rekall_profile_server.py
|
Python
|
apache-2.0
| 4,932
|
# encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python3/dist-packages/PyQt4/QtGui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
class QStyleOption(): # skipped bases: <class 'sip.simplewrapper'>
"""
QStyleOption(int version=QStyleOption.Version, int type=QStyleOption.SO_Default)
QStyleOption(QStyleOption)
"""
def init(self, QWidget): # real signature unknown; restored from __doc__
""" QStyleOption.init(QWidget) """
pass
def initFrom(self, QWidget): # real signature unknown; restored from __doc__
""" QStyleOption.initFrom(QWidget) """
pass
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
direction = None # (!) real value is ''
fontMetrics = None # (!) real value is ''
OptionType = None # (!) real value is ''
palette = None # (!) real value is ''
rect = None # (!) real value is ''
SO_Button = 2
SO_ComboBox = 983044
SO_Complex = 983040
SO_ComplexCustomBase = 251658240
SO_CustomBase = 3840
SO_Default = 0
SO_DockWidget = 10
SO_FocusRect = 1
SO_Frame = 5
SO_GraphicsItem = 17
SO_GroupBox = 983047
SO_Header = 8
SO_MenuItem = 4
SO_ProgressBar = 6
SO_Q3DockWindow = 9
SO_Q3ListView = 983045
SO_Q3ListViewItem = 11
SO_RubberBand = 15
SO_SizeGrip = 983048
SO_Slider = 983041
SO_SpinBox = 983042
SO_Tab = 3
SO_TabBarBase = 14
SO_TabWidgetFrame = 13
SO_TitleBar = 983046
SO_ToolBar = 16
SO_ToolBox = 7
SO_ToolButton = 983043
SO_ViewItem = 12
state = None # (!) real value is ''
StyleOptionType = None # (!) real value is ''
StyleOptionVersion = None # (!) real value is ''
type = None # (!) real value is ''
Type = 0
Version = 1
version = None # (!) real value is ''
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyQt4/QtGui/QStyleOption.py
|
Python
|
gpl-2.0
| 2,102
|
# -*- coding: utf-8 -*-
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import binascii
import io
import os
import tempfile
from binascii import hexlify
import pytest
from ansible.compat.tests import unittest
from ansible import errors
from ansible.module_utils import six
from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing import vault
from units.mock.loader import DictDataLoader
from units.mock.vault_helper import TextVaultSecret
class TestVaultSecret(unittest.TestCase):
def test(self):
secret = vault.VaultSecret()
secret.load()
self.assertIsNone(secret._bytes)
def test_bytes(self):
some_text = u'私はガラスを食べられます。それは私を傷つけません。'
_bytes = to_bytes(some_text)
secret = vault.VaultSecret(_bytes)
secret.load()
self.assertEqual(secret.bytes, _bytes)
class TestPromptVaultSecret(unittest.TestCase):
def test_empty_prompt_formats(self):
secret = vault.PromptVaultSecret(vault_id='test_id', prompt_formats=[])
secret.load()
self.assertIsNone(secret._bytes)
class TestFileVaultSecret(unittest.TestCase):
def test(self):
secret = vault.FileVaultSecret()
self.assertIsNone(secret._bytes)
self.assertIsNone(secret._text)
def test_repr_empty(self):
secret = vault.FileVaultSecret()
self.assertEqual(repr(secret), "FileVaultSecret()")
def test_repr(self):
tmp_file = tempfile.NamedTemporaryFile(delete=False)
fake_loader = DictDataLoader({tmp_file.name: 'sdfadf'})
secret = vault.FileVaultSecret(loader=fake_loader, filename=tmp_file.name)
filename = tmp_file.name
tmp_file.close()
self.assertEqual(repr(secret), "FileVaultSecret(filename='%s')" % filename)
def test_empty_bytes(self):
secret = vault.FileVaultSecret()
self.assertIsNone(secret.bytes)
def test_file(self):
password = 'some password'
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(to_bytes(password))
tmp_file.close()
fake_loader = DictDataLoader({tmp_file.name: 'sdfadf'})
secret = vault.FileVaultSecret(loader=fake_loader, filename=tmp_file.name)
secret.load()
os.unlink(tmp_file.name)
self.assertEqual(secret.bytes, to_bytes(password))
def test_file_not_a_directory(self):
filename = '/dev/null/foobar'
fake_loader = DictDataLoader({filename: 'sdfadf'})
secret = vault.FileVaultSecret(loader=fake_loader, filename=filename)
self.assertRaisesRegexp(errors.AnsibleError,
'.*Could not read vault password file.*/dev/null/foobar.*Not a directory',
secret.load)
def test_file_not_found(self):
tmp_file = tempfile.NamedTemporaryFile()
filename = tmp_file.name
tmp_file.close()
fake_loader = DictDataLoader({filename: 'sdfadf'})
secret = vault.FileVaultSecret(loader=fake_loader, filename=filename)
self.assertRaisesRegexp(errors.AnsibleError,
'.*Could not read vault password file.*%s.*' % filename,
secret.load)
class TestScriptVaultSecret(unittest.TestCase):
def test(self):
secret = vault.ScriptVaultSecret()
self.assertIsNone(secret._bytes)
self.assertIsNone(secret._text)
class TestGetFileVaultSecret(unittest.TestCase):
def test_file(self):
password = 'some password'
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(to_bytes(password))
tmp_file.close()
fake_loader = DictDataLoader({tmp_file.name: 'sdfadf'})
secret = vault.get_file_vault_secret(filename=tmp_file.name, loader=fake_loader)
secret.load()
os.unlink(tmp_file.name)
self.assertEqual(secret.bytes, to_bytes(password))
def test_file_not_a_directory(self):
filename = '/dev/null/foobar'
fake_loader = DictDataLoader({filename: 'sdfadf'})
self.assertRaisesRegexp(errors.AnsibleError,
'.*The vault password file %s was not found.*' % filename,
vault.get_file_vault_secret,
filename=filename,
loader=fake_loader)
def test_file_not_found(self):
tmp_file = tempfile.NamedTemporaryFile()
filename = tmp_file.name
tmp_file.close()
fake_loader = DictDataLoader({filename: 'sdfadf'})
self.assertRaisesRegexp(errors.AnsibleError,
'.*The vault password file %s was not found.*' % filename,
vault.get_file_vault_secret,
filename=filename,
loader=fake_loader)
class TestVaultIsEncrypted(unittest.TestCase):
def test_bytes_not_encrypted(self):
b_data = b"foobar"
self.assertFalse(vault.is_encrypted(b_data))
def test_bytes_encrypted(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
self.assertTrue(vault.is_encrypted(b_data))
def test_text_not_encrypted(self):
b_data = to_text(b"foobar")
self.assertFalse(vault.is_encrypted(b_data))
def test_text_encrypted(self):
b_data = to_text(b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible"))
self.assertTrue(vault.is_encrypted(b_data))
def test_invalid_text_not_ascii(self):
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
self.assertFalse(vault.is_encrypted(data))
def test_invalid_bytes_not_ascii(self):
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
b_data = to_bytes(data, encoding='utf-8')
self.assertFalse(vault.is_encrypted(b_data))
class TestVaultIsEncryptedFile(unittest.TestCase):
def test_binary_file_handle_not_encrypted(self):
b_data = b"foobar"
b_data_fo = io.BytesIO(b_data)
self.assertFalse(vault.is_encrypted_file(b_data_fo))
def test_text_file_handle_not_encrypted(self):
data = u"foobar"
data_fo = io.StringIO(data)
self.assertFalse(vault.is_encrypted_file(data_fo))
def test_binary_file_handle_encrypted(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
b_data_fo = io.BytesIO(b_data)
self.assertTrue(vault.is_encrypted_file(b_data_fo))
def test_text_file_handle_encrypted(self):
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % to_text(hexlify(b"ansible"))
data_fo = io.StringIO(data)
self.assertTrue(vault.is_encrypted_file(data_fo))
def test_binary_file_handle_invalid(self):
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
b_data = to_bytes(data)
b_data_fo = io.BytesIO(b_data)
self.assertFalse(vault.is_encrypted_file(b_data_fo))
def test_text_file_handle_invalid(self):
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
data_fo = io.StringIO(data)
self.assertFalse(vault.is_encrypted_file(data_fo))
def test_file_already_read_from_finds_header(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible\ntesting\nfile pos")
b_data_fo = io.BytesIO(b_data)
b_data_fo.read(42) # Arbitrary number
self.assertTrue(vault.is_encrypted_file(b_data_fo))
def test_file_already_read_from_saves_file_pos(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible\ntesting\nfile pos")
b_data_fo = io.BytesIO(b_data)
b_data_fo.read(69) # Arbitrary number
vault.is_encrypted_file(b_data_fo)
self.assertEqual(b_data_fo.tell(), 69)
def test_file_with_offset(self):
b_data = b"JUNK$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible\ntesting\nfile pos")
b_data_fo = io.BytesIO(b_data)
self.assertTrue(vault.is_encrypted_file(b_data_fo, start_pos=4))
def test_file_with_count(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible\ntesting\nfile pos")
vault_length = len(b_data)
b_data = b_data + u'ァ ア'.encode('utf-8')
b_data_fo = io.BytesIO(b_data)
self.assertTrue(vault.is_encrypted_file(b_data_fo, count=vault_length))
def test_file_with_offset_and_count(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible\ntesting\nfile pos")
vault_length = len(b_data)
b_data = b'JUNK' + b_data + u'ァ ア'.encode('utf-8')
b_data_fo = io.BytesIO(b_data)
self.assertTrue(vault.is_encrypted_file(b_data_fo, start_pos=4, count=vault_length))
@pytest.mark.skipif(not vault.HAS_CRYPTOGRAPHY,
reason="Skipping cryptography tests because cryptography is not installed")
class TestVaultCipherAes256(unittest.TestCase):
def setUp(self):
self.vault_cipher = vault.VaultAES256()
def test(self):
self.assertIsInstance(self.vault_cipher, vault.VaultAES256)
# TODO: tag these as slow tests
def test_create_key_cryptography(self):
b_password = b'hunter42'
b_salt = os.urandom(32)
b_key_cryptography = self.vault_cipher._create_key_cryptography(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_cryptography, six.binary_type)
@pytest.mark.skipif(not vault.HAS_PYCRYPTO, reason='Not testing pycrypto key as pycrypto is not installed')
def test_create_key_pycrypto(self):
b_password = b'hunter42'
b_salt = os.urandom(32)
b_key_pycrypto = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_pycrypto, six.binary_type)
@pytest.mark.skipif(not vault.HAS_PYCRYPTO,
reason='Not comparing cryptography key to pycrypto key as pycrypto is not installed')
def test_compare_new_keys(self):
b_password = b'hunter42'
b_salt = os.urandom(32)
b_key_cryptography = self.vault_cipher._create_key_cryptography(b_password, b_salt, key_length=32, iv_length=16)
b_key_pycrypto = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
self.assertEqual(b_key_cryptography, b_key_pycrypto)
def test_create_key_known_cryptography(self):
b_password = b'hunter42'
# A fixed salt
b_salt = b'q' * 32 # q is the most random letter.
b_key_1 = self.vault_cipher._create_key_cryptography(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_1, six.binary_type)
# verify we get the same answer
# we could potentially run a few iterations of this and time it to see if it's roughly constant time
# and or that it exceeds some minimal time, but that would likely cause unreliable fails, esp in CI
b_key_2 = self.vault_cipher._create_key_cryptography(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_2, six.binary_type)
self.assertEqual(b_key_1, b_key_2)
# And again with pycrypto
b_key_3 = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_3, six.binary_type)
# verify we get the same answer
# we could potentially run a few iterations of this and time it to see if it's roughly constant time
# and or that it exceeds some minimal time, but that would likely cause unreliable fails, esp in CI
b_key_4 = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_4, six.binary_type)
self.assertEqual(b_key_3, b_key_4)
self.assertEqual(b_key_1, b_key_4)
def test_create_key_known_pycrypto(self):
b_password = b'hunter42'
# A fixed salt
b_salt = b'q' * 32 # q is the most random letter.
b_key_3 = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_3, six.binary_type)
# verify we get the same answer
# we could potentially run a few iterations of this and time it to see if it's roughly constant time
# and or that it exceeds some minimal time, but that would likely cause unreliable fails, esp in CI
b_key_4 = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_4, six.binary_type)
self.assertEqual(b_key_3, b_key_4)
def test_is_equal_is_equal(self):
self.assertTrue(self.vault_cipher._is_equal(b'abcdefghijklmnopqrstuvwxyz', b'abcdefghijklmnopqrstuvwxyz'))
def test_is_equal_unequal_length(self):
self.assertFalse(self.vault_cipher._is_equal(b'abcdefghijklmnopqrstuvwxyz', b'abcdefghijklmnopqrstuvwx and sometimes y'))
def test_is_equal_not_equal(self):
self.assertFalse(self.vault_cipher._is_equal(b'abcdefghijklmnopqrstuvwxyz', b'AbcdefghijKlmnopQrstuvwxZ'))
def test_is_equal_empty(self):
self.assertTrue(self.vault_cipher._is_equal(b'', b''))
def test_is_equal_non_ascii_equal(self):
utf8_data = to_bytes(u'私はガラスを食べられます。それは私を傷つけません。')
self.assertTrue(self.vault_cipher._is_equal(utf8_data, utf8_data))
def test_is_equal_non_ascii_unequal(self):
utf8_data = to_bytes(u'私はガラスを食べられます。それは私を傷つけません。')
utf8_data2 = to_bytes(u'Pot să mănânc sticlă și ea nu mă rănește.')
# Test for the len optimization path
self.assertFalse(self.vault_cipher._is_equal(utf8_data, utf8_data2))
# Test for the slower, char by char comparison path
self.assertFalse(self.vault_cipher._is_equal(utf8_data, utf8_data[:-1] + b'P'))
def test_is_equal_non_bytes(self):
""" Anything not a byte string should raise a TypeError """
self.assertRaises(TypeError, self.vault_cipher._is_equal, u"One fish", b"two fish")
self.assertRaises(TypeError, self.vault_cipher._is_equal, b"One fish", u"two fish")
self.assertRaises(TypeError, self.vault_cipher._is_equal, 1, b"red fish")
self.assertRaises(TypeError, self.vault_cipher._is_equal, b"blue fish", 2)
@pytest.mark.skipif(not vault.HAS_PYCRYPTO,
reason="Skipping Pycrypto tests because pycrypto is not installed")
class TestVaultCipherAes256PyCrypto(TestVaultCipherAes256):
def setUp(self):
self.has_cryptography = vault.HAS_CRYPTOGRAPHY
vault.HAS_CRYPTOGRAPHY = False
super(TestVaultCipherAes256PyCrypto, self).setUp()
def tearDown(self):
vault.HAS_CRYPTOGRAPHY = self.has_cryptography
super(TestVaultCipherAes256PyCrypto, self).tearDown()
class TestMatchSecrets(unittest.TestCase):
def test_empty_tuple(self):
secrets = [tuple()]
vault_ids = ['vault_id_1']
self.assertRaises(ValueError,
vault.match_secrets,
secrets, vault_ids)
def test_empty_secrets(self):
matches = vault.match_secrets([], ['vault_id_1'])
self.assertEqual(matches, [])
def test_single_match(self):
secret = TextVaultSecret('password')
matches = vault.match_secrets([('default', secret)], ['default'])
self.assertEquals(matches, [('default', secret)])
def test_no_matches(self):
secret = TextVaultSecret('password')
matches = vault.match_secrets([('default', secret)], ['not_default'])
self.assertEquals(matches, [])
def test_multiple_matches(self):
secrets = [('vault_id1', TextVaultSecret('password1')),
('vault_id2', TextVaultSecret('password2')),
('vault_id1', TextVaultSecret('password3')),
('vault_id4', TextVaultSecret('password4'))]
vault_ids = ['vault_id1', 'vault_id4']
matches = vault.match_secrets(secrets, vault_ids)
self.assertEqual(len(matches), 3)
expected = [('vault_id1', TextVaultSecret('password1')),
('vault_id1', TextVaultSecret('password3')),
('vault_id4', TextVaultSecret('password4'))]
self.assertEqual([x for x, y in matches],
[a for a, b in expected])
@pytest.mark.skipif(not vault.HAS_CRYPTOGRAPHY,
reason="Skipping cryptography tests because cryptography is not installed")
class TestVaultLib(unittest.TestCase):
def setUp(self):
self.vault_password = "test-vault-password"
text_secret = TextVaultSecret(self.vault_password)
self.vault_secrets = [('default', text_secret),
('test_id', text_secret)]
self.v = vault.VaultLib(self.vault_secrets)
def _vault_secrets(self, vault_id, secret):
return [(vault_id, secret)]
def _vault_secrets_from_password(self, vault_id, password):
return [(vault_id, TextVaultSecret(password))]
def test_encrypt(self):
plaintext = u'Some text to encrypt in a café'
b_vaulttext = self.v.encrypt(plaintext)
self.assertIsInstance(b_vaulttext, six.binary_type)
b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
self.assertEqual(b_vaulttext[:len(b_header)], b_header)
def test_encrypt_vault_id(self):
plaintext = u'Some text to encrypt in a café'
b_vaulttext = self.v.encrypt(plaintext, vault_id='test_id')
self.assertIsInstance(b_vaulttext, six.binary_type)
b_header = b'$ANSIBLE_VAULT;1.2;AES256;test_id\n'
self.assertEqual(b_vaulttext[:len(b_header)], b_header)
def test_encrypt_bytes(self):
plaintext = to_bytes(u'Some text to encrypt in a café')
b_vaulttext = self.v.encrypt(plaintext)
self.assertIsInstance(b_vaulttext, six.binary_type)
b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
self.assertEqual(b_vaulttext[:len(b_header)], b_header)
def test_encrypt_no_secret_empty_secrets(self):
vault_secrets = []
v = vault.VaultLib(vault_secrets)
plaintext = u'Some text to encrypt in a café'
self.assertRaisesRegexp(vault.AnsibleVaultError,
'.*A vault password must be specified to encrypt data.*',
v.encrypt,
plaintext)
def test_is_encrypted(self):
self.assertFalse(self.v.is_encrypted(b"foobar"), msg="encryption check on plaintext yielded false positive")
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
self.assertTrue(self.v.is_encrypted(b_data), msg="encryption check on headered text failed")
def test_format_vaulttext_envelope(self):
cipher_name = "TEST"
b_ciphertext = b"ansible"
b_vaulttext = vault.format_vaulttext_envelope(b_ciphertext,
cipher_name,
version=self.v.b_version,
vault_id='default')
b_lines = b_vaulttext.split(b'\n')
self.assertGreater(len(b_lines), 1, msg="failed to properly add header")
b_header = b_lines[0]
# self.assertTrue(b_header.endswith(b';TEST'), msg="header does not end with cipher name")
b_header_parts = b_header.split(b';')
self.assertEqual(len(b_header_parts), 4, msg="header has the wrong number of parts")
self.assertEqual(b_header_parts[0], b'$ANSIBLE_VAULT', msg="header does not start with $ANSIBLE_VAULT")
self.assertEqual(b_header_parts[1], self.v.b_version, msg="header version is incorrect")
self.assertEqual(b_header_parts[2], b'TEST', msg="header does not end with cipher name")
# And just to verify, lets parse the results and compare
b_ciphertext2, b_version2, cipher_name2, vault_id2 = \
vault.parse_vaulttext_envelope(b_vaulttext)
self.assertEqual(b_ciphertext, b_ciphertext2)
self.assertEqual(self.v.b_version, b_version2)
self.assertEqual(cipher_name, cipher_name2)
self.assertEqual('default', vault_id2)
def test_parse_vaulttext_envelope(self):
b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\nansible"
b_ciphertext, b_version, cipher_name, vault_id = vault.parse_vaulttext_envelope(b_vaulttext)
b_lines = b_ciphertext.split(b'\n')
self.assertEqual(b_lines[0], b"ansible", msg="Payload was not properly split from the header")
self.assertEqual(cipher_name, u'TEST', msg="cipher name was not properly set")
self.assertEqual(b_version, b"9.9", msg="version was not properly set")
def test_parse_vaulttext_envelope_crlf(self):
b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\r\nansible"
b_ciphertext, b_version, cipher_name, vault_id = vault.parse_vaulttext_envelope(b_vaulttext)
b_lines = b_ciphertext.split(b'\n')
self.assertEqual(b_lines[0], b"ansible", msg="Payload was not properly split from the header")
self.assertEqual(cipher_name, u'TEST', msg="cipher name was not properly set")
self.assertEqual(b_version, b"9.9", msg="version was not properly set")
def test_encrypt_decrypt_aes(self):
self.v.cipher_name = u'AES'
vault_secrets = self._vault_secrets_from_password('default', 'ansible')
self.v.secrets = vault_secrets
# AES encryption code has been removed, so this is old output for
# AES-encrypted 'foobar' with password 'ansible'.
b_vaulttext = b'''$ANSIBLE_VAULT;1.1;AES
53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3
fe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e
786a5a15efeb787e1958cbdd480d076c
'''
b_plaintext = self.v.decrypt(b_vaulttext)
self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")
def test_encrypt_decrypt_aes256(self):
self.v.cipher_name = u'AES256'
plaintext = u"foobar"
b_vaulttext = self.v.encrypt(plaintext)
b_plaintext = self.v.decrypt(b_vaulttext)
self.assertNotEqual(b_vaulttext, b"foobar", msg="encryption failed")
self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")
def test_encrypt_decrypt_aes256_none_secrets(self):
vault_secrets = self._vault_secrets_from_password('default', 'ansible')
v = vault.VaultLib(vault_secrets)
plaintext = u"foobar"
b_vaulttext = v.encrypt(plaintext)
# VaultLib will default to empty {} if secrets is None
v_none = vault.VaultLib(None)
# so set secrets None explicitly
v_none.secrets = None
self.assertRaisesRegexp(vault.AnsibleVaultError,
'.*A vault password must be specified to decrypt data.*',
v_none.decrypt,
b_vaulttext)
def test_encrypt_decrypt_aes256_empty_secrets(self):
vault_secrets = self._vault_secrets_from_password('default', 'ansible')
v = vault.VaultLib(vault_secrets)
plaintext = u"foobar"
b_vaulttext = v.encrypt(plaintext)
vault_secrets_empty = []
v_none = vault.VaultLib(vault_secrets_empty)
self.assertRaisesRegexp(vault.AnsibleVaultError,
'.*Attempting to decrypt but no vault secrets found.*',
v_none.decrypt,
b_vaulttext)
def test_encrypt_decrypt_aes256_multiple_secrets_all_wrong(self):
plaintext = u'Some text to encrypt in a café'
b_vaulttext = self.v.encrypt(plaintext)
vault_secrets = [('default', TextVaultSecret('another-wrong-password')),
('wrong-password', TextVaultSecret('wrong-password'))]
v_multi = vault.VaultLib(vault_secrets)
self.assertRaisesRegexp(errors.AnsibleError,
'.*Decryption failed.*',
v_multi.decrypt,
b_vaulttext,
filename='/dev/null/fake/filename')
def test_encrypt_decrypt_aes256_multiple_secrets_one_valid(self):
plaintext = u'Some text to encrypt in a café'
b_vaulttext = self.v.encrypt(plaintext)
correct_secret = TextVaultSecret(self.vault_password)
wrong_secret = TextVaultSecret('wrong-password')
vault_secrets = [('default', wrong_secret),
('corect_secret', correct_secret),
('wrong_secret', wrong_secret)]
v_multi = vault.VaultLib(vault_secrets)
b_plaintext = v_multi.decrypt(b_vaulttext)
self.assertNotEqual(b_vaulttext, to_bytes(plaintext), msg="encryption failed")
self.assertEqual(b_plaintext, to_bytes(plaintext), msg="decryption failed")
def test_encrypt_decrypt_aes256_existing_vault(self):
self.v.cipher_name = u'AES256'
b_orig_plaintext = b"Setec Astronomy"
vaulttext = u'''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''
b_plaintext = self.v.decrypt(vaulttext)
self.assertEqual(b_plaintext, b_plaintext, msg="decryption failed")
b_vaulttext = to_bytes(vaulttext, encoding='ascii', errors='strict')
b_plaintext = self.v.decrypt(b_vaulttext)
self.assertEqual(b_plaintext, b_orig_plaintext, msg="decryption failed")
# FIXME This test isn't working quite yet.
@pytest.mark.skip(reason='This test is not ready yet')
def test_encrypt_decrypt_aes256_bad_hmac(self):
self.v.cipher_name = 'AES256'
# plaintext = "Setec Astronomy"
enc_data = '''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''
b_data = to_bytes(enc_data, errors='strict', encoding='utf-8')
b_data = self.v._split_header(b_data)
foo = binascii.unhexlify(b_data)
lines = foo.splitlines()
# line 0 is salt, line 1 is hmac, line 2+ is ciphertext
b_salt = lines[0]
b_hmac = lines[1]
b_ciphertext_data = b'\n'.join(lines[2:])
b_ciphertext = binascii.unhexlify(b_ciphertext_data)
# b_orig_ciphertext = b_ciphertext[:]
# now muck with the text
# b_munged_ciphertext = b_ciphertext[:10] + b'\x00' + b_ciphertext[11:]
# b_munged_ciphertext = b_ciphertext
# assert b_orig_ciphertext != b_munged_ciphertext
b_ciphertext_data = binascii.hexlify(b_ciphertext)
b_payload = b'\n'.join([b_salt, b_hmac, b_ciphertext_data])
# reformat
b_invalid_ciphertext = self.v._format_output(b_payload)
# assert we throw an error
self.v.decrypt(b_invalid_ciphertext)
def test_decrypt_non_default_1_2(self):
b_expected_plaintext = to_bytes('foo bar\n')
vaulttext = '''$ANSIBLE_VAULT;1.2;AES256;ansible_devel
65616435333934613466373335363332373764363365633035303466643439313864663837393234
3330656363343637313962633731333237313636633534630a386264363438363362326132363239
39363166646664346264383934393935653933316263333838386362633534326664646166663736
6462303664383765650a356637643633366663643566353036303162386237336233393065393164
6264'''
vault_secrets = self._vault_secrets_from_password('default', 'ansible')
v = vault.VaultLib(vault_secrets)
b_vaulttext = to_bytes(vaulttext)
b_plaintext = v.decrypt(b_vaulttext)
self.assertEqual(b_expected_plaintext, b_plaintext)
b_ciphertext, b_version, cipher_name, vault_id = vault.parse_vaulttext_envelope(b_vaulttext)
self.assertEqual('ansible_devel', vault_id)
self.assertEqual(b'1.2', b_version)
def test_encrypt_encrypted(self):
self.v.cipher_name = u'AES'
b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
vaulttext = to_text(b_vaulttext, errors='strict')
self.assertRaises(errors.AnsibleError, self.v.encrypt, b_vaulttext)
self.assertRaises(errors.AnsibleError, self.v.encrypt, vaulttext)
def test_decrypt_decrypted(self):
plaintext = u"ansible"
self.assertRaises(errors.AnsibleError, self.v.decrypt, plaintext)
b_plaintext = b"ansible"
self.assertRaises(errors.AnsibleError, self.v.decrypt, b_plaintext)
def test_cipher_not_set(self):
plaintext = u"ansible"
self.v.encrypt(plaintext)
self.assertEquals(self.v.cipher_name, "AES256")
@pytest.mark.skipif(not vault.HAS_PYCRYPTO,
reason="Skipping Pycrypto tests because pycrypto is not installed")
class TestVaultLibPyCrypto(TestVaultLib):
def setUp(self):
self.has_cryptography = vault.HAS_CRYPTOGRAPHY
vault.HAS_CRYPTOGRAPHY = False
super(TestVaultLibPyCrypto, self).setUp()
def tearDown(self):
vault.HAS_CRYPTOGRAPHY = self.has_cryptography
super(TestVaultLibPyCrypto, self).tearDown()
|
fernandezcuesta/ansible
|
test/units/parsing/vault/test_vault.py
|
Python
|
gpl-3.0
| 30,935
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2016, 2017, 2018 Guenter Bartsch
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String, Text, Unicode, UnicodeText, Enum, DateTime, ForeignKey, Index, Float
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class TrainingData(Base):
__tablename__ = 'training_data'
id = Column(Integer, primary_key=True)
lang = Column(String(2), index=True)
skill = Column(String(255), index=True)
inp = Column(UnicodeText, index=True)
md5s = Column(String(32))
args = Column(String(255))
loc_fn = Column(String(255))
loc_line = Column(Integer)
__table_args__ = (Index('idx_td_inp_lang', "inp", "lang"),
Index('idx_td_mod_lang', "skill", "lang"))
class Code(Base):
__tablename__ = "code"
md5s = Column(String(32), primary_key=True)
skill = Column(String(255), index=True)
code = Column(Text)
fn = Column(String(255))
class TestCase(Base):
__tablename__ = 'test_case'
id = Column(Integer, primary_key=True)
lang = Column(String(2), index=True)
skill = Column(String(255), index=True)
name = Column(String(255), index=True)
prep_code = Column(Text)
prep_fn = Column(String(255))
rounds = Column(Text)
loc_fn = Column(String(255))
loc_line = Column(Integer)
# __table_args__ = (Index('idx_tc_inp_lang', "inp", "lang"), )
class NERData(Base):
__tablename__ = 'ner_data'
id = Column(Integer, primary_key=True)
lang = Column(String(2), index=True)
skill = Column(String(255), index=True)
cls = Column(String(255))
entity = Column(Unicode(255))
label = Column(Unicode(255))
class NamedMacro(Base):
__tablename__ = 'named_macro'
id = Column(Integer, primary_key=True)
lang = Column(String(2), index=True)
skill = Column(String(255), index=True)
name = Column(String(255), index=True)
soln = Column(Text)
class Mem(Base):
__tablename__ = 'mem'
id = Column(Integer, primary_key=True)
realm = Column(String(255), index=True)
k = Column(String(255), index=True)
v = Column(Text)
score = Column(Float)
__table_args__ = (Index('idx_mem_realm_k', "realm", "k"), )
def data_engine_setup(db_url, echo=False):
engine = create_engine(db_url, echo=echo)
Base.metadata.create_all(engine)
return engine
|
gooofy/nlp
|
zamiaai/model.py
|
Python
|
lgpl-3.0
| 3,573
|
#
# ElementTree
# $Id: ElementPath.py 1858 2004-06-17 21:31:41Z Fredrik $
#
# limited xpath support for element trees
#
# history:
# 2003-05-23 fl created
# 2003-05-28 fl added support for // etc
# 2003-08-27 fl fixed parsing of periods in element names
#
# Copyright (c) 2003-2004 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2004 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Implementation module for XPath support. There's usually no reason
# to import this module directly; the <b>ElementTree</b> does this for
# you, if needed.
##
import re
xpath_tokenizer = re.compile(
"(::|\.\.|\(\)|[/.*:\[\]\(\)@=])|((?:\{[^}]+\})?[^/:\[\]\(\)@=\s]+)|\s+"
).findall
class xpath_descendant_or_self:
pass
##
# Wrapper for a compiled XPath.
class Path:
##
# Create an Path instance from an XPath expression.
def __init__(self, path):
tokens = xpath_tokenizer(path)
# the current version supports 'path/path'-style expressions only
self.path = []
self.tag = None
if tokens and tokens[0][0] == "/":
raise SyntaxError("cannot use absolute path on element")
while tokens:
op, tag = tokens.pop(0)
if tag or op == "*":
self.path.append(tag or op)
elif op == ".":
pass
elif op == "/":
self.path.append(xpath_descendant_or_self())
continue
else:
raise SyntaxError("unsupported path syntax (%s)" % op)
if tokens:
op, tag = tokens.pop(0)
if op != "/":
raise SyntaxError(
"expected path separator (%s)" % (op or tag)
)
if self.path and isinstance(self.path[-1], xpath_descendant_or_self):
raise SyntaxError("path cannot end with //")
if len(self.path) == 1 and isinstance(self.path[0], type("")):
self.tag = self.path[0]
##
# Find first matching object.
def find(self, element):
tag = self.tag
if tag is None:
nodeset = self.findall(element)
if not nodeset:
return None
return nodeset[0]
for elem in element:
if elem.tag == tag:
return elem
return None
##
# Find text for first matching object.
def findtext(self, element, default=None):
tag = self.tag
if tag is None:
nodeset = self.findall(element)
if not nodeset:
return default
return nodeset[0].text or ""
for elem in element:
if elem.tag == tag:
return elem.text or ""
return default
##
# Find all matching objects.
def findall(self, element):
nodeset = [element]
index = 0
while 1:
try:
path = self.path[index]
index = index + 1
except IndexError:
return nodeset
set = []
if isinstance(path, xpath_descendant_or_self):
try:
tag = self.path[index]
if not isinstance(tag, type("")):
tag = None
else:
index = index + 1
except IndexError:
tag = None # invalid path
for node in nodeset:
new = list(node.getiterator(tag))
if new and new[0] is node:
set.extend(new[1:])
else:
set.extend(new)
else:
for node in nodeset:
for node in node:
if path == "*" or node.tag == path:
set.append(node)
if not set:
return []
nodeset = set
_cache = {}
##
# (Internal) Compile path.
def _compile(path):
p = _cache.get(path)
if p is not None:
return p
p = Path(path)
if len(_cache) >= 100:
_cache.clear()
_cache[path] = p
return p
##
# Find first matching object.
def find(element, path):
return _compile(path).find(element)
##
# Find text for first matching object.
def findtext(element, path, default=None):
return _compile(path).findtext(element, default)
##
# Find all matching objects.
def findall(element, path):
return _compile(path).findall(element)
|
remybaranx/qtaste
|
tools/jython/lib/Lib/elementtree/ElementPath.py
|
Python
|
gpl-3.0
| 6,150
|
__author__ = 'teddydestodes'
import sys
import binascii
import socket
import select
import hashlib
class ApiRos:
"""Routeros api"""
def __init__(self, sk):
self.sk = sk
self.currenttag = 0
def login(self, username, pwd):
for repl, attrs in self.talk(["/login"]):
chal = binascii.unhexlify((attrs['=ret']).encode('UTF-8'))
md = hashlib.md5()
md.update(b'\x00')
md.update(pwd.encode('UTF-8'))
md.update(chal)
self.talk(["/login", "=name=" + username,
"=response=00" + binascii.hexlify(md.digest()).decode('UTF-8')])
def talk(self, words):
if self.write_sentence(words) == 0: return
r = []
while 1:
i = self.read_sentence()
if len(i) == 0:
continue
reply = i[0]
attrs = {}
for w in i[1:]:
j = w.find('=', 1)
if j == -1:
attrs[w] = ''
else:
attrs[w[:j]] = w[j+1:]
r.append((reply, attrs))
if reply == '!done': return r
def write_sentence(self, words):
ret = 0
for w in words:
self.write_word(w)
ret += 1
self.write_word('')
return ret
def read_sentence(self):
r = []
while 1:
w = self.read_word()
if w == '':
return r
r.append(w)
def write_word(self, w):
print(("<<< " + w))
self.write_len(len(w))
self.write_str(w)
def read_word(self):
ret = self.read_str(self.read_len())
print((">>> " + ret))
return ret
def write_len(self, l):
if l < 0x80:
self.write_str(chr(l))
elif l < 0x4000:
l |= 0x8000
self.write_str(chr((l >> 8) & 0xFF))
self.write_str(chr(l & 0xFF))
elif l < 0x200000:
l |= 0xC00000
self.write_str(chr((l >> 16) & 0xFF))
self.write_str(chr((l >> 8) & 0xFF))
self.write_str(chr(l & 0xFF))
elif l < 0x10000000:
l |= 0xE0000000
self.write_str(chr((l >> 24) & 0xFF))
self.write_str(chr((l >> 16) & 0xFF))
self.write_str(chr((l >> 8) & 0xFF))
self.write_str(chr(l & 0xFF))
else:
self.write_str(chr(0xF0))
self.write_str(chr((l >> 24) & 0xFF))
self.write_str(chr((l >> 16) & 0xFF))
self.write_str(chr((l >> 8) & 0xFF))
self.write_str(chr(l & 0xFF))
def read_len(self):
c = ord(self.read_str(1))
if (c & 0x80) == 0x00:
pass
elif (c & 0xC0) == 0x80:
c &= ~0xC0
c <<= 8
c += ord(self.read_str(1))
elif (c & 0xE0) == 0xC0:
c &= ~0xE0
c <<= 8
c += ord(self.read_str(1))
c <<= 8
c += ord(self.read_str(1))
elif (c & 0xF0) == 0xE0:
c &= ~0xF0
c <<= 8
c += ord(self.read_str(1))
c <<= 8
c += ord(self.read_str(1))
c <<= 8
c += ord(self.read_str(1))
elif (c & 0xF8) == 0xF0:
c = ord(self.read_str(1))
c <<= 8
c += ord(self.read_str(1))
c <<= 8
c += ord(self.read_str(1))
c <<= 8
c += ord(self.read_str(1))
return c
def write_str(self, str):
n = 0
while n < len(str):
r = self.sk.send(bytes(str[n:], 'UTF-8'))
if r == 0:
raise RuntimeError("connection closed by remote end")
n += r
def read_str(self, length):
ret = ''
while len(ret) < length:
s = self.sk.recv(length - len(ret))
if s == '': raise RuntimeError("connection closed by remote end")
ret += s.decode('UTF-8', 'replace')
return ret
def main():
s = None
for res in socket.getaddrinfo('192.168.2.1', "8728", socket.AF_UNSPEC, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
s = socket.socket(af, socktype, proto)
except socket.error:
s = None
continue
try:
s.connect(sa)
except socket.error:
s.close()
s = None
continue
break
if s is None:
print('could not open socket')
sys.exit(1)
apiros = ApiRos(s)
apiros.login('api', 'testo')
inputsentence = []
while 1:
r = select.select([s, sys.stdin], [], [], None)
if s in r[0]:
# something to read in socket, read sentence
x = apiros.read_sentence()
if sys.stdin in r[0]:
# read line from input and strip off newline
l = sys.stdin.readline()
l = l[:-1]
# if empty line, send sentence and start with new
# otherwise append to input sentence
if l == '':
apiros.write_sentence(inputsentence)
inputsentence = []
else:
inputsentence.append(l)
if __name__ == '__main__':
main()
|
TeddyDesTodes/pyflipdot
|
pyflipdot/plugins/mikrotik/__init__.py
|
Python
|
bsd-3-clause
| 5,309
|
import collections
import operator
import os
import json
import logging
import mimetypes
import md5
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.views.generic import ListView, DetailView
from django.utils.datastructures import SortedDict
from django.views.decorators.cache import cache_page
from taggit.models import Tag
import requests
from .base import ProjectOnboardMixin
from readthedocs.builds.constants import LATEST
from readthedocs.builds.filters import VersionSlugFilter
from readthedocs.builds.models import Version
from readthedocs.projects.models import Project, ImportedFile
from readthedocs.search.indexes import PageIndex
from readthedocs.search.views import LOG_TEMPLATE
log = logging.getLogger(__name__)
search_log = logging.getLogger(__name__ + '.search')
mimetypes.add_type("application/epub+zip", ".epub")
class ProjectIndex(ListView):
model = Project
def get_queryset(self):
queryset = Project.objects.public(self.request.user)
if self.kwargs.get('tag'):
self.tag = get_object_or_404(Tag, slug=self.kwargs.get('tag'))
queryset = queryset.filter(tags__name__in=[self.tag.slug])
else:
self.tag = None
if self.kwargs.get('username'):
self.user = get_object_or_404(User, username=self.kwargs.get('username'))
queryset = queryset.filter(user=self.user)
else:
self.user = None
return queryset
def get_context_data(self, **kwargs):
context = super(ProjectIndex, self).get_context_data(**kwargs)
context['person'] = self.user
context['tag'] = self.tag
return context
project_index = ProjectIndex.as_view()
class ProjectDetailView(ProjectOnboardMixin, DetailView):
'''Display project onboard steps'''
model = Project
slug_url_kwarg = 'project_slug'
def get_queryset(self):
return Project.objects.protected(self.request.user)
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
project = self.get_object()
context['versions'] = Version.objects.public(
user=self.request.user, project=project)
context['filter'] = VersionSlugFilter(self.request.GET,
queryset=context['versions'])
protocol = 'http'
if self.request.is_secure():
protocol = 'https'
context['badge_url'] = "%s://%s%s?version=%s" % (
protocol,
settings.PRODUCTION_DOMAIN,
reverse('project_badge', args=[project.slug]),
project.get_default_version(),
)
context['site_url'] = "%s://%s%s?badge=%s" % (
protocol,
settings.PRODUCTION_DOMAIN,
reverse('projects_detail', args=[project.slug]),
project.get_default_version(),
)
return context
def _badge_return(redirect, url):
if redirect:
return HttpResponseRedirect(url)
else:
response = requests.get(url)
http_response = HttpResponse(response.content,
content_type="image/svg+xml")
http_response['Cache-Control'] = 'no-cache'
http_response['Etag'] = md5.new(url)
return http_response
# TODO remove this, it's a temporary fix to heavy database usage
@cache_page(60 * 30)
def project_badge(request, project_slug, redirect=True):
"""
Return a sweet badge for the project
"""
version_slug = request.GET.get('version', LATEST)
style = request.GET.get('style', 'flat')
try:
version = Version.objects.public(request.user).get(
project__slug=project_slug, slug=version_slug)
except Version.DoesNotExist:
url = (
'https://img.shields.io/badge/docs-unknown%20version-yellow.svg?style={style}'
.format(style=style))
return _badge_return(redirect, url)
version_builds = version.builds.filter(type='html', state='finished').order_by('-date')
if not version_builds.exists():
url = (
'https://img.shields.io/badge/docs-no%20builds-yellow.svg?style={style}'
.format(style=style))
return _badge_return(redirect, url)
last_build = version_builds[0]
if last_build.success:
color = 'brightgreen'
else:
color = 'red'
url = 'https://img.shields.io/badge/docs-%s-%s.svg?style=%s' % (
version.slug.replace('-', '--'), color, style)
return _badge_return(redirect, url)
def project_downloads(request, project_slug):
"""
A detail view for a project with various dataz
"""
project = get_object_or_404(Project.objects.protected(request.user), slug=project_slug)
versions = Version.objects.public(user=request.user, project=project)
version_data = SortedDict()
for version in versions:
data = version.get_downloads()
# Don't show ones that have no downloads.
if data:
version_data[version.slug] = data
# in case the MEDIA_URL is a protocol relative URL we just assume
# we want http as the protcol, so that Dash is able to handle the URL
if settings.MEDIA_URL.startswith('//'):
media_url_prefix = u'http:'
# but in case we're in debug mode and the MEDIA_URL is just a path
# we prefix it with a hardcoded host name and protocol
elif settings.MEDIA_URL.startswith('/') and settings.DEBUG:
media_url_prefix = u'http://%s' % request.get_host()
else:
media_url_prefix = ''
return render_to_response(
'projects/project_downloads.html',
{
'project': project,
'version_data': version_data,
'versions': versions,
'media_url_prefix': media_url_prefix,
},
context_instance=RequestContext(request),
)
def project_download_media(request, project_slug, type, version_slug):
"""
Download a specific piece of media.
Perform an auth check if serving in private mode.
"""
# Do private project auth checks
queryset = Project.objects.protected(request.user).filter(slug=project_slug)
if not queryset.exists():
raise Http404
privacy_level = getattr(settings, 'DEFAULT_PRIVACY_LEVEL', 'public')
if privacy_level == 'public' or settings.DEBUG:
path = os.path.join(settings.MEDIA_URL, type, project_slug, version_slug,
'%s.%s' % (project_slug, type.replace('htmlzip', 'zip')))
return HttpResponseRedirect(path)
else:
# Get relative media path
path = queryset[0].get_production_media_path(type=type, version_slug=version_slug).replace(
settings.PRODUCTION_ROOT, '/prod_artifacts'
)
content_type, encoding = mimetypes.guess_type(path)
content_type = content_type or 'application/octet-stream'
response = HttpResponse(content_type=content_type)
if encoding:
response["Content-Encoding"] = encoding
response['X-Accel-Redirect'] = path
# Include version in filename; this fixes a long-standing bug
filename = "%s-%s.%s" % (project_slug, version_slug, path.split('.')[-1])
response['Content-Disposition'] = 'filename=%s' % filename
return response
def search_autocomplete(request):
"""
return a json list of project names
"""
if 'term' in request.GET:
term = request.GET['term']
else:
raise Http404
queryset = (Project.objects.public(request.user).filter(name__icontains=term)[:20])
ret_list = []
for project in queryset:
ret_list.append({
'label': project.name,
'value': project.slug,
})
json_response = json.dumps(ret_list)
return HttpResponse(json_response, content_type='text/javascript')
def version_autocomplete(request, project_slug):
"""
return a json list of version names
"""
queryset = Project.objects.public(request.user)
get_object_or_404(queryset, slug=project_slug)
versions = Version.objects.public(request.user)
if 'term' in request.GET:
term = request.GET['term']
else:
raise Http404
version_queryset = versions.filter(slug__icontains=term)[:20]
names = version_queryset.values_list('slug', flat=True)
json_response = json.dumps(list(names))
return HttpResponse(json_response, content_type='text/javascript')
def version_filter_autocomplete(request, project_slug):
queryset = Project.objects.public(request.user)
project = get_object_or_404(queryset, slug=project_slug)
versions = Version.objects.public(request.user)
filter = VersionSlugFilter(request.GET, queryset=versions)
format = request.GET.get('format', 'json')
if format == 'json':
names = filter.qs.values_list('slug', flat=True)
json_response = json.dumps(list(names))
return HttpResponse(json_response, content_type='text/javascript')
elif format == 'html':
return render_to_response(
'core/version_list.html',
{
'project': project,
'versions': versions,
'filter': filter,
},
context_instance=RequestContext(request),
)
else:
return HttpResponse(status=400)
def file_autocomplete(request, project_slug):
"""
return a json list of version names
"""
if 'term' in request.GET:
term = request.GET['term']
else:
raise Http404
queryset = ImportedFile.objects.filter(project__slug=project_slug, path__icontains=term)[:20]
ret_list = []
for file in queryset:
ret_list.append({
'label': file.path,
'value': file.path,
})
json_response = json.dumps(ret_list)
return HttpResponse(json_response, content_type='text/javascript')
def elastic_project_search(request, project_slug):
"""
Use elastic search to search in a project.
"""
queryset = Project.objects.protected(request.user)
project = get_object_or_404(queryset, slug=project_slug)
version_slug = request.GET.get('version', LATEST)
query = request.GET.get('q', None)
if query:
user = ''
if request.user.is_authenticated():
user = request.user
log.info(LOG_TEMPLATE.format(
user=user,
project=project or '',
type='inproject',
version=version_slug or '',
language='',
msg=query or '',
))
if query:
kwargs = {}
body = {
"query": {
"bool": {
"should": [
{"match": {"title": {"query": query, "boost": 10}}},
{"match": {"headers": {"query": query, "boost": 5}}},
{"match": {"content": {"query": query}}},
]
}
},
"highlight": {
"fields": {
"title": {},
"headers": {},
"content": {},
}
},
"fields": ["title", "project", "version", "path"],
"filter": {
"and": [
{"term": {"project": project_slug}},
{"term": {"version": version_slug}},
]
},
"size": 50 # TODO: Support pagination.
}
# Add routing to optimize search by hitting the right shard.
kwargs['routing'] = project_slug
results = PageIndex().search(body, **kwargs)
else:
results = {}
if results:
# pre and post 1.0 compat
for num, hit in enumerate(results['hits']['hits']):
for key, val in hit['fields'].items():
if isinstance(val, list):
results['hits']['hits'][num]['fields'][key] = val[0]
return render_to_response(
'search/elastic_project_search.html',
{
'project': project,
'query': query,
'results': results,
},
context_instance=RequestContext(request),
)
def project_versions(request, project_slug):
"""
Shows the available versions and lets the user choose which ones he would
like to have built.
"""
project = get_object_or_404(Project.objects.protected(request.user),
slug=project_slug)
versions = Version.objects.public(user=request.user, project=project, only_active=False)
active_versions = versions.filter(active=True)
inactive_versions = versions.filter(active=False)
inactive_filter = VersionSlugFilter(request.GET, queryset=inactive_versions)
active_filter = VersionSlugFilter(request.GET, queryset=active_versions)
# If there's a wiped query string, check the string against the versions
# list and display a success message. Deleting directories doesn't know how
# to fail. :)
wiped = request.GET.get('wipe', '')
wiped_version = versions.filter(slug=wiped)
if wiped and wiped_version.count():
messages.success(request, 'Version wiped: ' + wiped)
return render_to_response(
'projects/project_version_list.html',
{
'inactive_filter': inactive_filter,
'active_filter': active_filter,
'project': project,
},
context_instance=RequestContext(request)
)
def project_analytics(request, project_slug):
"""
Have a analytics API placeholder
"""
project = get_object_or_404(Project.objects.protected(request.user),
slug=project_slug)
analytics_cache = cache.get('analytics:%s' % project_slug)
if analytics_cache:
analytics = json.loads(analytics_cache)
else:
try:
resp = requests.get(
'{host}/api/v1/index/1/heatmap/'.format(host=settings.GROK_API_HOST),
params={'project': project.slug, 'days': 7, 'compare': True}
)
analytics = resp.json()
cache.set('analytics:%s' % project_slug, resp.content, 1800)
except:
analytics = None
if analytics:
page_list = list(reversed(sorted(analytics['page'].items(),
key=operator.itemgetter(1))))
version_list = list(reversed(sorted(analytics['version'].items(),
key=operator.itemgetter(1))))
else:
page_list = []
version_list = []
full = request.GET.get('full')
if not full:
page_list = page_list[:20]
version_list = version_list[:20]
return render_to_response(
'projects/project_analytics.html',
{
'project': project,
'analytics': analytics,
'page_list': page_list,
'version_list': version_list,
'full': full,
},
context_instance=RequestContext(request)
)
def project_embed(request, project_slug):
"""
Have a content API placeholder
"""
project = get_object_or_404(Project.objects.protected(request.user),
slug=project_slug)
version = project.versions.get(slug=LATEST)
files = version.imported_files.order_by('path')
return render_to_response(
'projects/project_embed.html',
{
'project': project,
'files': files,
'settings': {
'GROK_API_HOST': settings.GROK_API_HOST,
'URI': request.build_absolute_uri(location='/').rstrip('/')
}
},
context_instance=RequestContext(request)
)
|
GovReady/readthedocs.org
|
readthedocs/projects/views/public.py
|
Python
|
mit
| 16,118
|
"""Support for Ambient Weather Station binary sensors."""
import logging
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.const import ATTR_NAME
from . import (
SENSOR_TYPES, TYPE_BATT1, TYPE_BATT2, TYPE_BATT3, TYPE_BATT4, TYPE_BATT5,
TYPE_BATT6, TYPE_BATT7, TYPE_BATT8, TYPE_BATT9, TYPE_BATT10, TYPE_BATTOUT,
AmbientWeatherEntity)
from .const import ATTR_LAST_DATA, DATA_CLIENT, DOMAIN, TYPE_BINARY_SENSOR
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up Ambient PWS binary sensors based on the old way."""
pass
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Ambient PWS binary sensors based on a config entry."""
ambient = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id]
binary_sensor_list = []
for mac_address, station in ambient.stations.items():
for condition in ambient.monitored_conditions:
name, _, kind, device_class = SENSOR_TYPES[condition]
if kind == TYPE_BINARY_SENSOR:
binary_sensor_list.append(
AmbientWeatherBinarySensor(
ambient, mac_address, station[ATTR_NAME], condition,
name, device_class))
async_add_entities(binary_sensor_list, True)
class AmbientWeatherBinarySensor(AmbientWeatherEntity, BinarySensorDevice):
"""Define an Ambient binary sensor."""
def __init__(
self, ambient, mac_address, station_name, sensor_type, sensor_name,
device_class):
"""Initialize the sensor."""
super().__init__(
ambient, mac_address, station_name, sensor_type, sensor_name)
self._device_class = device_class
@property
def device_class(self):
"""Return the device class."""
return self._device_class
@property
def is_on(self):
"""Return the status of the sensor."""
if self._sensor_type in (TYPE_BATT1, TYPE_BATT10, TYPE_BATT2,
TYPE_BATT3, TYPE_BATT4, TYPE_BATT5,
TYPE_BATT6, TYPE_BATT7, TYPE_BATT8,
TYPE_BATT9, TYPE_BATTOUT):
return self._state == 0
return self._state == 1
async def async_update(self):
"""Fetch new state data for the entity."""
self._state = self._ambient.stations[
self._mac_address][ATTR_LAST_DATA].get(self._sensor_type)
|
MartinHjelmare/home-assistant
|
homeassistant/components/ambient_station/binary_sensor.py
|
Python
|
apache-2.0
| 2,546
|
#
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
#
# sandesh_stats.py
#
class SandeshStats(object):
class SandeshStatsElem(object):
def __init__(self):
self.tx_count = 0
self.tx_bytes = 0
self.rx_count = 0
self.rx_bytes = 0
#end __init__
#end SandeshStatsElem
def __init__(self):
self._sandesh_sent = 0
self._bytes_sent = 0
self._sandesh_received = 0
self._bytes_received = 0
self._stats_map = {}
#end __init__
def stats_map(self):
return self._stats_map
#end stats_map
def update_stats(self, sandesh_name, bytes, is_tx):
try:
stats_elem = self._stats_map[sandesh_name]
except KeyError:
stats_elem = SandeshStats.SandeshStatsElem()
finally:
if is_tx:
stats_elem.tx_count += 1
stats_elem.tx_bytes += bytes
self._sandesh_sent += 1
self._bytes_sent += bytes
else:
stats_elem.rx_count += 1
stats_elem.rx_bytes += bytes
self._sandesh_received += 1
self._bytes_received += bytes
self._stats_map[sandesh_name] = stats_elem
#end update_stats
#end class SandeshStats
|
safchain/vr_nldump
|
vr_nldump/pysandesh/sandesh_stats.py
|
Python
|
apache-2.0
| 1,340
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_auto_20150823_2014'),
]
operations = [
migrations.RenameModel(
old_name='Catagory',
new_name='Category',
),
]
|
python-ning/blog
|
blogchen/blog/migrations/0004_auto_20150824_1217.py
|
Python
|
mpl-2.0
| 355
|
import renderdoc as rd
import rdtest
class GL_Resource_Lifetimes(rdtest.TestCase):
demos_test_name = 'GL_Resource_Lifetimes'
def check_capture(self):
self.check_final_backbuffer()
|
TurtleRockStudios/renderdoc_public
|
util/test/tests/GL/GL_Resource_Lifetimes.py
|
Python
|
mit
| 199
|
from ld2.ld2 import *
import math
samples = get_samples('./ld3/ld1/examples_boolean.txt')
results = get_results('./ld3/ld1/d_notand.txt')
class MLPerceptron():
def __init__(self, HIDDEN_COUNT, INPUT_SIZE, W, b, g, nn):
self.HIDDEN = Perceptron(
n=HIDDEN_COUNT,
m=INPUT_SIZE,
W=W,
b=b,
g=g,
nn=nn)
self.LAST = Perceptron(
n=1,
m=HIDDEN_COUNT,
W=[[i[0] for i in W]],
b=b,
g=g,
nn=nn
)
self.grad = [0] * 1
def run_mlp_single(self, x):
return self.LAST.run_mlp_single(self.HIDDEN.run_mlp_single(x))
def train_last_single(self, x, d):
e = []
for i in range(self.LAST.n):
e.append(d - self.LAST.y[i])
self.grad[i] = e[i] / self.LAST.g * self.LAST.y[i] * (1 - self.LAST.y[i])
for k in range(self.LAST.m):
self.LAST.w[i][k] += self.LAST.nn * x[k] * self.grad[i]
self.LAST.b[i] += self.LAST.nn * self.grad[i]
return e
def train_hidden_single(self, x, NEXT):
for j in range(self.HIDDEN.n):
error = 0
for k in range(NEXT.n):
error += NEXT.grad[k] * NEXT.w[k][j]
self.HIDDEN.grad[j] = error / self.HIDDEN.g * self.HIDDEN.y[j] * (1 - self.HIDDEN.y[j])
for i in range(self.HIDDEN.m):
self.HIDDEN.w[j][i] += self.HIDDEN.nn * x[i] * self.HIDDEN.grad[j]
self.HIDDEN.b[j] += self.HIDDEN.nn + self.HIDDEN.grad[j]
def train_mlp(self, x, d):
err = 10000
epoch = 0
maxepoch = 500
ee = 0.01
while epoch < maxepoch and err > ee:
epoch += 1
err = 0
for k in range(len(x)):
self.run_mlp_single(x[k])
e = self.train_last_single(
self.HIDDEN.y, d[k])
self.train_hidden_single(x[k], self.LAST)
for j in range(len(e)):
err += math.pow(e[j], 2)
foo = (len(x) * len(e))
err /= foo
print(err)
return (self.HIDDEN.w, self.HIDDEN.b), (self.LAST.w, self.LAST.b), epoch
ml_preceptron = MLPerceptron(
HIDDEN_COUNT=4,
INPUT_SIZE=2,
W=[[-0.3, 0.3], [-0.3, 0.3], [-0.3, 0.3], [-0.3, 0.3]],
b=[0, 0, 0, 0],
g=0.2,
nn=0.1
)
print([ml_preceptron.run_mlp_single(samples[i]) for i in range(4)])
print([ml_preceptron.train_last_single(ml_preceptron.HIDDEN.run_slp_single(samples[i]), results[i]) for i in range(4)])
[ml_preceptron.train_hidden_single(samples[i], ml_preceptron.LAST) for i in range(4)]
print('{}'.format(ml_preceptron.HIDDEN.w))
print('{}'.format(ml_preceptron.HIDDEN.b))
print()
ml_preceptron = MLPerceptron(
HIDDEN_COUNT=4,
INPUT_SIZE=2,
W=[[-0.3, 0.3], [-0.3, 0.3], [-0.3, 0.3], [-0.3, 0.3]],
b=[0, 0, 0, 0],
g=0.2,
nn=0.1
)
print(ml_preceptron.train_mlp(samples, results))
def normalize(n):
if n == 1:
return 0
elif n == 2:
return 0.5
else:
return 1
def get_example_data():
# read example line by line
with open('./ld3/ld1/examples.txt') as f:
# `\d\\t\d\\t ... \d\\n` format
data = [tuple(i for i in map(lambda x: int(x), d))
for d in csv.reader(f, delimiter='\t')]
data = [tuple(normalize(i) for i in col) for col in data]
return data
sample = get_example_data()
def get_results(path='./ld1/d.txt'):
with open(path) as f:
results = [int(a[0])
for a in csv.reader(f, delimiter='\t')]
data = [normalize(i) for i in results]
return data
results = get_results('./ld3/ld1/d.txt')
ml_preceptron = MLPerceptron(
HIDDEN_COUNT=4,
INPUT_SIZE=4,
W=[[-0.3, 0.3, -0.3, 0.3], [-0.3, 0.3, -0.3, 0.3], [-0.3, 0.3, -0.3, 0.3], [-0.3, 0.3, -0.3, 0.3]],
b=[0, 0, 0, 0],
g=0.2,
nn=0.1
)
hidden, last, epochs = ml_preceptron.train_mlp(sample, results)
|
ktaube/neural-network-course
|
neural-netoworks/ld3.py
|
Python
|
mit
| 4,054
|
# -*- coding: utf-8 -*-
#
# Anaconda documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 18 14:37:01 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Anaconda'
copyright = u'2015, Red Hat, Inc.' # pylint: disable=redefined-builtin
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
def read_version():
""" Read version from ../configure.ac"""
import re
version_re = re.compile(r"AC_INIT\(\[(.*)\], \[(.*)\], \[(.*)\]\)")
with open("../configure.ac", "r") as f:
for line in f:
m = version_re.match(line)
if m:
return m.group(2)
# The short X.Y version.
version = read_version()
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'html']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
autoclass_content = 'both'
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Anacondadoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Anaconda.tex', u'Anaconda Documentation',
u'Anaconda Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'Anaconda', u'Anaconda Documentation',
[u'Anaconda Team'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Anaconda', u'Anaconda Documentation',
u'Anaconda Team', 'Anaconda', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'Anaconda'
epub_author = u'Anaconda Team'
epub_publisher = u'Anaconda Team'
epub_copyright = u'2015, Anaconda Team'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/2': None}
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Group by class
autodoc_member_order = 'source'
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# This was taken directly from here:
# http://read-the-docs.readthedocs.org/en/latest/faq.html#i-get-import-errors-on-libraries-that-depend-on-c-modules
# I only added the __getitem__ method.
# NOTE: this can be removed whenever we move to sphinx-1.3, at which point we'll
# be able to use autodoc_mock_imports (value is a list of modules to be
# mocked).
class Mock(object):
__all__ = []
def __init__(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
return Mock()
@classmethod
def __getattr__(cls, name):
if name in ('__file__', '__path__'):
return '/dev/null'
elif name[0] == name[0].upper():
mockType = type(name, (), {})
mockType.__module__ = __name__
return mockType
else:
return Mock()
@classmethod
def __getitem__(cls, key):
return cls.__getattr__(key)
MOCK_MODULES = ['_isys']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
|
maxamillion/anaconda
|
docs/conf.py
|
Python
|
gpl-2.0
| 11,072
|
#coding: utf-8
# +-------------------------------------------------------------------
# | 宝塔Linux面板
# +-------------------------------------------------------------------
# | Copyright (c) 2015-2016 宝塔软件(http://bt.cn) All rights reserved.
# +-------------------------------------------------------------------
# | Author: 黄文良 <2879625666@qq.com>
# +-------------------------------------------------------------------
import re,os
class panelMysql:
__DB_PASS = None
__DB_USER = 'root'
__DB_PORT = 3306
__DB_HOST = '127.0.0.1'
__DB_CONN = None
__DB_CUR = None
__DB_ERR = None
__DB_HOST_CONF = 'data/mysqlHost.pl';
#连接MYSQL数据库
def __Conn(self):
try:
import public
try:
import MySQLdb
except Exception,ex:
self.__DB_ERR = ex
return False;
try:
myconf = public.readFile('/etc/my.cnf');
rep = "port\s*=\s*([0-9]+)"
self.__DB_PORT = int(re.search(rep,myconf).groups()[0]);
except:
self.__DB_PORT = 3306;
self.__DB_PASS = public.M('config').where('id=?',(1,)).getField('mysql_root');
try:
if os.path.exists(self.__DB_HOST_CONF): self.__DB_HOST = public.readFile(self.__DB_HOST_CONF);
self.__DB_CONN = MySQLdb.connect(host = self.__DB_HOST,user = self.__DB_USER,passwd = self.__DB_PASS,port = self.__DB_PORT,charset="utf8",connect_timeout=1)
except MySQLdb.Error,e:
if e[0] != 2003:
self.__DB_ERR = e
return False
if self.__DB_HOST == 'localhost':
self.__DB_HOST = '127.0.0.1';
else:
self.__DB_HOST = 'localhost';
public.writeFile(self.__DB_HOST_CONF,self.__DB_HOST);
self.__DB_CONN = MySQLdb.connect(host = self.__DB_HOST,user = self.__DB_USER,passwd = self.__DB_PASS,port = self.__DB_PORT,charset="utf8",connect_timeout=1)
self.__DB_CUR = self.__DB_CONN.cursor()
return True
except MySQLdb.Error,e:
self.__DB_ERR = e
return False
def execute(self,sql):
#执行SQL语句返回受影响行
if not self.__Conn(): return self.__DB_ERR
try:
result = self.__DB_CUR.execute(sql)
self.__DB_CONN.commit()
self.__Close()
return result
except Exception,ex:
return ex
def query(self,sql):
#执行SQL语句返回数据集
if not self.__Conn(): return self.__DB_ERR
try:
self.__DB_CUR.execute(sql)
result = self.__DB_CUR.fetchall()
#将元组转换成列表
data = map(list,result)
self.__Close()
return data
except Exception,ex:
return ex
#关闭连接
def __Close(self):
self.__DB_CUR.close()
self.__DB_CONN.close()
|
WZQ1397/automatic-repo
|
python/FileSystem/BTpanel/btclass/panelMysql.py
|
Python
|
lgpl-3.0
| 3,122
|
"""Support for Reddit."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (CONF_USERNAME, CONF_PASSWORD, CONF_MAXIMUM)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_CLIENT_ID = 'client_id'
CONF_CLIENT_SECRET = 'client_secret'
CONF_SORT_BY = 'sort_by'
CONF_SUBREDDITS = 'subreddits'
ATTR_ID = 'id'
ATTR_BODY = 'body'
ATTR_COMMENTS_NUMBER = 'comms_num'
ATTR_CREATED = 'created'
ATTR_POSTS = 'posts'
ATTR_SUBREDDIT = 'subreddit'
ATTR_SCORE = 'score'
ATTR_TITLE = 'title'
ATTR_URL = 'url'
DEFAULT_NAME = 'Reddit'
DOMAIN = 'reddit'
LIST_TYPES = ['top', 'controversial', 'hot', 'new']
SCAN_INTERVAL = timedelta(seconds=300)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_SUBREDDITS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_SORT_BY, default='hot'):
vol.All(cv.string, vol.In(LIST_TYPES)),
vol.Optional(CONF_MAXIMUM, default=10): cv.positive_int
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Reddit sensor platform."""
import praw
subreddits = config[CONF_SUBREDDITS]
user_agent = '{}_home_assistant_sensor'.format(config[CONF_USERNAME])
limit = config[CONF_MAXIMUM]
sort_by = config[CONF_SORT_BY]
try:
reddit = praw.Reddit(
client_id=config[CONF_CLIENT_ID],
client_secret=config[CONF_CLIENT_SECRET],
username=config[CONF_USERNAME],
password=config[CONF_PASSWORD],
user_agent=user_agent)
_LOGGER.debug('Connected to praw')
except praw.exceptions.PRAWException as err:
_LOGGER.error("Reddit error %s", err)
return
sensors = [RedditSensor(reddit, subreddit, limit, sort_by)
for subreddit in subreddits]
add_entities(sensors, True)
class RedditSensor(Entity):
"""Representation of a Reddit sensor."""
def __init__(self, reddit, subreddit: str, limit: int, sort_by: str):
"""Initialize the Reddit sensor."""
self._reddit = reddit
self._subreddit = subreddit
self._limit = limit
self._sort_by = sort_by
self._subreddit_data = []
@property
def name(self):
"""Return the name of the sensor."""
return 'reddit_{}'.format(self._subreddit)
@property
def state(self):
"""Return the state of the sensor."""
return len(self._subreddit_data)
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_SUBREDDIT: self._subreddit,
ATTR_POSTS: self._subreddit_data,
CONF_SORT_BY: self._sort_by
}
@property
def icon(self):
"""Return the icon to use in the frontend."""
return 'mdi:reddit'
def update(self):
"""Update data from Reddit API."""
import praw
self._subreddit_data = []
try:
subreddit = self._reddit.subreddit(self._subreddit)
if hasattr(subreddit, self._sort_by):
method_to_call = getattr(subreddit, self._sort_by)
for submission in method_to_call(limit=self._limit):
self._subreddit_data.append({
ATTR_ID: submission.id,
ATTR_URL: submission.url,
ATTR_TITLE: submission.title,
ATTR_SCORE: submission.score,
ATTR_COMMENTS_NUMBER: submission.num_comments,
ATTR_CREATED: submission.created,
ATTR_BODY: submission.selftext
})
except praw.exceptions.PRAWException as err:
_LOGGER.error("Reddit error %s", err)
|
MartinHjelmare/home-assistant
|
homeassistant/components/reddit/sensor.py
|
Python
|
apache-2.0
| 4,105
|
from test_support import *
prove_all()
gnatprove(opt=["-P", "test.gpr", "--clean"])
prove_all()
|
ptroja/spark2014
|
testsuite/gnatprove/tests/N730-015__clean/test.py
|
Python
|
gpl-3.0
| 96
|
from django.db import connection
from django.utils.log import getLogger
logger = getLogger(__name__)
IDENT = b'<!-- queries -->'
class QueryCountDebugMiddleware(object):
"""
This middleware will log the number of queries run
and the total time taken for each request (with a
status code of 200). It does not currently support
multi-db setups.
"""
def process_response(self, request, response):
if response is None:
return None
if response.status_code == 200:
total_time = 0
totals = {}
for query in connection.queries:
f = query['sql']
if 'FROM' in f:
f = f.split('FROM')
# print(f, len(f))
f = f[1].split('"')[1]
*app, model = f.split('_')
app = '_'.join(app)
totals.setdefault(app, {})
totals[app].setdefault(model, 0)
totals[app][model] += 1
else:
totals.setdefault('unknown_app', {})
totals['unknown_app'].setdefault('unknown_model', 0)
totals['unknown_app']['unknown_model'] += 1
query_time = query.get('time')
if query_time is None:
# django-debug-toolbar monkeypatches the connection
# cursor wrapper and adds extra information in each
# item in connection.queries. The query time is stored
# under the key "duration" rather than "time" and is
# in milliseconds, not seconds.
query_time = query.get('duration', 0) / 1000
total_time += float(query_time)
content = '{} queries taking {:.4f}s'.format(
len(connection.queries),
total_time
)
if totals:
print()
totals = sorted(
totals.items(),
key=lambda x: sum(x[1].values()),
reverse=True
)
for k, v in totals:
print('{};'.format(k))
v = sorted(
v.items(),
key=lambda x: x[1],
reverse=True
)
for sk, sv in v:
print('\t{} -> {}'.format(sk, sv))
print()
if IDENT in response.content:
response.content = response.content.decode().replace(
IDENT.decode(),
content
).encode()
else:
logger.debug(content)
return response
|
Mause/pyalp
|
pyalp/common/middleware/query_count.py
|
Python
|
mit
| 2,817
|
'''
* Copyright (C) 2015 Francisco Javier <https://mx.linkedin.com/in/fcojavierpena>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
'''
from xml.dom import minidom
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import StaticPool
from DataAccess import DataString
__author__ = 'Estacion1'
import xml.etree.cElementTree as ET
class ExportData():
def exportToXMLFileString(self, window, path, languageTo):
window.lbProcess.setText('Writing file...')
p = str(path).split('strings')
newPath = p[0] + 'values-%s/'%(languageTo) + 'string.xml'
try:
engine = create_engine('sqlite:///data.sqlite',connect_args={'check_same_thread':True}, poolclass=StaticPool)
session = sessionmaker()
session.configure(bind=engine)
s = session()
data = s.query(DataString.name, DataString.value, DataString.translation).all()
if len(data) != 0:
root = ET.Element("resources")
for string in data:
ET.SubElement(root, "string", name=string[0]).text = string[2]
tree = ET.ElementTree(root)
tree.write(path)
xmlstr = minidom.parseString(ET.tostring(tree.getroot(), 'utf-8')).toprettyxml(indent=" ")
with open(path, "w") as f:
f.write(xmlstr)
window.lbProcess.setText('Process finished! Please rename the file.')
window.btExportFile.setEnabled(True)
else:
window.btImportRes.setEnabled(True)
window.lbProcess.setText('Empty Database. Please import an XML file')
except Exception as e2:
window.lbProcess.setText('Error creating XML file!')
window.btExportFile.setEnabled(False)
|
FranciscoJavierPRamos/AndroidAppTranslate
|
Export.py
|
Python
|
apache-2.0
| 2,381
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Load plugin assets from disk."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import tensorflow as tf
_PLUGINS_DIR = "plugins"
def _IsDirectory(parent, item):
"""Helper that returns if parent/item is a directory."""
return tf.gfile.IsDirectory(os.path.join(parent, item))
def PluginDirectory(logdir, plugin_name):
"""Returns the plugin directory for plugin_name."""
return os.path.join(logdir, _PLUGINS_DIR, plugin_name)
def ListPlugins(logdir):
"""List all the plugins that have registered assets in logdir.
If the plugins_dir does not exist, it returns an empty list. This maintains
compatibility with old directories that have no plugins written.
Args:
logdir: A directory that was created by a TensorFlow events writer.
Returns:
a list of plugin names, as strings
"""
plugins_dir = os.path.join(logdir, _PLUGINS_DIR)
if not tf.gfile.IsDirectory(plugins_dir):
return []
entries = tf.gfile.ListDirectory(plugins_dir)
return [x for x in entries if _IsDirectory(plugins_dir, x)]
def ListAssets(logdir, plugin_name):
"""List all the assets that are available for given plugin in a logdir.
Args:
logdir: A directory that was created by a TensorFlow summary.FileWriter.
plugin_name: A string name of a plugin to list assets for.
Returns:
A string list of available plugin assets. If the plugin subdirectory does
not exist (either because the logdir doesn't exist, or because the plugin
didn't register) an empty list is returned.
"""
plugin_dir = PluginDirectory(logdir, plugin_name)
if not tf.gfile.IsDirectory(plugin_dir):
return []
entries = tf.gfile.ListDirectory(plugin_dir)
return [x for x in entries if not _IsDirectory(plugin_dir, x)]
def RetrieveAsset(logdir, plugin_name, asset_name):
"""Retrieve a particular plugin asset from a logdir.
Args:
logdir: A directory that was created by a TensorFlow summary.FileWriter.
plugin_name: The plugin we want an asset from.
asset_name: The name of the requested asset.
Returns:
string contents of the plugin asset.
Raises:
KeyError: if the asset does not exist.
"""
asset_path = os.path.join(PluginDirectory(logdir, plugin_name), asset_name)
try:
with tf.gfile.Open(asset_path, "r") as f:
return f.read()
except tf.errors.NotFoundError:
raise KeyError("Asset path %s not found" % asset_path)
except tf.errors.OpError as e:
raise KeyError("Couldn't read asset path: %s, OpError %s" % (asset_path, e))
|
sjperkins/tensorflow
|
tensorflow/tensorboard/backend/event_processing/plugin_asset_util.py
|
Python
|
apache-2.0
| 3,278
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import os
import pwd
import sys
from string import ascii_letters, digits
from six import string_types
from six.moves import configparser
from ansible.parsing.splitter import unquote
from ansible.errors import AnsibleOptionsError
# copied from utils, avoid circular reference fun :)
def mk_boolean(value):
if value is None:
return False
val = str(value)
if val.lower() in [ "true", "t", "y", "1", "yes" ]:
return True
else:
return False
def get_config(p, section, key, env_var, default, boolean=False, integer=False, floating=False, islist=False):
''' return a configuration variable with casting '''
value = _get_config(p, section, key, env_var, default)
if boolean:
value = mk_boolean(value)
if value:
if integer:
value = int(value)
elif floating:
value = float(value)
elif islist:
if isinstance(value, string_types):
value = [x.strip() for x in value.split(',')]
elif isinstance(value, string_types):
value = unquote(value)
return value
def _get_config(p, section, key, env_var, default):
''' helper function for get_config '''
if env_var is not None:
value = os.environ.get(env_var, None)
if value is not None:
return value
if p is not None:
try:
return p.get(section, key, raw=True)
except:
return default
return default
def load_config_file():
''' Load Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
p = configparser.ConfigParser()
path0 = os.getenv("ANSIBLE_CONFIG", None)
if path0 is not None:
path0 = os.path.expanduser(path0)
if os.path.isdir(path0):
path0 += "/ansible.cfg"
path1 = os.getcwd() + "/ansible.cfg"
path2 = os.path.expanduser("~/.ansible.cfg")
path3 = "/etc/ansible/ansible.cfg"
for path in [path0, path1, path2, path3]:
if path is not None and os.path.exists(path):
try:
p.read(path)
except configparser.Error as e:
raise AnsibleOptionsError("Error reading config file: \n{0}".format(e))
return p, path
return None, ''
def shell_expand_path(path):
''' shell_expand_path is needed as os.path.expanduser does not work
when path is None, which is the default for ANSIBLE_PRIVATE_KEY_FILE '''
if path:
path = os.path.expanduser(os.path.expandvars(path))
return path
p, CONFIG_FILE = load_config_file()
active_user = pwd.getpwuid(os.geteuid())[0]
# check all of these extensions when looking for yaml files for things like
# group variables -- really anything we can load
YAML_FILENAME_EXTENSIONS = [ "", ".yml", ".yaml", ".json" ]
# sections in config file
DEFAULTS='defaults'
# generally configurable things
DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE_DEBUG', False, boolean=True)
DEFAULT_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS, 'hostfile', 'ANSIBLE_HOSTS', get_config(p, DEFAULTS,'inventory','ANSIBLE_INVENTORY', '/etc/ansible/hosts')))
DEFAULT_MODULE_PATH = get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY', None)
DEFAULT_ROLES_PATH = shell_expand_path(get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH', '/etc/ansible/roles'))
DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '$HOME/.ansible/tmp')
DEFAULT_MODULE_NAME = get_config(p, DEFAULTS, 'module_name', None, 'command')
DEFAULT_PATTERN = get_config(p, DEFAULTS, 'pattern', None, '*')
DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5, integer=True)
DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '')
DEFAULT_MODULE_LANG = get_config(p, DEFAULTS, 'module_lang', 'ANSIBLE_MODULE_LANG', 'en_US.UTF-8')
DEFAULT_TIMEOUT = get_config(p, DEFAULTS, 'timeout', 'ANSIBLE_TIMEOUT', 10, integer=True)
DEFAULT_POLL_INTERVAL = get_config(p, DEFAULTS, 'poll_interval', 'ANSIBLE_POLL_INTERVAL', 15, integer=True)
DEFAULT_REMOTE_USER = get_config(p, DEFAULTS, 'remote_user', 'ANSIBLE_REMOTE_USER', active_user)
DEFAULT_ASK_PASS = get_config(p, DEFAULTS, 'ask_pass', 'ANSIBLE_ASK_PASS', False, boolean=True)
DEFAULT_PRIVATE_KEY_FILE = shell_expand_path(get_config(p, DEFAULTS, 'private_key_file', 'ANSIBLE_PRIVATE_KEY_FILE', None))
DEFAULT_REMOTE_PORT = get_config(p, DEFAULTS, 'remote_port', 'ANSIBLE_REMOTE_PORT', None, integer=True)
DEFAULT_ASK_VAULT_PASS = get_config(p, DEFAULTS, 'ask_vault_pass', 'ANSIBLE_ASK_VAULT_PASS', False, boolean=True)
DEFAULT_VAULT_PASSWORD_FILE = shell_expand_path(get_config(p, DEFAULTS, 'vault_password_file', 'ANSIBLE_VAULT_PASSWORD_FILE', None))
DEFAULT_TRANSPORT = get_config(p, DEFAULTS, 'transport', 'ANSIBLE_TRANSPORT', 'smart')
DEFAULT_SCP_IF_SSH = get_config(p, 'ssh_connection', 'scp_if_ssh', 'ANSIBLE_SCP_IF_SSH', False, boolean=True)
DEFAULT_SFTP_BATCH_MODE = get_config(p, 'ssh_connection', 'sftp_batch_mode', 'ANSIBLE_SFTP_BATCH_MODE', True, boolean=True)
DEFAULT_MANAGED_STR = get_config(p, DEFAULTS, 'ansible_managed', None, 'Ansible managed: {file} modified on %Y-%m-%d %H:%M:%S by {uid} on {host}')
DEFAULT_SYSLOG_FACILITY = get_config(p, DEFAULTS, 'syslog_facility', 'ANSIBLE_SYSLOG_FACILITY', 'LOG_USER')
DEFAULT_KEEP_REMOTE_FILES = get_config(p, DEFAULTS, 'keep_remote_files', 'ANSIBLE_KEEP_REMOTE_FILES', False, boolean=True)
DEFAULT_HASH_BEHAVIOUR = get_config(p, DEFAULTS, 'hash_behaviour', 'ANSIBLE_HASH_BEHAVIOUR', 'replace')
DEFAULT_PRIVATE_ROLE_VARS = get_config(p, DEFAULTS, 'private_role_vars', 'ANSIBLE_PRIVATE_ROLE_VARS', False, boolean=True)
DEFAULT_JINJA2_EXTENSIONS = get_config(p, DEFAULTS, 'jinja2_extensions', 'ANSIBLE_JINJA2_EXTENSIONS', None)
DEFAULT_EXECUTABLE = get_config(p, DEFAULTS, 'executable', 'ANSIBLE_EXECUTABLE', '/bin/sh')
DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, boolean=True)
DEFAULT_INVENTORY_IGNORE = get_config(p, DEFAULTS, 'inventory_ignore_extensions', 'ANSIBLE_INVENTORY_IGNORE', ["~", ".orig", ".bak", ".ini", ".cfg", ".retry", ".pyc", ".pyo"], islist=True)
# selinux
DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf, ramfs', islist=True)
### PRIVILEGE ESCALATION ###
# Backwards Compat
DEFAULT_SU = get_config(p, DEFAULTS, 'su', 'ANSIBLE_SU', False, boolean=True)
DEFAULT_SU_USER = get_config(p, DEFAULTS, 'su_user', 'ANSIBLE_SU_USER', 'root')
DEFAULT_SU_EXE = get_config(p, DEFAULTS, 'su_exe', 'ANSIBLE_SU_EXE', 'su')
DEFAULT_SU_FLAGS = get_config(p, DEFAULTS, 'su_flags', 'ANSIBLE_SU_FLAGS', '')
DEFAULT_ASK_SU_PASS = get_config(p, DEFAULTS, 'ask_su_pass', 'ANSIBLE_ASK_SU_PASS', False, boolean=True)
DEFAULT_SUDO = get_config(p, DEFAULTS, 'sudo', 'ANSIBLE_SUDO', False, boolean=True)
DEFAULT_SUDO_USER = get_config(p, DEFAULTS, 'sudo_user', 'ANSIBLE_SUDO_USER', 'root')
DEFAULT_SUDO_EXE = get_config(p, DEFAULTS, 'sudo_exe', 'ANSIBLE_SUDO_EXE', 'sudo')
DEFAULT_SUDO_FLAGS = get_config(p, DEFAULTS, 'sudo_flags', 'ANSIBLE_SUDO_FLAGS', '-H')
DEFAULT_ASK_SUDO_PASS = get_config(p, DEFAULTS, 'ask_sudo_pass', 'ANSIBLE_ASK_SUDO_PASS', False, boolean=True)
# Become
BECOME_ERROR_STRINGS = {'sudo': 'Sorry, try again.', 'su': 'Authentication failure', 'pbrun': '', 'pfexec': '', 'runas': ''} #FIXME: deal with i18n
BECOME_METHODS = ['sudo','su','pbrun','pfexec','runas']
DEFAULT_BECOME_METHOD = get_config(p, 'privilege_escalation', 'become_method', 'ANSIBLE_BECOME_METHOD','sudo' if DEFAULT_SUDO else 'su' if DEFAULT_SU else 'sudo' ).lower()
DEFAULT_BECOME = get_config(p, 'privilege_escalation', 'become', 'ANSIBLE_BECOME',False, boolean=True)
DEFAULT_BECOME_USER = get_config(p, 'privilege_escalation', 'become_user', 'ANSIBLE_BECOME_USER', 'root')
DEFAULT_BECOME_EXE = get_config(p, 'privilege_escalation', 'become_exe', 'ANSIBLE_BECOME_EXE', None)
DEFAULT_BECOME_FLAGS = get_config(p, 'privilege_escalation', 'become_flags', 'ANSIBLE_BECOME_FLAGS', None)
DEFAULT_BECOME_ASK_PASS = get_config(p, 'privilege_escalation', 'become_ask_pass', 'ANSIBLE_BECOME_ASK_PASS', False, boolean=True)
# Plugin paths
DEFAULT_ACTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'action_plugins', 'ANSIBLE_ACTION_PLUGINS', '~/.ansible/plugins/action_plugins:/usr/share/ansible_plugins/action_plugins')
DEFAULT_CACHE_PLUGIN_PATH = get_config(p, DEFAULTS, 'cache_plugins', 'ANSIBLE_CACHE_PLUGINS', '~/.ansible/plugins/cache_plugins:/usr/share/ansible_plugins/cache_plugins')
DEFAULT_CALLBACK_PLUGIN_PATH = get_config(p, DEFAULTS, 'callback_plugins', 'ANSIBLE_CALLBACK_PLUGINS', '~/.ansible/plugins/callback_plugins:/usr/share/ansible_plugins/callback_plugins')
DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', 'ANSIBLE_CONNECTION_PLUGINS', '~/.ansible/plugins/connection_plugins:/usr/share/ansible_plugins/connection_plugins')
DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '~/.ansible/plugins/lookup_plugins:/usr/share/ansible_plugins/lookup_plugins')
DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', 'ANSIBLE_VARS_PLUGINS', '~/.ansible/plugins/vars_plugins:/usr/share/ansible_plugins/vars_plugins')
DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '~/.ansible/plugins/filter_plugins:/usr/share/ansible_plugins/filter_plugins')
DEFAULT_TEST_PLUGIN_PATH = get_config(p, DEFAULTS, 'test_plugins', 'ANSIBLE_TEST_PLUGINS', '~/.ansible/plugins/test_plugins:/usr/share/ansible_plugins/test_plugins')
DEFAULT_STDOUT_CALLBACK = get_config(p, DEFAULTS, 'stdout_callback', 'ANSIBLE_STDOUT_CALLBACK', 'default')
CACHE_PLUGIN = get_config(p, DEFAULTS, 'fact_caching', 'ANSIBLE_CACHE_PLUGIN', 'memory')
CACHE_PLUGIN_CONNECTION = get_config(p, DEFAULTS, 'fact_caching_connection', 'ANSIBLE_CACHE_PLUGIN_CONNECTION', None)
CACHE_PLUGIN_PREFIX = get_config(p, DEFAULTS, 'fact_caching_prefix', 'ANSIBLE_CACHE_PLUGIN_PREFIX', 'ansible_facts')
CACHE_PLUGIN_TIMEOUT = get_config(p, DEFAULTS, 'fact_caching_timeout', 'ANSIBLE_CACHE_PLUGIN_TIMEOUT', 24 * 60 * 60, integer=True)
# Display
ANSIBLE_FORCE_COLOR = get_config(p, DEFAULTS, 'force_color', 'ANSIBLE_FORCE_COLOR', None, boolean=True)
ANSIBLE_NOCOLOR = get_config(p, DEFAULTS, 'nocolor', 'ANSIBLE_NOCOLOR', None, boolean=True)
ANSIBLE_NOCOWS = get_config(p, DEFAULTS, 'nocows', 'ANSIBLE_NOCOWS', None, boolean=True)
DISPLAY_SKIPPED_HOSTS = get_config(p, DEFAULTS, 'display_skipped_hosts', 'DISPLAY_SKIPPED_HOSTS', True, boolean=True)
DEFAULT_UNDEFINED_VAR_BEHAVIOR = get_config(p, DEFAULTS, 'error_on_undefined_vars', 'ANSIBLE_ERROR_ON_UNDEFINED_VARS', True, boolean=True)
HOST_KEY_CHECKING = get_config(p, DEFAULTS, 'host_key_checking', 'ANSIBLE_HOST_KEY_CHECKING', True, boolean=True)
SYSTEM_WARNINGS = get_config(p, DEFAULTS, 'system_warnings', 'ANSIBLE_SYSTEM_WARNINGS', True, boolean=True)
DEPRECATION_WARNINGS = get_config(p, DEFAULTS, 'deprecation_warnings', 'ANSIBLE_DEPRECATION_WARNINGS', True, boolean=True)
DEFAULT_CALLABLE_WHITELIST = get_config(p, DEFAULTS, 'callable_whitelist', 'ANSIBLE_CALLABLE_WHITELIST', [], islist=True)
COMMAND_WARNINGS = get_config(p, DEFAULTS, 'command_warnings', 'ANSIBLE_COMMAND_WARNINGS', True, boolean=True)
DEFAULT_LOAD_CALLBACK_PLUGINS = get_config(p, DEFAULTS, 'bin_ansible_callbacks', 'ANSIBLE_LOAD_CALLBACK_PLUGINS', False, boolean=True)
DEFAULT_CALLBACK_WHITELIST = get_config(p, DEFAULTS, 'callback_whitelist', 'ANSIBLE_CALLBACK_WHITELIST', [], islist=True)
RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, boolean=True)
RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', '~/')
# CONNECTION RELATED
ANSIBLE_SSH_ARGS = get_config(p, 'ssh_connection', 'ssh_args', 'ANSIBLE_SSH_ARGS', None)
ANSIBLE_SSH_CONTROL_PATH = get_config(p, 'ssh_connection', 'control_path', 'ANSIBLE_SSH_CONTROL_PATH', "%(directory)s/ansible-ssh-%%h-%%p-%%r")
ANSIBLE_SSH_PIPELINING = get_config(p, 'ssh_connection', 'pipelining', 'ANSIBLE_SSH_PIPELINING', False, boolean=True)
ANSIBLE_SSH_RETRIES = get_config(p, 'ssh_connection', 'retries', 'ANSIBLE_SSH_RETRIES', 0, integer=True)
PARAMIKO_RECORD_HOST_KEYS = get_config(p, 'paramiko_connection', 'record_host_keys', 'ANSIBLE_PARAMIKO_RECORD_HOST_KEYS', True, boolean=True)
# obsolete -- will be formally removed
ZEROMQ_PORT = get_config(p, 'fireball_connection', 'zeromq_port', 'ANSIBLE_ZEROMQ_PORT', 5099, integer=True)
ACCELERATE_PORT = get_config(p, 'accelerate', 'accelerate_port', 'ACCELERATE_PORT', 5099, integer=True)
ACCELERATE_TIMEOUT = get_config(p, 'accelerate', 'accelerate_timeout', 'ACCELERATE_TIMEOUT', 30, integer=True)
ACCELERATE_CONNECT_TIMEOUT = get_config(p, 'accelerate', 'accelerate_connect_timeout', 'ACCELERATE_CONNECT_TIMEOUT', 1.0, floating=True)
ACCELERATE_DAEMON_TIMEOUT = get_config(p, 'accelerate', 'accelerate_daemon_timeout', 'ACCELERATE_DAEMON_TIMEOUT', 30, integer=True)
ACCELERATE_KEYS_DIR = get_config(p, 'accelerate', 'accelerate_keys_dir', 'ACCELERATE_KEYS_DIR', '~/.fireball.keys')
ACCELERATE_KEYS_DIR_PERMS = get_config(p, 'accelerate', 'accelerate_keys_dir_perms', 'ACCELERATE_KEYS_DIR_PERMS', '700')
ACCELERATE_KEYS_FILE_PERMS = get_config(p, 'accelerate', 'accelerate_keys_file_perms', 'ACCELERATE_KEYS_FILE_PERMS', '600')
ACCELERATE_MULTI_KEY = get_config(p, 'accelerate', 'accelerate_multi_key', 'ACCELERATE_MULTI_KEY', False, boolean=True)
PARAMIKO_PTY = get_config(p, 'paramiko_connection', 'pty', 'ANSIBLE_PARAMIKO_PTY', True, boolean=True)
# galaxy related
DEFAULT_GALAXY_URI = get_config(p, 'galaxy', 'server_uri', 'ANSIBLE_GALAXY_SERVER_URI', 'https://galaxy.ansible.com')
# this can be configured to blacklist SCMS but cannot add new ones unless the code is also updated
GALAXY_SCMS = get_config(p, 'galaxy', 'scms', 'ANSIBLE_GALAXY_SCMS', 'git, hg', islist=True)
# characters included in auto-generated passwords
DEFAULT_PASSWORD_CHARS = ascii_letters + digits + ".,:-_"
# non-configurable things
MODULE_REQUIRE_ARGS = ['command', 'shell', 'raw', 'script']
MODULE_NO_JSON = ['command', 'shell', 'raw']
DEFAULT_BECOME_PASS = None
DEFAULT_SUDO_PASS = None
DEFAULT_REMOTE_PASS = None
DEFAULT_SUBSET = None
DEFAULT_SU_PASS = None
VAULT_VERSION_MIN = 1.0
VAULT_VERSION_MAX = 1.0
MAX_FILE_SIZE_FOR_DIFF = 1*1024*1024
TREE_DIR = None
|
Xowap/ansible
|
lib/ansible/constants.py
|
Python
|
gpl-3.0
| 16,541
|
from setuptools import setup
setup(
name='snappass',
version='1.5.0',
description="It's like SnapChat... for Passwords.",
long_description=(open('README.rst').read() + '\n\n' +
open('AUTHORS.rst').read()),
url='http://github.com/Pinterest/snappass/',
install_requires=['Flask', 'redis', 'cryptography'],
license='MIT',
author='Dave Dash',
author_email='dd+github@davedash.com',
packages=['snappass'],
entry_points={
'console_scripts': [
'snappass = snappass.main:main',
],
},
include_package_data=True,
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Libraries :: Python Modules',
],
zip_safe=False,
)
|
samteezy/snappass-heroku
|
setup.py
|
Python
|
mit
| 1,409
|
"""Deprecated; use Graph."""
from rdflib.Graph import Graph
from rdflib.store.Memory import Memory
class TripleStore(Graph):
"""
Depcrecated. Use Graph instead.
"""
def __init__(self, location=None, backend=None):
if backend==None:
backend = Memory()
super(TripleStore, self).__init__(backend=backend)
if location:
self.load(location)
def prefix_mapping(self, prefix, namespace):
self.bind(prefix, namespace)
|
rwl/openpowersystem
|
rdflib/TripleStore.py
|
Python
|
agpl-3.0
| 491
|
import django
if django.VERSION < (2, 0):
from django.conf.urls import url, include
else:
from django.urls import re_path as url, include
from django.conf import settings
urlpatterns = []
if 'admin_tools.menu' in settings.INSTALLED_APPS:
urlpatterns.append(url(r'^menu/', include('admin_tools.menu.urls')))
if 'admin_tools.dashboard' in settings.INSTALLED_APPS:
urlpatterns.append(
url(r'^dashboard/', include('admin_tools.dashboard.urls'))
)
|
django-admin-tools/django-admin-tools
|
admin_tools/urls.py
|
Python
|
mit
| 473
|
# -*- coding: utf-8 -*-
#
import uuid
import time
from django.core.cache import cache
from django.conf import settings
from django.utils.translation import ugettext as _
from django.utils.six import text_type
from django.contrib.auth import get_user_model
from rest_framework import HTTP_HEADER_ENCODING
from rest_framework import authentication, exceptions
from common.auth import signature
from rest_framework.authentication import CSRFCheck
from common.utils import get_object_or_none, make_signature, http_to_unixtime
from ..models import AccessKey, PrivateToken
def get_request_date_header(request):
date = request.META.get('HTTP_DATE', b'')
if isinstance(date, text_type):
# Work around django test client oddness
date = date.encode(HTTP_HEADER_ENCODING)
return date
class AccessKeyAuthentication(authentication.BaseAuthentication):
"""App使用Access key进行签名认证, 目前签名算法比较简单,
app注册或者手动建立后,会生成 access_key_id 和 access_key_secret,
然后使用 如下算法生成签名:
Signature = md5(access_key_secret + '\n' + Date)
example: Signature = md5('d32d2b8b-9a10-4b8d-85bb-1a66976f6fdc' + '\n' +
'Thu, 12 Jan 2017 08:19:41 GMT')
请求时设置请求header
header['Authorization'] = 'Sign access_key_id:Signature' 如:
header['Authorization'] =
'Sign d32d2b8b-9a10-4b8d-85bb-1a66976f6fdc:OKOlmdxgYPZ9+SddnUUDbQ=='
验证时根据相同算法进行验证, 取到access_key_id对应的access_key_id, 从request
headers取到Date, 然后进行md5, 判断得到的结果是否相同, 如果是认证通过, 否则 认证
失败
"""
keyword = 'Sign'
def authenticate(self, request):
auth = authentication.get_authorization_header(request).split()
if not auth or auth[0].lower() != self.keyword.lower().encode():
return None
if len(auth) == 1:
msg = _('Invalid signature header. No credentials provided.')
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = _('Invalid signature header. Signature '
'string should not contain spaces.')
raise exceptions.AuthenticationFailed(msg)
try:
sign = auth[1].decode().split(':')
if len(sign) != 2:
msg = _('Invalid signature header. '
'Format like AccessKeyId:Signature')
raise exceptions.AuthenticationFailed(msg)
except UnicodeError:
msg = _('Invalid signature header. '
'Signature string should not contain invalid characters.')
raise exceptions.AuthenticationFailed(msg)
access_key_id = sign[0]
try:
uuid.UUID(access_key_id)
except ValueError:
raise exceptions.AuthenticationFailed('Access key id invalid')
request_signature = sign[1]
return self.authenticate_credentials(
request, access_key_id, request_signature
)
@staticmethod
def authenticate_credentials(request, access_key_id, request_signature):
access_key = get_object_or_none(AccessKey, id=access_key_id)
request_date = get_request_date_header(request)
if access_key is None or not access_key.user:
raise exceptions.AuthenticationFailed(_('Invalid signature.'))
access_key_secret = access_key.secret
try:
request_unix_time = http_to_unixtime(request_date)
except ValueError:
raise exceptions.AuthenticationFailed(
_('HTTP header: Date not provide '
'or not %a, %d %b %Y %H:%M:%S GMT'))
if int(time.time()) - request_unix_time > 15 * 60:
raise exceptions.AuthenticationFailed(
_('Expired, more than 15 minutes'))
signature = make_signature(access_key_secret, request_date)
if not signature == request_signature:
raise exceptions.AuthenticationFailed(_('Invalid signature.'))
if not access_key.user.is_active:
raise exceptions.AuthenticationFailed(_('User disabled.'))
return access_key.user, None
class AccessTokenAuthentication(authentication.BaseAuthentication):
keyword = 'Bearer'
expiration = settings.TOKEN_EXPIRATION or 3600
model = get_user_model()
def authenticate(self, request):
auth = authentication.get_authorization_header(request).split()
if not auth or auth[0].lower() != self.keyword.lower().encode():
return None
if len(auth) == 1:
msg = _('Invalid token header. No credentials provided.')
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = _('Invalid token header. Sign string '
'should not contain spaces.')
raise exceptions.AuthenticationFailed(msg)
try:
token = auth[1].decode()
except UnicodeError:
msg = _('Invalid token header. Sign string '
'should not contain invalid characters.')
raise exceptions.AuthenticationFailed(msg)
return self.authenticate_credentials(token)
def authenticate_credentials(self, token):
model = get_user_model()
user_id = cache.get(token)
user = get_object_or_none(model, id=user_id)
if not user:
msg = _('Invalid token or cache refreshed.')
raise exceptions.AuthenticationFailed(msg)
return user, None
class PrivateTokenAuthentication(authentication.TokenAuthentication):
model = PrivateToken
class SessionAuthentication(authentication.SessionAuthentication):
def authenticate(self, request):
"""
Returns a `User` if the request session currently has a logged in user.
Otherwise returns `None`.
"""
# Get the session-based user from the underlying HttpRequest object
user = getattr(request._request, 'user', None)
# Unauthenticated, CSRF validation not required
if not user or not user.is_active:
return None
try:
self.enforce_csrf(request)
except exceptions.AuthenticationFailed:
return None
# CSRF passed with authenticated user
return user, None
class SignatureAuthentication(signature.SignatureAuthentication):
# The HTTP header used to pass the consumer key ID.
# A method to fetch (User instance, user_secret_string) from the
# consumer key ID, or None in case it is not found. Algorithm
# will be what the client has sent, in the case that both RSA
# and HMAC are supported at your site (and also for expansion).
model = get_user_model()
def fetch_user_data(self, key_id, algorithm="hmac-sha256"):
# ...
# example implementation:
try:
key = AccessKey.objects.get(id=key_id)
if not key.is_active:
return None, None
user, secret = key.user, str(key.secret)
return user, secret
except AccessKey.DoesNotExist:
return None, None
|
sdgdsffdsfff/jumpserver
|
apps/authentication/backends/api.py
|
Python
|
gpl-2.0
| 7,226
|
from urllib.parse import parse_qs
from ws4py.websocket import WebSocket
from channelstream import utils
from channelstream.server_state import get_state
class ChatApplicationSocket(WebSocket):
def __init__(self, *args, **kwargs):
super(ChatApplicationSocket, self).__init__(*args, **kwargs)
self.qs = None
self.conn_id = None
def opened(self):
server_state = get_state()
self.qs = parse_qs(self.environ["QUERY_STRING"])
self.conn_id = utils.uuid_from_string(self.qs.get("conn_id")[0])
if self.conn_id not in server_state.connections:
# close connection instantly if user played with id
self.close()
else:
# attach a socket to connection
connection = server_state.connections[self.conn_id]
connection.socket = self
connection.deliver_catchup_messages()
def received_message(self, m):
server_state = get_state()
# this is to allow client heartbeats
if self.conn_id in server_state.connections:
connection = server_state.connections[self.conn_id]
connection.mark_activity()
user = server_state.users.get(connection.username)
if user:
user.mark_activity()
def closed(self, code, reason=""):
server_state = get_state()
self.environ.pop("ws4py.app")
found_conn = self.conn_id in server_state.connections
if hasattr(self, "conn_id") and found_conn:
connection = server_state.connections[self.conn_id]
connection.mark_for_gc()
|
AppEnlight/channelstream
|
channelstream/ws_app.py
|
Python
|
bsd-3-clause
| 1,621
|
from django import template
import re
register = template.Library()
@register.filter
def phone(string):
return re.sub(r'[^\+\w\d]', '', string)
|
maikelvl/django-boilerplate
|
src/main/templatetags/phone.py
|
Python
|
mit
| 149
|
#*******************************************************************************
# Copyright (C) 2013-2014 Kenneth L. Ho
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
#*******************************************************************************
import sys
sys.path.append('../python/')
import hypoct
import numpy as np
import time
if __name__ == '__main__':
"""
Build quadtree on uniformly spaced points on the unit circle.
"""
# initialize points
n = 2**20
theta = np.linspace(0, 2*np.pi, n+1)[:n]
x = np.array([np.cos(theta), np.sin(theta)], order='F')
# print input summary
print("Number of points: {:8d}".format(n))
print("----------------------------------------------------")
# set print format
fmt = "{:10.4e} (s) / {:6.2f} (MB)"
# build tree
print("Building tree... ", end=" ")
t0 = time.perf_counter()
tree = hypoct.Tree(x, occ=16)
t = time.perf_counter() - t0
mb = 1e-6*(tree.lvlx.nbytes + tree.rootx.nbytes + tree.xi.nbytes
+ tree.xp.nbytes + tree.nodex.nbytes)
print(fmt.format(t, mb))
# generate child data
print("Generating child data... ", end=" ")
t0 = time.perf_counter()
tree.generate_child_data()
t = time.perf_counter() - t0
mb = 1e-6*(tree.chldp.nbytes)
print(fmt.format(t, mb))
# generate geometry data
print("Generating geometry data... ", end=" ")
t0 = time.perf_counter()
tree.generate_geometry_data()
t = time.perf_counter() - t0
mb = 1e-6*(tree.l.nbytes + tree.ctr.nbytes)
print(fmt.format(t, mb))
# find neighbors
print("Finding neighbors... ", end=" ")
t0 = time.perf_counter()
tree.find_neighbors()
t = time.perf_counter() - t0
mb = 1e-6*(tree.nborp.nbytes + tree.nbori.nbytes)
print(fmt.format(t, mb))
# get interaction lists
print("Getting interaction lists...", end=" ")
t0 = time.perf_counter()
tree.get_interaction_lists()
t = time.perf_counter() - t0
mb = 1e-6*(tree.ilstp.nbytes + tree.ilsti.nbytes)
print(fmt.format(t, mb))
# search tree
print("Searching tree... ", end=" ")
m = 2**16;
y = np.random.rand(2, m)
y = 2*y - 1
t0 = time.perf_counter()
trav = tree.search(y)
t = time.perf_counter() - t0
mb = 1e-6*trav.nbytes
print(fmt.format(t, mb))
# print output summary
print("----------------------------------------------------")
print("Tree depth: {:8d}"
.format(tree.lvlx[1, 0]))
print("Number of nodes: {:8d}"
.format(tree.lvlx[0,-1]))
print("Total number of neighbors: {:8d}"
.format(tree.nborp[-1]))
print("Total number of nodes in interaction lists: {:8d}"
.format(tree.ilstp[-1]))
|
klho/hypoct
|
examples/hypoct_driver.py
|
Python
|
gpl-3.0
| 3,364
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.