repo_name
stringlengths 5
104
| path
stringlengths 4
248
| content
stringlengths 102
99.9k
|
|---|---|---|
rec/BiblioPixel
|
bibliopixel/layout/geometry/segment.py
|
from . import strip
class Segment(strip.Strip):
"""Represents an offset, length segment within a strip."""
def __init__(self, strip, length, offset=0):
if offset < 0 or length < 0:
raise ValueError('Segment indices are non-negative.')
if offset + length > len(strip):
raise ValueError('Segment too long.')
self.strip = strip
self.offset = offset
self.length = length
def __getitem__(self, index):
return self.strip[self._fix_index(index)]
def __setitem__(self, index, value):
self.strip[self._fix_index(index)] = value
def __len__(self):
return self.length
def next(self, length):
"""Return a new segment starting right after self in the same buffer."""
return Segment(self.strip, length, self.offset + self.length)
def _fix_index(self, index):
if isinstance(index, slice):
raise ValueError('Slicing segments not implemented.')
if index < 0:
index += self.length
if index >= 0 and index < self.length:
return self.offset + index
raise IndexError('Index out of range')
def make_segments(strip, length):
"""Return a list of Segments that evenly split the strip."""
if len(strip) % length:
raise ValueError('The length of strip must be a multiple of length')
s = []
try:
while True:
s.append(s[-1].next(length) if s else Segment(strip, length))
except ValueError:
return s
|
LockScreen/Backend
|
venv/lib/python2.7/site-packages/awscli/customizations/codedeploy/systems.py
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import ctypes
import os
import subprocess
DEFAULT_CONFIG_FILE = 'codedeploy.onpremises.yml'
class System:
UNSUPPORTED_SYSTEM_MSG = (
'Only Ubuntu Server, Red Hat Enterprise Linux Server and '
'Windows Server operating systems are supported.'
)
def __init__(self, params):
self.session = params.session
self.s3 = self.session.create_client(
's3',
region_name=params.region
)
def validate_administrator(self):
raise NotImplementedError('validate_administrator')
def install(self, params):
raise NotImplementedError('install')
def uninstall(self, params):
raise NotImplementedError('uninstall')
class Windows(System):
CONFIG_DIR = r'C:\ProgramData\Amazon\CodeDeploy'
CONFIG_FILE = 'conf.onpremises.yml'
CONFIG_PATH = r'{0}\{1}'.format(CONFIG_DIR, CONFIG_FILE)
INSTALLER = 'codedeploy-agent.msi'
def validate_administrator(self):
if not ctypes.windll.shell32.IsUserAnAdmin():
raise RuntimeError(
'You must run this command as an Administrator.'
)
def install(self, params):
if 'installer' in params:
self.INSTALLER = params.installer
process = subprocess.Popen(
[
'powershell.exe',
'-Command', 'Stop-Service',
'-Name', 'codedeployagent'
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
(output, error) = process.communicate()
not_found = (
"Cannot find any service with service name 'codedeployagent'"
)
if process.returncode != 0 and not_found not in error:
raise RuntimeError(
'Failed to stop the AWS CodeDeploy Agent:\n{0}'.format(error)
)
response = self.s3.get_object(Bucket=params.bucket, Key=params.key)
with open(self.INSTALLER, 'wb') as f:
f.write(response['Body'].read())
subprocess.check_call(
[
r'.\{0}'.format(self.INSTALLER),
'/quiet',
'/l', r'.\codedeploy-agent-install-log.txt'
],
shell=True
)
subprocess.check_call([
'powershell.exe',
'-Command', 'Restart-Service',
'-Name', 'codedeployagent'
])
process = subprocess.Popen(
[
'powershell.exe',
'-Command', 'Get-Service',
'-Name', 'codedeployagent'
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
(output, error) = process.communicate()
if "Running" not in output:
raise RuntimeError(
'The AWS CodeDeploy Agent did not start after installation.'
)
def uninstall(self, params):
process = subprocess.Popen(
[
'powershell.exe',
'-Command', 'Stop-Service',
'-Name', 'codedeployagent'
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
(output, error) = process.communicate()
not_found = (
"Cannot find any service with service name 'codedeployagent'"
)
if process.returncode == 0:
self._remove_agent()
elif not_found not in error:
raise RuntimeError(
'Failed to stop the AWS CodeDeploy Agent:\n{0}'.format(error)
)
def _remove_agent(self):
process = subprocess.Popen(
[
'wmic',
'product', 'where', 'name="CodeDeploy Host Agent"',
'call', 'uninstall', '/nointeractive'
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
(output, error) = process.communicate()
if process.returncode != 0:
raise RuntimeError(
'Failed to uninstall the AWS CodeDeploy Agent:\n{0}'.format(
error
)
)
class Linux(System):
CONFIG_DIR = '/etc/codedeploy-agent/conf'
CONFIG_FILE = DEFAULT_CONFIG_FILE
CONFIG_PATH = '{0}/{1}'.format(CONFIG_DIR, CONFIG_FILE)
INSTALLER = 'install'
def validate_administrator(self):
if os.geteuid() != 0:
raise RuntimeError('You must run this command as sudo.')
def install(self, params):
if 'installer' in params:
self.INSTALLER = params.installer
self._update_system(params)
self._stop_agent(params)
response = self.s3.get_object(Bucket=params.bucket, Key=params.key)
with open(self.INSTALLER, 'wb') as f:
f.write(response['Body'].read())
subprocess.check_call(
['chmod', '+x', './{0}'.format(self.INSTALLER)]
)
credentials = self.session.get_credentials()
environment = os.environ.copy()
environment['AWS_REGION'] = params.region
environment['AWS_ACCESS_KEY_ID'] = credentials.access_key
environment['AWS_SECRET_ACCESS_KEY'] = credentials.secret_key
if credentials.token is not None:
environment['AWS_SESSION_TOKEN'] = credentials.token
subprocess.check_call(
['./{0}'.format(self.INSTALLER), 'auto'],
env=environment
)
def uninstall(self, params):
process = self._stop_agent(params)
if process.returncode == 0:
self._remove_agent(params)
def _update_system(self, params):
raise NotImplementedError('preinstall')
def _remove_agent(self, params):
raise NotImplementedError('remove_agent')
def _stop_agent(self, params):
process = subprocess.Popen(
['service', 'codedeploy-agent', 'stop'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
(output, error) = process.communicate()
if process.returncode != 0 and params.not_found_msg not in error:
raise RuntimeError(
'Failed to stop the AWS CodeDeploy Agent:\n{0}'.format(error)
)
return process
class Ubuntu(Linux):
def _update_system(self, params):
subprocess.check_call(['apt-get', '-y', 'update'])
subprocess.check_call(['apt-get', '-y', 'install', 'ruby2.0'])
def _remove_agent(self, params):
subprocess.check_call(['dpkg', '-r', 'codedeploy-agent'])
def _stop_agent(self, params):
params.not_found_msg = 'codedeploy-agent: unrecognized service'
return Linux._stop_agent(self, params)
class RHEL(Linux):
def _update_system(self, params):
subprocess.check_call(['yum', '-y', 'install', 'ruby'])
def _remove_agent(self, params):
subprocess.check_call(['yum', '-y', 'erase', 'codedeploy-agent'])
def _stop_agent(self, params):
params.not_found_msg = 'Redirecting to /bin/systemctl stop codedeploy-agent.service'
return Linux._stop_agent(self, params)
|
x64dbg/x64dbgpy
|
swig/x64dbgpy/pluginsdk/_scriptapi/debug.py
|
from .. import x64dbg
class HardwareType:
HardwareAccess = x64dbg.HardwareAccess
HardwareWrite = x64dbg.HardwareWrite
HardwareExecute = x64dbg.HardwareExecute
def Wait():
x64dbg.Wait()
def Run():
x64dbg.Run()
def Stop():
x64dbg.Stop()
def StepIn():
x64dbg.StepIn()
def StepOver():
x64dbg.StepOver()
def StepOut():
x64dbg.StepOut()
def SetBreakpoint(address):
return x64dbg.SetBreakpoint(address)
def DeleteBreakpoint(address):
return x64dbg.DeleteBreakpoint(address)
def SetHardwareBreakpoint(address, type = HardwareType.HardwareExecute):
return x64dbg.SetHardwareBreakpoint(address, type)
def DeleteHardwareBreakpoint(address):
return x64dbg.DeleteHardwareBreakpoint(address)
|
aginzberg/crowdsource-platform
|
crowdsourcing/migrations/0000_get_requester_ratings_fn.py
|
# -*- coding: utf-8 -*-
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('crowdsourcing', '0007_auto_20151208_1957'),
]
operations = [
migrations.RunSQL('''
CREATE OR REPLACE FUNCTION get_requester_ratings(IN worker_profile_id INTEGER)
RETURNS TABLE(requester_id INTEGER, requester_rating DOUBLE PRECISION,
requester_avg_rating DOUBLE PRECISION)
AS $$
SELECT
r.id,
wr_rating.weight,
avg_wr_rating
FROM crowdsourcing_requester r
LEFT OUTER JOIN (
SELECT
wrr.target_id,
wrr.weight AS weight
FROM crowdsourcing_workerrequesterrating wrr
INNER JOIN (
SELECT
target_id,
MAX(last_updated) AS max_date
FROM crowdsourcing_workerrequesterrating
WHERE origin_type = 'worker' AND origin_id = $1
GROUP BY target_id
) most_recent
ON wrr.target_id = most_recent.target_id AND wrr.last_updated = most_recent.max_date AND
wrr.origin_type = 'worker'
AND wrr.origin_id = $1
) wr_rating ON wr_rating.target_id = r.profile_id
LEFT OUTER JOIN (
SELECT
target_id,
AVG(weight) AS avg_wr_rating
FROM (
SELECT
wrr.target_id,
wrr.weight
FROM crowdsourcing_workerrequesterrating wrr
INNER JOIN (
SELECT
origin_id,
target_id,
MAX(last_updated) AS max_date
FROM crowdsourcing_workerrequesterrating
WHERE origin_id<>$1 AND origin_type='worker'
GROUP BY origin_id, target_id
) most_recent
ON most_recent.origin_id = wrr.origin_id AND most_recent.target_id = wrr.target_id AND
wrr.last_updated = most_recent.max_date
AND wrr.origin_id <> $1 AND wrr.origin_type = 'worker'
) recent_wr_rating
GROUP BY target_id
) avg_wr_rating
ON avg_wr_rating.target_id = r.profile_id;
$$
LANGUAGE SQL
STABLE
RETURNS NULL ON NULL INPUT;
''')
]
|
opennode/nodeconductor-assembly-waldur
|
src/waldur_geo_ip/views.py
|
from rest_framework import status, views
from rest_framework.response import Response
from waldur_core.core.utils import get_lat_lon_from_address
from . import serializers
class GeocodeViewSet(views.APIView):
def get(self, request):
serializer = serializers.GeoCodeSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
address = serializer.validated_data['address']
lat_lon = get_lat_lon_from_address(address)
if lat_lon:
return Response(
{'latitude': lat_lon[0], 'longitude': lat_lon[1]},
status=status.HTTP_200_OK,
)
return Response(None, status=status.HTTP_200_OK)
|
aoakeson/home-assistant
|
homeassistant/components/sensor/systemmonitor.py
|
"""
Support for monitoring the local system..
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.systemmonitor/
"""
import logging
import homeassistant.util.dt as dt_util
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['psutil==4.0.0']
SENSOR_TYPES = {
'disk_use_percent': ['Disk Use', '%', 'mdi:harddisk'],
'disk_use': ['Disk Use', 'GiB', 'mdi:harddisk'],
'disk_free': ['Disk Free', 'GiB', 'mdi:harddisk'],
'memory_use_percent': ['RAM Use', '%', 'mdi:memory'],
'memory_use': ['RAM Use', 'MiB', 'mdi:memory'],
'memory_free': ['RAM Free', 'MiB', 'mdi:memory'],
'processor_use': ['CPU Use', '%', 'mdi:memory'],
'process': ['Process', '', 'mdi:memory'],
'swap_use_percent': ['Swap Use', '%', 'mdi:harddisk'],
'swap_use': ['Swap Use', 'GiB', 'mdi:harddisk'],
'swap_free': ['Swap Free', 'GiB', 'mdi:harddisk'],
'network_out': ['Sent', 'MiB', 'mdi:server-network'],
'network_in': ['Recieved', 'MiB', 'mdi:server-network'],
'packets_out': ['Packets sent', '', 'mdi:server-network'],
'packets_in': ['Packets recieved', '', 'mdi:server-network'],
'ipv4_address': ['IPv4 address', '', 'mdi:server-network'],
'ipv6_address': ['IPv6 address', '', 'mdi:server-network'],
'last_boot': ['Last Boot', '', 'mdi:clock'],
'since_last_boot': ['Since Last Boot', '', 'mdi:clock']
}
_LOGGER = logging.getLogger(__name__)
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the sensors."""
dev = []
for resource in config['resources']:
if 'arg' not in resource:
resource['arg'] = ''
if resource['type'] not in SENSOR_TYPES:
_LOGGER.error('Sensor type: "%s" does not exist', resource['type'])
else:
dev.append(SystemMonitorSensor(resource['type'], resource['arg']))
add_devices(dev)
class SystemMonitorSensor(Entity):
"""Implementation of a system monitor sensor."""
def __init__(self, sensor_type, argument=''):
"""Initialize the sensor."""
self._name = SENSOR_TYPES[sensor_type][0] + ' ' + argument
self.argument = argument
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name.rstrip()
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SENSOR_TYPES[self.type][2]
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
# pylint: disable=too-many-branches
def update(self):
"""Get the latest system information."""
import psutil
if self.type == 'disk_use_percent':
self._state = psutil.disk_usage(self.argument).percent
elif self.type == 'disk_use':
self._state = round(psutil.disk_usage(self.argument).used /
1024**3, 1)
elif self.type == 'disk_free':
self._state = round(psutil.disk_usage(self.argument).free /
1024**3, 1)
elif self.type == 'memory_use_percent':
self._state = psutil.virtual_memory().percent
elif self.type == 'memory_use':
self._state = round((psutil.virtual_memory().total -
psutil.virtual_memory().available) /
1024**2, 1)
elif self.type == 'memory_free':
self._state = round(psutil.virtual_memory().available / 1024**2, 1)
elif self.type == 'swap_use_percent':
self._state = psutil.swap_memory().percent
elif self.type == 'swap_use':
self._state = round(psutil.swap_memory().used / 1024**3, 1)
elif self.type == 'swap_free':
self._state = round(psutil.swap_memory().free / 1024**3, 1)
elif self.type == 'processor_use':
self._state = round(psutil.cpu_percent(interval=None))
elif self.type == 'process':
if any(self.argument in l.name() for l in psutil.process_iter()):
self._state = STATE_ON
else:
self._state = STATE_OFF
elif self.type == 'network_out':
self._state = round(psutil.net_io_counters(pernic=True)
[self.argument][0] / 1024**2, 1)
elif self.type == 'network_in':
self._state = round(psutil.net_io_counters(pernic=True)
[self.argument][1] / 1024**2, 1)
elif self.type == 'packets_out':
self._state = psutil.net_io_counters(pernic=True)[self.argument][2]
elif self.type == 'packets_in':
self._state = psutil.net_io_counters(pernic=True)[self.argument][3]
elif self.type == 'ipv4_address':
self._state = psutil.net_if_addrs()[self.argument][0][1]
elif self.type == 'ipv6_address':
self._state = psutil.net_if_addrs()[self.argument][1][1]
elif self.type == 'last_boot':
self._state = dt_util.datetime_to_date_str(
dt_util.as_local(
dt_util.utc_from_timestamp(psutil.boot_time())))
elif self.type == 'since_last_boot':
self._state = dt_util.utcnow() - dt_util.utc_from_timestamp(
psutil.boot_time())
|
kmike/psd-tools
|
tests/psd_tools/psd/test_effects_layer.py
|
from __future__ import absolute_import, unicode_literals
import pytest
import logging
from psd_tools.psd.effects_layer import (
CommonStateInfo,
ShadowInfo,
InnerGlowInfo,
OuterGlowInfo,
BevelInfo,
SolidFillInfo,
)
from ..utils import check_write_read, check_read_write
logger = logging.getLogger(__name__)
@pytest.mark.parametrize(
'kls', [
CommonStateInfo,
ShadowInfo,
InnerGlowInfo,
OuterGlowInfo,
BevelInfo,
SolidFillInfo,
]
)
def test_effects_layer_empty_wr(kls):
check_write_read(kls())
@pytest.mark.parametrize(
'fixture', [
(
b'\x00\x00\x00\x028BIMnorm\x0b\xf40262SC\x00\x00\xff\x01\x00\x00'
b'\xf0\x89\xa7s\x94\xd1\x00\x00'
),
]
)
def test_solid_fill_info(fixture):
check_read_write(SolidFillInfo, fixture)
|
nlgcoin/guldencoin-official
|
test/functional/feature_maxuploadtarget.py
|
#!/usr/bin/env python3
# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test behavior of -maxuploadtarget.
* Verify that getdata requests for old blocks (>1week) are dropped
if uploadtarget has been reached.
* Verify that getdata requests for recent blocks are respected even
if uploadtarget has been reached.
* Verify that the upload counters are reset after 24 hours.
"""
from collections import defaultdict
import time
from test_framework.messages import CInv, msg_getdata
from test_framework.mininode import P2PInterface
from test_framework.test_framework import GuldenTestFramework
from test_framework.util import assert_equal, mine_large_block
class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.block_receive_map = defaultdict(int)
def on_inv(self, message):
pass
def on_block(self, message):
message.block.calc_sha256()
self.block_receive_map[message.block.sha256] += 1
class MaxUploadTest(GuldenTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-maxuploadtarget=800"]]
# Cache for utxos, as the listunspent may take a long time later in the test
self.utxo_cache = []
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Before we connect anything, we first set the time on the node
# to be in the past, otherwise things break because the CNode
# time counters can't be reset backward after initialization
old_time = int(time.time() - 2*60*60*24*7)
self.nodes[0].setmocktime(old_time)
# Generate some old blocks
self.nodes[0].generate(130)
# p2p_conns[0] will only request old blocks
# p2p_conns[1] will only request new blocks
# p2p_conns[2] will test resetting the counters
p2p_conns = []
for _ in range(3):
p2p_conns.append(self.nodes[0].add_p2p_connection(TestP2PConn()))
# Now mine a big block
mine_large_block(self.nodes[0], self.utxo_cache)
# Store the hash; we'll request this later
big_old_block = self.nodes[0].getbestblockhash()
old_block_size = self.nodes[0].getblock(big_old_block, True)['size']
big_old_block = int(big_old_block, 16)
# Advance to two days ago
self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)
# Mine one more block, so that the prior block looks old
mine_large_block(self.nodes[0], self.utxo_cache)
# We'll be requesting this new block too
big_new_block = self.nodes[0].getbestblockhash()
big_new_block = int(big_new_block, 16)
# p2p_conns[0] will test what happens if we just keep requesting the
# the same big old block too many times (expect: disconnect)
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(2, big_old_block))
max_bytes_per_day = 800*1024*1024
daily_buffer = 144 * 4000000
max_bytes_available = max_bytes_per_day - daily_buffer
success_count = max_bytes_available // old_block_size
# 576MB will be reserved for relaying new blocks, so expect this to
# succeed for ~235 tries.
for i in range(success_count):
p2p_conns[0].send_message(getdata_request)
p2p_conns[0].sync_with_ping()
assert_equal(p2p_conns[0].block_receive_map[big_old_block], i+1)
assert_equal(len(self.nodes[0].getpeerinfo()), 3)
# At most a couple more tries should succeed (depending on how long
# the test has been running so far).
for i in range(3):
p2p_conns[0].send_message(getdata_request)
p2p_conns[0].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
self.log.info("Peer 0 disconnected after downloading old block too many times")
# Requesting the current block on p2p_conns[1] should succeed indefinitely,
# even when over the max upload target.
# We'll try 800 times
getdata_request.inv = [CInv(2, big_new_block)]
for i in range(800):
p2p_conns[1].send_message(getdata_request)
p2p_conns[1].sync_with_ping()
assert_equal(p2p_conns[1].block_receive_map[big_new_block], i+1)
self.log.info("Peer 1 able to repeatedly download new block")
# But if p2p_conns[1] tries for an old block, it gets disconnected too.
getdata_request.inv = [CInv(2, big_old_block)]
p2p_conns[1].send_message(getdata_request)
p2p_conns[1].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 1)
self.log.info("Peer 1 disconnected after trying to download old block")
self.log.info("Advancing system time on node to clear counters...")
# If we advance the time by 24 hours, then the counters should reset,
# and p2p_conns[2] should be able to retrieve the old block.
self.nodes[0].setmocktime(int(time.time()))
p2p_conns[2].sync_with_ping()
p2p_conns[2].send_message(getdata_request)
p2p_conns[2].sync_with_ping()
assert_equal(p2p_conns[2].block_receive_map[big_old_block], 1)
self.log.info("Peer 2 able to download old block")
self.nodes[0].disconnect_p2ps()
#stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1
self.log.info("Restarting nodes with -whitelist=127.0.0.1")
self.stop_node(0)
self.start_node(0, ["-whitelist=127.0.0.1", "-maxuploadtarget=1"])
# Reconnect to self.nodes[0]
self.nodes[0].add_p2p_connection(TestP2PConn())
#retrieve 20 blocks which should be enough to break the 1MB limit
getdata_request.inv = [CInv(2, big_new_block)]
for i in range(20):
self.nodes[0].p2p.send_message(getdata_request)
self.nodes[0].p2p.sync_with_ping()
assert_equal(self.nodes[0].p2p.block_receive_map[big_new_block], i+1)
getdata_request.inv = [CInv(2, big_old_block)]
self.nodes[0].p2p.send_and_ping(getdata_request)
assert_equal(len(self.nodes[0].getpeerinfo()), 1) #node is still connected because of the whitelist
self.log.info("Peer still connected after trying to download old block (whitelisted)")
if __name__ == '__main__':
MaxUploadTest().main()
|
indera/olass-client
|
olass/run.py
|
#!/usr/bin/env python
"""
Goal: Implement the application entry point.
@authors:
Andrei Sura <sura.andrei@gmail.com>
"""
import argparse
from olass.olass_client import OlassClient
from olass.version import __version__
DEFAULT_SETTINGS_FILE = 'config/settings.py'
def main():
""" Read args """
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--version",
default=False,
action='store_true',
help="Show the version number")
parser.add_argument("-c", "--config",
default=DEFAULT_SETTINGS_FILE,
help="Application config file")
parser.add_argument('--interactive',
default=True,
help="When `true` ask for confirmation")
parser.add_argument('--rows',
default=100,
help="Number of rows/batch sent to the server")
args = parser.parse_args()
if args.version:
import sys
print("olass, version {}".format(__version__))
sys.exit()
app = OlassClient(config_file=args.config,
interactive=args.interactive,
rows_per_batch=args.rows)
app.run()
if __name__ == "__main__":
main()
|
romses/LXC-Web-Panel
|
tests/api.py
|
import subprocess
import unittest
import urllib2
import shutil
import json
import ast
import os
from flask import Flask
from flask.ext.testing import LiveServerTestCase
from lwp.app import app
from lwp.utils import connect_db
token = 'myrandomapites0987'
class TestApi(LiveServerTestCase):
db = None
type_json = {'Content-Type': 'application/json'}
def create_app(self):
shutil.copyfile('lwp.db', '/tmp/db.sql')
self.db = connect_db('/tmp/db.sql')
self.db.execute('insert into api_tokens(description, token) values(?, ?)', ['test', token])
self.db.commit()
app.config['DATABASE'] = '/tmp/db.sql'
return app
def test_00_get_containers(self):
shutil.rmtree('/tmp/lxc/', ignore_errors=True)
request = urllib2.Request(self.get_server_url() + '/api/v1/containers/',
headers={'Private-Token': token})
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
#assert isinstance(response.read(), list)
def test_01_put_containers(self):
data = {'name': 'test_vm_sshd', 'template': 'sshd'}
request = urllib2.Request(self.get_server_url() + '/api/v1/containers/', json.dumps(data),
headers={'Private-Token': token, 'Content-Type': 'application/json' })
request.get_method = lambda: 'PUT'
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
assert data['name'] in os.listdir('/tmp/lxc')
def test_02_post_containers(self):
data = {'action': 'start'}
request = urllib2.Request(self.get_server_url() + '/api/v1/containers/test_vm_sshd', json.dumps(data),
headers={'Private-Token': token, 'Content-Type': 'application/json'})
request.get_method = lambda: 'POST'
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
def test_03_delete_containers(self):
request = urllib2.Request(self.get_server_url() + '/api/v1/containers/test_vm_sshd',
headers={'Private-Token': token})
request.get_method = lambda: 'DELETE'
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
def test_04_post_token(self):
data = {'token': 'test'}
request = urllib2.Request(self.get_server_url() + '/api/v1/tokens/', json.dumps(data),
headers={'Private-Token': token, 'Content-Type': 'application/json'})
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
def test_05_delete_token(self):
request = urllib2.Request(self.get_server_url() + '/api/v1/tokens/test',
headers={'Private-Token': token})
request.get_method = lambda: 'DELETE'
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
if __name__ == '__main__':
unittest.main()
|
d53dave/cgopt
|
csaopt/instancemanager/awstools.py
|
import boto3
import logging
import time
from string import Template
from pyhocon import ConfigTree
from botocore.exceptions import ClientError
from typing import List, Any, Tuple, Dict
from . import Instance
from .instancemanager import InstanceManager
from ..utils import random_str, random_int
log = logging.getLogger()
def _interpolate_userscript_template_vals(script: bytes, **kwargs: str) -> bytes:
return Template(script.decode('utf-8')).substitute(kwargs).encode()
def _has_exit_status(instance) -> bool:
instance.reload()
return instance.state['Name'] == 'shutting-down' or instance.state['Name'] == 'terminated'
class AWSTools(InstanceManager):
"""The AWSTools class provides an abstraction over boto3 and EC2 for the use with CSAOpt
This is a context manager and creates required instances on `__enter__()`, disposing of the managed instances in
`__exit__()`. These two methods as well as :meth:`instancemanager.awstools.AWSTools.get_running_instances` are the
only methods called by the Runner (i.e. the only public methods).
This class will use boto3 to (1) create a security group, (2) configure ingress to the broker backend (currently
Redis, as used by Dramatiq). It then (3) creates as many worker instances as requested and runs 'user-data' scripts
after startup, which is to say, bash scripts that set up and the required software (Redis, CSAOpt Worker, etc.).
After the run AWSTools (4) terminates all managed instances and removes the security group.
Note:
If the AWS credentials are not provided in the config file, boto3 will look into
the following environment variables: `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`
How to create IAM credentials (i.e. AWS keys):
* Create (or reuse) IAM user with programmatic access
* Assign to a (potentially new) group with AmazonEC2FullAccess
* Store the access key and secret key
Args:
config: Configuration for current optimization run
internal_conf: Internal CSAOpt configuration
"""
def __init__(self, config: ConfigTree, internal_conf: ConfigTree) -> None:
self.region = config.get('remote.aws.region', internal_conf['remote.aws.default_region'])
if config.get('remote.aws.secret_key', False) and config.get('remote.aws.access_key', False):
self.ec2_resource: boto3.session.Session.resource = boto3.resource(
'ec2',
aws_access_key_id=config['remote.aws.access_key'],
aws_secret_access_key=config['remote.aws.secret_key'],
region_name=self.region)
else:
# This will look for the env variables
self.ec2_resource: boto3.session.Session.resource = boto3.resource('ec2', region_name=self.region)
self.ec2_client = self.ec2_resource.meta.client
# ec2.Instance is of <class 'boto3.resources.factory.ec2.Instance'> but this cannot be
# used as a type hint here because it is generated by the factory at runtime, I assume.
self.workers: List[Any] = []
self.broker: Any = None
self.security_group_prefix: str = internal_conf.get('remote.aws.security_group_prefix', 'csaopt_')
self.security_group_id: str = ''
self.worker_count: int = config['remote.aws.worker_count']
worker_ami_key = 'remote.aws.worker_ami'
broker_ami_key = 'remote.aws.broker_ami'
self.broker_ami = config.get(broker_ami_key, internal_conf[broker_ami_key])
self.worker_ami = config.get(worker_ami_key, internal_conf[worker_ami_key])
self.timeout_provision = config.get('remote.aws.timeout_provision',
internal_conf['remote.aws.timeout_provision'])
self.timeout_startup = config.get('remote.aws.timeout_startup', internal_conf['remote.aws.timeout_startup'])
self.broker_port = internal_conf.get('broker.defaults.remote_port')
self.broker_password = config.get('remote.aws.instances.broker_password', None)
if self.broker_password is None:
self.broker_password = random_str(32)
self.debug_on_cpu = config.get('debug.gpu_simulator', '')
self.terminate_on_exit = config.get('remote.terminate_on_exit', False)
self.use_existing_instances = False
existing_instances = config.get('remote.aws.instances', None)
if existing_instances is not None:
self.use_existing_instances = True
self.existing_instances = existing_instances
self.provision_args: Dict[str, str] = {
'broker_image':
config.get('remote.aws.broker_ami', internal_conf['remote.aws.broker_ami']),
'worker_image':
config.get('remote.aws.worker_ami', internal_conf['remote.aws.worker_ami']),
'broker_instance_type':
config.get('remote.aws.broker_instance_type', internal_conf['remote.aws.broker_instance_type']),
'worker_instance_type':
config.get('remote.aws.worker_instance_type', internal_conf['remote.aws.worker_instance_type'])
}
data_base = internal_conf['remote.aws.userdata_rel_path']
with open(data_base + '-broker.sh', 'rb') as broker_data, open(data_base + '-worker.sh', 'rb') as worker_data:
self.user_data_scripts: Dict[str, bytes] = {'broker': broker_data.read(), 'worker': worker_data.read()}
def _get_from_ids(self, broker_id: str, worker_ids: List[str]) -> Tuple[Any, Any]:
broker = self.ec2_resource.Instance(broker_id)
workers = map(lambda worker_id: self.ec2_resource.Instance(worker_id), worker_ids)
return broker, list(workers)
def _provision_instances(self, timeout_ms: int, count: int = 2, **kwargs: str) -> Tuple[Any, Any]:
"""Start and configure instances
Args:
timeout_ms: General timeout for the provisioning of requested instances
count: number of worker instances to be created
kwargs: Any other parameters that are required for startup
"""
broker_userdata = _interpolate_userscript_template_vals(
self.user_data_scripts['broker'], external_port=self.broker_port, redis_password=self.broker_password)
broker = self.ec2_resource.create_instances(
ImageId=kwargs['broker_image'],
MinCount=1,
MaxCount=1,
UserData=broker_userdata,
SecurityGroupIds=[self.security_group_id],
InstanceType=kwargs['broker_instance_type'])[0]
worker_userdata = _interpolate_userscript_template_vals(
self.user_data_scripts['worker'],
debug='1' if self.debug_on_cpu else 'off',
redis_host=broker.private_ip_address,
redis_port=self.broker_port,
redis_password=self.broker_password)
workers = self.ec2_resource.create_instances(
ImageId=kwargs['worker_image'],
MinCount=count,
MaxCount=count,
InstanceType=kwargs['worker_instance_type'],
UserData=worker_userdata,
SecurityGroupIds=[self.security_group_id])
return broker, workers
def __map_ec2_instance(self, instance: Any, is_broker: bool = False, **kwargs: Any) -> Instance:
"""Maps a boto/EC2 instance to the internal Instance type
Args:
instance: Instance object returned by boto3 (which has a runtime type and therefore untyped here)
is_broker: Flag indicating whether a given instance is a broker or not
kwargs: Any other parameters that should be available on the produced object
Returns:
An abstract instance object
"""
return Instance(instance.id, instance.public_ip_address, is_broker=is_broker, **kwargs)
def get_running_instances(self) -> Tuple[Instance, List[Instance]]:
"""Update and get currently managed instances
Returns:
A tuple of broker, [worker]
"""
self.broker.reload()
for worker in self.workers:
worker.reload()
broker_instance = self.__map_ec2_instance(
instance=self.broker, is_broker=True, port=self.broker_port, password=self.broker_password)
worker_instances = [self.__map_ec2_instance(w, queue_id=w.id) for w in self.workers]
return broker_instance, worker_instances
def _terminate_instances(self, timeout_ms: int) -> None:
"""Terminate all instances managed by AWSTools
Args:
timeout_ms: Timeout, in milliseconds, for the termination
"""
instance_ids = [self.broker.id] + [instance.id for instance in self.workers]
self.ec2_client.terminate_instances(InstanceIds=instance_ids)
def _wait_for_instances(self) -> None:
"""Block until broker and workers are up"""
self.broker.wait_until_running()
for worker in self.workers:
worker.wait_until_running()
def _run_start_scripts(self, timeout_ms: int) -> None:
"""Run any required setup procedures after the initial startup of managed instances
Args:
timeout_ms: Timeout, in milliseconds, for the termination
"""
raise NotImplementedError
def __enter__(self) -> InstanceManager:
"""On enter, AWSTools prepares the AWS security group and spins up the required intances
"""
if not self.use_existing_instances:
self.security_group_id = self._create_sec_group(self.security_group_prefix + random_str(10))
self.broker, self.workers = self._provision_instances(
count=self.worker_count, timeout_ms=self.timeout_provision, **self.provision_args)
log.debug('Provision Instances returned: {}, {}. Waiting for instances now'.format(
self.broker, self.workers))
else:
self.security_group_id = self.existing_instances['security_group']
self.broker, self.workers = self._get_from_ids(self.existing_instances['broker'],
self.existing_instances['workers'])
self._wait_for_instances()
log.debug('Waiting for instances returned')
return self
def __exit__(self, exc_type, exc_value, traceback):
"""On exit, AWSTools terminates the started instances and removes security groups"""
log.debug('Entered awstools\' __exit__ method with traceback: {}'.format(traceback))
if not self.terminate_on_exit:
return False
self._terminate_instances(self.timeout_provision)
log.debug('Terminate Instances call returned, waiting for termination')
all_instances = [self.broker] + self.workers
while (any((not _has_exit_status(instance) for instance in all_instances))):
log.debug('Waiting for instances to enter "shutting-down" or "terminated" state: {}'.format(
[(i.id, i.state) for i in all_instances]))
time.sleep(2.0)
log.debug('Remove Security Group')
self._remove_sec_group(self.security_group_id)
return False
def _remove_sec_group(self, group_id: str) -> None:
"""Removes the security group created by CSAOpt
Args:
group_id: Security group Id of group to be deleted
"""
if group_id is not None:
try:
self.ec2_client.delete_security_group(GroupId=group_id)
log.debug('Security group [{}] deleted'.format(group_id))
except ClientError as e:
log.error('Could not remove security group: {}'.format(e))
else:
log.warning('Cannot remove security group, because none was created. Skipping...')
def _create_sec_group(self, name: str) -> str:
"""Creates an AWS security group and assigns ingress permissions from the current network
Args:
name: Name of the security group
Returns:
AWS Identifier `GroupId` of the created security group
"""
try:
response = self.ec2_client.create_security_group(GroupName=name, Description='Security Group for CSAOpt')
security_group_id = response['GroupId']
log.debug('Created Security Group: ' + security_group_id)
data = self.ec2_client.authorize_security_group_ingress(
GroupId=security_group_id,
IpPermissions=[
{
'IpProtocol': 'tcp',
'FromPort': self.broker_port,
'ToPort': self.broker_port,
'IpRanges': [{
'CidrIp': '0.0.0.0/0'
}]
},
{ # Allow communication within the sec group
'IpProtocol': '-1',
'UserIdGroupPairs': [{
'GroupId': security_group_id
}]
}
])
log.debug('Authorized Security Group Ingress with result: {}'.format(data))
data = self.ec2_client.authorize_security_group_egress(
GroupId=security_group_id,
IpPermissions=[{ # Allow communication within the sec group
'IpProtocol': '-1',
'UserIdGroupPairs': [{
'GroupId': security_group_id
}]
}])
log.debug('Authorized Security Group Egress with result: {}'.format(data))
return security_group_id
except ClientError as e:
log.exception('Could not create Security Group')
raise
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-machinelearningcompute/azure/mgmt/machinelearningcompute/models/__init__.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
from .error_detail import ErrorDetail
from .error_response import ErrorResponse
from .error_response_wrapper import ErrorResponseWrapper, ErrorResponseWrapperException
from .storage_account_properties import StorageAccountProperties
from .container_registry_properties import ContainerRegistryProperties
from .service_principal_properties import ServicePrincipalProperties
from .kubernetes_cluster_properties import KubernetesClusterProperties
from .system_service import SystemService
from .acs_cluster_properties import AcsClusterProperties
from .app_insights_properties import AppInsightsProperties
from .ssl_configuration import SslConfiguration
from .service_auth_configuration import ServiceAuthConfiguration
from .auto_scale_configuration import AutoScaleConfiguration
from .global_service_configuration import GlobalServiceConfiguration
from .operationalization_cluster import OperationalizationCluster
from .operationalization_cluster_update_parameters import OperationalizationClusterUpdateParameters
from .storage_account_credentials import StorageAccountCredentials
from .container_registry_credentials import ContainerRegistryCredentials
from .container_service_credentials import ContainerServiceCredentials
from .app_insights_credentials import AppInsightsCredentials
from .operationalization_cluster_credentials import OperationalizationClusterCredentials
from .check_system_services_updates_available_response import CheckSystemServicesUpdatesAvailableResponse
from .update_system_services_response import UpdateSystemServicesResponse
from .resource_operation_display import ResourceOperationDisplay
from .resource_operation import ResourceOperation
from .available_operations import AvailableOperations
from .operationalization_cluster_paged import OperationalizationClusterPaged
from .machine_learning_compute_management_client_enums import (
OperationStatus,
ClusterType,
OrchestratorType,
SystemServiceType,
AgentVMSizeTypes,
Status,
UpdatesAvailable,
)
__all__ = [
'Resource',
'ErrorDetail',
'ErrorResponse',
'ErrorResponseWrapper', 'ErrorResponseWrapperException',
'StorageAccountProperties',
'ContainerRegistryProperties',
'ServicePrincipalProperties',
'KubernetesClusterProperties',
'SystemService',
'AcsClusterProperties',
'AppInsightsProperties',
'SslConfiguration',
'ServiceAuthConfiguration',
'AutoScaleConfiguration',
'GlobalServiceConfiguration',
'OperationalizationCluster',
'OperationalizationClusterUpdateParameters',
'StorageAccountCredentials',
'ContainerRegistryCredentials',
'ContainerServiceCredentials',
'AppInsightsCredentials',
'OperationalizationClusterCredentials',
'CheckSystemServicesUpdatesAvailableResponse',
'UpdateSystemServicesResponse',
'ResourceOperationDisplay',
'ResourceOperation',
'AvailableOperations',
'OperationalizationClusterPaged',
'OperationStatus',
'ClusterType',
'OrchestratorType',
'SystemServiceType',
'AgentVMSizeTypes',
'Status',
'UpdatesAvailable',
]
|
disqus/Diamond
|
src/collectors/snmpraw/snmpraw.py
|
# coding=utf-8
"""
The SNMPRawCollector is designed for collecting data from SNMP-enables devices,
using a set of specified OIDs
#### Configuration
Below is an example configuration for the SNMPRawCollector. The collector
can collect data any number of devices by adding configuration sections
under the *devices* header. By default the collector will collect every 60
seconds. This might be a bit excessive and put unnecessary load on the
devices being polled. You may wish to change this to every 300 seconds. However
you need modify your graphite data retentions to handle this properly.
```
# Options for SNMPRawCollector
enabled = True
interval = 60
[devices]
# Start the device configuration
# Note: this name will be used in the metric path.
[[my-identification-for-this-host]]
host = localhost
port = 161
community = public
# Start the OID list for this device
# Note: the value part will be used in the metric path.
[[[oids]]]
1.3.6.1.4.1.2021.10.1.3.1 = cpu.load.1min
1.3.6.1.4.1.2021.10.1.3.2 = cpu.load.5min
1.3.6.1.4.1.2021.10.1.3.3 = cpu.load.15min
# If you want another host, you can. But you probably won't need it.
[[another-identification]]
host = router1.example.com
port = 161
community = public
[[[oids]]]
oid = metric.path
oid = metric.path
```
Note: If you modify the SNMPRawCollector configuration, you will need to
restart diamond.
#### Dependencies
* pysmnp (which depends on pyasn1 0.1.7 and pycrypto)
"""
import os
import sys
import time
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
'snmp'))
from snmp import SNMPCollector as parent_SNMPCollector
from diamond.metric import Metric
class SNMPRawCollector(parent_SNMPCollector):
def __init__(self, *args, **kwargs):
super(SNMPRawCollector, self).__init__(*args, **kwargs)
# list to save non-existing oid's per device, to avoid repetition of
# errors in logging. restart diamond/collector to flush this
self.skip_list = []
def get_default_config(self):
"""
Override SNMPCollector.get_default_config method to provide
default_config for the SNMPInterfaceCollector
"""
default_config = super(SNMPRawCollector,
self).get_default_config()
default_config.update({
'oids': {},
'path_prefix': 'servers',
'path_suffix': 'snmp',
})
return default_config
def _precision(self, value):
"""
Return the precision of the number
"""
value = str(value)
decimal = value.rfind('.')
if decimal == -1:
return 0
return len(value) - decimal - 1
def _skip(self, device, oid, reason=None):
self.skip_list.append((device, oid))
if reason is not None:
self.log.warn('Muted \'{0}\' on \'{1}\', because: {2}'.format(
oid, device, reason))
def _get_value_walk(self, device, oid, host, port, community):
data = self.walk(oid, host, port, community)
if data is None:
self._skip(device, oid, 'device down (#2)')
return
self.log.debug('Data received from WALK \'{0}\': [{1}]'.format(
device, data))
if len(data) != 1:
self._skip(
device,
oid,
'unexpected response, data has {0} entries'.format(
len(data)))
return
# because we only allow 1-key dicts, we can pick with absolute index
value = data.items()[0][1]
return value
def _get_value(self, device, oid, host, port, community):
data = self.get(oid, host, port, community)
if data is None:
self._skip(device, oid, 'device down (#1)')
return
self.log.debug('Data received from GET \'{0}\': [{1}]'.format(
device, data))
if len(data) == 0:
self._skip(device, oid, 'empty response, device down?')
return
if oid not in data:
# oid is not even in hierarchy, happens when using 9.9.9.9
# but not when using 1.9.9.9
self._skip(device, oid, 'no object at OID (#1)')
return
value = data[oid]
if value == 'No Such Object currently exists at this OID':
self._skip(device, oid, 'no object at OID (#2)')
return
if value == 'No Such Instance currently exists at this OID':
return self._get_value_walk(device, oid, host, port, community)
return value
def collect_snmp(self, device, host, port, community):
"""
Collect SNMP interface data from device
"""
self.log.debug(
'Collecting raw SNMP statistics from device \'{0}\''.format(device))
dev_config = self.config['devices'][device]
if 'oids' in dev_config:
for oid, metricName in dev_config['oids'].items():
if (device, oid) in self.skip_list:
self.log.debug(
'Skipping OID \'{0}\' ({1}) on device \'{2}\''.format(
oid, metricName, device))
continue
timestamp = time.time()
value = self._get_value(device, oid, host, port, community)
if value is None:
continue
self.log.debug(
'\'{0}\' ({1}) on device \'{2}\' - value=[{3}]'.format(
oid, metricName, device, value))
path = '.'.join([self.config['path_prefix'], device,
self.config['path_suffix'], metricName])
metric = Metric(path=path, value=value, timestamp=timestamp,
precision=self._precision(value),
metric_type='GAUGE')
self.publish_metric(metric)
|
timj/scons
|
test/D/HSTeoh/sconstest-singleStringCannotBeMultipleOptions_dmd.py
|
"""
Test compiling and executing using the dmd tool.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
from Common.singleStringCannotBeMultipleOptions import testForTool
testForTool('dmd')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
datahuborg/datahub
|
src/core/db/backend/pg.py
|
import re
import os
import errno
import shutil
import hashlib
from collections import namedtuple
from uuid import uuid4
import psycopg2
import core.db.query_rewriter
from psycopg2.extensions import AsIs
from psycopg2.pool import ThreadedConnectionPool
from psycopg2 import errorcodes
from core.db.licensemanager import LicenseManager
from core.db.errors import PermissionDenied
from config import settings
"""
DataHub internal APIs for postgres repo_base
"""
HOST = settings.DATABASES['default']['HOST']
PORT = 5432
if settings.DATABASES['default']['PORT'] != '':
try:
PORT = int(settings.DATABASES['default']['PORT'])
except:
pass
# Maintain a separate db connection pool for each (user, password, database)
# tuple.
connection_pools = {}
PoolKey = namedtuple('PoolKey', 'user, password, repo_base')
def _pool_for_credentials(user, password, repo_base, create_if_missing=True):
pool_key = PoolKey(user, password, repo_base)
# Create a new pool if one doesn't exist or if the existing one has been
# closed. Normally a pool should only be closed during testing, to force
# all hanging connections to a database to be closed.
if pool_key not in connection_pools or connection_pools[pool_key].closed:
if create_if_missing is False:
return None
# Maintains at least 1 connection.
# Raises "PoolError: connection pool exausted" if a thread tries
# holding onto than 10 connections to a single database.
connection_pools[pool_key] = ThreadedConnectionPool(
0,
10,
user=user,
password=password,
host=HOST,
port=PORT,
database=repo_base)
return connection_pools[pool_key]
def _close_all_connections(repo_base):
for key, pool in connection_pools.iteritems():
if repo_base == key.repo_base and not pool.closed:
pool.closeall()
def _convert_pg_exception(e):
# Convert some psycopg2 errors into exceptions meaningful to
# Django.
if (e.pgcode == errorcodes.INSUFFICIENT_PRIVILEGE):
raise PermissionDenied()
if (e.pgcode == errorcodes.INVALID_PARAMETER_VALUE or
e.pgcode == errorcodes.UNDEFINED_OBJECT):
raise ValueError("Invalid parameter in query.")
if e.pgcode == errorcodes.INVALID_SCHEMA_NAME:
error = ('Repo not found. '
'You must specify a repo in your query. '
'i.e. select * from REPO_NAME.TABLE_NAME. ')
raise LookupError(error)
if e.pgcode == errorcodes.UNDEFINED_TABLE:
raise LookupError("Table or view not found.")
if e.pgcode == errorcodes.DUPLICATE_SCHEMA:
raise ValueError("A repo with that name already exists.")
if e.pgcode == errorcodes.DUPLICATE_TABLE:
raise ValueError("A table with that name already exists.")
raise e
class PGBackend:
def __init__(self, user, password, host=HOST, port=PORT, repo_base=None):
self.user = user
self.password = password
self.host = host
self.port = port
self.repo_base = repo_base
self.connection = None
# row level security is enabled unless the user is a superuser
self.row_level_security = bool(
user != settings.DATABASES['default']['USER'])
# We only need a query rewriter if RLS is enabled
if self.row_level_security:
self.query_rewriter = core.db.query_rewriter.SQLQueryRewriter(
self.repo_base, self.user)
self.__open_connection__()
def __del__(self):
self.close_connection()
def __open_connection__(self):
pool = _pool_for_credentials(self.user, self.password, self.repo_base)
self.connection = pool.getconn()
self.connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
def change_repo_base(self, repo_base):
self.close_connection()
self.repo_base = repo_base
self.__open_connection__()
def close_connection(self):
pool = _pool_for_credentials(self.user, self.password, self.repo_base,
create_if_missing=False)
if self.connection and pool and not pool.closed:
pool.putconn(self.connection, close=True)
self.connection = None
def _check_for_injections(self, noun):
"""
Raises ValueError if the proposed noun is invalid.
Valid nouns contain only alphanumeric characters and underscores, and
must not begin or end with an underscore.
"""
invalid_noun_msg = (
"Usernames and repo names may only contain "
"alphanumeric characters and underscores, must begin with a "
"letter, and must not begin or end with an underscore."
)
regex = r'^(?![\_\d])[\w\_]+(?<![\_])$'
valid_pattern = re.compile(regex)
matches = valid_pattern.match(noun)
if matches is None:
raise ValueError(invalid_noun_msg)
def _validate_table_name(self, noun):
"""
Raises ValueError if the proposed table name is invalid.
Valid table names contain only alphanumeric characters and underscores.
"""
invalid_noun_msg = (
"Table names may only contain "
"alphanumeric characters and underscores, must begin with a "
"letter, and must not begin or end with an underscore."
)
regex = r'^(?![\d])[\w\_]+(?<![\_])$'
valid_pattern = re.compile(regex)
matches = valid_pattern.match(noun)
if matches is None:
raise ValueError(invalid_noun_msg)
def create_repo(self, repo):
"""Creates a postgres schema for the user."""
self._check_for_injections(repo)
query = 'CREATE SCHEMA IF NOT EXISTS %s AUTHORIZATION %s'
params = (AsIs(repo), AsIs(self.user))
res = self.execute_sql(query, params)
return res['status']
def list_repos(self):
query = ('SELECT schema_name AS repo_name '
'FROM information_schema.schemata '
'WHERE schema_owner != %s')
params = (settings.DATABASES['default']['USER'],)
res = self.execute_sql(query, params)
return [t[0] for t in res['tuples']]
def rename_repo(self, repo, new_name):
self._check_for_injections(repo)
self._check_for_injections(new_name)
query = 'ALTER SCHEMA %s RENAME TO %s'
params = (AsIs(repo), AsIs(new_name))
res = self.execute_sql(query, params)
return res['status']
def delete_repo(self, repo, force=False):
"""Deletes a repo and the folder the user's repo files are in."""
self._check_for_injections(repo)
# drop the schema
query = 'DROP SCHEMA %s %s'
params = (AsIs(repo), AsIs('CASCADE') if force is True else AsIs(''))
res = self.execute_sql(query, params)
return res['status']
def add_collaborator(
self, repo, collaborator, db_privileges=[], license_id=None):
# check that all repo names, usernames, and privileges passed aren't
# sql injections
self._check_for_injections(repo)
self._check_for_injections(collaborator)
for privilege in db_privileges:
self._check_for_injections(privilege)
if license_id:
query = (
'BEGIN;'
'GRANT USAGE ON SCHEMA %s TO %s;'
'COMMIT;')
privileges_str = ', '.join(db_privileges)
params = [repo, collaborator, privileges_str, repo,
collaborator, repo, privileges_str, collaborator]
params = tuple(map(lambda x: AsIs(x), params))
res = self.execute_sql(query, params)
return res['status']
else:
query = ('BEGIN;'
'GRANT USAGE ON SCHEMA %s TO %s;'
'GRANT %s ON ALL TABLES IN SCHEMA %s TO %s;'
'ALTER DEFAULT PRIVILEGES IN SCHEMA %s '
'GRANT %s ON TABLES TO %s;'
'COMMIT;'
)
privileges_str = ', '.join(db_privileges)
params = [repo, collaborator, privileges_str, repo,
collaborator, repo, privileges_str, collaborator]
params = tuple(map(lambda x: AsIs(x), params))
res = self.execute_sql(query, params)
return res['status']
def add_collaborator_to_license_view(
self, repo, collaborator, view, db_privileges=[]):
# check that all repo names, usernames, and privileges passed aren't
# sql injections
self._check_for_injections(repo)
self._check_for_injections(collaborator)
for privilege in db_privileges:
self._check_for_injections(privilege)
query = ('BEGIN;'
'GRANT %s ON %s.%s TO %s;'
'ALTER DEFAULT PRIVILEGES IN SCHEMA %s '
'GRANT %s ON %s.%s TO %s;'
'COMMIT;'
)
privileges_str = ', '.join(db_privileges)
params = [
privileges_str, repo, view,
collaborator, repo, privileges_str,
repo, view, collaborator]
params = tuple(map(lambda x: AsIs(x), params))
res = self.execute_sql(query, params)
return res['status']
def delete_collaborator(self, repo, collaborator):
self._check_for_injections(repo)
self._check_for_injections(collaborator)
query = ('BEGIN;'
'REVOKE ALL ON ALL TABLES IN SCHEMA %s FROM %s CASCADE;'
'REVOKE ALL ON SCHEMA %s FROM %s CASCADE;'
'ALTER DEFAULT PRIVILEGES IN SCHEMA %s '
'REVOKE ALL ON TABLES FROM %s;'
'COMMIT;'
)
params = [repo, collaborator, repo, collaborator, repo, collaborator]
params = tuple(map(lambda x: AsIs(x), params))
res = self.execute_sql(query, params)
return res['status']
def create_license_view(self, repo_base, repo,
table, view_sql, license_id):
view_name = table.lower() + "_license_view_"+str(license_id)
res = self.create_view(repo, view_name, view_sql)
return res
def delete_license_view(self, repo_base, repo, license_view):
res = self.delete_view(repo, license_view)
return res
def get_view_sql(self, repo_base, repo, table, view_params, license_id):
# create view based on license
license = LicenseManager.find_license_by_id(license_id)
pii_def = license.pii_def
if license.pii_removed:
# remove columns
query = ('SELECT column_name FROM information_schema.columns '
'WHERE table_schema = %s'
'AND table_name = %s'
)
params = (repo, table)
res = self.execute_sql(query, params)
columns = [t[0] for t in res['tuples']]
all_columns = set(columns)
removed_columns = set(view_params['removed-columns'])
columns_to_show = list(all_columns - removed_columns)
# if columns_to_show < 1:
# #error
# pass
query = 'SELECT {} FROM {}.{}'
columns_query = ""
for i in range(len(columns_to_show)):
columns_query += columns_to_show[i]
if i < len(columns_to_show) - 1:
columns_query += ","
query = query.format(columns_query, repo, table)
return query
def create_table(self, repo, table, params):
# check for injections
self._check_for_injections(repo)
self._validate_table_name(table)
param_values = []
for obj in params:
param_values += obj.values()
for value in param_values:
self._check_for_injections(value)
query = ('CREATE TABLE %s.%s (%s)')
table_params = ''
for obj in params:
table_params += obj['column_name']
table_params += ' '
table_params += obj['data_type']
table_params += ', '
table_params = table_params[:-2]
params = (AsIs(repo), AsIs(table), AsIs(table_params))
res = self.execute_sql(query, params)
return res['status']
def list_tables(self, repo):
self._check_for_injections(repo)
all_repos = self.list_repos()
if repo not in all_repos:
raise LookupError('Invalid repository name: %s' % (repo))
query = ('SELECT table_name FROM information_schema.tables '
'WHERE table_schema = %s AND table_type = \'BASE TABLE\';'
)
params = (repo,)
res = self.execute_sql(query, params)
return [t[0] for t in res['tuples']]
def describe_table(self, repo, table, detail=False):
query = ("SELECT %s "
"FROM information_schema.columns "
"WHERE table_schema = %s and table_name = %s;")
params = None
if detail:
params = (AsIs('*'), repo, table)
else:
params = (AsIs('column_name, data_type'), repo, table)
res = self.execute_sql(query, params)
return res['tuples']
def list_table_permissions(self, repo, table):
query = ("select privilege_type from "
"information_schema.role_table_grants where table_schema=%s "
"and table_name=%s and grantee=%s")
params = (repo, table, self.user)
res = self.execute_sql(query, params)
return res['tuples']
def create_view(self, repo, view, sql):
self._check_for_injections(repo)
self._validate_table_name(view)
query = ('CREATE VIEW %s.%s AS (%s)')
params = (
AsIs(repo), AsIs(view),
AsIs(sql))
res = self.execute_sql(query, params)
return res['status']
def list_views(self, repo):
self._check_for_injections(repo)
all_repos = self.list_repos()
if repo not in all_repos:
raise LookupError('Invalid repository name: %s' % (repo))
query = ('SELECT table_name FROM information_schema.tables '
'WHERE table_schema = %s '
'AND table_type = \'VIEW\';')
params = (repo,)
res = self.execute_sql(query, params)
return [t[0] for t in res['tuples']]
def delete_view(self, repo, view, force=False):
self._check_for_injections(repo)
self._validate_table_name(view)
force_param = 'RESTRICT'
if force:
force_param = 'CASCADE'
query = ('DROP VIEW %s.%s.%s %s')
params = (AsIs(self.repo_base), AsIs(repo), AsIs(view),
AsIs(force_param))
res = self.execute_sql(query, params)
return res['status']
def describe_view(self, repo, view, detail=False):
query = ("SELECT %s "
"FROM information_schema.columns "
"WHERE table_schema = %s and table_name = %s;")
params = None
if detail:
params = (AsIs('*'), repo, view)
else:
params = (AsIs('column_name, data_type'), repo, view)
res = self.execute_sql(query, params)
return res['tuples']
def delete_table(self, repo, table, force=False):
self._check_for_injections(repo)
self._validate_table_name(table)
force_param = 'RESTRICT'
if force:
force_param = 'CASCADE'
query = ('DROP TABLE %s.%s.%s %s')
params = (AsIs(self.repo_base), AsIs(repo), AsIs(table),
AsIs(force_param))
res = self.execute_sql(query, params)
return res['status']
def clone_table(self, repo, table, new_table):
self._validate_table_name(table)
self._validate_table_name(new_table)
query = 'CREATE TABLE %s.%s AS SELECT * FROM %s.%s'
params = (AsIs(repo), AsIs(new_table), AsIs(repo), AsIs(table))
res = self.execute_sql(query, params)
return res['status']
def get_schema(self, repo, table):
self._check_for_injections(repo)
self._validate_table_name(table)
query = ('SELECT column_name, data_type '
'FROM information_schema.columns '
'WHERE table_name = %s '
'AND table_schema = %s;'
)
params = (table, repo)
res = self.execute_sql(query, params)
if res['row_count'] < 1:
raise NameError("Invalid reference: '%s.%s'.\n" % (repo, table))
# return will look like [('id', 'integer'), ('words', 'text')]
return res['tuples']
def explain_query(self, query):
"""
returns the number of rows, the cost (in time) to execute,
and the width (bytes) of rows outputted
"""
# if it's a select query, return a different set of defaults
select_query = bool((query.split()[0]).lower() == 'select')
if not select_query:
response = {'num_rows': 1, 'time_cost': 0, 'byte_width': 0}
return response
query = 'EXPLAIN %s' % (query)
res = self.execute_sql(query)
num_rows = re.match(r'.*rows=(\d+).*', res['tuples'][0][0]).group(1)
byte_width = re.match(r'.*width=(\d+).*', res['tuples'][0][0]).group(1)
time_cost_re = re.match(
r'.*cost=(\d+.\d+)..(\d+.\d+)*', res['tuples'][0][0])
time_cost = (float(time_cost_re.group(1)),
float(time_cost_re.group(2)))
response = {'num_rows': int(num_rows),
'time_cost': time_cost,
'byte_width': int(byte_width)
}
return response
def limit_and_offset_select_query(self, query, limit, offset):
query = query.strip().rstrip(';')
# is it a select query?
select_query = False
if (query.split()[0]).lower() == 'select':
select_query = True
# return select query
if select_query:
query = ('select * from ( %s ) '
'as BXCQWVPEMWVKFBEBNKZSRPYBSB '
'LIMIT %s OFFSET %s;'
% (query, limit, offset))
return {'select_query': select_query, 'query': query}
def select_table_query(self, repo_base, repo, table):
dh_table_name = '%s.%s.%s' % (repo_base, repo, table)
query = 'SELECT * FROM %s;' % (dh_table_name)
return query
def execute_sql(self, query, params=None):
result = {
'status': False,
'row_count': 0,
'tuples': [],
'fields': []
}
query = query.strip()
cur = self.connection.cursor()
try:
sql_query = cur.mogrify(query, params)
if self.row_level_security:
sql_query = self.query_rewriter.apply_row_level_security(
sql_query)
cur.execute(sql_query)
except psycopg2.Error as e:
# Convert some psycopg2 errors into exceptions meaningful to
# Django.
_convert_pg_exception(e)
# if cur.execute() failed, this will print it.
try:
result['tuples'] = cur.fetchall()
except psycopg2.ProgrammingError:
# print "possible psycopg2.ProgrammingError in pg.execute_sql: "
# print(e)
pass
result['status'] = True
result['row_count'] = cur.rowcount
if cur.description:
result['fields'] = [
{'name': col[0], 'type': col[1]} for col in cur.description]
cur.close()
return result
def user_exists(self, username):
query = "SELECT 1 FROM pg_roles WHERE rolname=%s"
params = (username,)
result = self.execute_sql(query, params)
return (result['row_count'] > 0)
def database_exists(self, db_name):
query = "SELECT 1 FROM pg_database WHERE datname=%s"
params = (db_name,)
result = self.execute_sql(query, params)
return (result['row_count'] > 0)
def create_user(self, username, password, create_db=True):
self._check_for_injections(username)
query = ('CREATE ROLE %s WITH LOGIN '
'NOCREATEDB NOCREATEROLE NOCREATEUSER PASSWORD %s')
params = (AsIs(username), password)
self.execute_sql(query, params)
# Don't do this in the case of the public user.
if username != settings.PUBLIC_ROLE:
query = ('GRANT %s to %s')
params = (AsIs(settings.PUBLIC_ROLE), AsIs(username))
self.execute_sql(query, params)
if create_db:
return self.create_user_database(username)
def create_user_database(self, username):
# lines need to be executed seperately because
# "CREATE DATABASE cannot be executed from a
# function or multi-command string"
self._check_for_injections(username)
query = 'CREATE DATABASE %s; '
params = (AsIs(username),)
self.execute_sql(query, params)
query = 'ALTER DATABASE %s OWNER TO %s; '
params = (AsIs(username), AsIs(username))
return self.execute_sql(query, params)
def remove_user(self, username):
self._check_for_injections(username)
query = 'DROP ROLE %s;'
params = (AsIs(username),)
return self.execute_sql(query, params)
def drop_owned_by(self, username):
self._check_for_injections(username)
query = 'DROP OWNED BY %s CASCADE;' % (username)
params = (AsIs(username), )
return self.execute_sql(query, params)
def list_all_users(self):
query = 'SELECT usename FROM pg_catalog.pg_user WHERE usename != %s'
params = (self.user,)
res = self.execute_sql(query, params)
user_tuples = res['tuples']
all_users_list = []
for user_tuple in user_tuples:
all_users_list.append(user_tuple[0])
return all_users_list
def list_all_databases(self):
query = ('SELECT datname FROM pg_database where datname NOT IN '
' (%s, \'template1\', \'template0\', '
' \'datahub\', \'test_datahub\', \'postgres\');'
)
params = (self.user, )
res = self.execute_sql(query, params)
db_tuples = res['tuples']
all_db_list = []
for db_tuple in db_tuples:
all_db_list.append(db_tuple[0])
return all_db_list
def remove_database(self, database, revoke_collaborators=True):
self._check_for_injections(database)
# remove collaborator access to the database
if revoke_collaborators:
all_users = self.list_all_users()
for user in all_users:
query = "REVOKE ALL ON DATABASE %s FROM %s;"
params = (AsIs(database), AsIs(user))
self.execute_sql(query, params)
# Make sure to close all extant connections to this database or the
# drop will fail.
_close_all_connections(database)
# drop database
query = 'DROP DATABASE %s;'
params = (AsIs(database),)
try:
return self.execute_sql(query, params)
except psycopg2.ProgrammingError as e:
print(e)
print('this probably happened because the postgres role'
'exists, but a database of the same name does not.')
def change_password(self, username, password):
self._check_for_injections(username)
query = 'ALTER ROLE %s WITH PASSWORD %s;'
params = (AsIs(username), password)
return self.execute_sql(query, params)
def list_collaborators(self, repo):
query = 'SELECT unnest(nspacl) FROM pg_namespace WHERE nspname=%s;'
params = (repo, )
res = self.execute_sql(query, params)
# postgres privileges
# r -- SELECT ("read")
# w -- UPDATE ("write")
# a -- INSERT ("append")
# d -- DELETE
# D -- TRUNCATE
# x -- REFERENCES
# t -- TRIGGER
# X -- EXECUTE
# U -- USAGE
# C -- CREATE
# c -- CONNECT
# T -- TEMPORARY
# arwdDxt -- ALL PRIVILEGES (for tables, varies for other objects)
# * -- grant option for preceding privilege
# /yyyy -- role that granted this privilege
collaborators = []
for row in res['tuples']:
# for reference, rows look like this:
# ('username=UC/repo_base',)
collab_obj = {}
username = row[0].split('=')[0].strip()
permissions = row[0].split('=')[1].split('/')[0]
collab_obj['username'] = username
collab_obj['db_permissions'] = permissions
collaborators.append(collab_obj)
return collaborators
def has_base_privilege(self, login, privilege):
"""
returns True or False for whether the user has privileges for the
repo_base (database)
"""
query = 'SELECT has_database_privilege(%s, %s);'
params = (login, privilege)
res = self.execute_sql(query, params)
return res['tuples'][0][0]
def has_repo_db_privilege(self, login, repo, privilege):
"""
returns True or False for whether the use has privileges for the
repo (schema)
"""
query = 'SELECT has_schema_privilege(%s, %s, %s);'
params = (login, repo, privilege)
res = self.execute_sql(query, params)
return res['tuples'][0][0]
def has_table_privilege(self, login, table, privilege):
query = 'SELECT has_table_privilege(%s, %s, %s);'
params = (login, table, privilege)
res = self.execute_sql(query, params)
return res['tuples'][0][0]
def has_column_privilege(self, login, table, column, privilege):
query = 'SELECT has_column_privilege(%s, %s, %s, %s);'
params = (login, table, column, privilege)
res = self.execute_sql(query, params)
return res['tuples'][0][0]
def export_table(self, table_name, file_path, file_format='CSV',
delimiter=',', header=True):
words = table_name.split('.')
for word in words[:-1]:
self._check_for_injections(word)
self._validate_table_name(words[-1])
self._check_for_injections(file_format)
query = 'SELECT * FROM %s' % table_name
self.export_query(
query,
file_path,
file_format=file_format,
delimiter=delimiter,
header=header)
def export_view(self, view_name, file_path, file_format='CSV',
delimiter=',', header=True):
words = view_name.split('.')
for word in words[:-1]:
self._check_for_injections(word)
self._validate_table_name(words[-1])
self._check_for_injections(file_format)
query = 'SELECT * FROM %s' % view_name
self.export_query(
query,
file_path,
file_format=file_format,
delimiter=delimiter,
header=header)
def export_query(self, query, file_path, file_format='CSV',
delimiter=',', header=True):
"""
Runs a query as the current user and saves the result to a file.
query can be a sql query or table reference.
"""
header_option = 'HEADER' if header else ''
query = query.split(';')[0].strip()
self._check_for_injections(file_format)
self._check_for_injections(header_option)
meta_query = 'COPY (%s) TO STDOUT WITH %s %s DELIMITER %s;'
params = (AsIs(query), AsIs(file_format),
AsIs(header_option), delimiter)
cur = self.connection.cursor()
query = cur.mogrify(meta_query, params)
# Store pending exports in a temporary location so they're aren't
# discoverable while being exported.
tmp_path = '/tmp/user_exports/{0}-{1}'.format(
uuid4().hex, hashlib.sha256(query).hexdigest())
try:
os.makedirs('/tmp/user_exports')
except OSError as e:
if e.errno != errno.EEXIST:
raise e
try:
with open(tmp_path, 'w') as f:
cur.copy_expert(query, f)
except psycopg2.Error as e:
# Delete the temporary files of failed exports.
os.remove(tmp_path)
_convert_pg_exception(e)
finally:
cur.close()
# Move successful exports into the user's data folder.
# os.rename() would fail here if /tmp and /user_data are stored on
# different filesystems, so use shutil.move() instead.
shutil.move(tmp_path, file_path)
def import_file(self, table_name, file_path, file_format='CSV',
delimiter=',', header=True, encoding='ISO-8859-1',
quote_character='"'):
header_option = 'HEADER' if header else ''
words = table_name.split('.')
for word in words[:-1]:
self._check_for_injections(word)
self._validate_table_name(words[-1])
self._check_for_injections(file_format)
query = 'COPY %s FROM %s WITH %s %s DELIMITER %s ENCODING %s QUOTE %s;'
params = (AsIs(table_name), file_path, AsIs(file_format),
AsIs(header_option), delimiter, encoding, quote_character)
try:
self.execute_sql(query, params)
except Exception as e:
self.execute_sql('DROP TABLE IF EXISTS %s', (AsIs(table_name),))
raise ImportError(e)
# Try importing using dbtruck. Was never enabled by anant.
# RogerTangos 2015-12-09
# return self.import_file_w_dbtruck(table_name, file_path)
def import_rows(self, repo, table, rows, delimiter=',', header=False):
# if there was a header, remove it
if header:
rows = rows[1:len(rows)]
query = 'INSERT INTO %s.%s values '
params = [AsIs(repo), AsIs(table)]
# prepare query
all_row_array = []
for row in rows:
# split the string into an array
row = row.split(delimiter)
# add the objects to params
params += row
# turn every item in the array into a %s
# and make a string out of it
row_array = ['%s' for c in row]
row_array = ', '.join(row_array)
row_array = '(' + row_array + ')'
all_row_array.append(row_array)
all_row_string = ', '.join(all_row_array)
# finalize the query and params
query += all_row_string
params = tuple(params)
res = self.execute_sql(query, params)
return res['status']
def import_file_w_dbtruck(self, table_name, file_path):
# dbtruck is not tested for safety. At all. It's currently disabled
# in the project RogerTangos 2015-12-09
from dbtruck.dbtruck import import_datafiles
# from dbtruck.util import get_logger
from dbtruck.exporters.pg import PGMethods
dbsettings = {
'dbname': self.repo_base,
'hostname': self.host,
'username': self.user,
'password': self.password,
'port': self.port,
}
create_new = True
errfile = None
return import_datafiles([file_path], create_new, table_name, errfile,
PGMethods, **dbsettings)
# Methods for Licenses
def create_license_schema(self):
public_role = settings.PUBLIC_ROLE
schema = settings.LICENSE_SCHEMA
self._check_for_injections(public_role)
self._check_for_injections(schema)
query = 'CREATE SCHEMA IF NOT EXISTS %s AUTHORIZATION %s'
params = (AsIs(schema), AsIs(public_role))
return self.execute_sql(query, params)
def create_license_table(self):
schema = settings.LICENSE_SCHEMA
table = settings.LICENSE_TABLE
public_role = settings.PUBLIC_ROLE
self._check_for_injections(schema)
self._validate_table_name(table)
self._check_for_injections(public_role)
query = ('CREATE TABLE IF NOT EXISTS %s.%s'
'(license_id serial primary key,'
'license_name VARCHAR(40),'
'pii_def VARCHAR(100) NOT NULL,'
'pii_removed boolean NOT NULL,'
'pii_anonymized boolean NOT NULL);')
params = (AsIs(schema), AsIs(table))
self.execute_sql(query, params)
# grant the public role access to the table
query = ('GRANT ALL ON %s.%s to %s;')
params = (AsIs(schema), AsIs(table), AsIs(public_role))
return self.execute_sql(query, params)
def create_license_link_table(self):
schema = settings.LICENSE_LINK_SCHEMA
table = settings.LICENSE_LINK_TABLE
public_role = settings.PUBLIC_ROLE
self._check_for_injections(schema)
self._validate_table_name(table)
self._check_for_injections(public_role)
query = ('CREATE TABLE IF NOT EXISTS %s.%s '
'(license_link_id serial primary key,'
'repo_base VARCHAR(40) NOT NULL,'
'repo VARCHAR(40) NOT NULL,'
'license_id integer NOT NULL);')
params = (AsIs(schema), AsIs(table))
self.execute_sql(query, params)
query = ('GRANT ALL ON %s.%s to %s;')
params = (AsIs(schema), AsIs(table), AsIs(public_role))
return self.execute_sql(query, params)
def create_license(
self, license_name, pii_def, pii_anonymized, pii_removed):
'''
Creates a new license
'''
query = (
'INSERT INTO dh_public.license '
'(license_name, pii_def, pii_anonymized, pii_removed) '
'values (%s, %s, %s, %s)')
params = (license_name, pii_def, pii_anonymized, pii_removed)
res = self.execute_sql(query, params)
return res['status']
def create_license_link(self, repo_base, repo, license_id):
'''
Creates a new license
'''
# check if link already exists
query = ('SELECT license_link_id, repo_base, repo, license_id '
'FROM %s.%s where '
'repo_base = %s and repo = %s and license_id = %s;')
params = (
AsIs(settings.LICENSE_SCHEMA),
AsIs(settings.LICENSE_LINK_TABLE),
repo_base, repo, license_id)
res = self.execute_sql(query, params)
if res['tuples']:
return res['status']
query = (
'INSERT INTO dh_public.license_link '
'(repo_base, repo, license_id) '
'values (%s, %s, %s)')
params = (repo_base, repo, license_id)
res = self.execute_sql(query, params)
return res['status']
def find_license_links(self, license_id):
'''
finds all license_links associated with a given license_id
'''
query = ('SELECT license_link_id, repo_base, repo, license_id '
'FROM %s.%s ;')
params = (
AsIs(settings.LICENSE_SCHEMA),
AsIs(settings.LICENSE_LINK_TABLE))
res = self.execute_sql(query, params)
if not res['tuples']:
return []
return res['tuples']
def find_license_links_by_repo(self, repo_base, repo):
query = ('SELECT license_link_id, repo_base, repo, license_id '
'FROM %s.%s where repo_base = %s and repo = %s;')
params = (
AsIs(settings.LICENSE_SCHEMA),
AsIs(settings.LICENSE_LINK_TABLE),
repo_base, repo)
res = self.execute_sql(query, params)
if not res['tuples']:
return []
return res['tuples']
def find_licenses(self):
'''
find all licenses
'''
query = (
'SELECT license_id, license_name, pii_def, '
'pii_anonymized, pii_removed FROM %s.%s;')
params = (AsIs(settings.LICENSE_SCHEMA), AsIs(settings.LICENSE_TABLE))
res = self.execute_sql(query, params)
return res['tuples']
def find_license_by_id(self, license_id):
query = (
'SELECT license_id, license_name, pii_def, '
'pii_anonymized, pii_removed '
'FROM %s.%s where license_id= %s;')
params = (
AsIs(settings.LICENSE_SCHEMA),
AsIs(settings.LICENSE_TABLE),
license_id)
res = self.execute_sql(query, params)
# return None if the list is empty
if not res['tuples']:
return None
# else, return the policy
return res['tuples'][0]
# Below methods can only be called from the RLSSecurityManager #
def create_security_policy_schema(self):
public_role = settings.PUBLIC_ROLE
schema = settings.POLICY_SCHEMA
self._check_for_injections(public_role)
self._check_for_injections(schema)
query = 'CREATE SCHEMA IF NOT EXISTS %s AUTHORIZATION %s'
params = (AsIs(schema), AsIs(public_role))
return self.execute_sql(query, params)
def create_security_policy_table(self):
schema = settings.POLICY_SCHEMA
table = settings.POLICY_TABLE
public_role = settings.PUBLIC_ROLE
self._check_for_injections(schema)
self._validate_table_name(table)
self._check_for_injections(public_role)
query = ('CREATE TABLE IF NOT EXISTS %s.%s'
'('
'policy_id serial primary key,'
'policy VARCHAR(80) NOT NULL,'
'policy_type VARCHAR(80) NOT NULL,'
'grantee VARCHAR(80) NOT NULL,'
'grantor VARCHAR(80) NOT NULL,'
'table_name VARCHAR(80) NOT NULL,'
'repo VARCHAR(80) NOT NULL,'
'repo_base VARCHAR(80) NOT NULL'
');')
params = (AsIs(schema), AsIs(table))
self.execute_sql(query, params)
# create indexes for faster seraching
query = ('create index grantee_index on '
'dh_public.policy using hash(grantee); '
'create index grantor_index on '
'dh_public.policy using hash(grantor); '
'create index table_name_index on '
'dh_public.policy using hash(table_name); '
'create index repo_index on '
'dh_public.policy using hash(repo); '
'create index repo_base_index on '
'dh_public.policy using hash(repo_base);')
# postgres 9.4 doesn't support IF NOT EXISTS when creating indexes
# so it's possible for tests to attempt to create duplicate indexes
# This catches that exception
try:
self.execute_sql(query)
except:
pass
# grant the public role access to the table
query = ('GRANT ALL ON %s.%s to %s;')
params = (AsIs(schema), AsIs(table), AsIs(public_role))
return self.execute_sql(query, params)
def create_security_policy(self, policy, policy_type, grantee, grantor,
repo_base, repo, table):
'''
Creates a new security policy in the policy table if the policy
does not yet exist.
'''
# disallow semicolons in policy. This helps prevent the policy creator
# from shooting themself in the foot with an attempted sql injection.
# Note that we don't actually _need_ to do this. The parameters are all
# escaped in RLS methods executed by the superuser, so there's not a
# really a risk of a user acquiring root access.
if ';' in policy:
raise ValueError("\';'s are disallowed in the policy field")
query = ('INSERT INTO dh_public.policy (policy, policy_type, grantee, '
'grantor, table_name, repo, repo_base) values '
'(%s, %s, %s, %s, %s, %s, %s)')
params = (policy, policy_type, grantee, grantor, table, repo,
repo_base)
res = self.execute_sql(query, params)
return res['status']
def find_all_security_policies(self, username):
params = (username, username)
query = ('SELECT policy_id, policy, policy_type, grantee, grantor '
'FROM dh_public.policy WHERE grantee = %s or '
'grantor = %s')
res = self.execute_sql(query, params)
return res['tuples']
def find_security_policies(self, repo_base, repo=None, table=None,
policy_id=None, policy=None, policy_type=None,
grantee=None, grantor=None):
'''
Returns a list of all security polices that match the inputs specied
by the user.
'''
query = ('SELECT policy_id, policy, policy_type, grantee, grantor '
'repo_base, repo, table_name '
'FROM %s.%s WHERE ')
params = [AsIs(settings.POLICY_SCHEMA), AsIs(settings.POLICY_TABLE)]
conditions = []
# append mandatory passed-in conditions
conditions.append('repo_base = %s')
params.append(repo_base)
# append optional conditions
if repo:
conditions.append('repo = %s')
params.append(repo)
if table:
conditions.append('table_name = %s')
params.append(table)
if policy_id:
conditions.append('policy_id = %s')
params.append(policy_id)
if policy:
conditions.append('policy = %s')
params.append(policy)
if policy_type:
conditions.append('policy_type = %s')
params.append(policy_type)
if grantee:
conditions.append('grantee = %s')
params.append(grantee)
if grantor:
conditions.append('grantor = %s')
params.append(grantor)
conditions = " and ".join(conditions)
params = tuple(params)
query += conditions
res = self.execute_sql(query, params)
return res['tuples']
def find_security_policy_by_id(self, policy_id):
'''
Returns the security policy that has a policy_id matching the input
specified by the user.
'''
query = ('SELECT policy_id, policy, policy_type, grantee, grantor, '
'repo_base, repo, table_name '
'FROM dh_public.policy WHERE policy_id = %s')
params = (policy_id,)
res = self.execute_sql(query, params)
# return None if the list is empty
if not res['tuples']:
return None
# else, return the policy
return res['tuples'][0]
def update_security_policy(self, policy_id, new_policy, new_policy_type,
new_grantee):
'''
Updates an existing security policy based on the inputs specified
by the user.
'''
query = ('UPDATE dh_public.policy '
'SET policy = %s, policy_type = %s, '
'grantee = %s '
'WHERE policy_id = %s')
params = (new_policy, new_policy_type, new_grantee, policy_id)
res = self.execute_sql(query, params)
return res['status']
def remove_security_policy(self, policy_id):
'''
Removes the security policy from the policy table with a policy_id
matching the one specified.
'''
query = 'DELETE FROM dh_public.policy WHERE policy_id = %s'
params = (policy_id,)
res = self.execute_sql(query, params)
return res['status']
def can_user_access_rls_table(self,
username,
permissions=['SELECT', 'UPDATE', 'INSERT']):
'''
Returns True if the has been granted specified type(s) of access to
select/update/insert into the RLS policy table. Else, returns false.
This must be executed from a connection to the
settings.POLICY_DB database. Otherwise, it will check the wrong
database, and (most likely) return fFalse
'''
query = ("SELECT exists("
"SELECT * FROM %s.%s where grantee=lower(%s) and (")
conditions = ["lower(policy_type)=lower(%s)"] * len(permissions)
conditions = " or ".join(conditions)
query += conditions + "))"
params = (AsIs(settings.POLICY_SCHEMA),
AsIs(settings.POLICY_TABLE),
username) + tuple(permissions)
res = self.execute_sql(query, params)
return res['tuples'][0][0]
|
codypiersall/mlab
|
tests/test_mlab_on_unix.py
|
import sys
sys.path = ['../src/'] + sys.path
import unittest
from mlab.mlabwrap import MatlabReleaseNotFound
class TestMlabUnix(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_version_discovery(self):
import mlab
instances = mlab.releases.MatlabVersions(globals())
assert len(instances.pick_latest_release()) > 0
with self.assertRaises(MatlabReleaseNotFound):
mlab_inst = instances.get_mlab_instance('R2010c')
def test_latest_release(self):
from mlab.releases import latest_release
from matlab import matlabroot
self.assertTrue(len(matlabroot())>0)
matlabroot()
if __name__ == '__main__':
unittest.main()
|
arnavd96/Cinemiezer
|
myvenv/lib/python3.4/site-packages/botocore/retryhandler.py
|
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import random
import functools
import logging
from binascii import crc32
from botocore.vendored.requests import ConnectionError, Timeout
from botocore.vendored.requests.packages.urllib3.exceptions import ClosedPoolError
from botocore.exceptions import ChecksumError, EndpointConnectionError
logger = logging.getLogger(__name__)
# The only supported error for now is GENERAL_CONNECTION_ERROR
# which maps to requests generic ConnectionError. If we're able
# to get more specific exceptions from requests we can update
# this mapping with more specific exceptions.
EXCEPTION_MAP = {
'GENERAL_CONNECTION_ERROR': [
ConnectionError, ClosedPoolError, Timeout,
EndpointConnectionError
],
}
def delay_exponential(base, growth_factor, attempts):
"""Calculate time to sleep based on exponential function.
The format is::
base * growth_factor ^ (attempts - 1)
If ``base`` is set to 'rand' then a random number between
0 and 1 will be used as the base.
Base must be greater than 0, otherwise a ValueError will be
raised.
"""
if base == 'rand':
base = random.random()
elif base <= 0:
raise ValueError("The 'base' param must be greater than 0, "
"got: %s" % base)
time_to_sleep = base * (growth_factor ** (attempts - 1))
return time_to_sleep
def create_exponential_delay_function(base, growth_factor):
"""Create an exponential delay function based on the attempts.
This is used so that you only have to pass it the attempts
parameter to calculate the delay.
"""
return functools.partial(
delay_exponential, base=base, growth_factor=growth_factor)
def create_retry_handler(config, operation_name=None):
checker = create_checker_from_retry_config(
config, operation_name=operation_name)
action = create_retry_action_from_config(
config, operation_name=operation_name)
return RetryHandler(checker=checker, action=action)
def create_retry_action_from_config(config, operation_name=None):
# The spec has the possibility of supporting per policy
# actions, but right now, we assume this comes from the
# default section, which means that delay functions apply
# for every policy in the retry config (per service).
delay_config = config['__default__']['delay']
if delay_config['type'] == 'exponential':
return create_exponential_delay_function(
base=delay_config['base'],
growth_factor=delay_config['growth_factor'])
def create_checker_from_retry_config(config, operation_name=None):
checkers = []
max_attempts = None
retryable_exceptions = []
if '__default__' in config:
policies = config['__default__'].get('policies', [])
max_attempts = config['__default__']['max_attempts']
for key in policies:
current_config = policies[key]
checkers.append(_create_single_checker(current_config))
retry_exception = _extract_retryable_exception(current_config)
if retry_exception is not None:
retryable_exceptions.extend(retry_exception)
if operation_name is not None and config.get(operation_name) is not None:
operation_policies = config[operation_name]['policies']
for key in operation_policies:
checkers.append(_create_single_checker(operation_policies[key]))
retry_exception = _extract_retryable_exception(
operation_policies[key])
if retry_exception is not None:
retryable_exceptions.extend(retry_exception)
if len(checkers) == 1:
# Don't need to use a MultiChecker
return MaxAttemptsDecorator(checkers[0], max_attempts=max_attempts)
else:
multi_checker = MultiChecker(checkers)
return MaxAttemptsDecorator(
multi_checker, max_attempts=max_attempts,
retryable_exceptions=tuple(retryable_exceptions))
def _create_single_checker(config):
if 'response' in config['applies_when']:
return _create_single_response_checker(
config['applies_when']['response'])
elif 'socket_errors' in config['applies_when']:
return ExceptionRaiser()
def _create_single_response_checker(response):
if 'service_error_code' in response:
checker = ServiceErrorCodeChecker(
status_code=response['http_status_code'],
error_code=response['service_error_code'])
elif 'http_status_code' in response:
checker = HTTPStatusCodeChecker(
status_code=response['http_status_code'])
elif 'crc32body' in response:
checker = CRC32Checker(header=response['crc32body'])
else:
# TODO: send a signal.
raise ValueError("Unknown retry policy: %s" % config)
return checker
def _extract_retryable_exception(config):
applies_when = config['applies_when']
if 'crc32body' in applies_when.get('response', {}):
return [ChecksumError]
elif 'socket_errors' in applies_when:
exceptions = []
for name in applies_when['socket_errors']:
exceptions.extend(EXCEPTION_MAP[name])
return exceptions
class RetryHandler(object):
"""Retry handler.
The retry handler takes two params, ``checker`` object
and an ``action`` object.
The ``checker`` object must be a callable object and based on a response
and an attempt number, determines whether or not sufficient criteria for
a retry has been met. If this is the case then the ``action`` object
(which also is a callable) determines what needs to happen in the event
of a retry.
"""
def __init__(self, checker, action):
self._checker = checker
self._action = action
def __call__(self, attempts, response, caught_exception, **kwargs):
"""Handler for a retry.
Intended to be hooked up to an event handler (hence the **kwargs),
this will process retries appropriately.
"""
if self._checker(attempts, response, caught_exception):
result = self._action(attempts=attempts)
logger.debug("Retry needed, action of: %s", result)
return result
logger.debug("No retry needed.")
class BaseChecker(object):
"""Base class for retry checkers.
Each class is responsible for checking a single criteria that determines
whether or not a retry should not happen.
"""
def __call__(self, attempt_number, response, caught_exception):
"""Determine if retry criteria matches.
Note that either ``response`` is not None and ``caught_exception`` is
None or ``response`` is None and ``caught_exception`` is not None.
:type attempt_number: int
:param attempt_number: The total number of times we've attempted
to send the request.
:param response: The HTTP response (if one was received).
:type caught_exception: Exception
:param caught_exception: Any exception that was caught while trying to
send the HTTP response.
:return: True, if the retry criteria matches (and therefore a retry
should occur. False if the criteria does not match.
"""
# The default implementation allows subclasses to not have to check
# whether or not response is None or not.
if response is not None:
return self._check_response(attempt_number, response)
elif caught_exception is not None:
return self._check_caught_exception(
attempt_number, caught_exception)
else:
raise ValueError("Both response and caught_exception are None.")
def _check_response(self, attempt_number, response):
pass
def _check_caught_exception(self, attempt_number, caught_exception):
pass
class MaxAttemptsDecorator(BaseChecker):
"""Allow retries up to a maximum number of attempts.
This will pass through calls to the decorated retry checker, provided
that the number of attempts does not exceed max_attempts. It will
also catch any retryable_exceptions passed in. Once max_attempts has
been exceeded, then False will be returned or the retryable_exceptions
that was previously being caught will be raised.
"""
def __init__(self, checker, max_attempts, retryable_exceptions=None):
self._checker = checker
self._max_attempts = max_attempts
self._retryable_exceptions = retryable_exceptions
def __call__(self, attempt_number, response, caught_exception):
should_retry = self._should_retry(attempt_number, response,
caught_exception)
if should_retry:
if attempt_number >= self._max_attempts:
logger.debug("Reached the maximum number of retry "
"attempts: %s", attempt_number)
return False
else:
return should_retry
else:
return False
def _should_retry(self, attempt_number, response, caught_exception):
if self._retryable_exceptions and \
attempt_number < self._max_attempts:
try:
return self._checker(attempt_number, response, caught_exception)
except self._retryable_exceptions as e:
logger.debug("retry needed, retryable exception caught: %s",
e, exc_info=True)
return True
else:
# If we've exceeded the max attempts we just let the exception
# propogate if one has occurred.
return self._checker(attempt_number, response, caught_exception)
class HTTPStatusCodeChecker(BaseChecker):
def __init__(self, status_code):
self._status_code = status_code
def _check_response(self, attempt_number, response):
if response[0].status_code == self._status_code:
logger.debug(
"retry needed: retryable HTTP status code received: %s",
self._status_code)
return True
else:
return False
class ServiceErrorCodeChecker(BaseChecker):
def __init__(self, status_code, error_code):
self._status_code = status_code
self._error_code = error_code
def _check_response(self, attempt_number, response):
if response[0].status_code == self._status_code:
actual_error_code = response[1].get('Error', {}).get('Code')
if actual_error_code == self._error_code:
logger.debug(
"retry needed: matching HTTP status and error code seen: "
"%s, %s", self._status_code, self._error_code)
return True
return False
class MultiChecker(BaseChecker):
def __init__(self, checkers):
self._checkers = checkers
def __call__(self, attempt_number, response, caught_exception):
for checker in self._checkers:
checker_response = checker(attempt_number, response,
caught_exception)
if checker_response:
return checker_response
return False
class CRC32Checker(BaseChecker):
def __init__(self, header):
# The header where the expected crc32 is located.
self._header_name = header
def _check_response(self, attempt_number, response):
http_response = response[0]
expected_crc = http_response.headers.get(self._header_name)
if expected_crc is None:
logger.debug("crc32 check skipped, the %s header is not "
"in the http response.", self._header_name)
else:
actual_crc32 = crc32(response[0].content) & 0xffffffff
if not actual_crc32 == int(expected_crc):
logger.debug(
"retry needed: crc32 check failed, expected != actual: "
"%s != %s", int(expected_crc), actual_crc32)
raise ChecksumError(checksum_type='crc32',
expected_checksum=int(expected_crc),
actual_checksum=actual_crc32)
class ExceptionRaiser(BaseChecker):
"""Raise any caught exceptions.
This class will raise any non None ``caught_exception``.
"""
def _check_caught_exception(self, attempt_number, caught_exception):
# This is implementation specific, but this class is useful by
# coordinating with the MaxAttemptsDecorator.
# The MaxAttemptsDecorator has a list of exceptions it should catch
# and retry, but something needs to come along and actually raise the
# caught_exception. That's what this class is being used for. If
# the MaxAttemptsDecorator is not interested in retrying the exception
# then this exception just propogates out past the retry code.
raise caught_exception
|
kartta-labs/mapwarper
|
lib/tilestache/TileStache-1.51.5/TileStache/Providers.py
|
""" The provider bits of TileStache.
A Provider is the part of TileStache that actually renders imagery. A few default
providers are found here, but it's possible to define your own and pull them into
TileStache dynamically by class name.
Built-in providers:
- mapnik (Mapnik.ImageProvider)
- proxy (Proxy)
- vector (TileStache.Vector.Provider)
- url template (UrlTemplate)
- mbtiles (TileStache.MBTiles.Provider)
- mapnik grid (Mapnik.GridProvider)
Example built-in provider, for JSON configuration file:
"layer-name": {
"provider": {"name": "mapnik", "mapfile": "style.xml"},
...
}
Example external provider, for JSON configuration file:
"layer-name": {
"provider": {"class": "Module:Classname", "kwargs": {"frob": "yes"}},
...
}
- The "class" value is split up into module and classname, and dynamically
included. If this doesn't work for some reason, TileStache will fail loudly
to let you know.
- The "kwargs" value is fed to the class constructor as a dictionary of keyword
args. If your defined class doesn't accept any of these keyword arguments,
TileStache will throw an exception.
A provider must offer one of two methods for rendering map areas.
The renderTile() method draws a single tile at a time, and has these arguments:
- width, height: in pixels
- srs: projection as Proj4 string.
"+proj=longlat +ellps=WGS84 +datum=WGS84" is an example,
see http://spatialreference.org for more.
- coord: Coordinate object representing a single tile.
The renderArea() method draws a variably-sized area, and is used when drawing
metatiles. It has these arguments:
- width, height: in pixels
- srs: projection as Proj4 string.
"+proj=longlat +ellps=WGS84 +datum=WGS84" is an example,
see http://spatialreference.org for more.
- xmin, ymin, xmax, ymax: coordinates of bounding box in projected coordinates.
- zoom: zoom level of final map. Technically this can be derived from the other
arguments, but that's a hassle so we'll pass it in explicitly.
A provider may offer a method for custom response type, getTypeByExtension().
This method accepts a single argument, a filename extension string (e.g. "png",
"json", etc.) and returns a tuple with twon strings: a mime-type and a format.
Note that for image and non-image tiles alike, renderArea() and renderTile()
methods on a provider class must return a object with a save() method that
can accept a file-like object and a format name, e.g. this should word:
provder.renderArea(...).save(fp, "TEXT")
... if "TEXT" is a valid response format according to getTypeByExtension().
Non-image providers and metatiles do not mix.
For an example of a non-image provider, see TileStache.Vector.Provider.
"""
import os
import logging
try:
from io import BytesIO
except ImportError:
# Python 2
from StringIO import StringIO as BytesIO
from string import Template
try:
import urllib.request as urllib2
except ImportError:
# Python 2
import urllib2
import urllib
try:
from PIL import Image
except ImportError:
# On some systems, PIL.Image is known as Image.
import Image
import ModestMaps
from ModestMaps.Core import Point, Coordinate
from . import Geography
# This import should happen inside getProviderByName(), but when testing
# on Mac OS X features are missing from output. Wierd-ass C libraries...
try:
from . import Vector
except ImportError:
pass
# Already deprecated; provided for temporary backward-compatibility with
# old location of Mapnik provider. TODO: remove in next major version.
try:
from .Mapnik import ImageProvider as Mapnik
except ImportError:
pass
def getProviderByName(name):
""" Retrieve a provider object by name.
Raise an exception if the name doesn't work out.
"""
if name.lower() == 'mapnik':
from . import Mapnik
return Mapnik.ImageProvider
elif name.lower() == 'proxy':
return Proxy
elif name.lower() == 'url template':
return UrlTemplate
elif name.lower() == 'vector':
from . import Vector
return Vector.Provider
elif name.lower() == 'mbtiles':
from . import MBTiles
return MBTiles.Provider
elif name.lower() == 'mapnik grid':
from . import Mapnik
return Mapnik.GridProvider
elif name.lower() == 'sandwich':
from . import Sandwich
return Sandwich.Provider
raise Exception('Unknown provider name: "%s"' % name)
class Verbatim:
''' Wrapper for PIL.Image that saves raw input bytes if modes and formats match.
'''
def __init__(self, bytes):
self.buffer = BytesIO(bytes)
self.format = None
self._image = None
#
# Guess image format based on magic number, if possible.
# http://www.astro.keele.ac.uk/oldusers/rno/Computing/File_magic.html
#
magic = {
'\x89\x50\x4e\x47': 'PNG',
'\xff\xd8\xff\xe0': 'JPEG',
'\x47\x49\x46\x38': 'GIF',
'\x47\x49\x46\x38': 'GIF',
'\x4d\x4d\x00\x2a': 'TIFF',
'\x49\x49\x2a\x00': 'TIFF'
}
if bytes[:4] in magic:
self.format = magic[bytes[:4]]
else:
self.format = self.image().format
def image(self):
''' Return a guaranteed instance of PIL.Image.
'''
if self._image is None:
self._image = Image.open(self.buffer)
return self._image
def convert(self, mode):
if mode == self.image().mode:
return self
else:
return self.image().convert(mode)
def crop(self, bbox):
return self.image().crop(bbox)
def save(self, output, format):
if format == self.format:
output.write(self.buffer.getvalue())
else:
self.image().save(output, format)
class Proxy:
""" Proxy provider, to pass through and cache tiles from other places.
This provider is identified by the name "proxy" in the TileStache config.
Additional arguments:
- url (optional)
URL template for remote tiles, for example:
"http://tile.openstreetmap.org/{Z}/{X}/{Y}.png"
- provider (optional)
Provider name string from Modest Maps built-ins.
See ModestMaps.builtinProviders.keys() for a list.
Example: "OPENSTREETMAP".
- timeout (optional)
Defines a timeout in seconds for the request.
If not defined, the global default timeout setting will be used.
Either url or provider is required. When both are present, url wins.
Example configuration:
{
"name": "proxy",
"url": "http://tile.openstreetmap.org/{Z}/{X}/{Y}.png"
}
"""
def __init__(self, layer, url=None, provider_name=None, timeout=None):
""" Initialize Proxy provider with layer and url.
"""
if url:
self.provider = ModestMaps.Providers.TemplatedMercatorProvider(url)
elif provider_name:
if provider_name in ModestMaps.builtinProviders:
self.provider = ModestMaps.builtinProviders[provider_name]()
else:
raise Exception('Unkown Modest Maps provider: "%s"' % provider_name)
else:
raise Exception('Missing required url or provider parameter to Proxy provider')
self.timeout = timeout
@staticmethod
def prepareKeywordArgs(config_dict):
""" Convert configured parameters to keyword args for __init__().
"""
kwargs = dict()
if 'url' in config_dict:
kwargs['url'] = config_dict['url']
if 'provider' in config_dict:
kwargs['provider_name'] = config_dict['provider']
if 'timeout' in config_dict:
kwargs['timeout'] = config_dict['timeout']
return kwargs
def renderTile(self, width, height, srs, coord):
"""
"""
img = None
urls = self.provider.getTileUrls(coord)
# Tell urllib2 get proxies if set in the environment variables <protocol>_proxy
# see: https://docs.python.org/2/library/urllib2.html#urllib2.ProxyHandler
proxy_support = urllib2.ProxyHandler()
url_opener = urllib2.build_opener(proxy_support)
for url in urls:
body = url_opener.open(url, timeout=self.timeout).read()
tile = Verbatim(body)
if len(urls) == 1:
#
# if there is only one URL, don't bother
# with PIL's non-Porter-Duff alpha channeling.
#
return tile
elif img is None:
#
# for many URLs, paste them to a new image.
#
img = Image.new('RGBA', (width, height))
img.paste(tile, (0, 0), tile)
return img
class UrlTemplate:
""" Built-in URL Template provider. Proxies map images from WMS servers.
This provider is identified by the name "url template" in the TileStache config.
Additional arguments:
- template (required)
String with substitutions suitable for use in string.Template.
- referer (optional)
String to use in the "Referer" header when making HTTP requests.
- source projection (optional)
Projection to transform coordinates into before making request
- timeout (optional)
Defines a timeout in seconds for the request.
If not defined, the global default timeout setting will be used.
More on string substitutions:
- http://docs.python.org/library/string.html#template-strings
"""
def __init__(self, layer, template, referer=None, source_projection=None,
timeout=None):
""" Initialize a UrlTemplate provider with layer and template string.
http://docs.python.org/library/string.html#template-strings
"""
self.layer = layer
self.template = Template(template)
self.referer = referer
self.source_projection = source_projection
self.timeout = timeout
@staticmethod
def prepareKeywordArgs(config_dict):
""" Convert configured parameters to keyword args for __init__().
"""
kwargs = {'template': config_dict['template']}
if 'referer' in config_dict:
kwargs['referer'] = config_dict['referer']
if 'source projection' in config_dict:
kwargs['source_projection'] = Geography.getProjectionByName(config_dict['source projection'])
if 'timeout' in config_dict:
kwargs['timeout'] = config_dict['timeout']
return kwargs
def renderArea(self, width, height, srs, xmin, ymin, xmax, ymax, zoom):
""" Return an image for an area.
Each argument (width, height, etc.) is substituted into the template.
"""
if self.source_projection is not None:
ne_location = self.layer.projection.projLocation(Point(xmax, ymax))
ne_point = self.source_projection.locationProj(ne_location)
ymax = ne_point.y
xmax = ne_point.x
sw_location = self.layer.projection.projLocation(Point(xmin, ymin))
sw_point = self.source_projection.locationProj(sw_location)
ymin = sw_point.y
xmin = sw_point.x
srs = self.source_projection.srs
mapping = {'width': width, 'height': height, 'srs': srs, 'zoom': zoom,
'xmin': xmin, 'ymin': ymin, 'xmax': xmax, 'ymax': ymax}
href = self.template.safe_substitute(mapping)
req = urllib2.Request(href)
if self.referer:
req.add_header('Referer', self.referer)
body = urllib2.urlopen(req, timeout=self.timeout).read()
tile = Verbatim(body)
return tile
|
gina-alaska/emodis_ndvi_python-docker
|
emodis_ndvi_python/pycodes/oneyear_data_layer_subset_good.py
|
#This python script is modified from oneyear_data_layer_subset_good.pro
#This routine open one year files defined in file lists, stack these file, subset, and fill bad data with -2000
#input arguments are flist_ndvi, flist_bq, ul_lon,ul_lat,lr_lon,lr_lat
#;inputs: yyyy_flist_ndvi----file list for one year *ndvi.tif,
#; yyyy_flist_bq -----file list fro one year *nvdi_bq.tif
#; ul-----upper left coordinate in unit of degree in geographic coordinates,WGS84
#; lr-----lower right cordinate in unit of degree in geographic coordinates,WGS84
#; data_ver_flg------, 0-old version data,1-new version data
import sys
import os
import platform
from read_ndvi import *
import raster_process as rp
print 'Number of arguments:', len(sys.argv), 'arguments.'
print 'Argument List:', str(sys.argv)
if len(sys.argv) != 7:
print "input arguments are: flist_ndvi, flist_bq, ulx,uly,lrx,lry"
sys.exit(1)
flist_ndvi=sys.argv[1]
flist_bq=sys.argv[2]
ulx=float(sys.argv[3])
uly=float(sys.argv[4])
lrx=float(sys.argv[5])
lry=float(sys.argv[6])
#;test
#;ul in deg, minute, secons= 173d 0' 0.00"W, 72d 0' 0.00"N
#;lr in deg, minute, second= 127d59'56.82"W, 54d 0' 0.07"N
#;if do not want subsize the data, just input 0,0,0,0 for ul_lon,ul_lat,lr_lon,lr_lat, respectively.
#;wrkdir='/home/jiang/nps/cesu/modis_ndvi_250m/wrkdir/'
#;flist_ndvi='/mnt/jzhu_scratch/EMODIS-NDVI-DATA/wrk/ver_new_201107/2008/flist_ndvi'
#;flist_bq = '/mnt/jzhu_scratch/EMODIS-NDVI-DATA/wrk/ver_new_201107/2008/flist_bq'
#;flist_ndvi='/raid/scratch/cesu/eMODIS/ver_old/2008/flist_ndvi'
#;flist_bq='/raid/scratch/cesu/eMODIS/ver_old/2008/flist_bq'
#;flist_ndvi='/home/jiang/nps/cesu/modis_ndvi_250m/wrkdir/2010/2010_flist_ndvi'
#;flist_bq = '/home/jiang/nps/cesu/modis_ndvi_250m/wrkdir/2010/2010_flist_bq'
#;ul=[-173.0d,72.0d]
#;lr=[-127.999116667d,54.000019444d]
#;set path and start envi
#;ENVI, /RESTORE_BASE_SAVE_FILES
#;PREF_SET, 'IDL_PATH', '<IDL_DEFAULT>:+~/nps/cesu/modis_ndvi_250m/bin', /COMMIT
#;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
if platform.system() == 'Windows':
sign='\\'
else:
sign='/'
#---- read these two lists into flist and flist_bq
u1=open(flist_ndvi,'r')
u2=open(flist_bq ,'r')
#---- count the number of lines in the flist files
#total_line_count = sum(1 for line in open("filename.txt"))
#total_line_count = sum(1 for line in open("filename.txt"))
#---- get the file names into the list
flist=u1.readlines()
flist=[x.rstrip('\n') for x in flist]
flistbq=u2.readlines()
flistbq=[x.rstrip('\n') for x in flistbq]
num=len(flist)
#---- get workdir and year from mid-year file
#p =strpos(flist(1),sign,/reverse_search)
#len=strlen(flist(1))
wrkdir=os.path.dirname(flist[0])
filen =os.path.basename(flist[0])
#;-----use file header to determine the
if filen.find('MT3RG_') == True:
data_ver_flg=0
else:
data_ver_flg=1
if data_ver_flg == 0:
year=filen[6:9] #MT3RG_2008_141-147_250m_composite_ndvi.tif
else:
year=filen[13:17] #AK_eMTH_NDVI.2008.036-042.QKM.VI_NDVI.005.2011202142526.tif
#;---- define a struc to save info of each file
#;p={flists,fn:'abc',sn:0,dims:lonarr(5),bn:0L}
#;x=create_struct(name=flist,fn,'abc',fid,0L,dims,lonarr(5),bn,0L)
#x={flist,fn:'abc',bname:'abc',fid:0L,dims:lonarr(5),pos:0L}
#flista=replicate(x,num) ;save ndvi data files
#flistq=replicate(x,num) ; save ndvi_bq data files
#;---- go through one year ndvi and ndvi_bq data files
First_Flag=True
for j in range(0L, num):
fn_ndvi = flist[j]
#;---- for old data name
if data_ver_flg == 0:
str1='composite_ndvi'
str2='composite_ndvi_bq'
p1=fn_ndvi.rfinid(sign)
tmpbname=fn_ndvi[p1+7:p1+19] # for old data, its name looks like:MT3RG_2008_253-259_250m_composite_ndvi.tif
else:
#;---- for new data name
str1='.VI_NDVI.'
str2='.VI_QUAL.'
p1=fn_ndvi.rfind(sign)
tmpbname=fn_ndvi[p1+14:p1+26] #for new data, its name looks like:eMTH_NDVI.2008.029-035.QKM.VI_NDVI.005.2011202084157.tif
p=fn_ndvi.find(str1)
length=len(fn_ndvi)
file_hdr=fn_ndvi[0:p]
file_end =fn_ndvi[p+len(str1):length]
fn_bq=file_hdr+str2+file_end
idx = fn_bq in flistbq
if idx == True:
#---- read ndvi and bq to cut off no-sense points
print('process the '+ str(j+1) + ' th file: ' +fn_ndvi)
(rt_t, rt_d)=read_ndvi(fn_ndvi,fn_bq,ulx,uly,lrx,lry,tmpbname)
if First_Flag == True:
First_Flag=False
tot_t=wrkdir+'/'+year+'_stack_ndvi.tif'
tot_d=wrkdir+'/'+year+'_stack_bq.tif'
os.system('cp '+ rt_t +' '+ tot_t)
os.system('rm -f '+rt_t)
os.system('cp '+ rt_d +' '+ tot_d)
os.system('rm -f '+rt_d)
else:
tot_t=rp.raster_comb(tot_t,rt_t)
tot_d=rp.raster_comb(tot_d,rt_d)
|
emilgaripov/emilgaripov.github.io
|
code_examples/python_with/with_sqlite3_conn_contextmanager.py
|
import contextlib
import sqlite3
@contextlib.contextmanager
def sqlite3_connection(db_name):
connection = sqlite3.connect(db_name)
yield connection
connection.close()
with sqlite3_connection('dhcp_snooping.db') as conn:
for row in conn.execute('select * from dhcp'):
print(row)
try:
conn.execute('select * from dhcp')
except sqlite3.ProgrammingError as e:
print(e)
|
aa403/betfair.py
|
betfair/meta/datatype.py
|
# -*- coding: utf-8 -*-
class DataType(object):
def __init__(self, type, preprocessor=None):
self.type = type
self.preprocessor = preprocessor
def preprocess(self, value):
return self.preprocessor(value) if self.preprocessor else value
def serialize(self, value):
return value
def unserialize(self, value):
processed = self.preprocess(value)
if isinstance(processed, self.type):
return processed
return self.type(processed)
|
dmeulen/home-assistant
|
homeassistant/config.py
|
"""Module to help with parsing and generating configuration files."""
import asyncio
import logging
import os
import shutil
from types import MappingProxyType
# pylint: disable=unused-import
from typing import Any, Tuple # NOQA
import voluptuous as vol
from homeassistant.const import (
CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, CONF_UNIT_SYSTEM,
CONF_TIME_ZONE, CONF_CUSTOMIZE, CONF_ELEVATION, CONF_UNIT_SYSTEM_METRIC,
CONF_UNIT_SYSTEM_IMPERIAL, CONF_TEMPERATURE_UNIT, TEMP_CELSIUS,
__version__)
from homeassistant.core import valid_entity_id
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util.yaml import load_yaml
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import set_customize
from homeassistant.util import dt as date_util, location as loc_util
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
_LOGGER = logging.getLogger(__name__)
YAML_CONFIG_FILE = 'configuration.yaml'
VERSION_FILE = '.HA_VERSION'
CONFIG_DIR_NAME = '.homeassistant'
DEFAULT_CORE_CONFIG = (
# Tuples (attribute, default, auto detect property, description)
(CONF_NAME, 'Home', None, 'Name of the location where Home Assistant is '
'running'),
(CONF_LATITUDE, 0, 'latitude', 'Location required to calculate the time'
' the sun rises and sets'),
(CONF_LONGITUDE, 0, 'longitude', None),
(CONF_ELEVATION, 0, None, 'Impacts weather/sunrise data'
' (altitude above sea level in meters)'),
(CONF_UNIT_SYSTEM, CONF_UNIT_SYSTEM_METRIC, None,
'{} for Metric, {} for Imperial'.format(CONF_UNIT_SYSTEM_METRIC,
CONF_UNIT_SYSTEM_IMPERIAL)),
(CONF_TIME_ZONE, 'UTC', 'time_zone', 'Pick yours from here: http://en.wiki'
'pedia.org/wiki/List_of_tz_database_time_zones'),
) # type: Tuple[Tuple[str, Any, Any, str], ...]
DEFAULT_CONFIG = """
# Show links to resources in log and frontend
introduction:
# Enables the frontend
frontend:
http:
# Uncomment this to add a password (recommended!)
# api_password: PASSWORD
# Checks for available updates
updater:
# Discover some devices automatically
discovery:
# Allows you to issue voice commands from the frontend in enabled browsers
conversation:
# Enables support for tracking state changes over time.
history:
# View all events in a logbook
logbook:
# Track the sun
sun:
# Weather Prediction
sensor:
platform: yr
"""
def _valid_customize(value):
"""Config validator for customize."""
if not isinstance(value, dict):
raise vol.Invalid('Expected dictionary')
for key, val in value.items():
if not valid_entity_id(key):
raise vol.Invalid('Invalid entity ID: {}'.format(key))
if not isinstance(val, dict):
raise vol.Invalid('Value of {} is not a dictionary'.format(key))
return value
CORE_CONFIG_SCHEMA = vol.Schema({
CONF_NAME: vol.Coerce(str),
CONF_LATITUDE: cv.latitude,
CONF_LONGITUDE: cv.longitude,
CONF_ELEVATION: vol.Coerce(int),
vol.Optional(CONF_TEMPERATURE_UNIT): cv.temperature_unit,
CONF_UNIT_SYSTEM: cv.unit_system,
CONF_TIME_ZONE: cv.time_zone,
vol.Required(CONF_CUSTOMIZE,
default=MappingProxyType({})): _valid_customize,
})
def get_default_config_dir() -> str:
"""Put together the default configuration directory based on OS."""
data_dir = os.getenv('APPDATA') if os.name == "nt" \
else os.path.expanduser('~')
return os.path.join(data_dir, CONFIG_DIR_NAME)
def ensure_config_exists(config_dir: str, detect_location: bool=True) -> str:
"""Ensure a config file exists in given configuration directory.
Creating a default one if needed.
Return path to the config file.
"""
config_path = find_config_file(config_dir)
if config_path is None:
print("Unable to find configuration. Creating default one in",
config_dir)
config_path = create_default_config(config_dir, detect_location)
return config_path
def create_default_config(config_dir, detect_location=True):
"""Create a default configuration file in given configuration directory.
Return path to new config file if success, None if failed.
This method needs to run in an executor.
"""
config_path = os.path.join(config_dir, YAML_CONFIG_FILE)
version_path = os.path.join(config_dir, VERSION_FILE)
info = {attr: default for attr, default, _, _ in DEFAULT_CORE_CONFIG}
location_info = detect_location and loc_util.detect_location_info()
if location_info:
if location_info.use_metric:
info[CONF_UNIT_SYSTEM] = CONF_UNIT_SYSTEM_METRIC
else:
info[CONF_UNIT_SYSTEM] = CONF_UNIT_SYSTEM_IMPERIAL
for attr, default, prop, _ in DEFAULT_CORE_CONFIG:
if prop is None:
continue
info[attr] = getattr(location_info, prop) or default
if location_info.latitude and location_info.longitude:
info[CONF_ELEVATION] = loc_util.elevation(location_info.latitude,
location_info.longitude)
# Writing files with YAML does not create the most human readable results
# So we're hard coding a YAML template.
try:
with open(config_path, 'w') as config_file:
config_file.write("homeassistant:\n")
for attr, _, _, description in DEFAULT_CORE_CONFIG:
if info[attr] is None:
continue
elif description:
config_file.write(" # {}\n".format(description))
config_file.write(" {}: {}\n".format(attr, info[attr]))
config_file.write(DEFAULT_CONFIG)
with open(version_path, 'wt') as version_file:
version_file.write(__version__)
return config_path
except IOError:
print('Unable to create default configuration file', config_path)
return None
@asyncio.coroutine
def async_hass_config_yaml(hass):
"""Load YAML from hass config File.
This function allow component inside asyncio loop to reload his config by
self.
This method is a coroutine.
"""
def _load_hass_yaml_config():
path = find_config_file(hass.config.config_dir)
conf = load_yaml_config_file(path)
return conf
conf = yield from hass.loop.run_in_executor(None, _load_hass_yaml_config)
return conf
def find_config_file(config_dir):
"""Look in given directory for supported configuration files.
Async friendly.
"""
config_path = os.path.join(config_dir, YAML_CONFIG_FILE)
return config_path if os.path.isfile(config_path) else None
def load_yaml_config_file(config_path):
"""Parse a YAML configuration file.
This method needs to run in an executor.
"""
conf_dict = load_yaml(config_path)
if not isinstance(conf_dict, dict):
msg = 'The configuration file {} does not contain a dictionary'.format(
os.path.basename(config_path))
_LOGGER.error(msg)
raise HomeAssistantError(msg)
return conf_dict
def process_ha_config_upgrade(hass):
"""Upgrade config if necessary.
This method needs to run in an executor.
"""
version_path = hass.config.path(VERSION_FILE)
try:
with open(version_path, 'rt') as inp:
conf_version = inp.readline().strip()
except FileNotFoundError:
# Last version to not have this file
conf_version = '0.7.7'
if conf_version == __version__:
return
_LOGGER.info('Upgrading config directory from %s to %s', conf_version,
__version__)
lib_path = hass.config.path('deps')
if os.path.isdir(lib_path):
shutil.rmtree(lib_path)
with open(version_path, 'wt') as outp:
outp.write(__version__)
@asyncio.coroutine
def async_process_ha_core_config(hass, config):
"""Process the [homeassistant] section from the config.
This method is a coroutine.
"""
config = CORE_CONFIG_SCHEMA(config)
hac = hass.config
def set_time_zone(time_zone_str):
"""Helper method to set time zone."""
if time_zone_str is None:
return
time_zone = date_util.get_time_zone(time_zone_str)
if time_zone:
hac.time_zone = time_zone
date_util.set_default_time_zone(time_zone)
else:
_LOGGER.error('Received invalid time zone %s', time_zone_str)
for key, attr in ((CONF_LATITUDE, 'latitude'),
(CONF_LONGITUDE, 'longitude'),
(CONF_NAME, 'location_name'),
(CONF_ELEVATION, 'elevation')):
if key in config:
setattr(hac, attr, config[key])
if CONF_TIME_ZONE in config:
set_time_zone(config.get(CONF_TIME_ZONE))
set_customize(config.get(CONF_CUSTOMIZE) or {})
if CONF_UNIT_SYSTEM in config:
if config[CONF_UNIT_SYSTEM] == CONF_UNIT_SYSTEM_IMPERIAL:
hac.units = IMPERIAL_SYSTEM
else:
hac.units = METRIC_SYSTEM
elif CONF_TEMPERATURE_UNIT in config:
unit = config[CONF_TEMPERATURE_UNIT]
if unit == TEMP_CELSIUS:
hac.units = METRIC_SYSTEM
else:
hac.units = IMPERIAL_SYSTEM
_LOGGER.warning("Found deprecated temperature unit in core config, "
"expected unit system. Replace '%s: %s' with "
"'%s: %s'", CONF_TEMPERATURE_UNIT, unit,
CONF_UNIT_SYSTEM, hac.units.name)
# Shortcut if no auto-detection necessary
if None not in (hac.latitude, hac.longitude, hac.units,
hac.time_zone, hac.elevation):
return
discovered = []
# If we miss some of the needed values, auto detect them
if None in (hac.latitude, hac.longitude, hac.units,
hac.time_zone):
info = yield from hass.loop.run_in_executor(
None, loc_util.detect_location_info)
if info is None:
_LOGGER.error('Could not detect location information')
return
if hac.latitude is None and hac.longitude is None:
hac.latitude, hac.longitude = (info.latitude, info.longitude)
discovered.append(('latitude', hac.latitude))
discovered.append(('longitude', hac.longitude))
if hac.units is None:
hac.units = METRIC_SYSTEM if info.use_metric else IMPERIAL_SYSTEM
discovered.append((CONF_UNIT_SYSTEM, hac.units.name))
if hac.location_name is None:
hac.location_name = info.city
discovered.append(('name', info.city))
if hac.time_zone is None:
set_time_zone(info.time_zone)
discovered.append(('time_zone', info.time_zone))
if hac.elevation is None and hac.latitude is not None and \
hac.longitude is not None:
elevation = yield from hass.loop.run_in_executor(
None, loc_util.elevation, hac.latitude, hac.longitude)
hac.elevation = elevation
discovered.append(('elevation', elevation))
if discovered:
_LOGGER.warning(
'Incomplete core config. Auto detected %s',
', '.join('{}: {}'.format(key, val) for key, val in discovered))
|
numa-engineering/python-daemon
|
daemon/runner.py
|
# -*- coding: utf-8 -*-
# daemon/runner.py
# Part of python-daemon, an implementation of PEP 3143.
#
# Copyright © 2009–2010 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2007–2008 Robert Niederreiter, Jens Klein
# Copyright © 2003 Clark Evans
# Copyright © 2002 Noah Spurrier
# Copyright © 2001 Jürgen Hermann
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Daemon runner library.
"""
import sys
import os
import signal
import errno
from . import pidlockfile
from .daemon import DaemonContext
if sys.version_info >= (3, 0):
unicode = str
basestring = str
class DaemonRunnerError(Exception):
""" Abstract base class for errors from DaemonRunner. """
class DaemonRunnerInvalidActionError(ValueError, DaemonRunnerError):
""" Raised when specified action for DaemonRunner is invalid. """
class DaemonRunnerStartFailureError(RuntimeError, DaemonRunnerError):
""" Raised when failure starting DaemonRunner. """
class DaemonRunnerStopFailureError(RuntimeError, DaemonRunnerError):
""" Raised when failure stopping DaemonRunner. """
class DaemonRunner(object):
""" Controller for a callable running in a separate background process.
The first command-line argument is the action to take:
* 'start': Become a daemon and call `app.run()`.
* 'stop': Exit the daemon process specified in the PID file.
* 'restart': Stop, then start.
"""
start_message = "started with pid %(pid)d"
def __init__(self, app):
""" Set up the parameters of a new runner.
The `app` argument must have the following attributes:
* `stdin_path`, `stdout_path`, `stderr_path`: Filesystem
paths to open and replace the existing `sys.stdin`,
`sys.stdout`, `sys.stderr`.
* `pidfile_path`: Absolute filesystem path to a file that
will be used as the PID file for the daemon. If
``None``, no PID file will be used.
* `pidfile_timeout`: Used as the default acquisition
timeout value supplied to the runner's PID lock file.
* `run`: Callable that will be invoked when the daemon is
started.
"""
self.parse_args()
self.app = app
self.daemon_context = DaemonContext()
self.daemon_context.stdin = open(app.stdin_path, 'r')
self.daemon_context.stdout = open(app.stdout_path, 'wb+', buffering=0)
self.daemon_context.stderr = open(
app.stderr_path, 'wb+', buffering=0)
self.pidfile = None
if app.pidfile_path is not None:
self.pidfile = make_pidlockfile(
app.pidfile_path, app.pidfile_timeout)
self.daemon_context.pidfile = self.pidfile
def _usage_exit(self, argv):
""" Emit a usage message, then exit.
"""
progname = os.path.basename(argv[0])
usage_exit_code = 2
action_usage = "|".join(self.action_funcs.keys())
message = "usage: %(progname)s %(action_usage)s" % vars()
emit_message(message)
sys.exit(usage_exit_code)
def parse_args(self, argv=None):
""" Parse command-line arguments.
"""
if argv is None:
argv = sys.argv
min_args = 2
if len(argv) < min_args:
self._usage_exit(argv)
self.action = unicode(argv[1])
if self.action not in self.action_funcs:
self._usage_exit(argv)
def _start(self):
""" Open the daemon context and run the application.
"""
if is_pidfile_stale(self.pidfile):
self.pidfile.break_lock()
try:
self.daemon_context.open()
except pidlockfile.AlreadyLocked:
pidfile_path = self.pidfile.path
raise DaemonRunnerStartFailureError(
"PID file %(pidfile_path)r already locked" % vars())
pid = os.getpid()
message = self.start_message % vars()
emit_message(message)
self.app.run()
def _terminate_daemon_process(self):
""" Terminate the daemon process specified in the current PID file.
"""
pid = self.pidfile.read_pid()
try:
os.kill(pid, signal.SIGTERM)
except OSError as exc:
raise DaemonRunnerStopFailureError(
"Failed to terminate %(pid)d: %(exc)s" % vars())
def _stop(self):
""" Exit the daemon process specified in the current PID file.
"""
if not self.pidfile.is_locked():
pidfile_path = self.pidfile.path
raise DaemonRunnerStopFailureError(
"PID file %(pidfile_path)r not locked" % vars())
if is_pidfile_stale(self.pidfile):
self.pidfile.break_lock()
else:
self._terminate_daemon_process()
def _restart(self):
""" Stop, then start.
"""
self._stop()
self._start()
action_funcs = {
'start': _start,
'stop': _stop,
'restart': _restart,
}
def _get_action_func(self):
""" Return the function for the specified action.
Raises ``DaemonRunnerInvalidActionError`` if the action is
unknown.
"""
try:
func = self.action_funcs[self.action]
except KeyError:
raise DaemonRunnerInvalidActionError(
"Unknown action: %(action)r" % vars(self))
return func
def do_action(self):
""" Perform the requested action.
"""
func = self._get_action_func()
func(self)
def emit_message(message, stream=None):
""" Emit a message to the specified stream (default `sys.stderr`). """
if stream is None:
stream = sys.stderr
stream.write("%(message)s\n" % vars())
stream.flush()
def make_pidlockfile(path, acquire_timeout):
""" Make a PIDLockFile instance with the given filesystem path. """
if not isinstance(path, basestring):
error = ValueError("Not a filesystem path: %(path)r" % vars())
raise error
if not os.path.isabs(path):
error = ValueError("Not an absolute path: %(path)r" % vars())
raise error
lockfile = pidlockfile.TimeoutPIDLockFile(path, acquire_timeout)
return lockfile
def is_pidfile_stale(pidfile):
""" Determine whether a PID file is stale.
Return ``True`` (“stale”) if the contents of the PID file are
valid but do not match the PID of a currently-running process;
otherwise return ``False``.
"""
result = False
pidfile_pid = pidfile.read_pid()
if pidfile_pid is not None:
try:
os.kill(pidfile_pid, signal.SIG_DFL)
except OSError as exc:
if exc.errno == errno.ESRCH:
# The specified PID does not exist
result = True
return result
|
kingvuplus/boom
|
lib/python/Screens/Rc.py
|
from Components.Pixmap import MovingPixmap, MultiPixmap
from Tools.Directories import resolveFilename, SCOPE_SKIN
from xml.etree.ElementTree import ElementTree
from Components.config import config, ConfigInteger
from Components.RcModel import rc_model
from boxbranding import getBoxType
config.misc.rcused = ConfigInteger(default=1)
class Rc:
def __init__(self):
self['rc'] = MultiPixmap()
self['arrowdown'] = MovingPixmap()
self['arrowdown2'] = MovingPixmap()
self['arrowup'] = MovingPixmap()
self['arrowup2'] = MovingPixmap()
config.misc.rcused = ConfigInteger(default=1)
self.isDefaultRc = rc_model.rcIsDefault()
self.rcheight = 500
self.rcheighthalf = 250
self.selectpics = []
self.selectpics.append((self.rcheighthalf, ['arrowdown', 'arrowdown2'], (-18, -70)))
self.selectpics.append((self.rcheight, ['arrowup', 'arrowup2'], (-18, 0)))
self.readPositions()
self.clearSelectedKeys()
self.onShown.append(self.initRc)
def initRc(self):
if getBoxType() in ('uniboxhd1', 'uniboxhd2', 'uniboxhd3', 'sezam5000hd', 'mbtwin', 'beyonwizt3'):
self['rc'].setPixmapNum(config.misc.rcused.value)
elif self.isDefaultRc:
self['rc'].setPixmapNum(config.misc.rcused.value)
else:
self['rc'].setPixmapNum(0)
def readPositions(self):
if self.isDefaultRc:
target = resolveFilename(SCOPE_SKIN, 'rcpositions.xml')
else:
target = rc_model.getRcLocation() + 'rcpositions.xml'
tree = ElementTree(file=target)
rcs = tree.getroot()
self.rcs = {}
for rc in rcs:
id = int(rc.attrib['id'])
self.rcs[id] = {}
for key in rc:
name = key.attrib['name']
pos = key.attrib['pos'].split(',')
self.rcs[id][name] = (int(pos[0]), int(pos[1]))
def getSelectPic(self, pos):
for selectPic in self.selectpics:
if pos[1] <= selectPic[0]:
return (selectPic[1], selectPic[2])
return None
def hideRc(self):
self['rc'].hide()
self.hideSelectPics()
def showRc(self):
self['rc'].show()
def selectKey(self, key):
if self.isDefaultRc:
rc = self.rcs[config.misc.rcused.value]
else:
try:
rc = self.rcs[2]
except:
rc = self.rcs[config.misc.rcused.value]
if rc.has_key(key):
rcpos = self['rc'].getPosition()
pos = rc[key]
selectPics = self.getSelectPic(pos)
selectPic = None
for x in selectPics[0]:
if x not in self.selectedKeys:
selectPic = x
break
if selectPic is not None:
print 'selectPic:', selectPic
self[selectPic].moveTo(rcpos[0] + pos[0] + selectPics[1][0], rcpos[1] + pos[1] + selectPics[1][1], 1)
self[selectPic].startMoving()
self[selectPic].show()
self.selectedKeys.append(selectPic)
return
def clearSelectedKeys(self):
self.showRc()
self.selectedKeys = []
self.hideSelectPics()
def hideSelectPics(self):
for selectPic in self.selectpics:
for pic in selectPic[1]:
self[pic].hide()
|
Distrotech/bzr
|
tools/rst2html.py
|
#! /usr/bin/env python
# Originally by Dave Goodger, from the docutils, distribution.
#
# Modified for Bazaar to accommodate options containing dots
#
# This file is in the public domain.
"""
A minimal front end to the Docutils Publisher, producing HTML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
import docutils
from docutils.core import publish_cmdline, default_description
if True: # this is still required in the distutils trunk as-at June 2008.
from docutils.parsers.rst.states import Body
# we have some option names that contain dot; which is not allowed by
# python-docutils 0.4-4 -- so monkeypatch in a better pattern
#
# This is a bit gross to patch because all this is built up at load time.
Body.pats['optname'] = r'[a-zA-Z0-9][a-zA-Z0-9._-]*'
Body.pats['longopt'] = r'(--|/)%(optname)s([ =]%(optarg)s)?' % Body.pats
Body.pats['option'] = r'(%(shortopt)s|%(longopt)s)' % Body.pats
Body.patterns['option_marker'] = r'%(option)s(, %(option)s)*( +| ?$)' % Body.pats
description = ('Generates (X)HTML documents from standalone reStructuredText '
'sources. ' + default_description)
# workaround for bug with <xxx id="tags" name="tags"> in IE
from docutils.writers import html4css1
class IESafeHtmlTranslator(html4css1.HTMLTranslator):
def starttag(self, node, tagname, suffix='\n', empty=0, **attributes):
x = html4css1.HTMLTranslator.starttag(self, node, tagname, suffix,
empty, **attributes)
y = x.replace('id="tags"', 'id="tags_"')
y = y.replace('name="tags"', 'name="tags_"')
y = y.replace('href="#tags"', 'href="#tags_"')
return y
mywriter = html4css1.Writer()
mywriter.translator_class = IESafeHtmlTranslator
publish_cmdline(writer=mywriter, description=description)
|
NoSmartNoMan/algorithm-1
|
lib/queue.py
|
#!/usr/bin/env python
# -*- coding:UTF-8
__author__ = 'shenshijun'
import copy
class Queue(object):
"""
使用Python的list快速实现一个队列
"""
def __init__(self, *arg):
super(Queue, self).__init__()
self.__queue = list(copy.copy(arg))
self.__size = len(self.__queue)
def enter(self, value):
self.__size += 1
self.__queue.append(value)
def exit(self):
if self.__size <= 0:
return None
else:
value = self.__queue[0]
self.__size -= 1
del self.__queue[0]
return value
def __len__(self):
return self.__size
def empty(self):
return self.__size <= 0
def __str__(self):
return "".join(["Queue(list=", str(self.__queue), ",size=", str(self.__size)])
|
llange/pynag
|
pynag/Parsers/main.py
|
# -*- coding: utf-8 -*-
"""Module for parsing main configuration file (nagios.cfg)."""
from pynag.Utils import paths
class MainConfig(object):
""" Generic parser for files in the format of key=value.
This is the format used by nagios.cfg and many other unix configuration files.
"""
def __init__(self, filename=None):
if not filename:
filename = paths.find_main_configuration_file()
self.filename = filename
self.data = self.parse()
def get(self, attribute, default=None):
"""Get the first instance of key."""
for key, value in self.data:
if key == attribute:
return value
def get_list(self, attribute):
"""Get a list of all values that have attribute_name 'key'."""
return [value for key, value in self.data if key == attribute]
@staticmethod
def _parse_string(string):
result = []
for line in string.splitlines():
# Strip out new line characters
line = line.strip()
# Skip blank lines
if not line:
continue
# Skip comments
if line.startswith("#") or line.startswith(';'):
continue
key, value = line.split('=', 1)
key = key.strip()
value = value.strip()
result.append((key, value))
return result
def parse(self):
with open(self.filename) as file_handle:
data = file_handle.read()
return self._parse_string(data)
|
nimble0/plover
|
plover/oslayer/processlock.py
|
# Copyright (c) 2012 Hesky Fisher
# See LICENSE.txt for details.
#
# processlock.py - Cross platform global lock to ensure plover only runs once.
"""Global lock to ensure plover only runs once."""
import sys
class LockNotAcquiredException(Exception):
pass
if sys.platform.startswith('win32'):
from ctypes import windll
class PloverLock:
# A GUID from http://createguid.com/
guid = 'plover_{F8C06652-2C51-410B-8D15-C94DF96FC1F9}'
def __init__(self):
pass
def acquire(self):
self.mutex = windll.kernel32.CreateMutexA(None, False, self.guid)
if windll.kernel32.GetLastError() == 0xB7: # ERROR_ALREADY_EXISTS
raise LockNotAcquiredException()
def release(self):
if hasattr(self, 'mutex'):
windll.kernel32.CloseHandle(self.mutex)
del self.mutex
def __del__(self):
self.release()
def __enter__(self):
self.acquire()
def __exit__(self, type, value, traceback):
self.release()
else:
import fcntl
import os
class PloverLock:
def __init__(self):
# Check the environment for items to make the lockfile unique
# fallback if not found
if 'DISPLAY' in os.environ:
display = os.environ['DISPLAY'][-1:]
else:
display = "0"
if hasattr(os, "uname"):
hostname = os.uname()[1]
else:
import socket
hostname = socket.gethostname()
lock_file_name = os.path.expanduser(
'~/.plover-lock-%s-%s' % (hostname, display))
self.fd = open(lock_file_name, 'w')
def acquire(self):
try:
fcntl.flock(self.fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError as e:
raise LockNotAcquiredException(str(e))
def release(self):
try:
fcntl.flock(self.fd, fcntl.LOCK_UN)
except:
pass
def __del__(self):
self.release()
try:
self.fd.close()
except:
pass
def __enter__(self):
self.acquire()
def __exit__(self, exc_type, exc_value, traceback):
self.release()
|
SoftwareIntrospectionLab/MininGit
|
pycvsanaly2/extensions/FileTypes.py
|
# Copyright (C) 2008 LibreSoft
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors :
# Carlos Garcia Campos <carlosgc@gsyc.escet.urjc.es>
from pycvsanaly2.Database import (SqliteDatabase, MysqlDatabase,
TableAlreadyExists, statement)
from pycvsanaly2.extensions import (Extension, register_extension,
ExtensionRunError)
from pycvsanaly2.extensions.file_types import guess_file_type
from pycvsanaly2.utils import to_utf8, uri_to_filename
class DBFileType(object):
id_counter = 1
__insert__ = """INSERT INTO file_types (id, file_id, type)
values (?, ?, ?)"""
def __init__(self, id, type, file_id):
if id is None:
self.id = DBFileType.id_counter
DBFileType.id_counter += 1
else:
self.id = id
self.type = to_utf8(type)
self.file_id = file_id
class FileTypes(Extension):
def __init__(self):
self.db = None
def __create_table(self, cnn):
cursor = cnn.cursor()
if isinstance(self.db, SqliteDatabase):
import sqlite3.dbapi2
try:
cursor.execute("CREATE TABLE file_types (" +
"id integer primary key," +
"file_id integer," +
"type varchar" +
")")
except sqlite3.dbapi2.OperationalError:
cursor.close()
raise TableAlreadyExists
except:
raise
elif isinstance(self.db, MysqlDatabase):
import MySQLdb
try:
cursor.execute("CREATE TABLE file_types (" +
"id INT primary key," +
"file_id integer REFERENCES files(id)," +
"type mediumtext" +
") CHARACTER SET=utf8")
except MySQLdb.OperationalError, e:
if e.args[0] == 1050:
cursor.close()
raise TableAlreadyExists
raise
except:
raise
cnn.commit()
cursor.close()
def __create_indices(self, cnn):
cursor = cnn.cursor()
if isinstance(self.db, MysqlDatabase):
import MySQLdb
try:
cursor.execute("create index parent_id on file_links(parent_id)")
except MySQLdb.OperationalError, e:
if e.args[0] != 1061:
cursor.close()
raise
try:
cursor.execute("create index repository_id on files(repository_id)")
except MySQLdb.OperationalError, e:
if e.args[0] != 1061:
cursor.close()
raise
cursor.close()
def __get_files_for_repository(self, repo_id, cursor):
query = "SELECT ft.file_id from file_types ft, files f " + \
"WHERE f.id = ft.file_id and f.repository_id = ?"
cursor.execute(statement(query, self.db.place_holder), (repo_id,))
files = [res[0] for res in cursor.fetchall()]
return files
def run(self, repo, uri, db):
self.db = db
path = uri_to_filename(uri)
if path is not None:
repo_uri = repo.get_uri_for_path(path)
else:
repo_uri = uri
cnn = self.db.connect()
cursor = cnn.cursor()
cursor.execute(statement("SELECT id from repositories where uri = ?",
db.place_holder), (repo_uri,))
repo_id = cursor.fetchone()[0]
files = []
try:
self.__create_table(cnn)
except TableAlreadyExists:
cursor.execute(statement("SELECT max(id) from file_types",
db.place_holder))
id = cursor.fetchone()[0]
if id is not None:
DBFileType.id_counter = id + 1
files = self.__get_files_for_repository(repo_id, cursor)
except Exception, e:
raise ExtensionRunError(str(e))
self.__create_indices(cnn)
query = """select distinct f.id fid, f.file_name fname
from files f
where f.repository_id = ?
and not exists (select id from file_links where parent_id = f.id)"""
cursor.execute(statement(query, db.place_holder), (repo_id,))
write_cursor = cnn.cursor()
rs = cursor.fetchmany()
while rs:
types = []
for file_id, file_name in rs:
if file_id in files:
continue
type = guess_file_type(file_name)
types.append(DBFileType(None, type, file_id))
if types:
file_types = [(type.id, type.file_id, type.type) \
for type in types]
write_cursor.executemany(statement(DBFileType.__insert__,
self.db.place_holder),
file_types)
rs = cursor.fetchmany()
cnn.commit()
write_cursor.close()
cursor.close()
cnn.close()
def backout(self, repo, uri, db):
update_statement = """delete from file_types where
file_id in (select id from files f
where f.repository_id = ?)"""
self._do_backout(repo, uri, db, update_statement)
register_extension("FileTypes", FileTypes)
|
hwoods723/script.gamescenter
|
resources/lib/eventdetails.py
|
# -*- coding: utf-8 -*-
'''
script.matchcenter - Football information for Kodi
A program addon that can be mapped to a key on your remote to display football information.
Livescores, Event details, Line-ups, League tables, next and previous matches by team. Follow what
others are saying about the match in twitter.
Copyright (C) 2016 enen92
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import xbmcgui
import xbmc
import sys
import thesportsdb
import random
import threading
import pytz
import re
import ignoreleagues
from resources.lib.utilities import positions
from resources.lib.utilities import ssutils
from resources.lib.utilities.addonfileio import FileIO
from resources.lib.utilities.common_addon import *
api = thesportsdb.Api("7723457519235")
class detailsDialog(xbmcgui.WindowXMLDialog):
def __init__( self, *args, **kwargs ):
self.isRunning = True
self.match = kwargs["item"]
self.controls = []
def onInit(self):
self.setEventDetails()
def setEventDetails(self):
xbmc.executebuiltin("ClearProperty(has_lineups,Home)")
xbmc.executebuiltin("SetProperty(has_details,1,home)")
#livematch
if 'idEvent' not in self.match.__dict__.keys():
header = self.match.League + " - " + translate(32017) + " " + str(self.match.Round)
matchTime = ssutils.translatematch(self.match.Time)
matchHomeGoals = self.match.HomeGoals
matchAwayGoals = self.match.AwayGoals
matchpercent = 0.0
#match time
if "'" in self.match.Time.lower():
try:
matchpercent = float(int((float(self.match.Time.replace("'",""))/90)*100))
except: pass
else:
if self.match.Time.lower() == "halftime":
matchpercent = 50.0
elif self.match.Time.lower() == "postponed" or self.match.Time.lower() == "not started":
matchpercent = 0.0
elif self.match.Time.lower() == "finished":
matchpercent = 100.0
#match status
if self.match.Time.lower() == "finished": status = os.path.join(addon_path,"resources","img","redstatus.png")
elif "'" in self.match.Time.lower(): status = os.path.join(addon_path,"resources","img","greenstatus.png")
else: status = os.path.join(addon_path,"resources","img","yellowstatus.png")
stadium = self.match.Stadium
matchReferee = self.match.Referee
matchSpectators = self.match.Spectators
matchHomeGoalDetails = self.match.HomeGoalDetails
matchHomeTeamRedCardDetails = self.match.HomeTeamRedCardDetails
matchHomeTeamYellowCardDetails = self.match.HomeTeamYellowCardDetails
matchHomeSubDetails = self.match.HomeSubDetails
matchAwayGoalDetails = self.match.AwayGoalDetails
matchAwayTeamRedCardDetails = self.match.AwayTeamRedCardDetails
matchAwayTeamYellowCardDetails = self.match.AwayTeamYellowCardDetails
matchAwaySubDetails = self.match.AwaySubDetails
#past match
else:
header = self.match.strLeague + " - " + translate(32017) + " " + str(self.match.intRound)
matchTime = ssutils.translatematch("Finished")
matchHomeGoals = self.match.intHomeScore
matchAwayGoals = self.match.intAwayScore
status = os.path.join(addon_path,"resources","img","redstatus.png")
matchpercent = 100.0
stadium = self.match.HomeTeamObj.strStadium
matchReferee = ""
matchSpectators = self.match.intSpectators
matchHomeGoalDetails = self.match.strHomeGoalDetails
matchHomeTeamRedCardDetails = self.match.strHomeRedCards
matchHomeTeamYellowCardDetails = self.match.strHomeYellowCards
matchHomeSubDetails = ""
matchAwayGoalDetails = self.match.strAwayGoalDetails
matchAwayTeamRedCardDetails = self.match.strAwayRedCards
matchAwayTeamYellowCardDetails = self.match.strAwayYellowCards
matchAwaySubDetails = ""
self.getControl(32500).setLabel(header)
if self.match.HomeTeamObj:
if self.match.HomeTeamObj.strTeamBadge:
self.getControl(32501).setImage(self.match.HomeTeamObj.strTeamBadge)
else:
self.getControl(32501).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
if self.match.HomeTeamObj.strTeamJersey:
self.getControl(32502).setImage(self.match.HomeTeamObj.strTeamJersey)
else:
self.getControl(32502).setImage(os.path.join(addon_path,"resources","img","nokit_placeholder.png"))
else:
self.getControl(32501).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
self.getControl(32502).setImage(os.path.join(addon_path,"resources","img","nokit_placeholder.png"))
#Default values for team names. It depends if it is a live object or simple a past event
if ("HomeTeam" in self.match.__dict__.keys() and "AwayTeam" in self.match.__dict__.keys()):
self.getControl(32503).setLabel(self.match.HomeTeam)
self.getControl(32506).setLabel(self.match.AwayTeam)
else:
self.getControl(32503).setLabel(self.match.strHomeTeam)
self.getControl(32506).setLabel(self.match.strAwayTeam)
if show_alternative == "true":
if self.match.HomeTeamObj: self.getControl(32503).setLabel(self.match.HomeTeamObj.AlternativeNameFirst)
if self.match.AwayTeamObj: self.getControl(32506).setLabel(self.match.AwayTeamObj.AlternativeNameFirst)
if self.match.AwayTeamObj:
if self.match.AwayTeamObj.strTeamBadge:
self.getControl(32504).setImage(self.match.AwayTeamObj.strTeamBadge)
else:
self.getControl(32504).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
if self.match.AwayTeamObj.strTeamJersey:
self.getControl(32505).setImage(self.match.AwayTeamObj.strTeamJersey)
else:
self.getControl(32505).setImage(os.path.join(addon_path,"resources","img","nokit_placeholder.png"))
else:
self.getControl(32504).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
self.getControl(32505).setImage(os.path.join(addon_path,"resources","img","nokit_placeholder.png"))
if matchHomeGoals and matchAwayGoals:
self.getControl(32507).setLabel(str(matchHomeGoals)+"-"+str(matchAwayGoals))
if matchTime:
self.getControl(32508).setLabel(matchTime)
#Match Status (yellow,green,red)
self.getControl(32509).setImage(status)
#Match progress bar
self.getControl(32510).setPercent(matchpercent)
#Stadium and location
self.getControl(32511).setLabel(stadium)
#Spectators and Referee
if matchReferee:
self.getControl(32512).setLabel("[COLOR selected]" + translate(32023) + ": [/COLOR]" + matchReferee)
if matchSpectators:
self.getControl(32513).setLabel(matchSpectators + " " + translate(32024))
#Home Team Event Details
vars = [("goal",matchHomeGoalDetails),("redcard",matchHomeTeamRedCardDetails),("yellowcard",matchHomeTeamYellowCardDetails),("sub",matchHomeSubDetails)]
hometeamevents = {}
home_subs = {}
for key,var in vars:
if key and var:
if ";" in var:
events = var.split(";")
if events:
for event in events:
stringregex = re.findall("(\d+)'\:(.*)", event)
if stringregex:
for time,strevent in stringregex:
if key == "sub":
if time in home_subs.keys():
if strevent.strip().startswith("in"):
home_subs[time]["in"] = strevent
if "out" in home_subs[time].keys():
if not int(time) in hometeamevents.keys():
hometeamevents[int(time)] = [(key,home_subs[time]["out"] + " |" + home_subs[time]["in"])]
else:
hometeamevents[int(time)].append((key,home_subs[time]["out"] + " |" + home_subs[time]["in"]))
#Remove item from dict (we might have more than one sub associated to a given minute)
home_subs.pop(time, None)
elif strevent.strip().startswith("out"):
home_subs[time]["out"] = strevent
if "in" in home_subs[time].keys():
if not int(time) in hometeamevents.keys():
hometeamevents[int(time)] = [(key,home_subs[time]["out"] + " |" + home_subs[time]["in"])]
else:
hometeamevents[int(time)].append((key,home_subs[time]["out"] + " |" + home_subs[time]["in"]))
#Remove item from dict (we might have more than one sub associated to a given minute)
home_subs.pop(time, None)
else:
home_subs[time] = {}
if strevent.strip().startswith("in"):
home_subs[time]["in"] = strevent
elif strevent.strip().startswith("out"):
home_subs[time]["out"] = strevent
else:
if not int(time) in hometeamevents.keys():
hometeamevents[int(time)] = [(key,strevent)]
else:
hometeamevents[int(time)].append((key,strevent))
#Away Team Event Details
vars = [("goal",matchAwayGoalDetails),("redcard",matchAwayTeamRedCardDetails),("yellowcard",matchAwayTeamYellowCardDetails),("sub",matchAwaySubDetails)]
awayteamevents = {}
away_subs = {}
for key,var in vars:
if key and var:
if ";" in var:
events = var.split(";")
if events:
for event in events:
stringregex = re.findall("(\d+)'\:(.*)", event)
if stringregex:
for time,strevent in stringregex:
if key == "sub":
if time in away_subs.keys():
if strevent.strip().startswith("in"):
away_subs[time]["in"] = strevent
if "out" in away_subs[time].keys():
if not int(time) in awayteamevents.keys():
awayteamevents[int(time)] = [(key,away_subs[time]["out"] + " |" + away_subs[time]["in"])]
else:
awayteamevents[int(time)].append((key,away_subs[time]["out"] + " |" + away_subs[time]["in"]))
#Remove item from dict (we might have more than one sub associated to a given minute)
away_subs.pop(time, None)
elif strevent.strip().startswith("out"):
away_subs[time]["out"] = strevent
if "in" in away_subs[time].keys():
if not int(time) in awayteamevents.keys():
awayteamevents[int(time)] = [(key,away_subs[time]["out"] + " |" + away_subs[time]["in"])]
else:
awayteamevents[int(time)].append((key,away_subs[time]["out"] + " |" + away_subs[time]["in"]))
#Remove item from dict (we might have more than one sub associated to a given minute)
away_subs.pop(time, None)
else:
away_subs[time] = {}
if strevent.strip().startswith("in"):
away_subs[time]["in"] = strevent
elif strevent.strip().startswith("out"):
away_subs[time]["out"] = strevent
else:
if not strevent: strevent = translate(32025)
if not int(time) in awayteamevents.keys():
awayteamevents[int(time)] = [(key,strevent.strip())]
else:
awayteamevents[int(time)].append((key,strevent.strip()))
#set home and away event details
#set home
self.getControl(32516).reset()
if hometeamevents:
items = []
ordered_times = reversed(sorted(hometeamevents.keys()))
for time in ordered_times:
eventlist = hometeamevents[time]
for eventtype,eventlabel in eventlist:
item = xbmcgui.ListItem(str(eventtype) + str(eventlabel))
item.setProperty("eventlabel",eventlabel)
item.setProperty("eventimg",os.path.join(addon_path,"resources","img",str(eventtype)+".png"))
item.setProperty("eventtime",str(time) + "':")
items.append(item)
if items:
self.getControl(32516).addItems(items)
#set home and away event details
#set away
self.getControl(32517).reset()
if awayteamevents:
items = []
ordered_times = reversed(sorted(awayteamevents.keys()))
for time in ordered_times:
eventlist = awayteamevents[time]
for eventtype,eventlabel in eventlist:
item = xbmcgui.ListItem(str(eventtype) + str(eventlabel))
item.setProperty("eventlabel",eventlabel)
item.setProperty("eventimg",os.path.join(addon_path,"resources","img",str(eventtype)+".png"))
item.setProperty("eventtime",str(time) + "':")
items.append(item)
if items:
self.getControl(32517).addItems(items)
self.setFocusId(32514)
def setLineUps(self,team):
xbmc.executebuiltin("ClearProperty(has_details,Home)")
self.getControl(32519).setImage(os.path.join(addon_path,"resources","img","pitch.png"))
xbmc.executebuiltin("SetProperty(has_lineups,1,home)")
self.current_lineup = team
if team == "home":
if 'idEvent' not in self.match.__dict__.keys():
if self.match.HomeTeamObj: self.LineUpTeamObj = self.match.HomeTeamObj
else: self.LineUpTeamObj = None
self.teamname = self.match.HomeTeam
self.formationlabel = self.match.HomeTeamFormation
self.lineupgoalkeeper = self.match.HomeLineupGoalkeeper
self.lineupdefenders = self.match.HomeLineupDefense
self.lineupmidfielders = self.match.HomeLineupMidfield
self.lineupforwarders = self.match.HomeLineupForward
self.lineupsubs = self.match.HomeLineupSubstitutes
if self.match.HomeLineupCoach:
self.lineupcoach = self.match.HomeLineupCoach.replace(";","")
else: self.lineupcoach = {}
else:
self.teamname = self.match.strHomeTeam
self.LineUpTeamObj = self.match.HomeTeamObj
self.formationlabel = self.match.strHomeFormation
self.lineupgoalkeeper = self.match.strHomeLineupGoalkeeper
self.lineupdefenders = self.match.strHomeLineupDefense
self.lineupmidfielders = self.match.strHomeLineupMidfield
self.lineupforwarders = self.match.strHomeLineupForward
self.lineupsubs = self.match.strHomeLineupSubstitutes
self.lineupcoach = {}
self.getControl(32527).setLabel(translate(32027))
else:
if 'idEvent' not in self.match.__dict__.keys():
if self.match.AwayTeamObj: self.LineUpTeamObj = self.match.AwayTeamObj
else: self.LineUpTeamObj = None
self.teamname = self.match.AwayTeam
self.formationlabel = self.match.AwayTeamFormation
self.lineupgoalkeeper = self.match.AwayLineupGoalkeeper
self.lineupdefenders = self.match.AwayLineupDefense
self.lineupmidfielders = self.match.AwayLineupMidfield
self.lineupforwarders = self.match.AwayLineupForward
self.lineupsubs = self.match.AwayLineupSubstitutes
if self.match.AwayLineupCoach:
self.lineupcoach = self.match.AwayLineupCoach.replace(";","")
else: self.lineupcoach = {}
else:
self.teamname = self.match.strAwayTeam
self.LineUpTeamObj = self.match.AwayTeamObj
self.formationlabel = self.match.strAwayFormation
self.lineupgoalkeeper = self.match.strAwayLineupGoalkeeper
self.lineupdefenders = self.match.strAwayLineupDefense
self.lineupmidfielders = self.match.strAwayLineupMidfield
self.lineupforwarders = self.match.strAwayLineupForward
self.lineupsubs = self.match.strAwayLineupSubstitutes
self.lineupcoach = {}
self.getControl(32527).setLabel(translate(32028))
#Set Labels for the panel
self.getControl(32522).setLabel(translate(32029) + ":")
self.getControl(32523).setLabel(translate(32030) + ":")
#Set team information
#Name
self.getControl(32521).setLabel(self.teamname)
if self.LineUpTeamObj:
if show_alternative == "true":
self.getControl(32521).setLabel(self.LineUpTeamObj.AlternativeNameFirst)
#Set team Badge
if self.LineUpTeamObj.strTeamBadge:
self.getControl(32520).setImage(self.LineUpTeamObj.strTeamBadge)
else:
self.getControl(32520).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
else:
self.getControl(32520).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
#Set team formation label
if self.formationlabel:
self.getControl(32518).setLabel(self.formationlabel)
#Set coach
if self.lineupcoach:
self.getControl(32526).setLabel("[COLOR selected]" + translate(32026) + ":[/COLOR] " + self.lineupcoach)
#Set Lineup
starters = []
if self.lineupgoalkeeper:
self.lineupgoalkeeper = self.lineupgoalkeeper.replace(";","")
starters.append(self.lineupgoalkeeper)
defenders = []
if self.lineupdefenders:
for player in self.lineupdefenders.split(";"):
if player:
defenders.append(player.strip())
starters.append(player.strip())
self.lineupdefenders = defenders
del defenders
midfielders = []
if self.lineupmidfielders:
for player in self.lineupmidfielders.split(";"):
if player:
midfielders.append(player.strip())
starters.append(player.strip())
self.lineupmidfielders = midfielders
del midfielders
forwarders = []
if self.lineupforwarders:
for player in self.lineupforwarders.split(";"):
if player:
forwarders.append(player.strip())
starters.append(player.strip())
self.getControl(32524).reset()
self.getControl(32524).addItems(starters)
self.lineupforwarders = forwarders
#Set Subs
subs = []
if self.lineupsubs:
for player in self.lineupsubs.split(";"):
if player: subs.append(player.strip())
self.getControl(32525).reset()
self.getControl(32525).addItems(subs)
#Players on pitch
pitch = self.getControl(32519)
pitchPosition = pitch.getPosition()
pitchHeight = pitch.getHeight()
pitchWidth = pitch.getWidth()
if self.formationlabel:
formationsjson = eval(FileIO.fileread(json_formations))
formation = formationsjson[self.formationlabel]
else:
formation = None
if formation:
#goalkeeper
goalkeeper = formation["goalkeeper"]
image_size = positions.getShirtHeight(pitchHeight,goalkeeper[1])
image_x = int(goalkeeper[0]*float(pitchWidth))+int(0.15*image_size)
image_y = int(goalkeeper[1]*float(pitchHeight))+int(0.15*image_size)
if self.LineUpTeamObj and self.LineUpTeamObj.strTeamJersey:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, self.LineUpTeamObj.strTeamJersey )
self.controls.append(image)
else:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, os.path.join(addon_path,"resources","img","nokit_placeholder.png") )
self.controls.append(image)
label = positions.getLabel(image, "[B]" + self.lineupgoalkeeper + "[/B]")
self.controls.append(label)
#defenders
defenders = formation["defenders"]
if defenders:
i = 0
for defender in defenders:
image_size = positions.getShirtHeight(pitchHeight,defender[1])
image_x = int(defender[0]*float(pitchWidth))+int(0.15*image_size)
image_y = int(defender[1]*float(pitchHeight))+int(0.15*image_size)
if self.LineUpTeamObj and self.LineUpTeamObj.strTeamJersey:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, self.LineUpTeamObj.strTeamJersey)
self.controls.append(image)
else:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, os.path.join(addon_path,"resources","img","nokit_placeholder.png") )
self.controls.append(image)
label = positions.getLabel(image,"[B]" + self.lineupdefenders[i] + "[/B]")
self.controls.append(label)
i += 1
#midfielders
midfielders = formation["midfielders"]
if midfielders:
i = 0
for midfielder in midfielders:
image_size = positions.getShirtHeight(pitchHeight,midfielder[1])
image_x = int(midfielder[0]*float(pitchWidth))+int(0.15*image_size)
image_y = int(midfielder[1]*float(pitchHeight))+int(0.15*image_size)
if self.LineUpTeamObj and self.LineUpTeamObj.strTeamJersey:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, self.LineUpTeamObj.strTeamJersey)
self.controls.append(image)
else:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, os.path.join(addon_path,"resources","img","nokit_placeholder.png") )
self.controls.append(image)
label = positions.getLabel(image,"[B]" + self.lineupmidfielders[i] + "[/B]")
self.controls.append(label)
i += 1
#forwarders
forwarders = formation["forwarders"]
if forwarders:
i = 0
for forwarder in forwarders:
image_size = positions.getShirtHeight(pitchHeight,forwarder[1])
image_x = int(forwarder[0]*float(pitchWidth))+int(0.15*image_size)
image_y = int(forwarder[1]*float(pitchHeight))+int(0.15*image_size)
if self.LineUpTeamObj and self.LineUpTeamObj.strTeamJersey:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, self.LineUpTeamObj.strTeamJersey)
self.controls.append(image)
else:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, os.path.join(addon_path,"resources","img","nokit_placeholder.png") )
self.controls.append(image)
label = positions.getLabel(image,"[B]" + self.lineupforwarders[i] + "[/B]")
self.controls.append(label)
i += 1
self.addControls(self.controls)
self.setFocusId(32527)
def resetControls(self):
self.removeControls(self.controls)
self.controls = []
def stopRunning(self):
self.isRunning = False
xbmc.executebuiltin("ClearProperty(has_lineups,Home)")
xbmc.executebuiltin("ClearProperty(has_details,Home)")
self.close()
def onAction(self,action):
if action.getId() == 92 or action.getId() == 10:
self.stopRunning()
def onClick(self,controlId):
if controlId == 32514:
if self.controls:
self.resetControls()
self.setLineUps("home")
elif controlId == 32515:
if self.controls:
self.resetControls()
self.setLineUps("away")
elif controlId == 32528:
if self.controls:
self.resetControls()
self.setEventDetails()
elif controlId == 32527:
if self.controls:
self.resetControls()
if self.current_lineup == "home":
self.setLineUps("away")
else:
self.setLineUps("home")
def showDetails(match, matchid = None):
if not match and matchid:
match = api.Lookups().Event(eventid=matchid)
if match:
match = match[0]
match.setHomeTeamObj(api.Lookups().Team(teamid=match.idHomeTeam)[0])
match.setAwayTeamObj(api.Lookups().Team(teamid=match.idAwayTeam)[0])
else:
xbmcgui.Dialog().ok(translate(32000), translate(32064))
sys.exit(0)
main = detailsDialog('script-matchcenter-EventDetails.xml', addon_path,getskinfolder(),'', item=match )
main.doModal()
del main
|
michaelborck/earthmine-qgis
|
qgisplugin/earthmine/earthmine_qgis.py
|
# -*- coding: utf-8 -*-
import copy
import json
import math
from functools import partial
from PyQt4.QtCore import QSettings, QTranslator, qVersion, QCoreApplication, pyqtSignal, QObject, pyqtSlot, Qt, QUrl, \
QRectF, SIGNAL, QPointF, QLineF
from PyQt4.QtGui import QAction, QIcon, QPainter, QPen, QBrush, QColor, QPixmap, QCursor, QPolygon
from PyQt4.QtSvg import QSvgRenderer
# Initialize Qt resources from file resources.py
from qgis._core import QgsMapLayerRegistry
import resources_rc
# Import the code for the dialog
from viewer import Viewer
from settingsdialog import SettingsDialog
import os.path
import contextlib
from qgis.core import QgsMessageLog, QgsCoordinateTransform, QgsCoordinateReferenceSystem, QgsPoint, QgsRectangle, \
QgsMapLayerRegistry, QGis, QgsGeometry, QgsFeatureRequest, QgsFeature, QgsDistanceArea, QgsRenderContext, QgsMapLayer
from qgis.gui import QgsMapCanvasItem, QgsMapToolEmitPoint, QgsMessageBar, QgsAttributeDialog, QgsRubberBand
class EarthmineSettingsError(Exception):
pass
@contextlib.contextmanager
def settinggroup(settings, name):
settings.beginGroup(name)
yield settings
settings.endGroup()
def maplayers():
return QgsMapLayerRegistry.instance().mapLayers().values()
def layer_by_name(name):
return QgsMapLayerRegistry.instance().mapLayersByName(name)[0]
def layer_by_id(layerid):
return QgsMapLayerRegistry.instance().mapLayer(layerid)
def feature_by_id(layer, featureid):
rq = QgsFeatureRequest(int(featureid))
feature = layer.getFeatures(rq).next()
return feature
def get_color(render, feature):
symbol = render.symbolForFeature(feature)
# name() returns the hex value for the colour
if not symbol:
return "0x00ff00"
name = symbol.color().name()
value = int("0x" + name[1:], 16)
return value
def search_area(units, distancearea, point):
distancearea.sourceCrs()
distance = 100
distance = distancearea.convertMeasurement(distance, QGis.Meters, units, False)
QgsMessageLog.logMessage(str(distance), "Earthmine")
QgsMessageLog.logMessage(str(units), "Earthmine")
geom = QgsGeometry.fromPoint(point)
rect = geom.buffer(distance[0], 10).boundingBox()
return rect
class EarthminePoint():
def __init__(self, qgispoint, pointdata):
for k, v in pointdata.items():
setattr(self, k, v)
self.qgispoint = QgsGeometry.fromPoint(qgispoint)
def distance(self, point):
return self.qgispoint.distance(point.qgispoint)
def height_diff(p1, p2):
if not p1.alt or not p2.alt:
return 0
try:
y = p1.alt - p2.alt
return y
except IndexError:
return 0
def safe_disconnect(signal, method):
try:
signal.disconnect(method)
except TypeError:
pass
class EarthmineLine():
def __init__(self, points, stats):
self.points = points
self._stats = stats
self.dist = QgsDistanceArea()
self.convert = self.dist.convertMeasurement
@property
def slope(self):
run = self.total_length_unadjusted
QgsMessageLog.logMessage(str(run), "Earthmine")
height = self._stats['height']
QgsMessageLog.logMessage(str(height), "Earthmine")
if not height:
return 0
try:
return height / run * 100
except ZeroDivisionError:
return 0
@property
def total_length(self):
return self._stats['3D-Total']
@property
def total_length_unadjusted(self):
return self._stats['2D-Total']
@property
def total_height(self):
height = self._stats['height']
if not height:
return 0
return abs(height)
@property
def slope_display(self):
return str(self.slope) + "%"
def stats(self, units, mode):
return self.total_length_display(units, mode), \
self.segment_length_display(units, mode), \
self.slope_display
def total_length_display(self, units, mode):
if mode == "3D":
return self.convert_to(self.total_length, units)
elif mode == "Horizontal":
return self.convert_to(self.total_length_unadjusted, units)
elif mode == "Vertical":
return self.convert_to(abs(self.total_height), units)
else:
return ""
def segment_length_display(self, units, mode):
if mode == "3D":
return self.convert_to(self._stats['3D'], units)
if mode == "Horizontal":
return self.convert_to(self._stats['2D'], units)
elif mode == "Vertical":
return self.convert_to(abs(self.total_height), units)
else:
return ""
def segments(self):
it = zip(self.points, self.points[1:])
for start, end in it:
yield self.segment(start, end)
def segment(self, start, end):
startlength = start.distance(end)
height = height_diff(end, start)
length = math.sqrt(startlength ** 2 + height ** 2)
return dict(length=startlength, adjusted=length, height=height)
def convert_to(self, length, units):
length, _ = self.convert(length, 0, units, False)
length = QgsDistanceArea.textUnit(length, 3, units, False, True)
return length
def to_feature_data(layerid, feature, renderer, transform):
"""
Transform the feature into the data for the viewer to use.
:param feature: QgsFeature
:param renderer:
:param transform:
:return:
"""
def polylinenodes(polyline):
nodes = []
for point in polyline:
point = transform.transform(point, QgsCoordinateTransform.ReverseTransform)
location = dict(lat=point.y(), lng=point.x())
nodes.append(location)
return nodes
geom = feature.geometry()
geomtype = geom.type()
featuredata = []
data = dict(id=feature.id(),
layerid=layerid,
color=get_color(renderer, feature),
geomtype=QGis.vectorGeometryType(geomtype))
if geomtype == QGis.Point:
geom = geom.asPoint()
point = transform.transform(geom, QgsCoordinateTransform.ReverseTransform)
try:
z = feature['Z']
if not z:
z = 0
except KeyError:
z = 0
location = dict(lat=point.y(), lng=point.x(), z=z)
data['nodes'] = [location]
featuredata.append(data)
elif geomtype == QGis.Line:
if geom.isMultipart():
# Copy the data for each polyline
for polyline in geom.asMultiPolyline():
newdata = copy.copy(data)
newdata['nodes'] = polylinenodes(polyline)
featuredata.append(newdata)
else:
data['nodes'] = polylinenodes(geom.asPolyline())
featuredata.append(data)
return featuredata
def get_features_in_area(layer, area, transform, mapsettings):
"""
Return all the features for the given layer in the search area
:param layer: Search layer
:param area: Search area
:param transform:
:return: yields a dict for each feature found in the area
"""
renderer = layer.rendererV2()
layerid = layer.id()
context = QgsRenderContext.fromMapSettings(mapsettings)
renderer.startRender(context, layer.pendingFields())
for feature in layer.getFeatures(QgsFeatureRequest(area)):
featuredata = to_feature_data(layerid, feature, renderer, transform)
for data in featuredata:
yield data
renderer.stopRender(context)
def get_feature_form(layer, feature, isadd=False):
dlg = QgsAttributeDialog(layer, feature, False, None)
dlg.setIsAddDialog(isadd)
return dlg
class EarthMineQGIS(QObject):
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# Save reference to the QGIS interface
super(EarthMineQGIS, self).__init__()
self.movingfeature = None
self.iface = iface
self.viewer = None
self.canvas = self.iface.mapCanvas()
self.settings = QSettings()
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value('locale/userLocale')[0:2]
locale_path = os.path.join(
self.plugin_dir,
'i18n',
'EarthMineQGIS_{}.qm'.format(locale))
if os.path.exists(locale_path):
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
self.pointtool = QgsMapToolEmitPoint(self.canvas)
self.pointtool.canvasClicked.connect(self.set_viewer_location)
self.settingsdialog = SettingsDialog(self.iface.mainWindow())
self.actions = []
self.menu = self.tr(u'&Earthmine')
self.toolbar = self.iface.addToolBar(u'EarthMineQGIS')
self.toolbar.setObjectName(u'EarthMineQGIS')
self.legend = self.iface.legendInterface()
emcolor = QColor(1, 150, 51)
self.tempband = QgsRubberBand(self.canvas, QGis.Line)
self.tempband.setWidth(5)
self.tempband.setColor(emcolor)
self.tempbandpoints = QgsRubberBand(self.canvas, QGis.Point)
self.tempbandpoints.setWidth(7)
self.tempbandpoints.setColor(emcolor)
self.movingband = QgsRubberBand(self.canvas, QGis.Point)
self.movingband.setWidth(5)
self.movingband.setColor(emcolor)
self.layersignals = []
self.marker = None
def initGui(self):
"""Create the menu entries and toolbar icons inside the QGIS GUI."""
icon_path = ':/icons/settings'
self.add_action(
icon_path,
text=self.tr(u'Show Settings'),
callback=self.show_settings,
parent=self.iface.mainWindow())
icon_path = ':/icons/viewer'
self.add_action(
icon_path,
text=self.tr(u'Earthmine Viewer'),
callback=self.open_viewer,
parent=self.iface.mainWindow())
self.marker = PostionMarker(self.canvas)
self.marker.hide()
self.viewer = Viewer(callbackobject=self)
self.viewer.trackingChanged.connect(self.marker.setTracking)
self.viewer.setLocationTriggered.connect(partial(self.canvas.setMapTool, self.pointtool))
self.viewer.updateFeatures.connect(self.update_earthmine_features)
self.viewer.layerChanged.connect(self.iface.setActiveLayer)
self.viewer.clearLine.connect(self.clear_bands)
self.viewer.closed.connect(self.remove_items)
self.iface.currentLayerChanged.connect(self.viewer.update_current_layer)
cursor = QCursor(QPixmap(":/icons/location"))
self.pointtool.setCursor(cursor)
self.pointtool.setAction(self.viewer.setlocationaction)
def remove_items(self):
self.marker.setTracking(False)
self.disconnect_projectsignals()
self.iface.actionPan().trigger()
def unload(self):
"""Removes the plugin menu item and icon from QGIS GUI."""
self.canvas.scene().removeItem(self.marker)
del self.marker
self.disconnect_projectsignals()
for action in self.actions:
self.iface.removePluginMenu(
self.tr(u'&Earthmine'),
action)
self.iface.removeToolBarIcon(action)
del self.toolbar
self.iface.removeDockWidget(self.viewer)
self.viewer.deleteLater()
def disconnect_projectsignals(self):
safe_disconnect(QgsMapLayerRegistry.instance().layerWasAdded, self.connect_layer_signals)
safe_disconnect(QgsMapLayerRegistry.instance().layersRemoved, self.layers_removed)
safe_disconnect(self.canvas.layersChanged, self.layers_changed)
safe_disconnect(self.iface.projectRead, self.connect_signals)
safe_disconnect(self.canvas.selectionChanged, self.selection_changed)
safe_disconnect(self.canvas.selectionChanged, self.viewer.selection_changed)
def clear_bands(self):
self.tempband.reset(QGis.Line)
self.tempbandpoints.reset(QGis.Point)
def visible_layers(self):
"""
Return the visible layers shown in the map canvas
:return:
"""
return (layer for layer, visible in self.layers_with_states() if visible)
def layers_with_states(self):
for layer in maplayers():
if not layer.type() == QgsMapLayer.VectorLayer:
continue
if not layer.geometryType() in [QGis.Point, QGis.Line]:
continue
yield layer, self.legend.isLayerVisible(layer)
def _layer_feature_added(self, featureid):
layer = self.sender()
if not layer:
return
self.layer_feature_added(layer, featureid)
def layer_feature_added(self, layer, featureid):
if not self.viewer:
return
feature = layer.getFeatures(QgsFeatureRequest(featureid)).next()
renderer = layer.rendererV2()
transform = self.coordinatetransform(layer)
featuredata = to_feature_data(layer.id(), feature, renderer, transform)
geomtype = layer.geometryType()
layerdata = dict(id=layer.id(),
geomtype=QGis.vectorGeometryType(geomtype))
self.viewer.load_features(layerdata, featuredata)
def _layer_feature_delete(self, featureid):
layer = self.sender()
if not layer:
return
self.layer_feature_delete(layer, featureid)
def layer_feature_delete(self, layer, featureid):
if not self.viewer:
return
self.viewer.remove_feature(layer.id(), featureid)
def _layer_geometry_changed(self, featureid, geometry):
layer = self.sender()
if not layer:
return
self.layer_geometry_changed(layer, featureid, geometry)
def layer_geometry_changed(self, layer, featureid, geometry):
if not self.viewer:
return
geomtype = layer.geometryType()
if geomtype == QGis.Point:
geom = geometry.asPoint()
transform = self.coordinatetransform(layer)
point = transform.transform(geom, QgsCoordinateTransform.ReverseTransform)
location = dict(lat=point.y(), lng=point.x())
self.viewer.edit_feature(layer.id(), featureid, [location])
elif geomtype == QGis.Line:
self.layer_feature_delete(layer, featureid)
self.layer_feature_added(layer, featureid)
def connect_layer_signals(self, layer):
if not layer.type() == QgsMapLayer.VectorLayer:
return
layer.featureAdded.connect(self._layer_feature_added)
layer.featureDeleted.connect(self._layer_feature_delete)
layer.editingStarted.connect(self.layer_editstate_changed)
layer.editingStopped.connect(self.layer_editstate_changed)
# HACK The new style doesn't work here
# http://hub.qgis.org/issues/6573
signal = SIGNAL("geometryChanged(QgsFeatureId, QgsGeometry&)")
self.connect(layer, signal, self._layer_geometry_changed)
self.load_layer_features(layers=[layer])
def layer_editstate_changed(self):
layer = self.sender()
if layer == self.iface.activeLayer():
self.viewer.layer_changed(layer)
def disconnect_signals(self):
self.disconnect_projectsignals()
for layer in maplayers():
if not layer.type() == QgsMapLayer.VectorLayer:
return
safe_disconnect(layer.featureAdded, self._layer_feature_added)
safe_disconnect(layer.featureDeleted, self._layer_feature_delete)
safe_disconnect(layer.editingStarted, self.layer_editstate_changed)
safe_disconnect(layer.editingStopped, self.layer_editstate_changed)
# HACK The new style doesn't work here
# http://hub.qgis.org/issues/6573
signal = SIGNAL("geometryChanged(QgsFeatureId, QgsGeometry&)")
self.disconnect(layer, signal, self._layer_geometry_changed)
def connect_signals(self):
for layer in maplayers():
self.connect_layer_signals(layer)
self.center_on_canvas()
def set_viewer_location(self, point, mousebutton):
transform = self.coordinatetransform()
point = transform.transform(point, QgsCoordinateTransform.ReverseTransform)
self.viewer.set_location(point)
def distancearea(self):
area = QgsDistanceArea()
dest = self.canvas.mapRenderer().destinationCrs()
area.setSourceCrs(dest)
return area, dest.mapUnits()
def coordinatetransform(self, layer=None):
"""
Return the transform for WGS84 -> QGIS projection.
"""
source = QgsCoordinateReferenceSystem()
source.createFromWkt(
'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]')
if not layer:
dest = self.canvas.mapRenderer().destinationCrs()
else:
dest = layer.crs()
transform = QgsCoordinateTransform(source, dest)
return transform
def earthmine_settings(self):
settings = {}
with settinggroup(self.settings, "plugins/Earthmine"):
for key in ['serviceUrl', 'baseDataUrl', "apiKey", 'secretKey', 'viewerUrl']:
if not self.settings.contains(key):
raise EarthmineSettingsError("{} not set".format(key))
value = self.settings.value(key, type=str)
if value is None:
raise EarthmineSettingsError("{} not set".format(key))
settings[key] = value
return settings
@pyqtSlot()
def ready(self):
"""
Called when the viewer is ready to be started. At this point the viewer hasn't been loaded
so no other methods apart from startViewer will be handled.
"""
settings = self.earthmine_settings()
self.viewer.startViewer(settings)
@pyqtSlot()
def viewerReady(self):
"""
Called once the viewer is loaded and ready to get location events.
"""
self.disconnect_signals()
self.connect_signals()
self.iface.projectRead.connect(self.connect_signals)
self.canvas.layersChanged.connect(self.layers_changed)
self.canvas.selectionChanged.connect(self.selection_changed)
self.canvas.selectionChanged.connect(self.viewer.selection_changed)
QgsMapLayerRegistry.instance().layersRemoved.connect(self.layers_removed)
QgsMapLayerRegistry.instance().layerWasAdded.connect(self.connect_layer_signals)
self.center_on_canvas()
self.viewer.activelayercombo.setLayer(self.iface.activeLayer())
def center_on_canvas(self):
point = self.canvas.extent().center()
transform = self.coordinatetransform()
point = transform.transform(point, QgsCoordinateTransform.ReverseTransform)
self.viewer.set_location(point)
self.viewer.infoaction.toggle()
def selection_changed(self, layer):
ids = [feature.id() for feature in layer.selectedFeatures()]
if not ids:
self.viewer.clear_selection(layer.id())
else:
self.viewer.set_selection(layer.id(), ids)
def layers_changed(self):
layerstates = self.layers_with_states()
for layer, visible in layerstates:
layerid = layer.id()
viewerloaded = self.viewer.layer_loaded(layerid)
QgsMessageLog.instance().logMessage(layerid, "Earthmine")
QgsMessageLog.instance().logMessage("Viewer State:" + str(viewerloaded), "Earthmine")
QgsMessageLog.instance().logMessage("QGIS State:" + str(visible), "Earthmine")
if (viewerloaded and visible) or (not viewerloaded and not visible):
QgsMessageLog.instance().logMessage("Ignoring as states match", "Earthmine")
continue
if viewerloaded and not visible:
QgsMessageLog.instance().logMessage("Clearing layer because viewer loaded and disabled in QGIS",
"Earthmine")
self.viewer.clear_layer_features(layerid)
continue
if not viewerloaded and visible:
QgsMessageLog.instance().logMessage("Loading layer", "Earthmine")
self.load_layer_features(layers=[layer])
continue
def layers_removed(self, layers):
for layerid in layers:
self.viewer.clear_layer_features(layerid)
@pyqtSlot(str, float, float)
def viewChanged(self, event, yaw, angle):
self.marker.setAngle(angle)
self.marker.setYaw(yaw)
@pyqtSlot(str, str)
def getInfo(self, layerid, featureid):
featureid = int(featureid)
activelayer = self.iface.activeLayer()
if not activelayer:
return
activetool = self.viewer.active_tool()
if not activetool in ["Info", "Select"]:
return
# Only show information for the active layer
if not layerid == activelayer.id():
return
layer = layer_by_id(layerid)
if activetool == "Select":
layer.setSelectedFeatures([featureid])
elif activetool == "Info":
rq = QgsFeatureRequest(featureid)
feature = layer.getFeatures(rq).next()
dlg = get_feature_form(layer, feature)
if dlg.dialog().exec_():
self.canvas.refresh()
@pyqtSlot(str, str, float, float, bool)
def featureMoved(self, layerid, featureid, lat, lng, end):
layer = layer_by_id(layerid)
transform = self.coordinatetransform(layer)
point = transform.transform(lng, lat)
if not end:
self.movingband.show()
self.movingband.setToGeometry(QgsGeometry.fromPoint(point), layer)
self.movingband.updatePosition()
self.movingband.update()
else:
self.movingband.hide()
feature = feature_by_id(layer, featureid)
startpoint = feature.geometry().asPoint()
dx = point.x() - startpoint.x()
dy = point.y() - startpoint.y()
layer.beginEditCommand("Feature Moved")
# Block signals for this move as the geometry changed signal will re add the geometry on use.
layer.blockSignals(True)
layer.translateFeature(feature.id(), dx, dy)
layer.blockSignals(False)
self.canvas.refresh()
layer.endEditCommand()
@pyqtSlot(str, str)
def onError(self, message, stacktrace=None):
self.iface.messageBar().pushMessage("Earthmine", message, QgsMessageBar.WARNING)
QgsMessageLog.logMessage(stacktrace, "Earthmine")
@pyqtSlot(float, float, float)
def addPoint(self, lat, lng, z):
layer = self.viewer.active_layer
if not layer.isEditable():
self.iface.messageBar().pushMessage("Earthmine",
"Selected layer isn't editable. Please enable edit mode to add features",
duration=3, level=QgsMessageBar.WARNING)
return
transform = self.coordinatetransform(layer)
point = transform.transform(lng, lat)
geom = QgsGeometry.fromPoint(point)
self.add_feature(layer, geom, z)
def add_feature(self, layer, geom, z=None):
feature = QgsFeature(layer.pendingFields())
if z and self.viewer.copyZvalue:
try:
feature['Z'] = z
except KeyError:
QgsMessageLog.log("No Z found on layer {}".format(layer.name()))
pass
feature.setGeometry(geom)
dlg = get_feature_form(layer, feature, isadd=True)
if dlg.dialog().exec_():
self.canvas.refresh()
@pyqtSlot(str, bool, str)
def drawLine(self, points, end, stats):
points = json.loads(points)
stats = json.loads(stats)
QgsMessageLog.logMessage(str(stats), "Earthmine")
self.tempband.reset(QGis.Line)
self.tempbandpoints.reset(QGis.Point)
color = QColor(self.viewer.current_action_color)
self.tempband.setColor(color)
self.tempbandpoints.setColor(color)
layer = self.viewer.active_layer
transform = self.coordinatetransform(layer)
earthminepoints = []
for point in points:
newpoint = transform.transform(point['lng'], point['lat'])
self.tempband.addPoint(newpoint)
self.tempbandpoints.addPoint(newpoint)
empoint = EarthminePoint(newpoint, point)
earthminepoints.append(empoint)
if end and not self.viewer.mode == "Vertical":
geom = self.tempband.asGeometry()
self.add_feature(layer, geom)
self.clear_bands()
self.viewer.geom = EarthmineLine(earthminepoints, stats)
self.tempband.show()
self.tempbandpoints.show()
@pyqtSlot(str, str, str, float)
def locationChanged(self, lat, lng, yaw, angle):
transform = self.coordinatetransform()
point = transform.transform(float(lng), float(lat))
self.marker.setCenter(point)
yaw = float(yaw)
self.marker.setAngle(angle)
self.marker.setYaw(yaw)
self.marker.setTracking(self.viewer.tracking)
if self.marker.tracking:
rect = QgsRectangle(point, point)
extentlimt = QgsRectangle(self.canvas.extent())
extentlimt.scale(0.95)
if not extentlimt.contains(point):
self.canvas.setExtent(rect)
self.canvas.refresh()
# Clear old features
self.viewer.clear_features()
self.load_layer_features(point)
def update_earthmine_features(self, viewfeatures):
self.viewer.clear_features()
if viewfeatures:
self.load_layer_features()
def load_layer_features(self, point=None, layers=None):
# TODO Move this logic into the viewer and let it track it's position
if point is None and self.marker.map_pos is None:
return
if point is None:
point = self.marker.map_pos
area, units = self.distancearea()
rect = search_area(units, area, point)
if layers is None:
layers = self.visible_layers()
for layer in layers:
transform = self.coordinatetransform(layer)
# Transform the rect
source = self.canvas.mapRenderer().destinationCrs()
dest = layer.crs()
recttransform = QgsCoordinateTransform(source, dest)
rect = recttransform.transformBoundingBox(rect)
features = list(get_features_in_area(layer, rect, transform, self.canvas.mapSettings()))
geomtype = layer.geometryType()
layerdata = dict(id=layer.id(),
geomtype=QGis.vectorGeometryType(geomtype))
self.viewer.load_features(layerdata, features)
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('EarthMineQGIS', message)
def add_action(
self,
icon_path,
text,
callback,
enabled_flag=True,
add_to_menu=True,
add_to_toolbar=True,
status_tip=None,
whats_this=None,
parent=None):
"""Add a toolbar icon to the InaSAFE toolbar.
:param icon_path: Path to the icon for this action. Can be a resource
path (e.g. ':/plugins/foo/bar.png') or a normal file system path.
:type icon_path: str
:param text: Text that should be shown in menu items for this action.
:type text: str
:param callback: Function to be called when the action is triggered.
:type callback: function
:param enabled_flag: A flag indicating if the action should be enabled
by default. Defaults to True.
:type enabled_flag: bool
:param add_to_menu: Flag indicating whether the action should also
be added to the menu. Defaults to True.
:type add_to_menu: bool
:param add_to_toolbar: Flag indicating whether the action should also
be added to the toolbar. Defaults to True.
:type add_to_toolbar: bool
:param status_tip: Optional text to s, "Earhtmine"how in a popup when mouse pointer
hovers over the action.
:type status_tip: str
:param parent: Parent widget for the new action. Defaults None.
:type parent: QWidget
:param whats_this: Optional text to show in the status bar when the
mouse pointer hovers over the action.
:returns: The action that was created. Note that the action is also
added to self.actions list.
:rtype: QAction
"""
icon = QIcon(icon_path)
action = QAction(icon, text, parent)
action.triggered.connect(callback)
action.setEnabled(enabled_flag)
if status_tip is not None:
action.setStatusTip(status_tip)
if whats_this is not None:
action.setWhatsThis(whats_this)
if add_to_toolbar:
self.toolbar.addAction(action)
if add_to_menu:
self.iface.addPluginToMenu(
self.menu,
action)
self.actions.append(action)
return action
def open_viewer(self):
"""Run method that performs all the real work"""
try:
settings = self.earthmine_settings()
except EarthmineSettingsError as ex:
self.onError(ex.message)
self.show_settings()
return
url = settings["viewerUrl"]
if not url.startswith("http"):
url = url.replace("\\\\", "\\")
url = QUrl.fromLocalFile(url)
else:
url = QUrl(url)
if not self.viewer.isVisible():
self.iface.addDockWidget(Qt.RightDockWidgetArea, self.viewer)
self.viewer.loadviewer(url)
def show_settings(self):
self.settingsdialog.show()
class PostionMarker(QgsMapCanvasItem):
"""
Position marker for the current location in the viewer.
"""
def __init__(self, canvas):
self._yaw = 0
self._angle = 0
self.size = 8
self.halfsize = self.size / 2.0
super(PostionMarker, self).__init__(canvas)
self.canvas = canvas
colorvalue = "#019633"
colour = QColor(colorvalue)
colour.setAlpha(50)
self.conebrush = QBrush(colour)
pencolour = QColor(colorvalue)
self.pointpen = QPen(pencolour, 1)
self.solidbrush = QBrush(pencolour)
self.map_pos = QgsPoint()
self.tracking = False
def setAngle(self, angle):
self._angle = angle
self.update()
def setSize(self, size):
self.size = size
self.halfsize = self.size / 2.0
self.update()
def setYaw(self, yaw):
self._yaw = yaw
self.update()
def paint(self, painter, xxx, xxx2):
if not self.tracking:
return
halfanlge = self._angle / 2
painter.save()
painter.setRenderHint(QPainter.Antialiasing)
painter.setBrush(self.solidbrush)
painter.setPen(self.pointpen)
painter.rotate(-90 + self._yaw)
painter.drawEllipse(QPointF(0, 0), self.size, self.size)
painter.setBrush(self.conebrush)
painter.drawPie(self.boundingRect(), halfanlge * 16, -self._angle * 16)
# painter.drawRect(self.boundingRect())
painter.restore()
def distancearea(self):
area = QgsDistanceArea()
dest = self.canvas.mapRenderer().destinationCrs()
area.setSourceCrs(dest)
return area, dest.mapUnits()
def boundingRect(self):
distance = 15
area, units = self.distancearea()
distance = area.convertMeasurement(distance, QGis.Meters, units, False)
s = self.toCanvasCoordinates(QgsPoint(0, 0))
e = self.toCanvasCoordinates(QgsPoint(0, distance[0]))
length = s.y() - e.y()
half = length / 2
bounding = QRectF(-half * 2.0, -half * 2.0, 2.0 * length, 2.0 * length)
return bounding
def setCenter(self, map_pos):
self.map_pos = map_pos
self.setPos(self.toCanvasCoordinates(map_pos))
def updatePosition(self):
self.setCenter(self.map_pos)
self.setVisible(self.tracking)
def setTracking(self, tracking):
self.tracking = tracking
self.setVisible(tracking)
|
phracek/devassistant
|
test/fixtures/files/crt/commands/a.py
|
from devassistant.command_runners import CommandRunner
from devassistant.logger import logger
class CR1(CommandRunner):
@classmethod
def matches(cls, c):
return c.comm_type == 'barbarbar'
@classmethod
def run(cls, c):
logger.info('CR1: Doing something ...')
x = c.input_res + 'bar'
return (True, x)
class CR2(CommandRunner):
@classmethod
def matches(cls, c):
return c.comm_type == 'spamspamspam'
@classmethod
def run(cls, c):
logger.info('CR2: Doing something ...')
x = c.input_res + 'spam'
return (True, x)
|
openhdf/enigma2-wetek
|
lib/python/Screens/VideoWizard.py
|
from boxbranding import getBoxType, getMachineName, getMachineBuild, getBrandOEM, getMachineBrand
from Screens.Wizard import WizardSummary
from Screens.WizardLanguage import WizardLanguage
from Screens.Rc import Rc
from Components.AVSwitch import iAVSwitch
from Screens.Screen import Screen
from Components.Pixmap import Pixmap
from Components.config import config, ConfigBoolean, configfile
from Tools.Directories import resolveFilename, SCOPE_SKIN, SCOPE_ACTIVE_SKIN
from Tools.HardwareInfo import HardwareInfo
config.misc.showtestcard = ConfigBoolean(default = False)
boxtype = getBoxType()
has_rca = False
has_dvi = False
if boxtype in ('formuler3', 'enibox', 'mago', 'x2plus', 'sf3038', 'sf108', 'twinboxlcd', 'atemio6000', 'atemio6100', 'atemio6200', 'mbminiplus', 'vp7358ci', 'enibox', 'gbquad', 'gbquadplus', 'et5x00', 'et6000', 'et7000', 'et7500', 'et8500', 'classm', 'axodin', 'axodinc', 'genius', 'evo', 'galaxym6', 'geniuse3hd', 'evoe3hd', 'axase3', 'axase3c', 'starsatlx', 'mixosf7', 'mixoslumi', 'tmnano', 'azboxme', 'azboxminime', 'optimussos1', 'optimussos2', 'gb800seplus', 'gb800ueplus', 'gbultrase', 'gbultraue', 'sezam1000hd', 'ixussone', 'ixusszero', 'enfinity', 'marvel1', 'bre2ze', 'force1', 'force1plus', 'worldvisionf1', 'optimussos1plus', 'optimussos2plus', 'optimussos3plus', 'formuler1', 'tmnano2super', 'vusolose', 'vuzero', 'tyrant') or getMachineBrand() == 'Zgemma':
has_rca = True
if boxtype == 'dm8000' or boxtype == 'dm800':
has_dvi = True
class VideoWizardSummary(WizardSummary):
skin = (
"""<screen name="VideoWizardSummary" position="0,0" size="132,64" id="1">
<widget name="text" position="6,4" size="120,40" font="Regular;12" transparent="1" />
<widget source="parent.list" render="Label" position="6,40" size="120,21" font="Regular;14">
<convert type="StringListSelection" />
</widget>
<!--widget name="pic" pixmap="%s" position="6,22" zPosition="10" size="64,64" transparent="1" alphatest="on"/-->
</screen>""",
"""<screen name="VideoWizardSummary" position="0,0" size="96,64" id="2">
<widget name="text" position="0,4" size="96,40" font="Regular;12" transparent="1" />
<widget source="parent.list" render="Label" position="0,40" size="96,21" font="Regular;14">
<convert type="StringListSelection" />
</widget>
<!--widget name="pic" pixmap="%s" position="0,22" zPosition="10" size="64,64" transparent="1" alphatest="on"/-->
</screen>""")
#% (resolveFilename(SCOPE_PLUGINS, "SystemPlugins/Videomode/lcd_Scart.png"))
def __init__(self, session, parent):
WizardSummary.__init__(self, session, parent)
#self["pic"] = Pixmap()
def setLCDPicCallback(self):
self.parent.setLCDTextCallback(self.setText)
def setLCDPic(self, file):
self["pic"].instance.setPixmapFromFile(file)
class VideoWizard(WizardLanguage, Rc):
skin = """
<screen position="fill" title="Welcome..." flags="wfNoBorder" >
<panel name="WizardMarginsTemplate"/>
<panel name="WizardPictureLangTemplate"/>
<panel name="RemoteControlTemplate"/>
<panel position="left" size="10,*" />
<panel position="right" size="10,*" />
<panel position="fill">
<widget name="text" position="top" size="*,270" font="Regular;23" valign="center" />
<panel position="fill">
<panel position="left" size="150,*">
<widget name="portpic" position="top" zPosition="10" size="150,150" transparent="1" alphatest="on"/>
</panel>
<panel position="fill" layout="stack">
<widget source="list" render="Listbox" position="fill" scrollbarMode="showOnDemand" >
<convert type="StringList" />
</widget>
<!--<widget name="config" position="fill" zPosition="1" scrollbarMode="showOnDemand" />-->
</panel>
</panel>
</panel>
</screen>"""
def __init__(self, session):
# FIXME anyone knows how to use relative paths from the plugin's directory?
self.xmlfile = resolveFilename(SCOPE_SKIN, "videowizard.xml")
self.hw = iAVSwitch
WizardLanguage.__init__(self, session, showSteps = False, showStepSlider = False)
Rc.__init__(self)
self["wizard"] = Pixmap()
self["portpic"] = Pixmap()
Screen.setTitle(self, _("Welcome..."))
self.port = None
self.mode = None
self.rate = None
def createSummary(self):
return VideoWizardSummary
def markDone(self):
self.hw.saveMode(self.port, self.mode, self.rate)
config.misc.videowizardenabled.value = 0
config.misc.videowizardenabled.save()
configfile.save()
def listInputChannels(self):
hw_type = HardwareInfo().get_device_name()
has_hdmi = HardwareInfo().has_hdmi()
list = []
for port in self.hw.getPortList():
if self.hw.isPortUsed(port):
descr = port
if descr == 'HDMI' and has_dvi:
descr = 'DVI'
if descr == 'Scart' and has_rca:
descr = 'RCA'
if port != "DVI-PC":
list.append((descr,port))
list.sort(key = lambda x: x[0])
print "listInputChannels:", list
return list
def inputSelectionMade(self, index):
print "inputSelectionMade:", index
self.port = index
self.inputSelect(index)
def inputSelectionMoved(self):
hw_type = HardwareInfo().get_device_name()
has_hdmi = HardwareInfo().has_hdmi()
print "input selection moved:", self.selection
self.inputSelect(self.selection)
if self["portpic"].instance is not None:
picname = self.selection
if picname == 'HDMI' and has_dvi:
picname = "DVI"
if picname == 'Scart' and has_rca:
picname = "RCA"
self["portpic"].instance.setPixmapFromFile(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/" + picname + ".png"))
def inputSelect(self, port):
print "inputSelect:", port
modeList = self.hw.getModeList(self.selection)
print "modeList:", modeList
self.port = port
if len(modeList) > 0:
ratesList = self.listRates(modeList[0][0])
self.hw.setMode(port = port, mode = modeList[0][0], rate = ratesList[0][0])
def listModes(self):
list = []
print "modes for port", self.port
for mode in self.hw.getModeList(self.port):
#if mode[0] != "PC":
list.append((mode[0], mode[0]))
print "modeslist:", list
list.sort()
return list
def modeSelectionMade(self, index):
print "modeSelectionMade:", index
self.mode = index
self.modeSelect(index)
def modeSelectionMoved(self):
print "mode selection moved:", self.selection
self.modeSelect(self.selection)
def modeSelect(self, mode):
ratesList = self.listRates(mode)
print "ratesList:", ratesList
if self.port == "HDMI" and mode in ("720p", "1080i", "1080p"):
self.rate = "multi"
self.hw.setMode(port = self.port, mode = mode, rate = "multi")
else:
self.hw.setMode(port = self.port, mode = mode, rate = ratesList[0][0])
def listRates(self, querymode = None):
if querymode is None:
querymode = self.mode
list = []
print "modes for port", self.port, "and mode", querymode
for mode in self.hw.getModeList(self.port):
print mode
if mode[0] == querymode:
for rate in mode[1]:
if self.port == "DVI-PC":
print "rate:", rate
if rate == "640x480":
list.insert(0, (rate, rate))
continue
list.append((rate, rate))
return list
def rateSelectionMade(self, index):
print "rateSelectionMade:", index
self.rate = index
self.rateSelect(index)
def rateSelectionMoved(self):
print "rate selection moved:", self.selection
self.rateSelect(self.selection)
def rateSelect(self, rate):
self.hw.setMode(port = self.port, mode = self.mode, rate = rate)
def showTestCard(self, selection = None):
if selection is None:
selection = self.selection
print "set config.misc.showtestcard to", {'yes': True, 'no': False}[selection]
if selection == "yes":
config.misc.showtestcard.value = True
else:
config.misc.showtestcard.value = False
def keyNumberGlobal(self, number):
if number in (1,2,3):
if number == 1:
self.hw.saveMode("HDMI", "720p", "multi")
elif number == 2:
self.hw.saveMode("HDMI", "1080i", "multi")
elif number == 3:
self.hw.saveMode("Scart", "Multi", "multi")
self.hw.setConfiguredMode()
self.close()
WizardLanguage.keyNumberGlobal(self, number)
|
fstltna/PyImp
|
src/empPath.py
|
# Copyright (C) 1998 Ulf Larsson
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import string
import empDb
import empSector
import math
move_directions = "ujnbgy"
move_reverse_directions ="bgyujn"
def norm_coords( coords, world_size ) :
""" normalize coordinates according to world size"""
x , y = coords
size_x, size_y = world_size
return ( ( x + size_x / 2 ) % size_x - size_x / 2,
( y + size_y / 2 ) % size_y - size_y / 2 )
def neighbours( coords ) :
""" neighbours ordered as directions """
world = empDb.megaDB['version']['worldsize']
x , y = coords
return [ norm_coords( ( x + 1, y - 1) , world ) ,
norm_coords( ( x + 2, y ) , world ) ,
norm_coords( ( x + 1, y + 1) , world ) ,
norm_coords( ( x - 1, y + 1) , world ) ,
norm_coords( ( x - 2, y ) , world ) ,
norm_coords( ( x - 1, y - 1) , world ) ]
def coords_to_str( coords ) :
x , y = coords
return `x` + ',' + `y`
## MobCost is used in the bestpath algorithm.
## Bestpath for navigating, marching and
## exploring needs their own version of
## this class
class MobCost:
""" Mobility cost using the move cmd """
def __init__( self ) :
pass
def cost( self, coords ) :
""" cost for moving into sector """
result = 2.0 * empSector.infinite_mob_cost
sect = empDb.megaDB['SECTOR'].get( coords , {} )
if sect and sect.get( 'owner' ) == empDb.CN_OWNED :
result = empSector.mob_cost( sect )
return result
class ExplMobCost:
""" Mobility cost using the expl cmd """
def __init__( self ) :
pass
def cost( self, coords ) :
""" cost for moving into sector """
result = 2.0 * empSector.infinite_mob_cost
sect = empDb.megaDB['SECTOR'].get( coords, {} )
if sect and ( sect.get( 'owner' ) == empDb.CN_OWNED
or empSector.is_explorable_into( sect ) ) :
result = empSector.mob_cost( sect )
return result
## Path is used in bestpath calculation to keep track of
## start point, end point , path string and path cost.
## These are public members right now but should be
## private.
class Path :
""" Empire path between sectors in a hex map """
def __init__( self, sect, mob_cost ) :
self.start = sect
self.end = sect
self.directions = ""
self.cost = mob_cost
def append( self, tail, dir ) :
""" concatinate two paths """
result = Path( self.start, self.cost + tail.cost )
result.directions = self.directions + dir + tail.directions
result.end = tail.end
return result
def post_extend( self, sect , mob_cost , dir ) :
""" add a step at the end of the path """
result = Path( self.start, self.cost + mob_cost )
result.directions = self.directions + dir
result.end = sect
return result
def pre_extend( self, sect , mob_cost , dir ) :
""" add a step at the beginning of the path """
result = Path( sect, self.cost + mob_cost )
result.directions = dir + self.directions
result.end = self.end;
return result
## Paths -- bestpath generator between sets of sectors.
##
##
## Paths has the following data members
## __mob : mobility cost object
## __visited : dictonary of sector we have calculated a path to
## __heads : list of paths starting at a source sector
## __tails : list of paths endinging at a destination sector
## __complete : list of paths starting at a source sector
## and ends at a destination sector.
##
## __heads, __tails and __complete are sorted wrt the path cost
##
## Paths has two main parts. One is to build up paths
## and the second part deals with removing a source or
## destination sector.
##
## Building up paths is done by taking the best head ( or tail) path
## and create new paths to the neighbours of the path's end point.
## If the neigbouring sector is *not* in __visited we add the new
## path, otherwise we try to create a __complete path. This ends
## when the total cost of the best head and tail path is higher
## then the best complete path.
##
## Removing source or destination sector is done by looping through
## __visited sectors and remove those origin from the removed sector.
## Same for the lists of paths.
##
##
class Paths :
""" Paths between two sets of sectors """
def __init__( self, from_sect, to_sect, mcost ):
self.__mob = mcost
self.__visited = {}
self.__starting_at = {}
self.__ending_at = {}
self.__heads = []
self.__tails = []
self.__complete = []
for sect in from_sect:
path = Path( sect, 0.0 )
self.__visited[ path.start ] = ( path , 1 )
self.__starting_at[ path.start ] = [ path ]
self.__insert_path( self.__heads , path )
for sect in to_sect :
path = Path( sect, self.__mob.cost( sect ) )
self.__visited[ path.end ] = ( path , 0 )
self.__ending_at[ path.end ] = [ path ]
self.__insert_path( self.__tails , path )
self.__make_paths()
def empty( self ) :
""" no path exits """
return ( len( self.__complete ) == 0
or self.__complete[ 0 ].cost >= empSector.infinite_mob_cost )
def best( self ) :
""" the best path ( lowest cost ) between any two sectors """
return self.__complete[ 0 ]
def __found_best_path( self ) :
""" have we found the best path """
done_search = not self.__heads or not self.__tails
if not done_search :
best_so_far = empSector.infinite_mob_cost
if self.__complete :
best_so_far = self.__complete[ 0 ].cost
best_possible = self.__heads[ 0 ].cost + self.__tails[ 0 ].cost
done_search = best_possible > best_so_far
return done_search
def __insert_path( self, path_list, path ) :
""" insert path in a sorted list """
index = 0
for elem in path_list :
if path.cost <= elem.cost :
break
else :
index = index + 1;
path_list.insert( index, path )
def __make_paths( self ):
""" expand tail and head paths """
expand_heads = not 0
while not self.__found_best_path():
if expand_heads:
self.__expand_heads()
else :
self.__expand_tails()
expand_heads = not expand_heads
def __expand_heads( self ) :
""" expand best head path """
path = self.__heads[ 0 ];
# print "expand head path " + path_str( path )
del self.__heads[ 0 ]
i = 0
for sect in neighbours( path.end ) :
dir = move_directions[ i ]
if not self.__visited.has_key( sect ) :
new_path = path.post_extend( sect ,
self.__mob.cost( sect ),
dir )
self.__insert_path( self.__heads, new_path )
self.__visited[ sect ] = ( new_path, 1 )
self.__starting_at[ path.start ].append( new_path )
else :
tail, is_head_path = self.__visited[ sect ]
if not is_head_path :
self.__insert_path( self.__complete,
path.append( tail, dir ) )
i = i + 1
def __expand_tails( self ) :
""" expand best tail path """
path = self.__tails[ 0 ]
# print "expand tail path " + path_str( path )
del self.__tails[ 0 ]
i = 0
for sect in neighbours( path.start ) :
dir = move_reverse_directions[ i ]
if not self.__visited.has_key( sect ) :
new_path = path.pre_extend( sect,
self.__mob.cost( sect ),
dir )
self.__insert_path( self.__tails, new_path )
self.__visited[ sect ] = ( new_path , 0 )
self.__ending_at[ path.end ].append( new_path )
else :
head, is_head_path = self.__visited[ sect ]
if is_head_path :
self.__insert_path( self.__complete,
head.append( path, dir ) )
i = i + 1
## code below deals with removing sectors
def remove_from( self, coords ) :
""" remove a sector from the set of source sectors """
removed = []
for path in self.__starting_at[ coords ] :
del self.__visited[ path.end ]
removed.append( path.end )
del self.__starting_at[ coords ]
self.__heads = self.__not_starting_at( self.__heads,
coords )
self.__complete = self.__not_starting_at( self.__complete,
coords )
self.__activate_neighbours_of( removed )
self.__make_paths()
def remove_to( self, coords ) :
""" remove a sector from the set of destination sectors """
removed = []
for path in self.__ending_at[ coords ] :
del self.__visited[ path.start ]
removed.append( path.start )
del self.__ending_at[ coords ]
self.__tails = self.__not_ending_at( self.__tails,
coords )
self.__complete = self.__not_ending_at( self.__complete,
coords )
self.__activate_neighbours_of( removed )
self.__make_paths()
def __not_starting_at( self,
path_list,
coords ) :
""" filter out path not starting at coords """
result = []
for path in path_list :
if path.start != coords :
result.append( path )
return result
def __not_ending_at( self,
path_list,
coords ) :
""" filter out path not starting at coords """
result = []
for path in path_list :
if path.end != coords :
result.append( path )
return result
def __activate_neighbours_of( self, removed_list ) :
""" enable neighbouring paths to expand into unvisited sectors """
for removed in removed_list :
## print "activate " + removed.str() + " neighbours"
for sect in neighbours( removed ) :
if self.__visited.has_key( sect) :
self.__activate_path_end( sect )
def __activate_path_end( self, sect ) :
""" insert path to head_paths or tail_paths """
path , is_head_path = self.__visited[ sect ];
if is_head_path :
if path not in self.__heads :
# print "activate head path " + path_str( path )
self.__insert_path( self.__heads, path )
else :
if path not in self.__tails :
# print "activate tail path " + path_str( path )
self.__insert_path( self.__tails, path )
## only used in debug printing
def path_str( path ) :
""" make a string out of a path """
return ( coords_to_str( path.start ) + ' ' + path.directions
+ ' ' + coords_to_str( path.end )
+ ' (' + `path.cost` + ')' )
## MultiMove use two dictonaries to keep track of mobility
## and amount of commodities in source and destination sectors.
## Paths is used to calculate the best path. For each of these
## paths we check wether we shall remove a source sector or
## a destination sector in the Paths object depending on how
## much we can move.
def best_path( src, dst, mob_cost = MobCost() ) :
result = None
paths = Paths( [ src ], [ dst ], mob_cost )
if not paths.empty() :
result = paths.best()
return result
class MoveGenerator :
""" generator of moves """
def __init__( self,
commodity,
src_secs,
dst_secs,
mcost = MobCost() ) :
self.__from_map = src_secs
self.__to_map = dst_secs
self.__move = None
self.__commodity = commodity
## print len( src_secs ) , " source sectors",
## print len( dst_secs ) , " destination sectors",
self.__paths = Paths( self.__from_map.keys(),
self.__to_map.keys(),
mcost )
## print "use ", len( self.__from_map ) , " source sectors",
## print len( self.__to_map ) , " destination sectors",
self.next()
def empty( self ) :
""" no more move commands """
return not self.__move
def next( self ) :
""" proceede to next move command """
self.__move = None
while not ( self.__paths.empty() or self.__move ) :
path = self.__paths.best()
## print "best path = " + path_str( path )
amount, mob , weight = self.__from_map[ path.start ]
if weight * path.cost < mob :
## print amount, mob, weight
move_amount = math.floor( mob / ( weight * path.cost ) )
if move_amount > amount : move_amount = amount
to_amount = self.__to_map[ path.end ]
if move_amount > to_amount : move_amount = to_amount
amount = amount - move_amount;
to_amount = to_amount - move_amount;
mob = math.floor( mob - weight * path.cost * move_amount )
self.__move = ( self.__commodity,
path,
move_amount )
if to_amount > 0 :
self.__to_map[ path.end ] = to_amount
else :
self.__paths.remove_to( path.end )
if amount > 0 and mob > 0 :
self.__from_map[ path.start ] = ( amount ,
mob ,
weight)
else :
self.__paths.remove_from( path.start )
else :
self.__paths.remove_from( path.start )
def move( self ) :
""" current move command """
return self.__move
class MultiMove :
""" generator of move commands """
def __init__( self,
commodity,
from_secs,
from_amount,
mob_limit,
to_secs,
to_amount ) :
## print len( from_secs ) , " source sectors",
## print len( to_secs ) , " destination sectors",
if from_amount < 0 : from_amount = 0
if mob_limit < 0 : mob_limit = 0
if to_amount < 0 : to_amount = 0
src = self.__create_src_map( commodity,
from_secs,
from_amount,
mob_limit )
dst = self.__create_dst_map( commodity,
to_secs,
to_amount )
for coords in dst.keys() :
if src.has_key( coords ) :
del src[ coords ]
self.__mover = MoveGenerator( commodity,
src,
dst,
MobCost() )
def empty( self ) :
""" no more move commands """
return self.__mover.empty()
def next( self ) :
""" proceede to next move command """
self.__mover.next()
def move_cmd_str( self ) :
""" construct a move command string """
result = ""
if not self.__mover.empty() :
commodity, path, amount = self.__mover.move();
result = ( 'move ' + commodity + ' '
+ coords_to_str( path.start ) + ' ' + `amount`
+ ' ' + coords_to_str( path.end ) )
return result
def __create_src_map( self, commodity, from_secs, from_amount, mob_limit ):
""" create source sectors dictionary """
result = {}
for sect in from_secs.values() :
coords = empSector.to_coord( sect )
if empSector.is_movable_from( sect, commodity ) :
mob = empSector.value( sect, 'mob' ) - mob_limit;
amount = empSector.value( sect, commodity ) - from_amount
if mob > 0 and amount > 0 :
weight = empSector.move_weight( sect, commodity )
result[ coords ] = ( amount , mob , weight)
## print "src += " + coords.str() + " " + `amount` + " ",
## print `mob` + " " + `weight`
return result
def __create_dst_map( self, commodity, to_secs, to_amount ):
""" create destination sectors dictionary """
result = {}
for sect in to_secs.values() :
coords = empSector.to_coord( sect )
if empSector.is_movable_into( sect, commodity ) :
amount = to_amount - empSector.value( sect, commodity )
if amount > 0 :
result[ coords ] = amount
## print "dst += " + coords.str() + " " + `amount`
return result
class MultiExplore :
""" generator of explore commands """
def __init__( self,
commodity,
from_secs,
from_amount,
mob_limit,
to_secs ) :
if from_amount < 0 : from_amount = 0
if mob_limit < 0 : mob_limit = 0
src = self.__create_src_map( commodity,
from_secs,
from_amount,
mob_limit )
dst = self.__create_dst_map( to_secs )
self.__explore = MoveGenerator( commodity,
src,
dst,
ExplMobCost() )
def empty( self ) :
""" no more explore commands """
return self.__explore.empty()
def next( self ) :
""" proceede to next explore command """
self.__explore.next()
def explore_cmd_str( self ) :
""" construct a expl command string """
result = ""
if not self.__explore.empty() :
commodity, path, amount = self.__explore.move();
result = ( 'expl ' + commodity + ' '
+ coords_to_str( path.start ) + ' ' + `amount`
+ ' ' + path.directions + 'h' )
return result
def __create_src_map( self, commodity, from_secs, from_amount, mob_limit ):
""" init source sectors dictionary """
result = {};
for sect in from_secs.values() :
coords = empSector.to_coord( sect )
if empSector.is_movable_from( sect, commodity ) :
mob = empSector.value( sect, 'mob' ) - mob_limit;
amount = empSector.value( sect, commodity ) - from_amount
if mob > 0 and amount > 0 :
weight = 1.0
result[ coords ] = ( amount ,
mob ,
weight)
## print "src += " + coords.str() + " " + `amount` + " ",
## print `mob` + " " + `weight`
return result;
def __create_dst_map( self, to_secs ):
""" init destination sectors dictionary """
result = {}
for sect in to_secs.values() :
if empSector.is_explorable_into( sect ) :
coords = empSector.to_coord( sect )
result[ coords ] = 1
return result
|
heplesser/nest-simulator
|
pynest/examples/clopath_synapse_spike_pairing.py
|
# -*- coding: utf-8 -*-
#
# clopath_synapse_spike_pairing.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Clopath Rule: Spike pairing experiment
--------------------------------------
This script simulates one ``aeif_psc_delta_clopath`` neuron that is connected with
a Clopath connection [1]_. The synapse receives pairs of a pre- and a postsynaptic
spikes that are separated by either 10 ms (pre before post) or -10 ms (post
before pre). The change of the synaptic weight is measured after five of such
pairs. This experiment is repeated five times with different rates of the
sequence of the spike pairs: 10Hz, 20Hz, 30Hz, 40Hz, and 50Hz.
References
~~~~~~~~~~
.. [1] Clopath C, Büsing L, Vasilaki E, Gerstner W (2010). Connectivity reflects coding:
a model of voltage-based STDP with homeostasis.
Nature Neuroscience 13:3, 344--352
"""
import numpy as np
import matplotlib.pyplot as plt
import nest
##############################################################################
# First we specify the neuron parameters. To enable voltage dependent
# prefactor ``A_LTD(u_bar_bar)`` add ``A_LTD_const: False`` to the dictionary.
nrn_params = {'V_m': -70.6,
'E_L': -70.6,
'C_m': 281.0,
'theta_minus': -70.6,
'theta_plus': -45.3,
'A_LTD': 14.0e-5,
'A_LTP': 8.0e-5,
'tau_u_bar_minus': 10.0,
'tau_u_bar_plus': 7.0,
'delay_u_bars': 4.0,
'a': 4.0,
'b': 0.0805,
'V_reset': -70.6 + 21.0,
'V_clamp': 33.0,
't_clamp': 2.0,
't_ref': 0.0,
}
##############################################################################
# Hardcoded spike times of presynaptic spike generator
spike_times_pre = [
# Presynaptic spike before the postsynaptic
[ 20.0, 120.0, 220.0, 320.0, 420.0], # noqa
[ 20.0, 70.0, 120.0, 170.0, 220.0], # noqa
[ 20.0, 53.3, 86.7, 120.0, 153.3], # noqa
[ 20.0, 45.0, 70.0, 95.0, 120.0], # noqa
[ 20.0, 40.0, 60.0, 80.0, 100.0], # noqa
# Presynaptic spike after the postsynaptic
[120.0, 220.0, 320.0, 420.0, 520.0, 620.0], # noqa
[ 70.0, 120.0, 170.0, 220.0, 270.0, 320.0], # noqa
[ 53.3, 86.6, 120.0, 153.3, 186.6, 220.0], # noqa
[ 45.0, 70.0, 95.0, 120.0, 145.0, 170.0], # noqa
[ 40.0, 60.0, 80.0, 100.0, 120.0, 140.0]] # noqa
##############################################################################
# Hardcoded spike times of postsynaptic spike generator
spike_times_post = [
[ 10.0, 110.0, 210.0, 310.0, 410.0], # noqa
[ 10.0, 60.0, 110.0, 160.0, 210.0], # noqa
[ 10.0, 43.3, 76.7, 110.0, 143.3], # noqa
[ 10.0, 35.0, 60.0, 85.0, 110.0], # noqa
[ 10.0, 30.0, 50.0, 70.0, 90.0], # noqa
[130.0, 230.0, 330.0, 430.0, 530.0, 630.0], # noqa
[ 80.0, 130.0, 180.0, 230.0, 280.0, 330.0], # noqa
[ 63.3, 96.6, 130.0, 163.3, 196.6, 230.0], # noqa
[ 55.0, 80.0, 105.0, 130.0, 155.0, 180.0], # noqa
[ 50.0, 70.0, 90.0, 110.0, 130.0, 150.0]] # noqa
init_w = 0.5
syn_weights = []
resolution = 0.1
##############################################################################
# Loop over pairs of spike trains
for s_t_pre, s_t_post in zip(spike_times_pre, spike_times_post):
nest.ResetKernel()
nest.resolution = resolution
# Create one neuron
nrn = nest.Create("aeif_psc_delta_clopath", 1, nrn_params)
# We need a parrot neuron since spike generators can only
# be connected with static connections
prrt_nrn = nest.Create("parrot_neuron", 1)
# Create and connect spike generators
spike_gen_pre = nest.Create("spike_generator", {"spike_times": s_t_pre})
nest.Connect(spike_gen_pre, prrt_nrn,
syn_spec={"delay": resolution})
spike_gen_post = nest.Create("spike_generator", {"spike_times": s_t_post})
nest.Connect(spike_gen_post, nrn, syn_spec={"delay": resolution, "weight": 80.0})
# Create weight recorder
wr = nest.Create('weight_recorder')
# Create Clopath connection with weight recorder
nest.CopyModel("clopath_synapse", "clopath_synapse_rec",
{"weight_recorder": wr})
syn_dict = {"synapse_model": "clopath_synapse_rec",
"weight": init_w, "delay": resolution}
nest.Connect(prrt_nrn, nrn, syn_spec=syn_dict)
# Simulation
simulation_time = (10.0 + max(s_t_pre[-1], s_t_post[-1]))
nest.Simulate(simulation_time)
# Extract and save synaptic weights
weights = wr.get("events", "weights")
syn_weights.append(weights[-1])
syn_weights = np.array(syn_weights)
# scaling of the weights so that they are comparable to [1]
syn_weights = 100.0 * 15.0 * (syn_weights - init_w) / init_w + 100.0
# Plot results
fig, ax = plt.subplots(1, sharex=False)
ax.plot([10., 20., 30., 40., 50.], syn_weights[5:], color='b', lw=2.5, ls='-',
label="pre-post pairing")
ax.plot([10., 20., 30., 40., 50.], syn_weights[:5], color='g', lw=2.5, ls='-',
label="post-pre pairing")
ax.set_ylabel("normalized weight change")
ax.set_xlabel("rho (Hz)")
ax.legend()
ax.set_title("synaptic weight")
plt.show()
|
SickGear/SickGear
|
lib/enzyme/mpeg.py
|
# -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
# Copyright 2003-2006 Thomas Schueppel <stain@acm.org>
# Copyright 2003-2006 Dirk Meyer <dischi@freevo.org>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
__all__ = ['Parser']
import os
import struct
import logging
import stat
from .exceptions import ParseError
from . import core
from six import byte2int, indexbytes
# get logging object
log = logging.getLogger(__name__)
# #------------------------------------------------------------------------
# # START_CODE
# #
# # Start Codes, with 'slice' occupying 0x01..0xAF
# #------------------------------------------------------------------------
START_CODE = {
0x00: 'picture_start_code',
0xB0: 'reserved',
0xB1: 'reserved',
0xB2: 'user_data_start_code',
0xB3: 'sequence_header_code',
0xB4: 'sequence_error_code',
0xB5: 'extension_start_code',
0xB6: 'reserved',
0xB7: 'sequence end',
0xB8: 'group of pictures',
}
for i in range(0x01, 0xAF):
START_CODE[i] = 'slice_start_code'
# #------------------------------------------------------------------------
# # START CODES
# #------------------------------------------------------------------------
PICTURE = 0x00
USERDATA = 0xB2
SEQ_HEAD = 0xB3
SEQ_ERR = 0xB4
EXT_START = 0xB5
SEQ_END = 0xB7
GOP = 0xB8
SEQ_START_CODE = 0xB3
PACK_PKT = 0xBA
SYS_PKT = 0xBB
PADDING_PKT = 0xBE
AUDIO_PKT = 0xC0
VIDEO_PKT = 0xE0
PRIVATE_STREAM1 = 0xBD
PRIVATE_STREAM2 = 0xBf
TS_PACKET_LENGTH = 188
TS_SYNC = 0x47
# #------------------------------------------------------------------------
# # FRAME_RATE
# #
# # A lookup table of all the standard frame rates. Some rates adhere to
# # a particular profile that ensures compatibility with VLSI capabilities
# # of the early to mid 1990s.
# #
# # CPB
# # Constrained Parameters Bitstreams, an MPEG-1 set of sampling and
# # bitstream parameters designed to normalize decoder computational
# # complexity, buffer size, and memory bandwidth while still addressing
# # the widest possible range of applications.
# #
# # Main Level
# # MPEG-2 Video Main Profile and Main Level is analogous to MPEG-1's
# # CPB, with sampling limits at CCIR 601 parameters (720x480x30 Hz or
# # 720x576x24 Hz).
# #
# #------------------------------------------------------------------------
FRAME_RATE = [
0,
24000.0 / 1001, # # 3-2 pulldown NTSC (CPB/Main Level)
24, # # Film (CPB/Main Level)
25, # # PAL/SECAM or 625/60 video
30000.0 / 1001, # # NTSC (CPB/Main Level)
30, # # drop-frame NTSC or component 525/60 (CPB/Main Level)
50, # # double-rate PAL
60000.0 / 1001, # # double-rate NTSC
60, # # double-rate, drop-frame NTSC/component 525/60 video
]
# #------------------------------------------------------------------------
# # ASPECT_RATIO -- INCOMPLETE?
# #
# # This lookup table maps the header aspect ratio index to a float value.
# # These are just the defined ratios for CPB I believe. As I understand
# # it, a stream that doesn't adhere to one of these aspect ratios is
# # technically considered non-compliant.
# #------------------------------------------------------------------------
ASPECT_RATIO = (None, # Forbidden
1.0, # 1/1 (VGA)
4.0 / 3, # 4/3 (TV)
16.0 / 9, # 16/9 (Widescreen)
2.21 # (Cinema)
)
class MPEG(core.AVContainer):
"""
Parser for various MPEG files. This includes MPEG-1 and MPEG-2
program streams, elementary streams and transport streams. The
reported length differs from the length reported by most video
players but the provides length here is correct. An MPEG file has
no additional metadata like title, etc; only codecs, length and
resolution is reported back.
"""
def __init__(self, file):
core.AVContainer.__init__(self)
self.sequence_header_offset = 0
self.mpeg_version = 2
self.get_time = None
self.audio = []
self.video = []
self.start = None
self.__seek_size__ = None
self.__sample_size__ = None
self.__search__ = None
self.filename = None
self.length = None
self.audio_ok = None
# detect TS (fast scan)
if not self.isTS(file):
# detect system mpeg (many infos)
if not self.isMPEG(file):
# detect PES
if not self.isPES(file):
# Maybe it's MPEG-ES
if self.isES(file):
# If isES() succeeds, we needn't do anything further.
return
if file.name.lower().endswith('mpeg') or \
file.name.lower().endswith('mpg'):
# This has to be an mpeg file. It could be a bad
# recording from an ivtv based hardware encoder with
# same bytes missing at the beginning.
# Do some more digging...
if not self.isMPEG(file, force=True) or \
not self.video or not self.audio:
# does not look like an mpeg at all
raise ParseError()
else:
# no mpeg at all
raise ParseError()
self.mime = 'video/mpeg'
if not self.video:
self.video.append(core.VideoStream())
if self.sequence_header_offset <= 0:
return
self.progressive(file)
for vi in self.video:
vi.width, vi.height = self.dxy(file)
vi.fps, vi.aspect = self.framerate_aspect(file)
vi.bitrate = self.bitrate(file)
if self.length:
vi.length = self.length
if not self.type:
self.type = 'MPEG Video'
# set fourcc codec for video and audio
vc, ac = 'MP2V', 'MP2A'
if self.mpeg_version == 1:
vc, ac = 'MPEG', 0x0050
for v in self.video:
v.codec = vc
for a in self.audio:
if not a.codec:
a.codec = ac
def dxy(self, file):
"""
get width and height of the video
"""
file.seek(self.sequence_header_offset + 4, 0)
v = file.read(4)
x = struct.unpack('>H', v[:2])[0] >> 4
y = struct.unpack('>H', v[1:3])[0] & 0x0FFF
return x, y
def framerate_aspect(self, file):
"""
read framerate and aspect ratio
"""
file.seek(self.sequence_header_offset + 7, 0)
v = struct.unpack('>B', file.read(1))[0]
try:
fps = FRAME_RATE[v & 0xf]
except IndexError:
fps = None
if v >> 4 < len(ASPECT_RATIO):
aspect = ASPECT_RATIO[v >> 4]
else:
aspect = None
return fps, aspect
def progressive(self, file):
"""
Try to find out with brute force if the mpeg is interlaced or not.
Search for the Sequence_Extension in the extension header (01B5)
"""
file.seek(0)
buffer = ''
count = 0
while 1:
if len(buffer) < 1000:
count += 1
if count > 1000:
break
buffer += file.read(1024)
if len(buffer) < 1000:
break
pos = buffer.find('\x00\x00\x01\xb5')
if pos == -1 or len(buffer) - pos < 5:
buffer = buffer[-10:]
continue
ext = (indexbytes(buffer, pos + 4) >> 4)
if ext == 8:
pass
elif ext == 1:
if (indexbytes(buffer, pos + 5) >> 3) & 1:
self._set('progressive', True)
else:
self._set('interlaced', True)
return True
else:
log.debug(u'ext: %r' % ext)
buffer = buffer[pos + 4:]
return False
# #------------------------------------------------------------------------
# # bitrate()
# #
# # From the MPEG-2.2 spec:
# #
# # bit_rate -- This is a 30-bit integer. The lower 18 bits of the
# # integer are in bit_rate_value and the upper 12 bits are in
# # bit_rate_extension. The 30-bit integer specifies the bitrate of the
# # bitstream measured in units of 400 bits/second, rounded upwards.
# # The value zero is forbidden.
# #
# # So ignoring all the variable bitrate stuff for now, this 30 bit integer
# # multiplied times 400 bits/sec should give the rate in bits/sec.
# #
# # TODO: Variable bitrates? I need one that implements this.
# #
# # Continued from the MPEG-2.2 spec:
# #
# # If the bitstream is a constant bitrate stream, the bitrate specified
# # is the actual rate of operation of the VBV specified in annex C. If
# # the bitstream is a variable bitrate stream, the STD specifications in
# # ISO/IEC 13818-1 supersede the VBV, and the bitrate specified here is
# # used to dimension the transport stream STD (2.4.2 in ITU-T Rec. xxx |
# # ISO/IEC 13818-1), or the program stream STD (2.4.5 in ITU-T Rec. xxx |
# # ISO/IEC 13818-1).
# #
# # If the bitstream is not a constant rate bitstream the vbv_delay
# # field shall have the value FFFF in hexadecimal.
# #
# # Given the value encoded in the bitrate field, the bitstream shall be
# # generated so that the video encoding and the worst case multiplex
# # jitter do not cause STD buffer overflow or underflow.
# #
# #
# # ------------------------------------------------------------------------
# # Some parts in the code are based on mpgtx (mpgtx.sf.net)
def bitrate(self, file):
"""
read the bitrate (most of the time broken)
"""
file.seek(self.sequence_header_offset + 8, 0)
t, b = struct.unpack('>HB', file.read(3))
vrate = t << 2 | b >> 6
return vrate * 400
@staticmethod
def ReadSCRMpeg2(buffer):
"""
read SCR (timestamp) for MPEG2 at the buffer beginning (6 Bytes)
"""
if len(buffer) < 6:
return None
highbit = (byte2int(buffer) & 0x20) >> 5
low4Bytes = ((int(byte2int(buffer)) & 0x18) >> 3) << 30
low4Bytes |= (byte2int(buffer) & 0x03) << 28
low4Bytes |= indexbytes(buffer, 1) << 20
low4Bytes |= (indexbytes(buffer, 2) & 0xF8) << 12
low4Bytes |= (indexbytes(buffer, 2) & 0x03) << 13
low4Bytes |= indexbytes(buffer, 3) << 5
low4Bytes |= (indexbytes(buffer, 4)) >> 3
sys_clock_ref = (indexbytes(buffer, 4) & 0x3) << 7
sys_clock_ref |= (indexbytes(buffer, 5) >> 1)
return (int(highbit * (1 << 16) * (1 << 16)) + low4Bytes) / 90000
@staticmethod
def ReadSCRMpeg1(buffer):
"""
read SCR (timestamp) for MPEG1 at the buffer beginning (5 Bytes)
"""
if len(buffer) < 5:
return None
highbit = (byte2int(buffer) >> 3) & 0x01
low4Bytes = ((int(byte2int(buffer)) >> 1) & 0x03) << 30
low4Bytes |= indexbytes(buffer, 1) << 22
low4Bytes |= (indexbytes(buffer, 2) >> 1) << 15
low4Bytes |= indexbytes(buffer, 3) << 7
low4Bytes |= indexbytes(buffer, 4) >> 1
return (int(highbit) * (1 << 16) * (1 << 16) + low4Bytes) / 90000
@staticmethod
def ReadPTS(buffer):
"""
read PTS (PES timestamp) at the buffer beginning (5 Bytes)
"""
high = ((byte2int(buffer) & 0xF) >> 1)
med = (indexbytes(buffer, 1) << 7) + (indexbytes(buffer, 2) >> 1)
low = (indexbytes(buffer, 3) << 7) + (indexbytes(buffer, 4) >> 1)
return ((int(high) << 30) + (med << 15) + low) / 90000
def ReadHeader(self, buffer, offset):
"""
Handle MPEG header in buffer on position offset
Return None on error, new offset or 0 if the new offset can't be scanned
"""
if buffer[offset:offset + 3] != '\x00\x00\x01':
return None
_id = indexbytes(buffer, offset + 3)
if _id == PADDING_PKT:
return offset + (indexbytes(buffer, offset + 4) << 8) + \
indexbytes(buffer, offset + 5) + 6
if _id == PACK_PKT:
if indexbytes(buffer, offset + 4) & 0xF0 == 0x20:
self.type = 'MPEG-1 Video'
self.get_time = self.ReadSCRMpeg1
self.mpeg_version = 1
return offset + 12
elif (indexbytes(buffer, offset + 4) & 0xC0) == 0x40:
self.type = 'MPEG-2 Video'
self.get_time = self.ReadSCRMpeg2
return offset + (indexbytes(buffer, offset + 13) & 0x07) + 14
else:
# I have no idea what just happened, but for some DVB
# recordings done with mencoder this points to a
# PACK_PKT describing something odd. Returning 0 here
# (let's hope there are no extensions in the header)
# fixes it.
return 0
if 0xC0 <= _id <= 0xDF:
# code for audio stream
for a in self.audio:
if a.id == _id:
break
else:
self.audio.append(core.AudioStream())
self.audio[-1]._set('id', _id)
return 0
if 0xE0 <= _id <= 0xEF:
# code for video stream
for v in self.video:
if v.id == _id:
break
else:
self.video.append(core.VideoStream())
self.video[-1]._set('id', _id)
return 0
if _id == SEQ_HEAD:
# sequence header, remember that position for later use
self.sequence_header_offset = offset
return 0
if _id in [PRIVATE_STREAM1, PRIVATE_STREAM2]:
# private stream. we don't know, but maybe we can guess later
add = indexbytes(buffer, offset + 8)
# if (indexbytes(buffer, offset+6) & 4) or 1:
# id = indexbytes(buffer, offset+10+add)
if buffer[offset + 11 + add:offset + 15 + add].find('\x0b\x77') != -1:
# AC3 stream
for a in self.audio:
if a.id == _id:
break
else:
self.audio.append(core.AudioStream())
self.audio[-1]._set('id', _id)
self.audio[-1].codec = 0x2000 # AC3
return 0
if _id == SYS_PKT:
return 0
if _id == EXT_START:
return 0
return 0
# Normal MPEG (VCD, SVCD) ========================================
def isMPEG(self, file, force=False):
"""
This MPEG starts with a sequence of 0x00 followed by a PACK Header
http://dvd.sourceforge.net/dvdinfo/packhdr.html
"""
file.seek(0, 0)
buffer = file.read(10000)
offset = 0
# seek until the 0 byte stop
while offset < len(buffer) - 100 and buffer[offset] == '\0':
offset += 1
offset -= 2
# test for mpeg header 0x00 0x00 0x01
header = '\x00\x00\x01%s' % chr(PACK_PKT)
if offset < 0 or not buffer[offset:offset + 4] == header:
if not force:
return 0
# brute force and try to find the pack header in the first
# 10000 bytes somehow
offset = buffer.find(header)
if offset < 0:
return 0
# scan the 100000 bytes of data
buffer += file.read(100000)
# scan first header, to get basic info about
# how to read a timestamp
self.ReadHeader(buffer, offset)
# store first timestamp
self.start = self.get_time(buffer[offset + 4:])
while len(buffer) > offset + 1000 and \
buffer[offset:offset + 3] == '\x00\x00\x01':
# read the mpeg header
new_offset = self.ReadHeader(buffer, offset)
# header scanning detected error, this is no mpeg
if new_offset is None:
return 0
if new_offset:
# we have a new offset
offset = new_offset
# skip padding 0 before a new header
while len(buffer) > offset + 10 and \
not indexbytes(buffer, offset + 2):
offset += 1
else:
# seek to new header by brute force
offset += buffer[offset + 4:].find('\x00\x00\x01') + 4
# fill in values for support functions:
self.__seek_size__ = 1000000
self.__sample_size__ = 10000
self.__search__ = self._find_timer_
self.filename = file.name
# get length of the file
self.length = self.get_length()
return 1
@staticmethod
def _find_timer_(buffer):
"""
Return position of timer in buffer or None if not found.
This function is valid for 'normal' mpeg files
"""
pos = buffer.find('\x00\x00\x01%s' % chr(PACK_PKT))
if pos == -1:
return None
return pos + 4
# PES ============================================================
def ReadPESHeader(self, offset, buffer, id=0):
"""
Parse a PES header.
Since it starts with 0x00 0x00 0x01 like 'normal' mpegs, this
function will return (0, None) when it is no PES header or
(packet length, timestamp position (maybe None))
http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
"""
if not buffer[0:3] == '\x00\x00\x01':
return 0, None
packet_length = (indexbytes(buffer, 4) << 8) + indexbytes(buffer, 5) + 6
align = indexbytes(buffer, 6) & 4
header_length = indexbytes(buffer, 8)
# PES ID (starting with 001)
if indexbytes(buffer, 3) & 0xE0 == 0xC0:
id = id or indexbytes(buffer, 3) & 0x1F
for a in self.audio:
if a.id == id:
break
else:
self.audio.append(core.AudioStream())
self.audio[-1]._set('id', id)
elif indexbytes(buffer, 3) & 0xF0 == 0xE0:
id = id or indexbytes(buffer, 3) & 0xF
for v in self.video:
if v.id == id:
break
else:
self.video.append(core.VideoStream())
self.video[-1]._set('id', id)
# new mpeg starting
if buffer[header_length + 9:header_length + 13] == \
'\x00\x00\x01\xB3' and not self.sequence_header_offset:
# yes, remember offset for later use
self.sequence_header_offset = offset + header_length + 9
elif indexbytes(buffer, 3) == 189 or indexbytes(buffer, 3) == 191:
# private stream. we don't know, but maybe we can guess later
id = id or indexbytes(buffer, 3) & 0xF
if align and \
buffer[header_length + 9:header_length + 11] == '\x0b\x77':
# AC3 stream
for a in self.audio:
if a.id == id:
break
else:
self.audio.append(core.AudioStream())
self.audio[-1]._set('id', id)
self.audio[-1].codec = 0x2000 # AC3
else:
# unknown content
pass
ptsdts = indexbytes(buffer, 7) >> 6
if ptsdts and ptsdts == indexbytes(buffer, 9) >> 4:
if indexbytes(buffer, 9) >> 4 != ptsdts:
log.warning(u'WARNING: bad PTS/DTS, please contact us')
return packet_length, None
# timestamp = self.ReadPTS(buffer[9:14])
high = ((indexbytes(buffer, 9) & 0xF) >> 1)
med = (indexbytes(buffer, 10) << 7) + (indexbytes(buffer, 11) >> 1)
low = (indexbytes(buffer, 12) << 7) + (indexbytes(buffer, 13) >> 1)
return packet_length, 9
return packet_length, None
def isPES(self, file):
log.info(u'trying mpeg-pes scan')
file.seek(0, 0)
buffer = file.read(3)
# header (also valid for all mpegs)
if not buffer == '\x00\x00\x01':
return 0
self.sequence_header_offset = 0
buffer += file.read(10000)
offset = 0
while offset + 1000 < len(buffer):
pos, timestamp = self.ReadPESHeader(offset, buffer[offset:])
if not pos:
return 0
if timestamp is not None and not hasattr(self, 'start'):
self.get_time = self.ReadPTS
bpos = buffer[offset + timestamp:offset + timestamp + 5]
self.start = self.get_time(bpos)
if self.sequence_header_offset and hasattr(self, 'start'):
# we have all informations we need
break
offset += pos
if offset + 1000 < len(buffer) < 1000000 or 1:
# looks like a pes, read more
buffer += file.read(10000)
if not self.video and not self.audio:
# no video and no audio?
return 0
self.type = 'MPEG-PES'
# fill in values for support functions:
self.__seek_size__ = 10000000 # 10 MB
self.__sample_size__ = 500000 # 500 k scanning
self.__search__ = self._find_timer_PES_
self.filename = file.name
# get length of the file
self.length = self.get_length()
return 1
def _find_timer_PES_(self, buffer):
"""
Return position of timer in buffer or -1 if not found.
This function is valid for PES files
"""
pos = buffer.find('\x00\x00\x01')
offset = 0
if pos == -1 or offset + 1000 >= len(buffer):
return None
retpos = -1
ackcount = 0
while offset + 1000 < len(buffer):
pos, timestamp = self.ReadPESHeader(offset, buffer[offset:])
if timestamp is not None and retpos == -1:
retpos = offset + timestamp
if pos == 0:
# Oops, that was a mpeg header, no PES header
offset += buffer[offset:].find('\x00\x00\x01')
retpos = -1
ackcount = 0
else:
offset += pos
if retpos != -1:
ackcount += 1
if ackcount > 10:
# looks ok to me
return retpos
return None
# Elementary Stream ===============================================
def isES(self, file):
file.seek(0, 0)
try:
header = struct.unpack('>LL', file.read(8))
except (struct.error, IOError):
return False
if header[0] != 0x1B3:
return False
# Is an mpeg video elementary stream
self.mime = 'video/mpeg'
video = core.VideoStream()
video.width = header[1] >> 20
video.height = (header[1] >> 8) & 0xfff
if header[1] & 0xf < len(FRAME_RATE):
video.fps = FRAME_RATE[header[1] & 0xf]
if (header[1] >> 4) & 0xf < len(ASPECT_RATIO):
# FIXME: Empirically the aspect looks like PAR rather than DAR
video.aspect = ASPECT_RATIO[(header[1] >> 4) & 0xf]
self.video.append(video)
return True
# Transport Stream ===============================================
def isTS(self, file):
file.seek(0, 0)
buffer = file.read(TS_PACKET_LENGTH * 2)
c = 0
while c + TS_PACKET_LENGTH < len(buffer):
if indexbytes(buffer, c) == indexbytes(buffer, c + TS_PACKET_LENGTH) == TS_SYNC:
break
c += 1
else:
return 0
buffer += file.read(10000)
self.type = 'MPEG-TS'
while c + TS_PACKET_LENGTH < len(buffer):
start = indexbytes(buffer, c + 1) & 0x40
# maybe load more into the buffer
if c + 2 * TS_PACKET_LENGTH > len(buffer) and c < 500000:
buffer += file.read(10000)
# wait until the ts payload contains a payload header
if not start:
c += TS_PACKET_LENGTH
continue
tsid = ((indexbytes(buffer, c + 1) & 0x3F) << 8) + indexbytes(buffer, c + 2)
adapt = (indexbytes(buffer, c + 3) & 0x30) >> 4
offset = 4
if adapt & 0x02:
# meta info present, skip it for now
adapt_len = indexbytes(buffer, c + offset)
offset += adapt_len + 1
if not indexbytes(buffer, c + 1) & 0x40:
# no new pes or psi in stream payload starting
pass
elif adapt & 0x01:
# PES
timestamp = self.ReadPESHeader(c + offset, buffer[c + offset:],
tsid)[1]
if timestamp is not None:
if not hasattr(self, 'start'):
self.get_time = self.ReadPTS
timestamp = c + offset + timestamp
self.start = self.get_time(buffer[timestamp:timestamp + 5])
elif not hasattr(self, 'audio_ok'):
timestamp = c + offset + timestamp
start = self.get_time(buffer[timestamp:timestamp + 5])
if start is not None and self.start is not None and \
abs(start - self.start) < 10:
# looks ok
self.audio_ok = True
else:
# timestamp broken
del self.start
log.warning(u'Timestamp error, correcting')
if hasattr(self, 'start') and self.start and \
self.sequence_header_offset and self.video and self.audio:
break
c += TS_PACKET_LENGTH
if not self.sequence_header_offset:
return 0
# fill in values for support functions:
self.__seek_size__ = 10000000 # 10 MB
self.__sample_size__ = 100000 # 100 k scanning
self.__search__ = self._find_timer_TS_
self.filename = file.name
# get length of the file
self.length = self.get_length()
return 1
def _find_timer_TS_(self, buffer):
c = 0
while c + TS_PACKET_LENGTH < len(buffer):
if indexbytes(buffer, c) == indexbytes(buffer, c + TS_PACKET_LENGTH) == TS_SYNC:
break
c += 1
else:
return None
while c + TS_PACKET_LENGTH < len(buffer):
start = indexbytes(buffer, c + 1) & 0x40
if not start:
c += TS_PACKET_LENGTH
continue
tsid = ((indexbytes(buffer, c + 1) & 0x3F) << 8) + indexbytes(buffer, c + 2)
adapt = (indexbytes(buffer, c + 3) & 0x30) >> 4
offset = 4
if adapt & 0x02:
# meta info present, skip it for now
offset += indexbytes(buffer, c + offset) + 1
if adapt & 0x01:
timestamp = self.ReadPESHeader(c + offset, buffer[c + offset:], tsid)[1]
if timestamp is None:
# this should not happen
log.error(u'bad TS')
return None
return c + offset + timestamp
c += TS_PACKET_LENGTH
return None
# Support functions ==============================================
def get_endpos(self):
"""
get the last timestamp of the mpeg, return -1 if this is not possible
"""
if not hasattr(self, 'filename') or not hasattr(self, 'start'):
return None
length = os.stat(self.filename)[stat.ST_SIZE]
if length < self.__sample_size__:
return
file = open(self.filename)
file.seek(length - self.__sample_size__)
buffer = file.read(self.__sample_size__)
end = None
while 1:
pos = self.__search__(buffer)
if pos is None:
break
end = self.get_time(buffer[pos:]) or end
buffer = buffer[pos + 100:]
file.close()
return end
def get_length(self):
"""
get the length in seconds, return -1 if this is not possible
"""
end = self.get_endpos()
if end is None or self.start is None:
return None
if self.start > end:
return int(((int(1) << 33) - 1) / 90000) - self.start + end
return end - self.start
def seek(self, end_time):
"""
Return the byte position in the file where the time position
is 'pos' seconds. Return 0 if this is not possible
"""
if not hasattr(self, 'filename') or not hasattr(self, 'start'):
return 0
file = open(self.filename)
seek_to = 0
while 1:
file.seek(self.__seek_size__, 1)
buffer = file.read(self.__sample_size__)
if len(buffer) < 10000:
break
pos = self.__search__(buffer)
if pos is not None:
# found something
nt = self.get_time(buffer[pos:])
if nt is not None and nt >= end_time:
# too much, break
break
# that wasn't enough
seek_to = file.tell()
file.close()
return seek_to
def __scan__(self):
"""
scan file for timestamps (may take a long time)
"""
if not hasattr(self, 'filename') or not hasattr(self, 'start'):
return 0
file = open(self.filename)
log.debug(u'scanning file...')
while 1:
file.seek(self.__seek_size__ * 10, 1)
buffer = file.read(self.__sample_size__)
if len(buffer) < 10000:
break
pos = self.__search__(buffer)
if pos is None:
continue
log.debug(u'buffer position: %r' % self.get_time(buffer[pos:]))
file.close()
log.debug(u'done scanning file')
Parser = MPEG
|
nop33/indico
|
indico/modules/rb/models/locations.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from collections import defaultdict
from datetime import time
from sqlalchemy import func
from indico.core.db import db
from indico.modules.rb.models.aspects import Aspect
from indico.util.caching import memoize_request
from indico.util.decorators import classproperty
from indico.util.i18n import _
from indico.util.locators import locator_property
from indico.util.string import return_ascii
class Location(db.Model):
__tablename__ = 'locations'
__table_args__ = {'schema': 'roombooking'}
# TODO: Turn this into a proper admin setting
working_time_periods = ((time(8, 30), time(12, 30)), (time(13, 30), time(17, 30)))
@classproperty
@classmethod
def working_time_start(cls):
return cls.working_time_periods[0][0]
@classproperty
@classmethod
def working_time_end(cls):
return cls.working_time_periods[-1][1]
id = db.Column(
db.Integer,
primary_key=True
)
name = db.Column(
db.String,
nullable=False,
unique=True,
index=True
)
is_default = db.Column(
db.Boolean,
nullable=False,
default=False
)
default_aspect_id = db.Column(
db.Integer,
db.ForeignKey(
'roombooking.aspects.id',
use_alter=True,
name='fk_locations_default_aspect_id',
onupdate='CASCADE',
ondelete='SET NULL'
)
)
map_url_template = db.Column(
db.String,
nullable=False,
default=''
)
aspects = db.relationship(
'Aspect',
backref='location',
cascade='all, delete-orphan',
primaryjoin=(id == Aspect.location_id),
lazy='dynamic',
)
default_aspect = db.relationship(
'Aspect',
primaryjoin=default_aspect_id == Aspect.id,
post_update=True,
)
rooms = db.relationship(
'Room',
backref='location',
cascade='all, delete-orphan',
lazy=True
)
attributes = db.relationship(
'RoomAttribute',
backref='location',
cascade='all, delete-orphan',
lazy='dynamic'
)
equipment_types = db.relationship(
'EquipmentType',
backref='location',
lazy='dynamic',
cascade='all, delete-orphan'
)
holidays = db.relationship(
'Holiday',
backref='location',
cascade='all, delete-orphan',
lazy='dynamic'
)
# relationship backrefs:
# - breaks (Break.own_venue)
# - contributions (Contribution.own_venue)
# - events (Event.own_venue)
# - session_blocks (SessionBlock.own_venue)
# - sessions (Session.own_venue)
@return_ascii
def __repr__(self):
return u'<Location({0}, {1}, {2})>'.format(
self.id,
self.default_aspect_id,
self.name
)
@locator_property
def locator(self):
return {'locationId': self.name}
@property
@memoize_request
def is_map_available(self):
return self.aspects.count() > 0
@classproperty
@classmethod
@memoize_request
def default_location(cls):
return cls.query.filter_by(is_default=True).first()
def set_default(self):
if self.is_default:
return
(Location.query
.filter(Location.is_default | (Location.id == self.id))
.update({'is_default': func.not_(Location.is_default)}, synchronize_session='fetch'))
def get_attribute_by_name(self, name):
return self.attributes.filter_by(name=name).first()
def get_equipment_by_name(self, name):
return self.equipment_types.filter_by(name=name).first()
def get_buildings(self):
building_rooms = defaultdict(list)
for room in self.rooms:
building_rooms[room.building].append(room)
buildings = []
for building_name, rooms in building_rooms.iteritems():
room_with_lat_lon = next((r for r in rooms if r.longitude and r.latitude), None)
if not room_with_lat_lon:
continue
buildings.append({'number': building_name,
'title': _(u'Building {}'.format(building_name)),
'longitude': room_with_lat_lon.longitude,
'latitude': room_with_lat_lon.latitude,
'rooms': [r.to_serializable('__public_exhaustive__') for r in rooms]})
return buildings
|
Applied-GeoSolutions/gips
|
gips/__init__.py
|
#!/usr/bin/env python
################################################################################
# GIPS: Geospatial Image Processing System
#
# AUTHOR: Matthew Hanson
# EMAIL: matt.a.hanson@gmail.com
#
# Copyright (C) 2014-2018 Applied Geosolutions
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
################################################################################
import os
from . import version
def detect_version():
"""Emit GIPS' software version. May be overridden for testing purposes.
To override version.py, put a desired version string in the environment
variable GIPS_OVERRIDE_VERSION."""
return os.environ.get('GIPS_OVERRIDE_VERSION', version.__version__)
__version__ = detect_version()
|
caioau/caioau-personal
|
fluxos/lista 2/grafo-3.py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 27 01:20:46 2016
@author: caioau
"""
import matplotlib.pyplot as plt
import networkx as nx
from networkx.drawing.nx_agraph import graphviz_layout
def main():
G = nx.DiGraph() # G eh um grafo direcionado
# gera o grafo apartir de suas arestas
G.add_weighted_edges_from([(1,2,2.0),(1,3,1.0),(2,3,3.0),(2,4,3.0),(3,5,1.0),(4,6,2.0),(5,4,2.0),(5,6,5.0)])
for i in G.edges():
# print i[0], i[1]
G[i[0]][i[1]]["color"] = "black"
# G[1][2]["color"] = "red"
maiorCaminho = nx.dag_longest_path(G)
print maiorCaminho
for i in range(1, len(maiorCaminho)):
G[maiorCaminho[i-1]][maiorCaminho[i]]["color"] = "red"
desenhaGrafo(G, "grafo-3.png")
def desenhaGrafo(G,pngfilename): # desenha o grafo e salva numa imagem png
edge_labels=dict([((u,v,),d['weight']) # gera os labels das arestas
for u,v,d in G.edges(data=True)])
colors = [G[u][v]['color'] for u,v in G.edges()]
pos = graphviz_layout(G,prog='neato') # obtem a posicao dos nos (para desenhalo) # TODO: desativar isso?
nx.draw_networkx_edges(G,pos, edge_color=colors) # desenha as arestas
nx.draw_networkx_labels(G,pos) # desenha os labels das arestas
nx.draw_networkx_edge_labels(G,pos,edge_labels=edge_labels) # desenha os labels dos nos
nx.draw_networkx_nodes(G,pos,node_color='w') # desenha os nos
plt.axis('off') # desativa os eixos
plt.savefig(pngfilename)
plt.close("all")
if __name__ == "__main__":
main()
|
tinutomson/wikicoding
|
wiki/plugins/macros/wiki_plugin.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext as _
from wiki.core.plugins import registry
from wiki.core.plugins.base import BasePlugin
from wiki.plugins.macros import settings
from wiki.plugins.macros.mdx.macro import MacroExtension
from wiki.plugins.macros.mdx.toc import WikiTocExtension
# from wiki.plugins.macros.mdx.wikilinks import WikiLinkExtension
class MacroPlugin(BasePlugin):
slug = settings.SLUG
sidebar = {'headline': _('Macros'),
'icon_class': 'fa-play',
'template': 'wiki/plugins/macros/sidebar.html',
'form_class': None,
'get_form_kwargs': (lambda a: {})}
markdown_extensions = [MacroExtension(), WikiTocExtension()]
def __init__(self):
pass
registry.register(MacroPlugin)
|
petertodd/timelock
|
lib/python-bitcoinlib/bitcoin/rpc.py
|
# Copyright 2011 Jeff Garzik
#
# RawProxy has the following improvements over python-jsonrpc's ServiceProxy
# class:
#
# - HTTP connections persist for the life of the RawProxy object (if server
# supports HTTP/1.1)
# - sends protocol 'version', per JSON-RPC 1.1
# - sends proper, incrementing 'id'
# - sends Basic HTTP authentication headers
# - parses all JSON numbers that look like floats as Decimal
# - uses standard Python json lib
#
# Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
#
# Copyright (c) 2007 Jan-Klaas Kollhof
#
# This file is part of jsonrpc.
#
# jsonrpc is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Bitcoin Core RPC support"""
from __future__ import absolute_import, division, print_function, unicode_literals
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import binascii
import decimal
import json
import os
import platform
import sys
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
import bitcoin
from bitcoin.core import COIN, lx, b2lx, CBlock, CTransaction, COutPoint, CTxOut
from bitcoin.core.script import CScript
from bitcoin.wallet import CBitcoinAddress
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
# (un)hexlify to/from unicode, needed for Python3
unhexlify = binascii.unhexlify
hexlify = binascii.hexlify
if sys.version > '3':
unhexlify = lambda h: binascii.unhexlify(h.encode('utf8'))
hexlify = lambda b: binascii.hexlify(b).decode('utf8')
class JSONRPCException(Exception):
def __init__(self, rpc_error):
super(JSONRPCException, self).__init__('msg: %r code: %r' %
(rpc_error['message'], rpc_error['code']))
self.error = rpc_error
class RawProxy(object):
# FIXME: need a CChainParams rather than hard-coded service_port
def __init__(self, service_url=None,
service_port=None,
btc_conf_file=None,
timeout=HTTP_TIMEOUT,
_connection=None):
"""Low-level JSON-RPC proxy
Unlike Proxy no conversion is done from the raw JSON objects.
"""
if service_url is None:
# Figure out the path to the bitcoin.conf file
if btc_conf_file is None:
if platform.system() == 'Darwin':
btc_conf_file = os.path.expanduser('~/Library/Application Support/Bitcoin/')
elif platform.system() == 'Windows':
btc_conf_file = os.path.join(os.environ['APPDATA'], 'Bitcoin')
else:
btc_conf_file = os.path.expanduser('~/.bitcoin')
btc_conf_file = os.path.join(btc_conf_file, 'bitcoin.conf')
# Extract contents of bitcoin.conf to build service_url
with open(btc_conf_file, 'r') as fd:
conf = {}
for line in fd.readlines():
if '#' in line:
line = line[:line.index('#')]
if '=' not in line:
continue
k, v = line.split('=', 1)
conf[k.strip()] = v.strip()
if service_port is None:
service_port = bitcoin.params.RPC_PORT
conf['rpcport'] = int(conf.get('rpcport', service_port))
conf['rpcssl'] = conf.get('rpcssl', '0')
if conf['rpcssl'].lower() in ('0', 'false'):
conf['rpcssl'] = False
elif conf['rpcssl'].lower() in ('1', 'true'):
conf['rpcssl'] = True
else:
raise ValueError('Unknown rpcssl value %r' % conf['rpcssl'])
service_url = ('%s://%s:%s@localhost:%d' %
('https' if conf['rpcssl'] else 'http',
conf['rpcuser'], conf['rpcpassword'],
conf['rpcport']))
self.__service_url = service_url
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
self.__id_count = 0
authpair = "%s:%s" % (self.__url.username, self.__url.password)
authpair = authpair.encode('utf8')
self.__auth_header = b"Basic " + base64.b64encode(authpair)
if _connection:
# Callables re-use the connection of the original proxy
self.__conn = _connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port=port,
key_file=None, cert_file=None,
timeout=timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port=port,
timeout=timeout)
def _call(self, service_name, *args):
self.__id_count += 1
postdata = json.dumps({'version': '1.1',
'method': service_name,
'params': args,
'id': self.__id_count})
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
response = self._get_response()
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
# Create a callable to do the actual call
f = lambda *args: self._call(name, *args)
# Make debuggers show <function bitcoin.rpc.name> rather than <function
# bitcoin.rpc.<lambda>>
f.__name__ = name
return f
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list))
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
return self._get_response()
def _get_response(self):
http_response = self.__conn.getresponse()
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
return json.loads(http_response.read().decode('utf8'),
parse_float=decimal.Decimal)
class Proxy(RawProxy):
def __init__(self, service_url=None,
service_port=None,
btc_conf_file=None,
timeout=HTTP_TIMEOUT,
**kwargs):
"""Create a proxy to a bitcoin RPC service
Unlike RawProxy data is passed as objects, rather than JSON. (not yet
fully implemented) Assumes Bitcoin Core version >= 0.9; older versions
mostly work, but there are a few incompatibilities.
If service_url is not specified the username and password are read out
of the file btc_conf_file. If btc_conf_file is not specified
~/.bitcoin/bitcoin.conf or equivalent is used by default. The default
port is set according to the chain parameters in use: mainnet, testnet,
or regtest.
Usually no arguments to Proxy() are needed; the local bitcoind will be
used.
timeout - timeout in seconds before the HTTP interface times out
"""
super(Proxy, self).__init__(service_url=service_url, service_port=service_port, btc_conf_file=btc_conf_file,
timeout=HTTP_TIMEOUT,
**kwargs)
def getaccountaddress(self, account=None):
"""Return the current Bitcoin address for receiving payments to this account."""
r = self._call('getaccountaddress', account)
return CBitcoinAddress(r)
def getbalance(self, account='*', minconf=1):
"""Get the balance
account - The selected account. Defaults to "*" for entire wallet. It may be the default account using "".
minconf - Only include transactions confirmed at least this many times. (default=1)
"""
r = self._call('getbalance', account, minconf)
return int(r*COIN)
def getblock(self, block_hash):
"""Get block <block_hash>
Raises IndexError if block_hash is not valid.
"""
try:
block_hash = b2lx(block_hash)
except TypeError:
raise TypeError('%s.getblock(): block_hash must be bytes; got %r instance' %
(self.__class__.__name__, block_hash.__class__))
try:
r = self._call('getblock', block_hash, False)
except JSONRPCException as ex:
raise IndexError('%s.getblock(): %s (%d)' %
(self.__class__.__name__, ex.error['message'], ex.error['code']))
return CBlock.deserialize(unhexlify(r))
def getblockhash(self, height):
"""Return hash of block in best-block-chain at height.
Raises IndexError if height is not valid.
"""
try:
return lx(self._call('getblockhash', height))
except JSONRPCException as ex:
raise IndexError('%s.getblockhash(): %s (%d)' %
(self.__class__.__name__, ex.error['message'], ex.error['code']))
def getinfo(self):
"""Return an object containing various state info"""
r = self._call('getinfo')
r['balance'] = int(r['balance'] * COIN)
r['paytxfee'] = int(r['paytxfee'] * COIN)
return r
def getnewaddress(self, account=None):
"""Return a new Bitcoin address for receiving payments.
If account is not None, it is added to the address book so payments
received with the address will be credited to account.
"""
r = None
if account is not None:
r = self._call('getnewaddress', account)
else:
r = self._call('getnewaddress')
return CBitcoinAddress(r)
def getrawchangeaddress(self):
"""Returns a new Bitcoin address, for receiving change.
This is for use with raw transactions, NOT normal use.
"""
r = self._call('getrawchangeaddress')
return CBitcoinAddress(r)
def getrawmempool(self, verbose=False):
"""Return the mempool"""
if verbose:
return self._call('getrawmempool', verbose)
else:
r = self._call('getrawmempool')
r = [lx(txid) for txid in r]
return r
def getrawtransaction(self, txid, verbose=False):
"""Return transaction with hash txid
Raises IndexError if transaction not found.
verbse - If true a dict is returned instead with additional information
on the transaction.
Note that if all txouts are spent and the transaction index is not
enabled the transaction may not be available.
"""
try:
r = self._call('getrawtransaction', b2lx(txid), 1 if verbose else 0)
except JSONRPCException as ex:
raise IndexError('%s.getrawtransaction(): %s (%d)' %
(self.__class__.__name__, ex.error['message'], ex.error['code']))
if verbose:
r['tx'] = CTransaction.deserialize(unhexlify(r['hex']))
del r['hex']
del r['txid']
del r['version']
del r['locktime']
del r['vin']
del r['vout']
r['blockhash'] = lx(r['blockhash']) if 'blockhash' in r else None
else:
r = CTransaction.deserialize(unhexlify(r))
return r
def gettransaction(self, txid):
"""Get detailed information about in-wallet transaction txid
Raises IndexError if transaction not found in the wallet.
FIXME: Returned data types are not yet converted.
"""
try:
r = self._call('gettransaction', b2lx(txid))
except JSONRPCException as ex:
raise IndexError('%s.getrawtransaction(): %s (%d)' %
(self.__class__.__name__, ex.error['message'], ex.error['code']))
return r
def gettxout(self, outpoint, includemempool=True):
"""Return details about an unspent transaction output.
Raises IndexError if outpoint is not found or was spent.
includemempool - Include mempool txouts
"""
r = self._call('gettxout', b2lx(outpoint.hash), outpoint.n, includemempool)
if r is None:
raise IndexError('%s.gettxout(): unspent txout %r not found' % (self.__class__.__name__, outpoint))
r['txout'] = CTxOut(int(r['value'] * COIN),
CScript(unhexlify(r['scriptPubKey']['hex'])))
del r['value']
del r['scriptPubKey']
r['bestblock'] = lx(r['bestblock'])
return r
def listunspent(self, minconf=0, maxconf=9999999, addrs=None):
"""Return unspent transaction outputs in wallet
Outputs will have between minconf and maxconf (inclusive)
confirmations, optionally filtered to only include txouts paid to
addresses in addrs.
"""
r = None
if addrs is None:
r = self._call('listunspent', minconf, maxconf)
else:
addrs = [str(addr) for addr in addrs]
r = self._call('listunspent', minconf, maxconf, addrs)
r2 = []
for unspent in r:
unspent['outpoint'] = COutPoint(lx(unspent['txid']), unspent['vout'])
del unspent['txid']
del unspent['vout']
unspent['address'] = CBitcoinAddress(unspent['address'])
unspent['scriptPubKey'] = CScript(unhexlify(unspent['scriptPubKey']))
unspent['amount'] = int(unspent['amount'] * COIN)
r2.append(unspent)
return r2
def lockunspent(self, unlock, outpoints):
"""Lock or unlock outpoints"""
json_outpoints = [{'txid':b2lx(outpoint.hash),'vout':outpoint.n} for outpoint in outpoints]
return self._call('lockunspent', unlock, json_outpoints)
def sendrawtransaction(self, tx):
"""Submit transaction to local node and network."""
hextx = hexlify(tx.serialize())
r = self._call('sendrawtransaction', hextx)
return lx(r)
def sendtoaddress(self, addr, amount):
"""Sent amount to a given address"""
addr = str(addr)
amount = float(amount)/COIN
r = self._call('sendtoaddress', addr, amount)
return lx(r)
def signrawtransaction(self, tx, *args):
"""Sign inputs for transaction
FIXME: implement options
"""
hextx = hexlify(tx.serialize())
r = self._call('signrawtransaction', hextx, *args)
r['tx'] = CTransaction.deserialize(unhexlify(r['hex']))
del r['hex']
return r
def submitblock(self, block, params=None):
"""Submit a new block to the network.
params is optional and is currently ignored by bitcoind. See
https://en.bitcoin.it/wiki/BIP_0022 for full specification.
"""
hexblock = hexlify(block.serialize())
if params is not None:
return self._call('submitblock', hexblock, params)
else:
return self._call('submitblock', hexblock)
def validateaddress(self, address):
"""Return information about an address"""
r = self._call('validateaddress', str(address))
r['address'] = CBitcoinAddress(r['address'])
if 'pubkey' in r:
r['pubkey'] = unhexlify(r['pubkey'])
return r
|
Eigenlabs/EigenD
|
plg_macosx/caprobe.py
|
#
# Copyright 2009 Eigenlabs Ltd. http://www.eigenlabs.com
#
# This file is part of EigenD.
#
# EigenD is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# EigenD is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EigenD. If not, see <http://www.gnu.org/licenses/>.
#
import macosx_native
def main():
macosx_native.probe_coreaudio(True,True)
|
burakbayramli/classnotes
|
sk/2019/07/test_rocket1.py
|
from rocketlander import RocketLander
from constants import LEFT_GROUND_CONTACT, RIGHT_GROUND_CONTACT
import numpy as np
import pyglet
if __name__ == "__main__":
# Settings holds all the settings for the rocket lander environment.
settings = {'Side Engines': True,
'Clouds': True,
'Vectorized Nozzle': True,
'Starting Y-Pos Constant': 1,
'Initial Force': 'random'} # (6000, -10000)}
env = RocketLander(settings)
s = env.reset()
left_or_right_barge_movement = np.random.randint(0, 2)
for i in range(50):
a = [10.0, 1.0, 1.0]
s, r, done, info = env.step(a)
# -------------------------------------
# Optional render
env.render()
# Draw the target
buffer = pyglet.image.get_buffer_manager().get_color_buffer()
image_data = buffer.get_image_data()
if i % 5 == 0:
image_data.save(filename='frames/rocket-%04d.png' % i)
env.draw_marker(env.landing_coordinates[0], env.landing_coordinates[1])
# Refresh render
env.refresh(render=False)
# When should the barge move? Water movement, dynamics etc can be simulated here.
if s[LEFT_GROUND_CONTACT] == 0 and s[RIGHT_GROUND_CONTACT] == 0:
env.move_barge_randomly(0.05, left_or_right_barge_movement)
# Random Force on rocket to simulate wind.
env.apply_random_x_disturbance \
(epsilon=0.005, \
left_or_right=left_or_right_barge_movement)
env.apply_random_y_disturbance(epsilon=0.005)
# Touch down or pass abs(THETA_LIMIT)
if done: break
|
wrouesnel/ansible
|
lib/ansible/modules/cloud/vmware/vmware_vmkernel_facts.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_vmkernel_facts
short_description: Gathers VMKernel facts about an ESXi host
description:
- This module can be used to gather VMKernel facts about an ESXi host from given ESXi hostname or cluster name.
version_added: '2.5'
author:
- Abhijeet Kasurde (@akasurde)
notes:
- Tested on vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
cluster_name:
description:
- Name of the cluster.
- VMKernel facts about each ESXi server will be returned for the given cluster.
- If C(esxi_hostname) is not given, this parameter is required.
esxi_hostname:
description:
- ESXi hostname.
- VMKernel facts about this ESXi server will be returned.
- If C(cluster_name) is not given, this parameter is required.
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather VMKernel facts about all ESXi Host in given Cluster
vmware_vmkernel_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: cluster_name
register: cluster_host_vmks
- name: Gather VMKernel facts about ESXi Host
vmware_vmkernel_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
register: host_vmks
'''
RETURN = r'''
host_vmk_facts:
description: metadata about VMKernel present on given host system
returned: success
type: dict
sample:
{
"10.76.33.208": [
{
"device": "vmk0",
"dhcp": true,
"enable_ft": false,
"enable_management": true,
"enable_vmotion": false,
"enable_vsan": false,
"ipv4_address": "10.76.33.28",
"ipv4_subnet_mask": "255.255.255.0",
"key": "key-vim.host.VirtualNic-vmk0",
"mac": "52:54:00:12:50:ce",
"mtu": 1500,
"portgroup": "Management Network",
"stack": "defaultTcpipStack"
},
]
}
'''
try:
from pyVmomi import vim, vmodl
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import vmware_argument_spec, PyVmomi
from ansible.module_utils._text import to_native
class VmkernelFactsManager(PyVmomi):
def __init__(self, module):
super(VmkernelFactsManager, self).__init__(module)
cluster_name = self.params.get('cluster_name', None)
esxi_host_name = self.params.get('esxi_hostname', None)
self.hosts = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=esxi_host_name)
self.service_type_vmks = dict()
self.get_all_vmks_by_service_type()
def get_all_vmks_by_service_type(self):
"""
Function to return information about service types and VMKernel
"""
for host in self.hosts:
self.service_type_vmks[host.name] = dict(vmotion=[], vsan=[], management=[], faultToleranceLogging=[])
for service_type in self.service_type_vmks[host.name].keys():
vmks_list = self.query_service_type_for_vmks(host, service_type)
self.service_type_vmks[host.name][service_type] = vmks_list
def query_service_type_for_vmks(self, host_system, service_type):
"""
Function to return list of VMKernels
Args:
host_system: Host system managed object
service_type: Name of service type
Returns: List of VMKernel which belongs to that service type
"""
vmks_list = []
query = None
try:
query = host_system.configManager.virtualNicManager.QueryNetConfig(service_type)
except vim.fault.HostConfigFault as config_fault:
self.module.fail_json(msg="Failed to get all VMKs for service type %s due to"
" host config fault : %s" % (service_type, to_native(config_fault.msg)))
except vmodl.fault.InvalidArgument as invalid_argument:
self.module.fail_json(msg="Failed to get all VMKs for service type %s due to"
" invalid arguments : %s" % (service_type, to_native(invalid_argument.msg)))
except Exception as e:
self.module.fail_json(msg="Failed to get all VMKs for service type %s due to"
"%s" % (service_type, to_native(e)))
if not query.selectedVnic:
return vmks_list
selected_vnics = [vnic for vnic in query.selectedVnic]
vnics_with_service_type = [vnic.device for vnic in query.candidateVnic if vnic.key in selected_vnics]
return vnics_with_service_type
def gather_host_vmk_facts(self):
hosts_facts = {}
for host in self.hosts:
host_vmk_facts = []
host_network_system = host.config.network
if host_network_system:
vmks_config = host.config.network.vnic
for vmk in vmks_config:
host_vmk_facts.append(dict(
device=vmk.device,
key=vmk.key,
portgroup=vmk.portgroup,
ipv4_address=vmk.spec.ip.ipAddress,
ipv4_subnet_mask=vmk.spec.ip.subnetMask,
dhcp=vmk.spec.ip.dhcp,
mac=vmk.spec.mac,
mtu=vmk.spec.mtu,
stack=vmk.spec.netStackInstanceKey,
enable_vsan=vmk.device in self.service_type_vmks[host.name]['vsan'],
enable_vmotion=vmk.device in self.service_type_vmks[host.name]['vmotion'],
enable_management=vmk.device in self.service_type_vmks[host.name]['management'],
enable_ft=vmk.device in self.service_type_vmks[host.name]['faultToleranceLogging'],
)
)
hosts_facts[host.name] = host_vmk_facts
return hosts_facts
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
cluster_name=dict(type='str', required=False),
esxi_hostname=dict(type='str', required=False),
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['cluster_name', 'esxi_hostname'],
]
)
vmware_vmk_config = VmkernelFactsManager(module)
module.exit_json(changed=False, host_vmk_facts=vmware_vmk_config.gather_host_vmk_facts())
if __name__ == "__main__":
main()
|
boutproject/BOUT-2.0
|
examples/non-local_1d/analyse_check_q_boundary_condition.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Runs the conduction example, produces some output
#
nproc = 1 # Number of processors to use
from boututils import shell, launch, plotdata
from boutdata import collect
import numpy as np
from sys import argv
from math import sqrt, log10, log, pi
from matplotlib import pyplot
gamma = 3.
if len(argv)>1:
data_path = str(argv[1])
else:
data_path = "data"
electron_mass = 9.10938291e-31
ion_mass = 3.34358348e-27
# Collect the data
Te = collect("T_electron", path=data_path, info=True, yguards=True)
Ti = collect("T_ion", path=data_path, info=True, yguards=True)
n = collect("n_ion", path=data_path, info=True, yguards=True)
V = collect("Vpar_ion", path=data_path, info=True, yguards=True)
q = collect("heat_flux", path=data_path, info=True, yguards=True)
q_electron_left = []
q_electron_right = []
right_index = len(Te[0,2,:,0])-4
for i in range(len(Te[:,2,0,0])):
Te_left = (Te[i,2,2,0]+Te[i,2,1,0])/2.
Ti_left = (Ti[i,2,2,0]+Ti[i,2,1,0])/2.
n_left = (n[i,2,2,0]+n[i,2,1,0])/2.
Te_right = (Te[i,2,right_index,0]+Te[i,2,right_index+1,0])/2
Ti_right = (Ti[i,2,right_index,0]+Ti[i,2,right_index+1,0])/2
n_right = (n[i,2,right_index,0]+n[i,2,right_index+1,0])/2
sheath_potential = 0.5*Te_left*log(2*pi*electron_mass/ion_mass*(1+gamma*Ti_left/Te_left))
q_electron_left.append((2.0*Te_left-sheath_potential)*n_left*V[i,2,2,0]) # in W/m^2
sheath_potential = 0.5*Te_right*log(2*pi*electron_mass/ion_mass*(1+gamma*Ti_right/Te_right))
q_electron_right.append((2.0*Te_right-sheath_potential)*n_right*V[i,2,right_index+1,0]) # in W/m^2
pyplot.figure(1)
pyplot.plot(q_electron_left,'r',q[:,2,2,0],'b',q_electron_right,'r',q[:,2,right_index+1,0],'b')
pyplot.title("Electron heat flux at the boundaries (blue) and calculated boundary value (red)\n\n")
pyplot.xlabel(u"t/μs")
pyplot.ylabel("Q/eV.m$^{-2}$")
pyplot.figure(2)
pyplot.plot(q[:,2,2,0]-q_electron_left,'b',q[:,2,right_index+1,0]-q_electron_right,'r')
pyplot.title("Difference between heat flux and its calculated boundary value at the left (blue) and right (red) boundaries\n\n")
pyplot.xlabel(u"t/μs")
pyplot.ylabel("dQ/eV.m$^{-2}$")
pyplot.show()
|
DarkFenX/Phobos
|
util/__init__.py
|
#===============================================================================
# Copyright (C) 2014-2019 Anton Vorobyov
#
# This file is part of Phobos.
#
# Phobos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Phobos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Phobos. If not, see <http://www.gnu.org/licenses/>.
#===============================================================================
from .cached_property import cachedproperty
from .eve_normalize import EveNormalizer
from .resource_browser import ResourceBrowser
from .translator import Translator
|
sergiopasra/megaradrp
|
megaradrp/processing/tests/test_fibermatch.py
|
import pytest
from megaradrp.processing.fibermatch import generate_box_model
from megaradrp.processing.fibermatch import count_peaks
PEAKS = [
3.806000000000000000e+03,
3.812000000000000000e+03,
3.818000000000000000e+03,
3.824000000000000000e+03,
3.830000000000000000e+03,
3.836000000000000000e+03,
3.842000000000000000e+03,
3.848000000000000000e+03,
3.854000000000000000e+03,
3.860000000000000000e+03,
3.867000000000000000e+03,
3.872000000000000000e+03,
3.878000000000000000e+03,
3.884000000000000000e+03,
3.890000000000000000e+03,
3.897000000000000000e+03,
3.903000000000000000e+03,
3.909000000000000000e+03,
3.915000000000000000e+03,
3.921000000000000000e+03
]
def test_generate_model():
expected = [
(1, 0),
(2, 0),
(3, 0),
(4, 0),
(5, 0)
]
model = generate_box_model(5, start=1)
assert len(model) == len(expected)
for m, e in zip(model, expected):
assert m == e
expected = [
(1, 0),
(2, 1),
(3, 0),
(4, 0),
(5, 0)
]
model = generate_box_model(5, missing_relids=[2])
assert len(model) == len(expected)
for m, e in zip(model, expected):
assert m == e
expected = [
(10, 0),
(12, 1),
(13, 0),
(14, 0),
(15, 0)
]
model = generate_box_model(5, start=10, skip_fibids=[11], missing_relids=[2])
assert len(model) == len(expected)
for m, e in zip(model, expected):
assert m == e
def test_count_peaks1():
with pytest.raises(ValueError):
count_peaks([])
def test_count_peaks():
expected = []
idx = 0
for p in PEAKS:
t = (idx + 1, p, 0, idx)
expected.append(t)
idx += 1
result = count_peaks(PEAKS, tol=1.2, distance=6.0)
assert result == expected
|
ezequielpereira/Time-Line
|
timelinelib/wxgui/dialogs/setcategory/controller.py
|
# Copyright (C) 2009, 2010, 2011, 2012, 2013, 2014, 2015 Rickard Lindberg, Roger Lindberg
#
# This file is part of Timeline.
#
# Timeline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Timeline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Timeline. If not, see <http://www.gnu.org/licenses/>.
from timelinelib.wxgui.framework import Controller
class SetCategoryDialogController(Controller):
def on_init(self, db, selected_event_ids):
self._db = db
self._selected_event_ids = selected_event_ids
self.view.PopulateCategories()
self._set_title()
def on_ok_clicked(self, event):
category = self.view.GetSelectedCategory()
if not self._category_is_given(category) and self._selected_event_ids == []:
self.view.DisplayErrorMessage(_("You must select a category!"))
else:
self._save_category_in_events(category)
self.view.EndModalOk()
def _set_title(self):
if self._selected_event_ids == []:
self.view.SetTitle(_("Set Category on events without category"))
else:
self.view.SetTitle(_("Set Category on selected events"))
def _category_is_given(self, category):
return category is not None
def _save_category_in_events(self, category):
if self._selected_event_ids == []:
self._save_category_in_events_for_events_without_category(category)
else:
self._save_category_in_events_for_selected_events(category)
def _save_category_in_events_for_selected_events(self, category):
for event_id in self._selected_event_ids:
event = self._db.find_event_with_id(event_id)
event.set_category(category)
def _save_category_in_events_for_events_without_category(self, category):
for event in self._db.get_all_events():
if event.get_category() is None:
event.set_category(category)
def _events_without_category_exists(self):
for event in self._db.get_all_events():
if event.category is None:
return True
return False
|
electrolinux/weblate
|
weblate/trans/aresource.py
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Weblate wrapper around translate-toolkit formats to add missing
functionality.
"""
import json
from translate.storage.jsonl10n import JsonFile as JsonFileTT
class JsonFile(JsonFileTT):
"""
Workaround ttkit bug on not including added units in saved file.
This is fixed in 1.13.0
"""
def __str__(self):
data = {}
# This is really broken for many reasons, but works for
# simple JSON files.
for unit in self.units:
data[unit.getid().lstrip('.')] = unit.source
return json.dumps(
data, sort_keys=True, indent=4, ensure_ascii=False
).encode('utf-8')
|
NAMD/pypln.backend
|
tests/test_worker_bigrams.py
|
# coding: utf-8
#
# Copyright 2012 NAMD-EMAP-FGV
#
# This file is part of PyPLN. You can get more information at: http://pypln.org/.
#
# PyPLN is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyPLN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyPLN. If not, see <http://www.gnu.org/licenses/>.
import nltk
from pypln.backend.workers.bigrams import Bigrams
from utils import TaskTest
bigram_measures = nltk.collocations.BigramAssocMeasures()
class TestBigramWorker(TaskTest):
def test_bigrams_should_return_correct_score(self):
# We need this list comprehension because we need to save the word list
# in mongo (thus, it needs to be json serializable). Also, a list is
# what will be available to the worker in real situations.
tokens = [w for w in
nltk.corpus.genesis.words('english-web.txt')]
doc_id = self.collection.insert({'tokens': tokens}, w=1)
Bigrams().delay(doc_id)
refreshed_document = self.collection.find_one({'_id': doc_id})
bigram_rank = refreshed_document['bigram_rank']
result = bigram_rank[0][1][0]
# This is the value of the chi_sq measure for this bigram in this
# colocation
expected_chi_sq = 95.59393417173634
self.assertEqual(result, expected_chi_sq)
def test_bigrams_could_contain_dollar_signs_and_dots(self):
tokens = ['$', '.']
doc_id = self.collection.insert({'tokens': tokens}, w=1)
Bigrams().delay(doc_id)
refreshed_document = self.collection.find_one({'_id': doc_id})
bigram_rank = refreshed_document['bigram_rank']
result = bigram_rank[0][1][0]
# 2.0 is the value of the chi_sq measure for this bigram in this
# colocation
expected_chi_sq = 2.0
self.assertEqual(result, expected_chi_sq)
|
GrotheFAF/client
|
src/model/playerset.py
|
from PyQt5.QtCore import QObject, pyqtSignal
from model.player import Player
class Playerset(QObject):
"""
Wrapper for an id->Player map
Used to lookup players either by id or by login.
"""
playerAdded = pyqtSignal(object)
playerRemoved = pyqtSignal(object)
def __init__(self):
QObject.__init__(self)
# UID -> Player map
self._players = {}
# Login -> Player map
self._logins = {}
def __getitem__(self, item):
if isinstance(item, int):
return self._players[item]
if isinstance(item, str):
return self._logins[item]
raise TypeError
def __len__(self):
return len(self._players)
def __iter__(self):
return iter(self._players)
# We need to define the below things - QObject
# doesn't allow for Mapping mixin
def keys(self):
return self._players.keys()
def values(self):
return self._players.values()
def items(self):
return self._players.items()
def get(self, item, default=None):
try:
return self[item]
except KeyError:
return default
def __contains__(self, item):
try:
self[item]
return True
except KeyError:
return False
def getID(self, name):
if name in self:
return self[name].id
return -1
def __setitem__(self, key, value):
if not isinstance(key, int) or not isinstance(value, Player):
raise TypeError
if key in self: # disallow overwriting existing players
raise ValueError
if key != value.id:
raise ValueError
self._players[key] = value
self._logins[value.login] = value
self.playerAdded.emit(value)
def __delitem__(self, item):
try:
player = self[item]
except KeyError:
return
del self._players[player.id]
del self._logins[player.login]
self.playerRemoved.emit(player)
def clear(self):
oldplayers = list(self.keys())
for player in oldplayers:
del self[player]
|
joelfiddes/toposubv2
|
topoMAPP/getERA/era_prep.py
|
#!/usr/bin/env python
""" This module preprocesses ERA-Interim data, units, accumulated to instantaneous values and timestep interpolation for 6 h to 3 h values.
Example:
as import:
from getERA import era_prep as prep
prep.main(wd, config['main']['startDate'], config['main']['endDate'])
Attributes:
wd = "/home/joel/sim/topomap_test/"
plotshp = TRUE
Todo:
"""
path2script = "./rsrc/toposcale_pre2.R"
# main
def main(wd, startDate, endDate):
"""Main entry point for the script."""
run_rscript_fileout(path2script,[wd, startDate, endDate])
# functions
def run_rscript_stdout(path2script , args):
""" Function to define comands to run an Rscript. Returns an object. """
import subprocess
command = 'Rscript'
cmd = [command, path2script] + args
print("Running:" + str(cmd))
x = subprocess.check_output(cmd, universal_newlines=True)
return(x)
def run_rscript_fileout(path2script , args):
""" Function to define comands to run an Rscript. Outputs a file. """
import subprocess
command = 'Rscript'
cmd = [command, path2script] + args
print("Running:" + str(cmd))
subprocess.check_output(cmd)
# calling main
if __name__ == '__main__':
import sys
wd = sys.argv[1]
startDate = sys.argv[2]
endDate = sys.argv[3]
main(wd, startDate, endDate)
|
LTD-Beget/sprutio
|
app/modules/webdav/actions/files/copy.py
|
from core import FM
from core.FMOperation import FMOperation
class CopyFiles(FM.BaseAction):
def __init__(self, request, paths, session, target, overwrite, **kwargs):
super(CopyFiles, self).__init__(request=request, **kwargs)
self.paths = paths
self.session = session
self.target = target
self.overwrite = overwrite
def run(self):
request = self.get_rpc_request()
operation = FMOperation.create(FM.Action.COPY, FMOperation.STATUS_WAIT)
result = request.request('webdav/copy_files', login=self.request.get_current_user(),
password=self.request.get_current_password(), status_id=operation.id,
source=self.session, target=self.target, paths=self.paths, overwrite=self.overwrite)
answer = self.process_result(result)
answer["data"] = operation.as_dict()
return answer
|
sillvan/hyperspy
|
hyperspy/io_plugins/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright 2007-2011 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
from hyperspy import messages
from hyperspy.io_plugins import (msa, digital_micrograph, fei, mrc,
ripple, tiff)
io_plugins = [msa, digital_micrograph, fei, mrc, ripple, tiff]
try:
from hyperspy.io_plugins import netcdf
io_plugins.append(netcdf)
except ImportError:
pass
# NetCDF is obsolate and is only provided for users who have
# old EELSLab files. Therefore, we print no message if it is not
# available
#~ messages.information('The NetCDF IO features are not available')
try:
from hyperspy.io_plugins import hdf5
io_plugins.append(hdf5)
except ImportError:
messages.warning('The HDF5 IO features are not available. '
'It is highly reccomended to install h5py')
try:
from hyperspy.io_plugins import image
io_plugins.append(image)
except ImportError:
messages.information('The Image (PIL) IO features are not available')
default_write_ext = set()
for plugin in io_plugins:
if plugin.writes:
default_write_ext.add(
plugin.file_extensions[plugin.default_extension])
|
SickGear/SickGear
|
lib/soupsieve_py3/css_types.py
|
"""CSS selector structure items."""
import copyreg
from collections.abc import Hashable, Mapping
__all__ = (
'Selector',
'SelectorNull',
'SelectorTag',
'SelectorAttribute',
'SelectorContains',
'SelectorNth',
'SelectorLang',
'SelectorList',
'Namespaces',
'CustomSelectors'
)
SEL_EMPTY = 0x1
SEL_ROOT = 0x2
SEL_DEFAULT = 0x4
SEL_INDETERMINATE = 0x8
SEL_SCOPE = 0x10
SEL_DIR_LTR = 0x20
SEL_DIR_RTL = 0x40
SEL_IN_RANGE = 0x80
SEL_OUT_OF_RANGE = 0x100
SEL_DEFINED = 0x200
SEL_PLACEHOLDER_SHOWN = 0x400
class Immutable(object):
"""Immutable."""
__slots__ = ('_hash',)
def __init__(self, **kwargs):
"""Initialize."""
temp = []
for k, v in kwargs.items():
temp.append(type(v))
temp.append(v)
super(Immutable, self).__setattr__(k, v)
super(Immutable, self).__setattr__('_hash', hash(tuple(temp)))
@classmethod
def __base__(cls):
"""Get base class."""
return cls
def __eq__(self, other):
"""Equal."""
return (
isinstance(other, self.__base__()) and
all([getattr(other, key) == getattr(self, key) for key in self.__slots__ if key != '_hash'])
)
def __ne__(self, other):
"""Equal."""
return (
not isinstance(other, self.__base__()) or
any([getattr(other, key) != getattr(self, key) for key in self.__slots__ if key != '_hash'])
)
def __hash__(self):
"""Hash."""
return self._hash
def __setattr__(self, name, value):
"""Prevent mutability."""
raise AttributeError("'{}' is immutable".format(self.__class__.__name__))
def __repr__(self): # pragma: no cover
"""Representation."""
return "{}({})".format(
self.__base__(), ', '.join(["{}={!r}".format(k, getattr(self, k)) for k in self.__slots__[:-1]])
)
__str__ = __repr__
class ImmutableDict(Mapping):
"""Hashable, immutable dictionary."""
def __init__(self, *args, **kwargs):
"""Initialize."""
arg = args[0] if args else kwargs
is_dict = isinstance(arg, dict)
if (
is_dict and not all([isinstance(v, Hashable) for v in arg.values()]) or
not is_dict and not all([isinstance(k, Hashable) and isinstance(v, Hashable) for k, v in arg])
):
raise TypeError('All values must be hashable')
self._d = dict(*args, **kwargs)
self._hash = hash(tuple([(type(x), x, type(y), y) for x, y in sorted(self._d.items())]))
def __iter__(self):
"""Iterator."""
return iter(self._d)
def __len__(self):
"""Length."""
return len(self._d)
def __getitem__(self, key):
"""Get item: `namespace['key']`."""
return self._d[key]
def __hash__(self):
"""Hash."""
return self._hash
def __repr__(self): # pragma: no cover
"""Representation."""
return "{!r}".format(self._d)
__str__ = __repr__
class Namespaces(ImmutableDict):
"""Namespaces."""
def __init__(self, *args, **kwargs):
"""Initialize."""
# If there are arguments, check the first index.
# `super` should fail if the user gave multiple arguments,
# so don't bother checking that.
arg = args[0] if args else kwargs
is_dict = isinstance(arg, dict)
if is_dict and not all([isinstance(k, str) and isinstance(v, str) for k, v in arg.items()]):
raise TypeError('Namespace keys and values must be Unicode strings')
elif not is_dict and not all([isinstance(k, str) and isinstance(v, str) for k, v in arg]):
raise TypeError('Namespace keys and values must be Unicode strings')
super(Namespaces, self).__init__(*args, **kwargs)
class CustomSelectors(ImmutableDict):
"""Custom selectors."""
def __init__(self, *args, **kwargs):
"""Initialize."""
# If there are arguments, check the first index.
# `super` should fail if the user gave multiple arguments,
# so don't bother checking that.
arg = args[0] if args else kwargs
is_dict = isinstance(arg, dict)
if is_dict and not all([isinstance(k, str) and isinstance(v, str) for k, v in arg.items()]):
raise TypeError('CustomSelectors keys and values must be Unicode strings')
elif not is_dict and not all([isinstance(k, str) and isinstance(v, str) for k, v in arg]):
raise TypeError('CustomSelectors keys and values must be Unicode strings')
super(CustomSelectors, self).__init__(*args, **kwargs)
class Selector(Immutable):
"""Selector."""
__slots__ = (
'tag', 'ids', 'classes', 'attributes', 'nth', 'selectors',
'relation', 'rel_type', 'contains', 'lang', 'flags', '_hash'
)
def __init__(
self, tag, ids, classes, attributes, nth, selectors,
relation, rel_type, contains, lang, flags
):
"""Initialize."""
super(Selector, self).__init__(
tag=tag,
ids=ids,
classes=classes,
attributes=attributes,
nth=nth,
selectors=selectors,
relation=relation,
rel_type=rel_type,
contains=contains,
lang=lang,
flags=flags
)
class SelectorNull(Immutable):
"""Null Selector."""
def __init__(self):
"""Initialize."""
super(SelectorNull, self).__init__()
class SelectorTag(Immutable):
"""Selector tag."""
__slots__ = ("name", "prefix", "_hash")
def __init__(self, name, prefix):
"""Initialize."""
super(SelectorTag, self).__init__(
name=name,
prefix=prefix
)
class SelectorAttribute(Immutable):
"""Selector attribute rule."""
__slots__ = ("attribute", "prefix", "pattern", "xml_type_pattern", "_hash")
def __init__(self, attribute, prefix, pattern, xml_type_pattern):
"""Initialize."""
super(SelectorAttribute, self).__init__(
attribute=attribute,
prefix=prefix,
pattern=pattern,
xml_type_pattern=xml_type_pattern
)
class SelectorContains(Immutable):
"""Selector contains rule."""
__slots__ = ("text", "_hash")
def __init__(self, text):
"""Initialize."""
super(SelectorContains, self).__init__(
text=text
)
class SelectorNth(Immutable):
"""Selector nth type."""
__slots__ = ("a", "n", "b", "of_type", "last", "selectors", "_hash")
def __init__(self, a, n, b, of_type, last, selectors):
"""Initialize."""
super(SelectorNth, self).__init__(
a=a,
n=n,
b=b,
of_type=of_type,
last=last,
selectors=selectors
)
class SelectorLang(Immutable):
"""Selector language rules."""
__slots__ = ("languages", "_hash",)
def __init__(self, languages):
"""Initialize."""
super(SelectorLang, self).__init__(
languages=tuple(languages)
)
def __iter__(self):
"""Iterator."""
return iter(self.languages)
def __len__(self): # pragma: no cover
"""Length."""
return len(self.languages)
def __getitem__(self, index): # pragma: no cover
"""Get item."""
return self.languages[index]
class SelectorList(Immutable):
"""Selector list."""
__slots__ = ("selectors", "is_not", "is_html", "_hash")
def __init__(self, selectors=tuple(), is_not=False, is_html=False):
"""Initialize."""
super(SelectorList, self).__init__(
selectors=tuple(selectors),
is_not=is_not,
is_html=is_html
)
def __iter__(self):
"""Iterator."""
return iter(self.selectors)
def __len__(self):
"""Length."""
return len(self.selectors)
def __getitem__(self, index):
"""Get item."""
return self.selectors[index]
def _pickle(p):
return p.__base__(), tuple([getattr(p, s) for s in p.__slots__[:-1]])
def pickle_register(obj):
"""Allow object to be pickled."""
copyreg.pickle(obj, _pickle)
pickle_register(Selector)
pickle_register(SelectorNull)
pickle_register(SelectorTag)
pickle_register(SelectorAttribute)
pickle_register(SelectorContains)
pickle_register(SelectorNth)
pickle_register(SelectorLang)
pickle_register(SelectorList)
|
PEAT-AI/Automato
|
Surveillance/make_ndvi.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# ndvi_test.py
#
# Copyright 2015 rob <rob@Novu>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
'''just a annoying dummy to get rid of Gtk2 and Gtk3 incompatibilities'''
from infrapix import infrapix
import sys
infrapix.ndvi(sys.argv[1],sys.argv[2], show_histogram = True,)
|
drayanaindra/inasafe
|
safe/messaging/styles.py
|
"""
InaSAFE Disaster risk assessment tool developed by AusAid **Messaging styles.**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Style constants for use with messaging. Example usage::
from messaging.styles import PROGRESS_UPDATE_STYLE
m.ImportantText(myTitle, **PROGRESS_UPDATE_STYLE)
This will result in some standardised styling being applied to the important
text element.
"""
__author__ = 'tim@linfiniti.com'
__revision__ = '$Format:%H$'
__date__ = '06/06/2013'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
# These all apply to heading elements
PROGRESS_UPDATE_STYLE = {
'level': 5,
'icon': 'icon-cog icon-white',
'style_class': 'info'}
INFO_STYLE = {
'level': 5,
'icon': 'icon-info-sign icon-white',
'style_class': 'info'}
WARNING_STYLE = {
'level': 5,
'icon': 'icon-warning-sign icon-white',
'style_class': 'warning'}
SUGGESTION_STYLE = {
'level': 5,
'icon': 'icon-comment icon-white',
'style_class': 'suggestion'}
PROBLEM_STYLE = {
'level': 5,
'icon': 'icon-remove-sign icon-white',
'style_class': 'warning'}
DETAILS_STYLE = {
'level': 5,
'icon': 'icon-list icon-white',
'style_class': 'problem'}
SMALL_ICON_STYLE = {
'attributes': 'style="width: 24px; height: 24px;"',
}
TRACEBACK_STYLE = {
'level': 5,
'icon': 'icon-info-sign icon-white',
'style_class': 'inverse',
'attributes': 'onclick="toggleTracebacks();"'}
TRACEBACK_ITEMS_STYLE = {
'style_class': 'traceback-detail',
}
# This is typically a text element or its derivatives
KEYWORD_STYLE = {
# not working unless you turn css on and off again using inspector
#'style_class': 'label label-success'
}
|
jelly/calibre
|
src/calibre/gui2/preferences/device_user_defined.py
|
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
from PyQt5.Qt import QDialog, QVBoxLayout, QPlainTextEdit, QTimer, \
QDialogButtonBox, QPushButton, QApplication, QIcon, QMessageBox
def step_dialog(parent, title, msg, det_msg=''):
d = QMessageBox(parent)
d.setWindowTitle(title)
d.setText(msg)
d.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel)
return d.exec_() & QMessageBox.Cancel
class UserDefinedDevice(QDialog):
def __init__(self, parent=None):
QDialog.__init__(self, parent)
self._layout = QVBoxLayout(self)
self.setLayout(self._layout)
self.log = QPlainTextEdit(self)
self._layout.addWidget(self.log)
self.log.setPlainText(_('Getting device information')+'...')
self.copy = QPushButton(_('Copy to &clipboard'))
self.copy.setDefault(True)
self.setWindowTitle(_('User-defined device information'))
self.setWindowIcon(QIcon(I('debug.png')))
self.copy.clicked.connect(self.copy_to_clipboard)
self.ok = QPushButton('&OK')
self.ok.setAutoDefault(False)
self.ok.clicked.connect(self.accept)
self.bbox = QDialogButtonBox(self)
self.bbox.addButton(self.copy, QDialogButtonBox.ActionRole)
self.bbox.addButton(self.ok, QDialogButtonBox.AcceptRole)
self._layout.addWidget(self.bbox)
self.resize(750, 500)
self.bbox.setEnabled(False)
QTimer.singleShot(1000, self.device_info)
def device_info(self):
try:
from calibre.devices import device_info
r = step_dialog(self.parent(), _('Device Detection'),
_('Ensure your device is disconnected, then press OK'))
if r:
self.close()
return
before = device_info()
r = step_dialog(self.parent(), _('Device Detection'),
_('Ensure your device is connected, then press OK'))
if r:
self.close()
return
after = device_info()
new_devices = after['device_set'] - before['device_set']
res = ''
if len(new_devices) == 1:
def fmtid(x):
if isinstance(x, (int, long)):
x = hex(x)
if not x.startswith('0x'):
x = '0x' + x
return x
for d in new_devices:
res = _('USB Vendor ID (in hex)') + ': ' + \
fmtid(after['device_details'][d][0]) + '\n'
res += _('USB Product ID (in hex)') + ': ' + \
fmtid(after['device_details'][d][1]) + '\n'
res += _('USB Revision ID (in hex)') + ': ' + \
fmtid(after['device_details'][d][2]) + '\n'
trailer = _(
'Copy these values to the clipboard, paste them into an '
'editor, then enter them into the USER_DEVICE by '
'customizing the device plugin in Preferences->Advanced->Plugins. '
'Remember to also enter the folders where you want the books to '
'be put. You must restart calibre for your changes '
'to take effect.\n')
self.log.setPlainText(res + '\n\n' + trailer)
finally:
self.bbox.setEnabled(True)
def copy_to_clipboard(self):
QApplication.clipboard().setText(self.log.toPlainText())
if __name__ == '__main__':
app = QApplication([])
d = UserDefinedDevice()
d.exec_()
|
rghe/ansible
|
lib/ansible/modules/cloud/ovirt/ovirt_quota_facts.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_quota_facts
short_description: Retrieve facts about one or more oVirt/RHV quotas
version_added: "2.3"
author: "Red Hat"
description:
- "Retrieve facts about one or more oVirt/RHV quotas."
notes:
- "This module creates a new top-level C(ovirt_quotas) fact, which
contains a list of quotas."
options:
data_center:
description:
- "Name of the datacenter where quota resides."
required: true
name:
description:
- "Name of the quota, can be used as glob expression."
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about quota named C<myquota> in Default datacenter:
- ovirt_quota_facts:
data_center: Default
name: myquota
- debug:
var: ovirt_quotas
'''
RETURN = '''
ovirt_quotas:
description: "List of dictionaries describing the quotas. Quota attributes are mapped to dictionary keys,
all quotas attributes can be found at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/quota."
returned: On success.
type: list
'''
import fnmatch
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
search_by_name,
)
def main():
argument_spec = ovirt_facts_full_argument_spec(
data_center=dict(required=True),
name=dict(default=None),
)
module = AnsibleModule(argument_spec)
if module._name == 'ovirt_quotas_facts':
module.deprecate("The 'ovirt_quotas_facts' module is being renamed 'ovirt_quota_facts'", version=2.8)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
datacenters_service = connection.system_service().data_centers_service()
dc_name = module.params['data_center']
dc = search_by_name(datacenters_service, dc_name)
if dc is None:
raise Exception("Datacenter '%s' was not found." % dc_name)
quotas_service = datacenters_service.service(dc.id).quotas_service()
if module.params['name']:
quotas = [
e for e in quotas_service.list()
if fnmatch.fnmatch(e.name, module.params['name'])
]
else:
quotas = quotas_service.list()
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_quotas=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in quotas
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
|
superberny70/pelisalacarta
|
python/main-classic/servers/filesmonster.py
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para filesmonster
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
# ------------------------------------------------------------
import re
from core import logger
from core import scrapertools
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info("( page_url='%s')")
video_urls = []
itemlist = []
data1 = ''
data2 = ''
url = ''
alerta = '[filesmonster premium]'
enlace = "no"
post2 = "username=" + user + "&password=" + password
login_url = "http://filesmonster.com/api/public/login"
data1 = scrapertools.cache_page(login_url, post=post2)
partes1 = data1.split('"')
estado = partes1[3]
if estado != 'success': alerta = "[error de filesmonster premium]: " + estado
id = page_url
id = id.replace("http://filesmonster.com/download.php", "")
post = id.replace("?", "")
url = 'http://filesmonster.com/api/public/premiumDownload'
data2 = scrapertools.cache_page(url, post=post)
partes = data2.split('"')
url = partes[7]
filename = scrapertools.get_filename_from_url(url)[-4:]
alerta = filename + " " + alerta
if "http" not in url: alerta = "[error de filesmonster premium]: " + url
video_urls.append([alerta, url])
return video_urls
# Encuentra vÃÂdeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
# http://uploaz.com/file/
patronvideos = '"filesmonster.com/download(.*?)"'
logger.info("#" + patronvideos + "#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[filesmonster]"
url = "http://filesmonster.com/download" + match
if url not in encontrados:
logger.info(" url=" + url)
devuelve.append([titulo, url, 'filemonster'])
encontrados.add(url)
else:
logger.info(" url duplicada=" + url)
return devuelve
|
phillxnet/rockstor-core
|
src/rockstor/scripts/ovpn_util.py
|
"""
Copyright (c) 2012-2020 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
DOCKER = "/usr/bin/docker"
CMD = "%s run --volumes-from ovpn-data --rm" % DOCKER
image = "kylemanna/openvpn"
def initpki():
os.system(
"/usr/bin/docker run --volumes-from ovpn-data --rm "
"-it kylemanna/openvpn ovpn_initpki"
)
def client_gen():
client_name = raw_input("Enter a name for the client(no spaces): ") # noqa F821
os.system(
"%s -it %s easyrsa build-client-full %s nopass" % (CMD, image, client_name)
)
def client_retrieve():
client_name = raw_input(
"Enter the name of the client you like to retrieve: "
) # noqa F821 E501
outfile = "/tmp/%s.ovpn" % client_name
rc = os.system("%s %s ovpn_getclient %s > %s" % (CMD, image, client_name, outfile))
if rc == 0:
print(
"client configuration is saved at %s. It can be used by your "
"vpn client software to connect." % outfile
)
|
KaiRo-at/socorro
|
alembic/versions/32b54dec3fc0_fixes_bug_970406_add_raw_adi_logs_table.py
|
"""Fixes bug 970406 - add raw_adi_logs table
Revision ID: 32b54dec3fc0
Revises: 1ab8d5514ce2
Create Date: 2014-06-12 11:47:19.398882
"""
# revision identifiers, used by Alembic.
revision = '32b54dec3fc0'
down_revision = '1ef041dfc3d5'
from alembic import op
from socorrolib.lib import citexttype, jsontype, buildtype
from socorrolib.lib.migrations import fix_permissions, load_stored_proc
import sqlalchemy as sa
from sqlalchemy import types
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import table, column
def upgrade():
op.create_table('raw_adi_logs',
sa.Column('report_date', sa.DATE(), nullable=True),
sa.Column('product_name', sa.TEXT(), nullable=True),
sa.Column('product_os_platform', sa.TEXT(), nullable=True),
sa.Column('product_os_version', sa.TEXT(), nullable=True),
sa.Column('product_version', sa.TEXT(), nullable=True),
sa.Column('build', sa.TEXT(), nullable=True),
sa.Column('build_channel', sa.TEXT(), nullable=True),
sa.Column('product_guid', sa.TEXT(), nullable=True),
sa.Column('count', sa.INTEGER(), nullable=True)
)
def downgrade():
op.drop_table('raw_adi_logs')
|
xlqian/navitia
|
source/jormungandr/jormungandr/exceptions.py
|
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
from flask import request
from werkzeug.exceptions import HTTPException
import logging
from jormungandr.new_relic import record_exception
__all__ = ["RegionNotFound", "DeadSocketException", "ApiNotFound", "InvalidArguments"]
def format_error(code, message):
error = {"error": {"id": code, "message": message}, "message": message}
return error
class RegionNotFound(HTTPException):
def __init__(self, region=None, lon=None, lat=None, object_id=None, custom_msg=None):
super(RegionNotFound, self).__init__()
self.code = 404
if custom_msg:
self.data = format_error("unknown_object", custom_msg)
return
if object_id:
if object_id.count(";") == 1:
lon, lat = object_id.split(";")
object_id = None
elif object_id[:6] == "coord:":
lon, lat = object_id[6:].split(":")
object_id = None
if not any([region, lon, lat, object_id]):
self.data = format_error("unknown_object", "No region nor " "coordinates given")
elif region and not any([lon, lat, object_id]):
self.data = format_error("unknown_object", "The region {0} " "doesn't exists".format(region))
elif not any([region, object_id]) and lon and lat:
self.data = format_error(
"unknown_object",
"No region available for the coordinates:" "{lon}, {lat}".format(lon=lon, lat=lat),
)
elif region == lon == lat is None and object_id:
self.data = format_error("unknown_object", "Invalid id : {id}".format(id=object_id))
else:
self.data = format_error("unknown_object", "Unable to parse region")
def __str__(self):
return repr(self.data['message'])
class DeadSocketException(HTTPException):
def __init__(self, region, path):
super(DeadSocketException, self).__init__()
error = 'The region {} is dead'.format(region)
self.data = format_error("dead_socket", error)
self.code = 503
class ApiNotFound(HTTPException):
def __init__(self, api):
super(ApiNotFound, self).__init__()
error = 'The api {} doesn\'t exist'.format(api)
self.data = format_error("unknown_object", error)
self.code = 404
class UnknownObject(HTTPException):
def __init__(self, msg):
super(UnknownObject, self).__init__()
error = 'The object {} doesn\'t exist'.format(msg)
self.data = format_error("unknown_object", error)
self.code = 404
class InvalidArguments(HTTPException):
def __init__(self, arg):
super(InvalidArguments, self).__init__()
self.data = format_error("unknown_object", "Invalid arguments " + arg)
self.code = 400
class UnableToParse(HTTPException):
def __init__(self, msg):
super(UnableToParse, self).__init__()
self.data = format_error("unable_to_parse", msg)
self.code = 400
class TechnicalError(HTTPException):
def __init__(self, msg):
super(TechnicalError, self).__init__()
self.data = format_error("technical_error", msg)
self.code = 500
class ConfigException(Exception):
def __init__(self, arg):
super(ConfigException, self).__init__(arg)
self.data = format_error("config_exception", "Invalid config " + arg)
self.code = 400
def log_exception(sender, exception, **extra):
logger = logging.getLogger(__name__)
message = ""
if hasattr(exception, "data") and "message" in exception.data:
message = exception.data['message']
error = '{} {} {}'.format(exception.__class__.__name__, message, request.url)
if isinstance(exception, (HTTPException, RegionNotFound)):
logger.debug(error)
if exception.code >= 500:
record_exception()
else:
logger.exception(error)
record_exception()
|
petrjasek/superdesk-core
|
apps/archive_broadcast/__init__.py
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import logging
from flask_babel import lazy_gettext
import superdesk
from .broadcast import ArchiveBroadcastResource, ArchiveBroadcastService, ARCHIVE_BROADCAST_NAME
logger = logging.getLogger(__name__)
def init_app(app) -> None:
endpoint_name = ARCHIVE_BROADCAST_NAME
service = ArchiveBroadcastService(endpoint_name, backend=superdesk.get_backend())
ArchiveBroadcastResource(endpoint_name, app=app, service=service)
superdesk.privilege(
name=ARCHIVE_BROADCAST_NAME,
label=lazy_gettext("Broadcast"),
description=lazy_gettext("Allows user to create broadcast content."),
)
|
shashi792/courtlistener
|
alert/donate/admin.py
|
from django.contrib import admin
from alert.donate.models import Donation
from alert.userHandling.models import UserProfile
class DonorInline(admin.TabularInline):
model = UserProfile.donation.through
max_num = 1
raw_id_fields = (
'userprofile',
)
class DonationAdmin(admin.ModelAdmin):
readonly_fields = (
'date_modified',
'date_created',
)
list_display = (
'__str__',
'amount',
'payment_provider',
'status',
'date_created',
'referrer',
)
list_filter = (
'payment_provider',
'status',
'referrer',
)
inlines = (
DonorInline,
)
admin.site.register(Donation, DonationAdmin)
|
VirusTotal/misp-modules
|
misp_modules/modules/expansion/threatfox.py
|
# -*- coding: utf-8 -*-
import requests
import json
misperrors = {'error': 'Error'}
mispattributes = {'input': ['md5', 'sha1', 'sha256', 'domain', 'url', 'email-src', 'ip-dst|port', 'ip-src|port'], 'output': ['text']}
moduleinfo = {'version': '0.1', 'author': 'Corsin Camichel', 'description': 'Module to search for an IOC on ThreatFox by abuse.ch.', 'module-type': ['hover', 'expansion']}
moduleconfig = []
API_URL = "https://threatfox-api.abuse.ch/api/v1/"
# copied from
# https://github.com/marjatech/threatfox2misp/blob/main/threatfox2misp.py
def confidence_level_to_tag(level: int) -> str:
confidence_tagging = {
0: 'misp:confidence-level="unconfident"',
10: 'misp:confidence-level="rarely-confident"',
37: 'misp:confidence-level="fairly-confident"',
63: 'misp:confidence-level="usually-confident"',
90: 'misp:confidence-level="completely-confident"',
}
confidence_tag = ""
for tag_minvalue, tag in confidence_tagging.items():
if level >= tag_minvalue:
confidence_tag = tag
return confidence_tag
def handler(q=False):
if q is False:
return False
request = json.loads(q)
ret_val = ""
for input_type in mispattributes['input']:
if input_type in request:
to_query = request[input_type]
break
else:
misperrors['error'] = "Unsupported attributes type:"
return misperrors
data = {"query": "search_ioc", "search_term": f"{to_query}"}
response = requests.post(API_URL, data=json.dumps(data))
if response.status_code == 200:
result = json.loads(response.text)
if(result["query_status"] == "ok"):
confidence_tag = confidence_level_to_tag(result["data"][0]["confidence_level"])
ret_val = {'results': [{'types': mispattributes['output'], 'values': [result["data"][0]["threat_type_desc"]], 'tags': [result["data"][0]["malware"], result["data"][0]["malware_printable"], confidence_tag]}]}
return ret_val
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
|
policycompass/policycompass-services
|
apps/metricsmanager/signals.py
|
"""
This creates Django signals that automatically update the elastic search Index
When an item is created, a signal is thrown that runs the create / update index API of the Search Manager
When an item is deleted, a signal is thrown that executes the delete index API of the Search Manager
This way the Policy compass database and Elastic search index remains synced.
"""
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from .models import Metric
from apps.searchmanager.signalhandlers import search_index_update, search_index_delete
from apps.datasetmanager import internal_api
@receiver(post_save, sender=Metric)
def update_document_on_search_service(sender, **kwargs):
if not kwargs.get('raw', False):
instance = kwargs['instance']
search_index_update('metric', instance.id)
@receiver(post_delete, sender=Metric)
def delete_document_on_search_service(sender, **kwargs):
instance = kwargs['instance']
search_index_delete('metric', instance.id)
@receiver(post_delete, sender=Metric)
def remove_metric_link_from_datasets(sender, **kwargs):
instance = kwargs['instance']
internal_api.remove_metric_link(instance.id)
|
brousch/opencraft
|
instance/tests/views/test_index.py
|
# -*- coding: utf-8 -*-
#
# OpenCraft -- tools to aid developing and hosting free software projects
# Copyright (C) 2015 OpenCraft <xavier@opencraft.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Views - Index - Tests
"""
# Imports #####################################################################
from instance.tests.base import WithUserTestCase
# Tests #######################################################################
class IndexViewsTestCase(WithUserTestCase):
"""
Test cases for views
"""
def test_index_unauthenticated(self):
"""
Index view - Unauthenticated users go to login page
"""
response = self.client.get('/')
self.assertRedirects(response, 'http://testserver/admin/login/?next=/')
def test_index_authenticated(self):
"""
Index view - Authenticated
"""
self.client.login(username='user1', password='pass')
response = self.client.get('/')
self.assertContains(response, 'ng-app="InstanceApp"')
|
g-weatherill/hmtk
|
hmtk/seismicity/gcmt_utils.py
|
#!/usr/bin/env/python
'''
Set of moment tensor utility functions
'''
import numpy as np
from math import fabs, log10, sqrt, acos, atan2, pi
def tensor_components_to_use(mrr, mtt, mpp, mrt, mrp, mtp):
'''
Converts components to Up, South, East definition
USE = [[mrr, mrt, mrp],
[mtt, mtt, mtp],
[mrp, mtp, mpp]]
'''
return np.array([[mrr, mrt, mrp], [mrt, mtt, mtp], [mrp, mtp, mpp]])
def tensor_components_to_ned(mrr, mtt, mpp, mrt, mrp, mtp):
'''
Converts components to North, East, Down definition
NED = [[mtt, -mtp, mrt],
[-mtp, mpp, -mrp],
[mrt, -mtp, mrr]]
'''
return np.array([[mtt, -mtp, mrt], [-mtp, mpp, -mrp], [mrt, -mtp, mrr]])
def get_azimuth_plunge(vect, degrees=True):
'''
For a given vector in USE format, retrieve the azimuth and plunge
'''
if vect[0] > 0:
vect = -1. * np.copy(vect)
vect_hor = sqrt(vect[1] ** 2. + vect[2] ** 2.)
plunge = atan2(-vect[0], vect_hor)
azimuth = atan2(vect[2], -vect[1])
if degrees:
icr = 180. / pi
return icr * azimuth % 360., icr * plunge
else:
return azimuth % (2. * pi), plunge
COORD_SYSTEM = {'USE': tensor_components_to_use,
'NED': tensor_components_to_ned}
ROT_NED_USE = np.matrix([[0., 0., -1.],
[-1., 0., 0.],
[0., 1., 0.]])
def use_to_ned(tensor):
'''
Converts a tensor in USE coordinate sytem to NED
'''
return np.array(ROT_NED_USE.T * np.matrix(tensor) * ROT_NED_USE)
def ned_to_use(tensor):
'''
Converts a tensor in NED coordinate sytem to USE
'''
return np.array(ROT_NED_USE * np.matrix(tensor) * ROT_NED_USE.T)
def tensor_to_6component(tensor, frame='USE'):
'''
Returns a tensor to six component vector [Mrr, Mtt, Mpp, Mrt, Mrp, Mtp]
'''
if 'NED' in frame:
tensor = ned_to_use(tensor)
return [tensor[0, 0], tensor[1, 1], tensor[2, 2], tensor[0, 1],
tensor[0, 2], tensor[1, 2]]
def normalise_tensor(tensor):
'''
Normalise the tensor by dividing it by its norm, defined such that
np.sqrt(X:X)
'''
tensor_norm = np.linalg.norm(tensor)
return tensor / tensor_norm, tensor_norm
def eigendecompose(tensor, normalise=False):
'''
Performs and eigendecomposition of the tensor and orders into
descending eigenvalues
'''
if normalise:
tensor, tensor_norm = normalise_tensor(tensor)
else:
tensor_norm = 1.
eigvals, eigvects = np.linalg.eigh(tensor, UPLO='U')
isrt = np.argsort(eigvals)
eigenvalues = eigvals[isrt] * tensor_norm
eigenvectors = eigvects[:, isrt]
return eigenvalues, eigenvectors
def matrix_to_euler( rotmat ):
'''Inverse of euler_to_matrix().'''
if not isinstance(rotmat, np.matrixlib.defmatrix.matrix):
# As this calculation relies on np.matrix algebra - convert array to
# matrix
rotmat = np.matrix(rotmat)
cvec = lambda x, y, z: np.matrix([[x, y, z]]).T
ex = cvec(1., 0., 0.)
ez = cvec(0., 0., 1.)
exs = rotmat.T * ex
ezs = rotmat.T * ez
enodes = np.cross(ez.T, ezs.T).T
if np.linalg.norm(enodes) < 1e-10:
enodes = exs
enodess = rotmat * enodes
cos_alpha = float((ez.T*ezs))
if cos_alpha > 1.:
cos_alpha = 1.
if cos_alpha < -1.:
cos_alpha = -1.
alpha = acos(cos_alpha)
beta = np.mod(atan2( enodes[1, 0], enodes[0, 0]), pi * 2. )
gamma = np.mod(-atan2( enodess[1, 0], enodess[0, 0]), pi*2.)
return unique_euler(alpha, beta, gamma)
def unique_euler(alpha, beta, gamma):
'''
Uniquify euler angle triplet.
Put euler angles into ranges compatible with (dip,strike,-rake)
in seismology:
alpha (dip) : [0, pi/2]
beta (strike) : [0, 2*pi)
gamma (-rake) : [-pi, pi)
If alpha is near to zero, beta is replaced by beta+gamma and gamma is set
to zero, to prevent that additional ambiguity.
If alpha is near to pi/2, beta is put into the range [0,pi).
'''
alpha = np.mod(alpha, 2.0 * pi)
if 0.5 * pi < alpha and alpha <= pi:
alpha = pi - alpha
beta = beta + pi
gamma = 2.0 * pi - gamma
elif pi < alpha and alpha <= 1.5 * pi:
alpha = alpha - pi
gamma = pi - gamma
elif 1.5 * pi < alpha and alpha <= 2.0 * pi:
alpha = 2.0 * pi - alpha
beta = beta + pi
gamma = pi + gamma
alpha = np.mod(alpha, 2.0 * pi )
beta = np.mod(beta, 2.0 * pi )
gamma = np.mod(gamma + pi, 2.0 * pi )- pi
# If dip is exactly 90 degrees, one is still
# free to choose between looking at the plane from either side.
# Choose to look at such that beta is in the range [0,180)
# This should prevent some problems, when dip is close to 90 degrees:
if fabs(alpha - 0.5 * pi) < 1e-10:
alpha = 0.5 * pi
if fabs(beta - pi) < 1e-10:
beta = pi
if fabs(beta - 2. * pi) < 1e-10:
beta = 0.
if fabs(beta) < 1e-10:
beta = 0.
if alpha == 0.5 * pi and beta >= pi:
gamma = - gamma
beta = np.mod(beta-pi, 2.0 * pi)
gamma = np.mod( gamma + pi, 2.0 * pi) - pi
assert 0. <= beta < pi
assert -pi <= gamma < pi
if alpha < 1e-7:
beta = np.mod(beta + gamma, 2.0 * pi)
gamma = 0.
return (alpha, beta, gamma)
def moment_magnitude_scalar(moment):
'''
Uses Hanks & Kanamori formula for calculating moment magnitude from
a scalar moment (Nm)
'''
if isinstance(moment, np.ndarray):
return (2. / 3.) * (np.log10(moment) - 9.05)
else:
return (2. / 3.) * (log10(moment) - 9.05)
|
open-craft/xblock-poll
|
tests/integration/test_submit_button.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 McKinsey Academy
#
# Authors:
# Jonathan Piacenti <jonathan@opencraft.com>
#
# This software's license gives you freedom; you can copy, convey,
# propagate, redistribute and/or modify this program under the terms of
# the GNU Affero General Public License (AGPL) as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version of the AGPL published by the FSF.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program in a file in the toplevel directory called
# "AGPLv3". If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
from tests.integration.base_test import PollBaseTest
from unittest import skip
class TestSubmitButton(PollBaseTest):
@skip("Flaky test")
def test_submit_button(self):
"""
Goal: We have to make sure that submit button gets disabled right
after it is clicked. We cannot test with 100% assurance by adding a
method in other tests such as test_functions.py because in that case
submit button is anyway disabled after the ajax request.
We can utilize infinite submission feature and check if the submit
button was disabled (because of js) and then re-enabled (because of
ajax request).
"""
self.go_to_page('Poll Submit Button')
# Find all the radio choices
answer_elements = self.browser.find_elements_by_css_selector('label.poll-answer-text')
# Select the first choice
answer_elements[1].click()
# When an answer is selected, make sure submit is enabled.
self.wait_until_exists('input[name=poll-submit]:enabled')
submit_button = self.get_submit()
submit_button.click()
# Make sure that submit button is disabled right away
self.assertFalse(submit_button.is_enabled())
self.wait_until_clickable(self.browser.find_element_by_css_selector('.poll-voting-thanks'))
# Wait until the ajax request is finished and submit button is enabled
self.assertTrue(self.get_submit().is_enabled())
|
mikelarre/odoomrp-wip-1
|
product_last_purchase_sale_info/models/sale_order.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Avanzosc - Avanced Open Source Consulting
# Copyright (C) 2011 - 2014 Avanzosc <http://www.avanzosc.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp.osv import orm
import time
class SaleOrder(orm.Model):
_inherit = 'sale.order'
def action_wait(self, cr, uid, ids, context=None):
product_obj = self.pool['product.product']
res = super(SaleOrder, self).action_wait(cr, uid, ids, context)
for o in self.browse(cr, uid, ids, context):
for line in o.order_line:
if line.product_id:
vals = {'last_sale_date':
time.strftime('%Y-%m-%d %H:%M:%S'),
'last_customer_id': line.order_id.partner_id.id,
}
product_obj.write(cr, uid, [line.product_id.id], vals,
context)
return res
|
12019/pyscard
|
smartcard/wx/APDUHexValidator.py
|
# -*- coding: iso-8859-15 -*-
"""
A wxValidator that matches APDU in hexadecimal such as:
A4 A0 00 00 02
A4A0000002
__author__ = "http://www.gemalto.com"
Copyright 2001-2010 gemalto
Author: Jean-Daniel Aussel, mailto:jean-daniel.aussel@gemalto.com
This file is part of pyscard.
pyscard is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
pyscard is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with pyscard; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import re
import string
import wx
# a regexp to match ATRs and APDUs
hexbyte = "[0-9a-fA-F]{1,2}"
apduregexp = re.compile("((%s)[ ]*)*" % hexbyte)
class APDUHexValidator(wx.PyValidator):
'''A wxValidator that matches APDU in hexadecimal such as:
A4 A0 00 00 02
A4A0000002'''
def __init__(self):
wx.PyValidator.__init__(self)
self.Bind(wx.EVT_CHAR, self.OnChar)
def Clone(self):
return APDUHexValidator()
def Validate(self, win):
tc = self.GetWindow()
val = tc.GetValue()
if not apduregexp.match(value):
return False
return True
def OnChar(self, event):
key = event.GetKeyCode()
if wx.WXK_SPACE == key or chr(key) in string.hexdigits:
value = event.GetEventObject().GetValue() + chr(key)
if apduregexp.match(value):
event.Skip()
return
if key < wx.WXK_SPACE or key == wx.WXK_DELETE or key > 255:
event.Skip()
return
if not wx.Validator_IsSilent():
wx.Bell()
return
|
barthess/mavlink
|
pymavlink/tools/magfit_motors.py
|
#!/usr/bin/env python
'''
fit best estimate of magnetometer offsets, trying to take into account motor interference
'''
import sys, time, os, math
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("--no-timestamps",dest="notimestamps", action='store_true', help="Log doesn't have timestamps")
parser.add_argument("--condition",dest="condition", default=None, help="select packets by condition")
parser.add_argument("--noise", type=float, default=0, help="noise to add")
parser.add_argument("logs", metavar="LOG", nargs="+")
args = parser.parse_args()
from pymavlink import mavutil
from pymavlink.rotmat import Vector3
def noise():
'''a noise vector'''
from random import gauss
v = Vector3(gauss(0, 1), gauss(0, 1), gauss(0, 1))
v.normalize()
return v * args.noise
def select_data(data):
ret = []
counts = {}
for d in data:
(mag,motor) = d
key = "%u:%u:%u" % (mag.x/20,mag.y/20,mag.z/20)
if key in counts:
counts[key] += 1
else:
counts[key] = 1
if counts[key] < 3:
ret.append(d)
print(len(data), len(ret))
return ret
def radius(d, offsets, motor_ofs):
'''return radius give data point and offsets'''
(mag, motor) = d
return (mag + offsets + motor*motor_ofs).length()
def radius_cmp(a, b, offsets, motor_ofs):
'''return radius give data point and offsets'''
diff = radius(a, offsets, motor_ofs) - radius(b, offsets, motor_ofs)
if diff > 0:
return 1
if diff < 0:
return -1
return 0
def sphere_error(p, data):
from scipy import sqrt
x,y,z,mx,my,mz,r = p
ofs = Vector3(x,y,z)
motor_ofs = Vector3(mx,my,mz)
ret = []
for d in data:
(mag,motor) = d
err = r - radius((mag,motor), ofs, motor_ofs)
ret.append(err)
return ret
def fit_data(data):
import numpy, scipy
from scipy import optimize
p0 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
p1, ier = optimize.leastsq(sphere_error, p0[:], args=(data))
if not ier in [1, 2, 3, 4]:
raise RuntimeError("Unable to find solution")
return (Vector3(p1[0], p1[1], p1[2]), Vector3(p1[3], p1[4], p1[5]), p1[6])
def magfit(logfile):
'''find best magnetometer offset fit to a log file'''
print("Processing log %s" % filename)
mlog = mavutil.mavlink_connection(filename, notimestamps=args.notimestamps)
data = []
last_t = 0
offsets = Vector3(0,0,0)
motor_ofs = Vector3(0,0,0)
motor = 0.0
# now gather all the data
while True:
m = mlog.recv_match(condition=args.condition)
if m is None:
break
if m.get_type() == "PARAM_VALUE" and m.param_id == "RC3_MIN":
rc3_min = float(m.param_value)
if m.get_type() == "SENSOR_OFFSETS":
# update current offsets
offsets = Vector3(m.mag_ofs_x, m.mag_ofs_y, m.mag_ofs_z)
if m.get_type() == "SERVO_OUTPUT_RAW":
motor_pwm = m.servo1_raw + m.servo2_raw + m.servo3_raw + m.servo4_raw
motor_pwm *= 0.25
rc3_min = mlog.param('RC3_MIN', 1100)
rc3_max = mlog.param('RC3_MAX', 1900)
motor = (motor_pwm - rc3_min) / (rc3_max - rc3_min)
if motor > 1.0:
motor = 1.0
if motor < 0.0:
motor = 0.0
if m.get_type() == "RAW_IMU":
mag = Vector3(m.xmag, m.ymag, m.zmag)
# add data point after subtracting the current offsets
data.append((mag - offsets + noise(), motor))
print("Extracted %u data points" % len(data))
print("Current offsets: %s" % offsets)
data = select_data(data)
# do an initial fit with all data
(offsets, motor_ofs, field_strength) = fit_data(data)
for count in range(3):
# sort the data by the radius
data.sort(lambda a,b : radius_cmp(a,b,offsets,motor_ofs))
print("Fit %u : %s %s field_strength=%6.1f to %6.1f" % (
count, offsets, motor_ofs,
radius(data[0], offsets, motor_ofs), radius(data[-1], offsets, motor_ofs)))
# discard outliers, keep the middle 3/4
data = data[len(data)/8:-len(data)/8]
# fit again
(offsets, motor_ofs, field_strength) = fit_data(data)
print("Final : %s %s field_strength=%6.1f to %6.1f" % (
offsets, motor_ofs,
radius(data[0], offsets, motor_ofs), radius(data[-1], offsets, motor_ofs)))
print "mavgraph.py '%s' 'mag_field(RAW_IMU)' 'mag_field_motors(RAW_IMU,SENSOR_OFFSETS,(%f,%f,%f),SERVO_OUTPUT_RAW,(%f,%f,%f))'" % (
filename,
offsets.x,offsets.y,offsets.z,
motor_ofs.x, motor_ofs.y, motor_ofs.z)
total = 0.0
for filename in args.logs:
magfit(filename)
|
ajdawson/cartopy
|
lib/cartopy/mpl/feature_artist.py
|
# (C) British Crown Copyright 2011 - 2015, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <http://www.gnu.org/licenses/>.
"""
This module defines the :class:`FeatureArtist` class, for drawing
:class:`Feature` instances with matplotlib.
"""
from __future__ import (absolute_import, division, print_function)
import warnings
import weakref
import matplotlib.artist
import matplotlib.collections
import cartopy.mpl.patch as cpatch
class FeatureArtist(matplotlib.artist.Artist):
"""
A subclass of :class:`~matplotlib.artist.Artist` capable of
drawing a :class:`cartopy.feature.Feature`.
"""
_geometry_to_path_cache = weakref.WeakKeyDictionary()
"""
A nested mapping from geometry and target projection to the
resulting transformed matplotlib paths::
{geom: {target_projection: list_of_paths}}
This provides a significant boost when producing multiple maps of the
same projection.
"""
def __init__(self, feature, **kwargs):
"""
Args:
* feature:
an instance of :class:`cartopy.feature.Feature` to draw.
* kwargs:
keyword arguments to be used when drawing the feature. These
will override those shared with the feature.
"""
super(FeatureArtist, self).__init__()
if kwargs is None:
kwargs = {}
self._kwargs = dict(kwargs)
# Set default zorder so that features are drawn before
# lines e.g. contours but after images.
# Note that the zorder of Patch, PatchCollection and PathCollection
# are all 1 by default. Assuming equal zorder drawing takes place in
# the following order: collections, patches, lines (default zorder=2),
# text (default zorder=3), then other artists e.g. FeatureArtist.
if self._kwargs.get('zorder') is not None:
self.set_zorder(self._kwargs['zorder'])
elif feature.kwargs.get('zorder') is not None:
self.set_zorder(feature.kwargs['zorder'])
else:
# The class attribute matplotlib.collections.PathCollection.zorder
# was removed after mpl v1.2.0, so the hard-coded value of 1 is
# used instead.
self.set_zorder(1)
self._feature = feature
@matplotlib.artist.allow_rasterization
def draw(self, renderer, *args, **kwargs):
"""
Draws the geometries of the feature that intersect with the extent of
the :class:`cartopy.mpl.GeoAxes` instance to which this
object has been added.
"""
if not self.get_visible():
return
ax = self.get_axes()
feature_crs = self._feature.crs
# Get geometries that we need to draw.
extent = None
try:
extent = ax.get_extent(feature_crs)
except ValueError:
warnings.warn('Unable to determine extent. Defaulting to global.')
geoms = self._feature.intersecting_geometries(extent)
# Project (if necessary) and convert geometries to matplotlib paths.
paths = []
key = ax.projection
for geom in geoms:
mapping = FeatureArtist._geometry_to_path_cache.setdefault(geom,
{})
geom_paths = mapping.get(key)
if geom_paths is None:
if ax.projection != feature_crs:
projected_geom = ax.projection.project_geometry(
geom, feature_crs)
else:
projected_geom = geom
geom_paths = cpatch.geos_to_path(projected_geom)
mapping[key] = geom_paths
paths.extend(geom_paths)
# Build path collection and draw it.
transform = ax.projection._as_mpl_transform(ax)
# Combine all the keyword args in priority order
final_kwargs = dict(self._feature.kwargs)
final_kwargs.update(self._kwargs)
final_kwargs.update(kwargs)
c = matplotlib.collections.PathCollection(paths,
transform=transform,
**final_kwargs)
c.set_clip_path(ax.patch)
c.set_figure(ax.figure)
return c.draw(renderer)
|
siongui/userpages
|
content/code/python-jinja2-vs-go-html-template/jinja2-example-3.py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from jinja2 import Template
import sys
tmpl = """
{% for link in links %}
{{ loop.index0 }}: <a href="{{link.href}}">{{link.name}}</a>
{{ loop.index }}: <a href="{{link.href}}">{{link.name}}</a>
{% endfor %}
"""
if __name__ == '__main__':
links = [
{'name': 'Google', 'href': 'https://www.google.com'},
{'name': 'Facebook', 'href': 'https://www.facebook.com'}
]
t = Template(tmpl)
sys.stdout.write(t.render(links=links))
|
mrknow/filmkodi
|
plugin.video.wizjatv/resources/lib/lib/client.py
|
# -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,sys,cookielib,urllib,urllib2,urlparse,gzip,StringIO,HTMLParser,time,random,base64
from resources.lib.libraries import cache
from resources.lib.libraries import control
from resources.lib.libraries import workers
def shrink_host(url):
u = urlparse.urlparse(url)[1].split('.')
u = u[-2] + '.' + u[-1]
return u.encode('utf-8')
IE_USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko'
FF_USER_AGENT = 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0'
OPERA_USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36 OPR/34.0.2036.50'
IOS_USER_AGENT = 'Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5376e Safari/8536.25'
ANDROID_USER_AGENT = 'Mozilla/5.0 (Linux; Android 4.4.2; Nexus 4 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.114 Mobile Safari/537.36'
#SMU_USER_AGENT = 'URLResolver for Kodi/%s' % (addon_version)
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30', XHR=False):
try:
#control.log('@@@@@@@@@@@@@@ - URL:%s POST:%s' % (url, post))
handlers = []
if not proxy == None:
handlers += [urllib2.ProxyHandler({'http':'%s' % (proxy)}), urllib2.HTTPHandler]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
if output == 'cookie' or output == 'extended' or not close == True:
cookies = cookielib.LWPCookieJar()
handlers += [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
if (2, 7, 9) <= sys.version_info < (2, 7, 11):
try:
import ssl; ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
handlers += [urllib2.HTTPSHandler(context=ssl_context)]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
except:
pass
if url.startswith('//'): url = 'http:' + url
try: headers.update(headers)
except: headers = {}
if 'User-Agent' in headers:
pass
elif not mobile == True:
#headers['User-Agent'] = agent()
headers['User-Agent'] = cache.get(randomagent, 1)
else:
headers['User-Agent'] = 'Apple-iPhone/701.341'
headers['User-Agent'] = 'Mozilla/5.0 (Linux; U; Android 4.0.3; ko-kr; LG-L160L Build/IML74K) AppleWebkit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30'
if 'Referer' in headers:
pass
elif referer == None:
headers['Referer'] = '%s://%s/' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
else:
headers['Referer'] = referer
if not 'Accept-Language' in headers:
headers['Accept-Language'] = 'en-US'
if 'X-Requested-With' in headers:
pass
elif XHR == True:
headers['X-Requested-With'] = 'XMLHttpRequest'
if 'Cookie' in headers:
pass
elif not cookie == None:
headers['Cookie'] = cookie
if 'Accept-Encoding' in headers:
pass
elif compression and limit is None:
headers['Accept-Encoding'] = 'gzip'
if redirect == False:
class NoRedirection(urllib2.HTTPErrorProcessor):
def http_response(self, request, response): return response
opener = urllib2.build_opener(NoRedirection)
opener = urllib2.install_opener(opener)
try: del headers['Referer']
except: pass
if isinstance(post, dict):
post = urllib.urlencode(post)
request = urllib2.Request(url, data=post, headers=headers)
try:
response = urllib2.urlopen(request, timeout=int(timeout))
except urllib2.HTTPError as response:
if response.code == 503:
cf_result = response.read(5242880)
try: encoding = response.info().getheader('Content-Encoding')
except: encoding = None
if encoding == 'gzip':
cf_result = gzip.GzipFile(fileobj=StringIO.StringIO(cf_result)).read()
if 'cf-browser-verification' in cf_result:
netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
ua = headers['User-Agent']
cf = cache.get(cfcookie().get, 168, netloc, ua, timeout)
headers['Cookie'] = cf
request = urllib2.Request(url, data=post, headers=headers)
response = urllib2.urlopen(request, timeout=int(timeout))
elif error == False:
return
elif error == False:
return
if output == 'cookie':
try: result = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
except: pass
try: result = cf
except: pass
if close == True: response.close()
return result
elif output == 'geturl':
result = response.geturl()
if close == True: response.close()
return result
elif output == 'headers':
result = response.headers
if close == True: response.close()
return result
elif output == 'chunk':
try: content = int(response.headers['Content-Length'])
except: content = (2049 * 1024)
if content < (2048 * 1024): return
result = response.read(16 * 1024)
if close == True: response.close()
return result
if limit == '0':
result = response.read(224 * 1024)
elif not limit == None:
result = response.read(int(limit) * 1024)
else:
result = response.read(5242880)
try: encoding = response.info().getheader('Content-Encoding')
except: encoding = None
if encoding == 'gzip':
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
if 'sucuri_cloudproxy_js' in result:
su = sucuri().get(result)
headers['Cookie'] = su
request = urllib2.Request(url, data=post, headers=headers)
response = urllib2.urlopen(request, timeout=int(timeout))
if limit == '0':
result = response.read(224 * 1024)
elif not limit == None:
result = response.read(int(limit) * 1024)
else:
result = response.read(5242880)
try: encoding = response.info().getheader('Content-Encoding')
except: encoding = None
if encoding == 'gzip':
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
if 'Blazingfast.io' in result and 'xhr.open' in result:
netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
ua = headers['User-Agent']
headers['Cookie'] = cache.get(bfcookie().get, 168, netloc, ua, timeout)
result = _basic_request(url, headers=headers, timeout=timeout, limit=limit)
if output == 'extended':
response_headers = response.headers
response_code = str(response.code)
try: cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
except: pass
try: cookie = cf
except: pass
if close == True: response.close()
return (result, response_code, response_headers, headers, cookie)
else:
if close == True: response.close()
return result
except Exception as e:
control.log('Client connect url:%s Error %s' % (url,e))
return
def source(url, close=True, error=False, proxy=None, post=None, headers=None, mobile=False, safe=False, referer=None, cookie=None, output='', timeout='30'):
return request(url, close, error, proxy, post, headers, mobile, safe, referer, cookie, output, timeout)
def parseDOM(html, name=u"", attrs={}, ret=False):
# Copyright (C) 2010-2011 Tobias Ussing And Henrik Mosgaard Jensen
if attrs is None: attrs = {}
if isinstance(html, str):
try:
html = [html.decode("utf-8")] # Replace with chardet thingy
except:
try:
html = [html.decode("utf-8", "replace")]
except:
html = [html]
elif isinstance(html, unicode):
html = [html]
elif not isinstance(html, list):
return ''
if not name.strip():
return ''
if not isinstance(attrs, dict):
return ''
ret_lst = []
for item in html:
for match in re.findall('(<[^>]*\n[^>]*>)', item):
item = item.replace(match, match.replace('\n', ' ').replace('\r', ' '))
if not attrs:
pattern = '(<%s(?: [^>]*>|/?>))' % (name)
this_list = re.findall(pattern, item, re.M | re.S | re.I)
else:
last_list = None
for key in attrs:
pattern = '''(<%s [^>]*%s=['"]%s['"][^>]*>)''' % (name, key, attrs[key])
this_list = re.findall(pattern, item, re.M | re. S | re.I)
if not this_list and ' ' not in attrs[key]:
pattern = '''(<%s [^>]*%s=%s[^>]*>)''' % (name, key, attrs[key])
this_list = re.findall(pattern, item, re.M | re. S | re.I)
if last_list is None:
last_list = this_list
else:
last_list = [item for item in this_list if item in last_list]
this_list = last_list
lst = this_list
if isinstance(ret, str):
lst2 = []
for match in lst:
pattern = '''<%s[^>]* %s\s*=\s*(?:(['"])(.*?)\\1|([^'"].*?)(?:>|\s))''' % (name, ret)
results = re.findall(pattern, match, re.I | re.M | re.S)
lst2 += [result[1] if result[1] else result[2] for result in results]
lst = lst2
else:
lst2 = []
for match in lst:
end_str = "</%s" % (name)
start_str = '<%s' % (name)
start = item.find(match)
end = item.find(end_str, start)
pos = item.find(start_str, start + 1)
while pos < end and pos != -1: # Ignore too early </endstr> return
tend = item.find(end_str, end + len(end_str))
if tend != -1:
end = tend
pos = item.find(start_str, pos + 1)
if start == -1 and end == -1:
result = ''
elif start > -1 and end > -1:
result = item[start + len(match):end]
elif end > -1:
result = item[:end]
elif start > -1:
result = item[start + len(match):]
else:
result = ''
if ret:
endstr = item[end:item.find(">", item.find(end_str)) + 1]
result = match + result + endstr
result = result.strip()
item = item[item.find(result, item.find(match)):]
lst2.append(result)
lst = lst2
ret_lst += lst
return ret_lst
def replaceHTMLCodes(txt):
txt = re.sub("(&#[0-9]+)([^;^0-9]+)", "\\1;\\2", txt)
txt = HTMLParser.HTMLParser().unescape(txt)
txt = txt.replace(""", "\"")
txt = txt.replace("&", "&")
txt = txt.strip()
return txt
def cleanHTMLCodes(txt):
txt = txt.replace("'", "")
txt = re.sub("(&#[0-9]+)([^;^0-9]+)", "\\1;\\2", txt)
txt = HTMLParser.HTMLParser().unescape(txt)
txt = txt.replace(""", "\"")
txt = txt.replace("&", "&")
return txt
def agent():
return cache.get(randomagent, 24)
def randomagent():
BR_VERS = [
['%s.0' % i for i in xrange(18, 50)],
['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.2125.111', '39.0.2171.71', '39.0.2171.95', '39.0.2171.99',
'40.0.2214.93', '40.0.2214.111',
'40.0.2214.115', '42.0.2311.90', '42.0.2311.135', '42.0.2311.152', '43.0.2357.81', '43.0.2357.124', '44.0.2403.155', '44.0.2403.157', '45.0.2454.101',
'45.0.2454.85', '46.0.2490.71',
'46.0.2490.80', '46.0.2490.86', '47.0.2526.73', '47.0.2526.80', '48.0.2564.116', '49.0.2623.112', '50.0.2661.86', '51.0.2704.103', '52.0.2743.116',
'53.0.2785.143', '54.0.2840.71'],
['11.0'],
['8.0', '9.0', '10.0', '10.6']]
WIN_VERS = ['Windows NT 10.0', 'Windows NT 7.0', 'Windows NT 6.3', 'Windows NT 6.2', 'Windows NT 6.1', 'Windows NT 6.0', 'Windows NT 5.1', 'Windows NT 5.0']
FEATURES = ['; WOW64', '; Win64; IA64', '; Win64; x64', '']
RAND_UAS = ['Mozilla/5.0 ({win_ver}{feature}; rv:{br_ver}) Gecko/20100101 Firefox/{br_ver}',
'Mozilla/5.0 ({win_ver}{feature}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{br_ver} Safari/537.36',
'Mozilla/5.0 ({win_ver}{feature}; Trident/7.0; rv:{br_ver}) like Gecko',
'Mozilla/5.0 (compatible; MSIE {br_ver}; {win_ver}{feature}; Trident/6.0)']
index = random.randrange(len(RAND_UAS))
return RAND_UAS[index].format(win_ver=random.choice(WIN_VERS), feature=random.choice(FEATURES), br_ver=random.choice(BR_VERS[index]))
def googletag(url):
quality = re.compile('itag=(\d*)').findall(url)
quality += re.compile('=m(\d*)$').findall(url)
try: quality = quality[0]
except: return []
#control.log('<><><><><><><><><><><><> %s <><><><><><><><><>' % quality)
if quality in ['37', '137', '299', '96', '248', '303', '46']:
return [{'quality': u'1080p', 'url': url}]
elif quality in ['22', '84', '136', '298', '120', '95', '247', '302', '45', '102']:
return [{'quality': u'HD', 'url': url}]
elif quality in ['35', '44', '135', '244', '94', '59']:
return [{'quality': u'SD', 'url': url}]
elif quality in ['18', '34', '43', '82', '100', '101', '134', '243', '93']:
return [{'quality': u'SD', 'url': url}]
elif quality in ['5', '6', '36', '83', '133', '242', '92', '132']:
return [{'quality': u'SD', 'url': url}]
else:
return [{'quality': u'HD', 'url': url}]
def file_quality_openload(url):
try:
if '1080' in url:
return {'quality': '1080p'}
elif '720' in url:
return {'quality': 'HD'}
else:
return {'quality': 'SD'}
except:
return {'quality': 'SD', 'url': url}
class cfcookie:
def __init__(self):
self.cookie = None
def get(self, netloc, ua, timeout):
threads = []
for i in range(0, 15): threads.append(workers.Thread(self.get_cookie, netloc, ua, timeout))
[i.start() for i in threads]
for i in range(0, 30):
if not self.cookie == None: return self.cookie
time.sleep(1)
def get_cookie(self, netloc, ua, timeout):
try:
headers = {'User-Agent': ua}
request = urllib2.Request(netloc, headers=headers)
try:
response = urllib2.urlopen(request, timeout=int(timeout))
except urllib2.HTTPError as response:
result = response.read(5242880)
try: encoding = response.info().getheader('Content-Encoding')
except: encoding = None
if encoding == 'gzip':
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
jschl = re.findall('name="jschl_vc" value="(.+?)"/>', result)[0]
init = re.findall('setTimeout\(function\(\){\s*.*?.*:(.*?)};', result)[-1]
builder = re.findall(r"challenge-form\'\);\s*(.*)a.v", result)[0]
decryptVal = self.parseJSString(init)
lines = builder.split(';')
for line in lines:
if len(line) > 0 and '=' in line:
sections=line.split('=')
line_val = self.parseJSString(sections[1])
decryptVal = int(eval(str(decryptVal)+sections[0][-1]+str(line_val)))
answer = decryptVal + len(urlparse.urlparse(netloc).netloc)
query = '%s/cdn-cgi/l/chk_jschl?jschl_vc=%s&jschl_answer=%s' % (netloc, jschl, answer)
if 'type="hidden" name="pass"' in result:
passval = re.findall('name="pass" value="(.*?)"', result)[0]
query = '%s/cdn-cgi/l/chk_jschl?pass=%s&jschl_vc=%s&jschl_answer=%s' % (netloc, urllib.quote_plus(passval), jschl, answer)
time.sleep(6)
cookies = cookielib.LWPCookieJar()
handlers = [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
try:
request = urllib2.Request(query, headers=headers)
response = urllib2.urlopen(request, timeout=int(timeout))
except:
pass
cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
if 'cf_clearance' in cookie: self.cookie = cookie
except:
pass
def parseJSString(self, s):
try:
offset=1 if s[0]=='+' else 0
val = int(eval(s.replace('!+[]','1').replace('!![]','1').replace('[]','0').replace('(','str(')[offset:]))
return val
except:
pass
class bfcookie:
def __init__(self):
self.COOKIE_NAME = 'BLAZINGFAST-WEB-PROTECT'
def get(self, netloc, ua, timeout):
try:
headers = {'User-Agent': ua, 'Referer': netloc}
result = _basic_request(netloc, headers=headers, timeout=timeout)
match = re.findall('xhr\.open\("GET","([^,]+),', result)
if not match:
return False
url_Parts = match[0].split('"')
url_Parts[1] = '1680'
url = urlparse.urljoin(netloc, ''.join(url_Parts))
match = re.findall('rid=([0-9a-zA-Z]+)', url_Parts[0])
if not match:
return False
headers['Cookie'] = 'rcksid=%s' % match[0]
result = _basic_request(url, headers=headers, timeout=timeout)
return self.getCookieString(result, headers['Cookie'])
except:
return
# not very robust but lazieness...
def getCookieString(self, content, rcksid):
vars = re.findall('toNumbers\("([^"]+)"', content)
value = self._decrypt(vars[2], vars[0], vars[1])
cookie = "%s=%s;%s" % (self.COOKIE_NAME, value, rcksid)
return cookie
def _decrypt(self, msg, key, iv):
from binascii import unhexlify, hexlify
import pyaes
msg = unhexlify(msg)
key = unhexlify(key)
iv = unhexlify(iv)
if len(iv) != 16: return False
decrypter = pyaes.Decrypter(pyaes.AESModeOfOperationCBC(key, iv))
plain_text = decrypter.feed(msg)
plain_text += decrypter.feed()
f = hexlify(plain_text)
return f
class sucuri:
def __init__(self):
self.cookie = None
def get(self, result):
try:
s = re.compile("S\s*=\s*'([^']+)").findall(result)[0]
s = base64.b64decode(s)
s = s.replace(' ', '')
s = re.sub('String\.fromCharCode\(([^)]+)\)', r'chr(\1)', s)
s = re.sub('\.slice\((\d+),(\d+)\)', r'[\1:\2]', s)
s = re.sub('\.charAt\(([^)]+)\)', r'[\1]', s)
s = re.sub('\.substr\((\d+),(\d+)\)', r'[\1:\1+\2]', s)
s = re.sub(';location.reload\(\);', '', s)
s = re.sub(r'\n', '', s)
s = re.sub(r'document\.cookie', 'cookie', s)
cookie = '' ; exec(s)
self.cookie = re.compile('([^=]+)=(.*)').findall(cookie)[0]
self.cookie = '%s=%s' % (self.cookie[0], self.cookie[1])
return self.cookie
except:
pass
def parseJSString(s):
try:
offset=1 if s[0]=='+' else 0
val = int(eval(s.replace('!+[]','1').replace('!![]','1').replace('[]','0').replace('(','str(')[offset:]))
return val
except:
pass
def googlepass(url):
try:
try:
headers = dict(urlparse.parse_qsl(url.rsplit('|', 1)[1]))
except:
headers = None
url = url.split('|')[0].replace('\\', '')
url = request(url, headers=headers, output='geturl')
if 'requiressl=yes' in url:
url = url.replace('http://', 'https://')
else:
url = url.replace('https://', 'http://')
if headers: url += '|%s' % urllib.urlencode(headers)
return url
except:
return
|
dmlc/mxnet
|
ci/build_windows.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""User friendly / multi platform builder script"""
import argparse
import datetime
import glob
import logging
import os
import platform
import shutil
import sys
import time
from distutils.dir_util import copy_tree
from enum import Enum
from subprocess import check_call
from util import *
KNOWN_VCVARS = {
'VS 2015': r'C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\x86_amd64\vcvarsx86_amd64.bat',
'VS 2017': r'C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsx86_amd64.bat'
}
class BuildFlavour(Enum):
WIN_CPU = 'WIN_CPU'
WIN_CPU_MKLDNN = 'WIN_CPU_MKLDNN'
WIN_GPU = 'WIN_GPU'
WIN_GPU_MKLDNN = 'WIN_GPU_MKLDNN'
CMAKE_FLAGS = {
'WIN_CPU': ('-DUSE_CUDA=0 '
'-DUSE_CUDNN=0 '
'-DUSE_NVRTC=0 '
'-DUSE_OPENCV=1 '
'-DUSE_OPENMP=1 '
'-DUSE_PROFILER=1 '
'-DUSE_BLAS=open '
'-DUSE_LAPACK=1 '
'-DUSE_DIST_KVSTORE=0 '
'-DBUILD_CPP_EXAMPLES=1 '
'-DUSE_MKL_IF_AVAILABLE=0 '
'-DCMAKE_BUILD_TYPE=Release')
, 'WIN_CPU_MKLDNN': ('-DUSE_CUDA=0 '
'-DUSE_CUDNN=0 '
'-DUSE_NVRTC=0 '
'-DUSE_OPENCV=1 '
'-DUSE_OPENMP=1 '
'-DUSE_PROFILER=1 '
'-DUSE_BLAS=open '
'-DUSE_LAPACK=1 '
'-DUSE_DIST_KVSTORE=0 '
'-DUSE_MKL_IF_AVAILABLE=1 '
'-DCMAKE_BUILD_TYPE=Release')
, 'WIN_GPU': ('-DUSE_CUDA=1 '
'-DUSE_CUDNN=1 '
'-DUSE_NVRTC=1 '
'-DUSE_OPENCV=1 '
'-DUSE_OPENMP=1 '
'-DUSE_PROFILER=1 '
'-DUSE_BLAS=open '
'-DUSE_LAPACK=1 '
'-DUSE_DIST_KVSTORE=0 '
'-DCUDA_ARCH_NAME=Manual '
'-DCUDA_ARCH_BIN=52 '
'-DCUDA_ARCH_PTX=52 '
'-DCMAKE_CXX_FLAGS="/FS /MD /O2 /Ob2" '
'-DUSE_MKL_IF_AVAILABLE=0 '
'-DCMAKE_BUILD_TYPE=Release')
, 'WIN_GPU_MKLDNN': ('-DUSE_CUDA=1 '
'-DUSE_CUDNN=1 '
'-DUSE_NVRTC=1 '
'-DUSE_OPENCV=1 '
'-DUSE_OPENMP=1 '
'-DUSE_PROFILER=1 '
'-DUSE_BLAS=open '
'-DUSE_LAPACK=1 '
'-DUSE_DIST_KVSTORE=0 '
'-DCUDA_ARCH_NAME=Manual '
'-DCUDA_ARCH_BIN=52 '
'-DCUDA_ARCH_PTX=52 '
'-DUSE_MKLDNN=1 '
'-DCMAKE_CXX_FLAGS="/FS /MD /O2 /Ob2" '
'-DCMAKE_BUILD_TYPE=Release')
}
def windows_build(args):
logging.info("Using vcvars environment:\n{}".format(args.vcvars))
path = args.output
os.makedirs(path, exist_ok=True)
mxnet_root = get_mxnet_root()
logging.info("Found MXNet root: {}".format(mxnet_root))
with remember_cwd():
os.chdir(path)
cmd = "\"{}\" && cmake -G \"NMake Makefiles JOM\" {} {}".format(args.vcvars,
CMAKE_FLAGS[args.flavour],
mxnet_root)
logging.info("Generating project with CMake:\n{}".format(cmd))
check_call(cmd, shell=True)
cmd = "\"{}\" && jom".format(args.vcvars)
logging.info("Building with jom:\n{}".format(cmd))
t0 = int(time.time())
check_call(cmd, shell=True)
logging.info("Build flavour: {} complete in directory: \"{}\"".format(args.flavour, os.path.abspath(path)))
logging.info("Build took {}".format(datetime.timedelta(seconds=int(time.time() - t0))))
windows_package(args)
def windows_package(args):
pkgfile = 'windows_package.7z'
pkgdir = os.path.abspath('windows_package')
logging.info("Packaging libraries and headers in package: %s", pkgfile)
j = os.path.join
pkgdir_lib = os.path.abspath(j(pkgdir, 'lib'))
with remember_cwd():
os.chdir(args.output)
logging.info("Looking for static libraries and dlls in: \"%s", os.getcwd())
libs = list(glob.iglob('**/*.lib', recursive=True))
dlls = list(glob.iglob('**/*.dll', recursive=True))
os.makedirs(pkgdir_lib, exist_ok=True)
for lib in libs:
logging.info("packing lib: %s", lib)
shutil.copy(lib, pkgdir_lib)
for dll in dlls:
logging.info("packing dll: %s", dll)
shutil.copy(dll, pkgdir_lib)
os.chdir(get_mxnet_root())
logging.info('packing python bindings')
copy_tree('python', j(pkgdir, 'python'))
logging.info('packing headers')
copy_tree('include', j(pkgdir, 'include'))
logging.info("Compressing package: %s", pkgfile)
check_call(['7z', 'a', pkgfile, pkgdir])
def nix_build(args):
path = args.output
os.makedirs(path, exist_ok=True)
with remember_cwd():
os.chdir(path)
logging.info("Generating project with CMake")
check_call("cmake \
-DUSE_CUDA=OFF \
-DUSE_OPENCV=OFF \
-DUSE_OPENMP=OFF \
-DCMAKE_BUILD_TYPE=Debug \
-GNinja ..", shell=True)
check_call("ninja", shell=True)
def main():
logging.getLogger().setLevel(logging.INFO)
logging.basicConfig(format='%(asctime)-15s %(message)s')
logging.info("MXNet Windows build helper")
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--output",
help="output directory",
default='build',
type=str)
parser.add_argument("--vcvars",
help="vcvars batch file location, typically inside vs studio install dir",
default=KNOWN_VCVARS['VS 2015'],
type=str)
parser.add_argument("--arch",
help="architecture",
default='x64',
type=str)
parser.add_argument("-f", "--flavour",
help="build flavour",
default='WIN_CPU',
choices=[x.name for x in BuildFlavour],
type=str)
args = parser.parse_args()
logging.info("Build flavour: %s", args.flavour)
system = platform.system()
if system == 'Windows':
logging.info("Detected Windows platform")
if 'OpenBLAS_HOME' not in os.environ:
os.environ["OpenBLAS_HOME"] = "C:\\Program Files\\OpenBLAS-v0.2.19"
if 'OpenCV_DIR' not in os.environ:
os.environ["OpenCV_DIR"] = "C:\\Program Files\\OpenCV-v3.4.1\\build"
if 'CUDA_PATH' not in os.environ:
os.environ["CUDA_PATH"] = "C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v9.2"
windows_build(args)
elif system == 'Linux' or system == 'Darwin':
nix_build(args)
else:
logging.error("Don't know how to build for {} yet".format(platform.system()))
return 0
if __name__ == '__main__':
sys.exit(main())
|
dmpetrov/dataversioncontrol
|
dvc/ui/pager.py
|
"""Draws DAG in ASCII."""
import logging
import os
import pydoc
import sys
from rich.pager import Pager
from dvc.env import DVC_PAGER
from dvc.utils import format_link
logger = logging.getLogger(__name__)
DEFAULT_PAGER = "less"
LESS = "LESS"
PAGER_ENV = "PAGER"
def prepare_default_pager(
clear_screen: bool = False,
quit_if_one_screen: bool = True,
ansi_escapes: bool = True,
chop_long_lines: bool = True,
no_init: bool = True,
no_tilde: bool = False,
) -> str:
args = [DEFAULT_PAGER]
if clear_screen:
args.append("--clear-screen") # -c
if quit_if_one_screen:
args.append("--quit-if-one-screen") # -F
if ansi_escapes:
args.append("--RAW-CONTROL-CHARS") # -R
if chop_long_lines:
args.append("--chop-long-lines") # -S
if no_init:
args.append("--no-init") # -X
if no_tilde:
args.append("--tilde") # -~
return " ".join(args)
def make_pager(cmd=None):
def _pager(text):
return pydoc.tempfilepager(pydoc.plain(text), cmd)
return _pager if cmd else pydoc.plainpager
def find_pager():
if not sys.stdout.isatty():
return None
# pylint: disable=redefined-outer-name
pager = os.getenv(DVC_PAGER)
if not pager:
pager = os.getenv(PAGER_ENV)
if not pager:
if os.system(f"({DEFAULT_PAGER}) 2>{os.devnull}") != 0:
logger.warning(
"Unable to find `less` in the PATH. Check out "
"{} for more info.".format(
format_link("https://man.dvc.org/pipeline/show")
)
)
else:
pager = DEFAULT_PAGER
if pager == DEFAULT_PAGER:
# if pager is less (i.e. default), regardless of `$LESS`, apply `-RS`.
# `-R` is required to render ansi escape sequences for exp show
# and, `-S` is required for horizontal scrolling.
less_env = bool(os.getenv(LESS))
pager = prepare_default_pager(
ansi_escapes=True,
chop_long_lines=True,
quit_if_one_screen=not less_env,
no_init=not less_env,
)
return pager
def pager(text: str) -> None:
_pager = find_pager()
logger.trace(f"Using pager: '{_pager}'") # type: ignore[attr-defined]
make_pager(_pager)(text)
class DvcPager(Pager):
def show(self, content: str) -> None:
pager(content)
|
infochimps-forks/ezbake-platform-services
|
efe/frontend_app/modules/ezRPConfig.py
|
# Copyright (C) 2013-2015 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Module to share configuration information across modules.
This global object is used through out to store and retreive configuration.
This is to avoid passing gConfig as variables throughout.
All the configurations needed are added in ezReverseProxy.
'''
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
import sys
import os
import time
from gevent.queue import JoinableQueue
from ezbake.reverseproxy.thriftapi.ttypes import AuthorizationOperation
'''
We want addGreenlets() and kill() to access global members without an instance,
perhaps the simplest idea is to just make them simple functions outside the class,
not class methods. I tried @staticmethod decorator.
#class EzRPConfig(object):
'''
appName = 'EzBakeFrontend'
watches = {}
containerDir = os.path.abspath(os.path.join(os.path.abspath(__file__),os.pardir,os.pardir,os.pardir,os.pardir))
configurationChangeQueue = JoinableQueue()
run = True
clientService = None
zkMonitor = None
cfgGreenlet = None
wGreenlet = None
current_milli_time = lambda: int(round(time.time() * 1000))
if getattr(sys, 'frozen', False):
containerDir = os.path.abspath(os.path.join(os.path.dirname(sys.executable),os.pardir,os.pardir))
templateDir = os.path.join(containerDir,'app','templates')
nginx = os.path.join(containerDir,'app','nginx')
eznginxlibpath = os.path.join(containerDir,'libs')
workingDirectory = os.path.join(containerDir,'wd')
logDirectory = os.path.join(containerDir,'logs')
eznginxmoduleLogProp = os.path.join(logDirectory,'log4j.properties')
configDirectory = os.path.join(workingDirectory,'conf')
mainConfig = os.path.join(configDirectory,'nginx.conf')
confdDirectory = os.path.join(configDirectory,'conf.d')
manualDirectory = os.path.join(containerDir,'manual')
ezconfig_dir = os.path.join(containerDir, 'config')
htmlRootDir = os.path.join(containerDir, 'static_content')
favicon_file = os.path.join(htmlRootDir, 'ezbstatic', 'images', 'favicon.ico')
# external facing ssl files for nginx
ssl_cadir = os.path.join(ezconfig_dir,'ssl/user_ca_files')
ssl_keyfile = os.path.join(ezconfig_dir,'ssl/server/server.key')
ssl_certfile = os.path.join(ezconfig_dir,'ssl/server/server.crt')
ssl_server_certs = os.path.join(workingDirectory, 'ssl')
ssl_server_certs_dirs = [os.path.join(workingDirectory, 'ssl_a'), os.path.join(workingDirectory, 'ssl_b')]
ssl_cafile = os.path.join(containerDir,'wd','CAchain.pem')
# internal ssl files for thrift service w/in EzBake
ezEtc = os.path.join(containerDir,'etc')
ezcertdir = os.path.join(containerDir,'etc/ezbake/pki/cert/config/ssl')
ez_keyfile = os.path.join(ezcertdir,'application.priv')
ez_cafile = os.path.join(ezcertdir,'ezbakeca.crt')
ez_certfile = os.path.join(ezcertdir,'application.crt')
# Static content directory to serve per site static content
static_contents = os.path.join(containerDir,'ezbappstatic')
static_contents_dirs = [os.path.join(containerDir, 'sc_a'), os.path.join(containerDir, 'sc_b')]
mainConfigTemplate = os.path.join(templateDir,'nginx.conf')
mimeTemplate = os.path.join(templateDir,'mime.types')
mimeConfig = os.path.join(configDirectory,'mime.types')
nginxPidFile = os.path.join(workingDirectory,'nginx_%d.pid' % os.getpid())
shutdownFile = os.path.join(workingDirectory,'delete_this_file_to_shutdown_efe')
ezproxyciphers = "HIGH:!DSS:!aNULL@STRENGTH"
defaultEznginxOps = AuthorizationOperation.USER_INFO
# Restrict access to EzFrontend Thrift services to the following CN
ez_frontend_access = r'_Ez_Deployer|_Ez_EFEUI'
def addGreenlets(thriftService, kzMonitor, cfgChange, shutdown):
global clientService
global zkMonitor
global cfgGreenlet
global wGreenlet
clientService = thriftService
zkMonitor = kzMonitor
cfgGreenlet = cfgChange
wGreenlet = shutdown
def kill():
if clientService:
clientService.kill()
if zkMonitor:
zkMonitor.kill()
if cfgGreenlet:
cfgGreenlet.kill()
|
stdweird/aquilon
|
tests/broker/test_del_10gig_hardware.py
|
#!/usr/bin/env python2.6
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing commands that remove virtual hardware."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestDel10GigHardware(TestBrokerCommand):
def test_200_del_hosts(self):
for i in range(0, 8) + range(9, 17):
hostname = "ivirt%d.aqd-unittest.ms.com" % (1 + i)
command = "del_host --hostname %s" % hostname
if i < 9:
net_index = (i % 4) + 2
usable_index = i / 4
else:
net_index = ((i - 9) % 4) + 6
usable_index = (i - 9) / 4
ip = self.net.unknown[net_index].usable[usable_index]
self.dsdb_expect_delete(ip)
(out, err) = self.successtest(command.split(" "))
self.assertEmptyOut(out, command)
self.dsdb_verify()
def test_300_delaux(self):
for i in range(1, 25):
hostname = "evh%d-e1.aqd-unittest.ms.com" % (i + 50)
self.dsdb_expect_delete(self.net.vm_storage_net[0].usable[i - 1])
command = ["del", "auxiliary", "--auxiliary", hostname]
(out, err) = self.successtest(command)
self.assertEmptyOut(out, command)
self.dsdb_verify()
def test_700_delmachines(self):
for i in range(0, 8) + range(9, 17):
machine = "evm%d" % (10 + i)
self.noouttest(["del", "machine", "--machine", machine])
def test_800_verifydelmachines(self):
for i in range(0, 18):
machine = "evm%d" % (10 + i)
command = "show machine --machine %s" % machine
self.notfoundtest(command.split(" "))
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestDel10GigHardware)
unittest.TextTestRunner(verbosity=2).run(suite)
|
apache/phoenix
|
bin/pherf-standalone.py
|
#!/usr/bin/env python
############################################################################
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
############################################################################
from __future__ import print_function
from phoenix_utils import tryDecode
import os
import subprocess
import sys
import phoenix_utils
phoenix_utils.setPath()
args = phoenix_utils.shell_quote(sys.argv[1:])
# HBase configuration folder path (where hbase-site.xml reside) for
# HBase/Phoenix client side property override
hbase_config_path = phoenix_utils.hbase_conf_dir
java_home = os.getenv('JAVA_HOME')
# load hbase-env.??? to extract JAVA_HOME, HBASE_PID_DIR, HBASE_LOG_DIR
hbase_env_path = None
hbase_env_cmd = None
if os.name == 'posix':
hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.sh')
hbase_env_cmd = ['bash', '-c', 'source %s && env' % hbase_env_path]
elif os.name == 'nt':
hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.cmd')
hbase_env_cmd = ['cmd.exe', '/c', 'call %s & set' % hbase_env_path]
if not hbase_env_path or not hbase_env_cmd:
sys.stderr.write("hbase-env file unknown on platform {}{}".format(os.name, os.linesep))
sys.exit(-1)
hbase_env = {}
if os.path.isfile(hbase_env_path):
p = subprocess.Popen(hbase_env_cmd, stdout = subprocess.PIPE)
for x in p.stdout:
(k, _, v) = tryDecode(x).partition('=')
hbase_env[k.strip()] = v.strip()
if 'JAVA_HOME' in hbase_env:
java_home = hbase_env['JAVA_HOME']
if java_home:
java = os.path.join(java_home, 'bin', 'java')
else:
java = 'java'
java_cmd = java +' -Xms512m -Xmx3072m -cp "' + \
phoenix_utils.pherf_conf_path + os.pathsep + \
phoenix_utils.hbase_conf_dir + os.pathsep + \
phoenix_utils.slf4j_backend_jar + os.pathsep + \
phoenix_utils.logging_jar + os.pathsep + \
phoenix_utils.phoenix_client_embedded_jar + os.pathsep +\
phoenix_utils.phoenix_pherf_jar + \
'" -Dlog4j.configuration=file:' + \
os.path.join(phoenix_utils.current_dir, "log4j.properties") + \
" org.apache.phoenix.pherf.Pherf " + args
os.execl("/bin/sh", "/bin/sh", "-c", java_cmd)
|
linkedin/naarad
|
src/naarad/__init__.py
|
# coding=utf-8
"""
Copyright 2013 LinkedIn Corp. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from collections import defaultdict
import ConfigParser
import errno
import logging
import os
import threading
import naarad.utils
import naarad.naarad_constants as CONSTANTS
from naarad_imports import metric_classes
from naarad_imports import aggregate_metric_classes
from naarad_imports import graphing_modules
from naarad_imports import reporting_modules
from naarad.reporting.diff import Diff
from naarad.reporting.diff import NaaradReport
logger = logging.getLogger('naarad')
class _Analysis(object):
"""
Class that saves state for analysis to be conducted
"""
def __init__(self, ts_start, config, test_id=None):
self.ts_start = ts_start
self.ts_end = None
self.test_id = test_id
self.config = config
self.description = ''
self.input_directory = None
self.output_directory = None
self.resource_path = 'resources'
self.status = CONSTANTS.OK
self.sla_data = {}
self.stats_data = {}
self.variables = None
class Naarad(object):
"""
Naarad base class that will let the caller run multiple naarad analysis
"""
def __init__(self):
self._default_test_id = -1
self._analyses = {}
self._resource_path = 'resources'
self._input_directory = None
self._output_directory = None
self.return_exit_code = False
self.skip_plots = False
self.available_graphing_modules = graphing_modules
logger.info('Available graphing modules: %s ', ','.join(self.available_graphing_modules.keys()))
naarad.metrics.metric.Metric.graphing_modules = self.available_graphing_modules
naarad.reporting.diff.Diff.graphing_modules = self.available_graphing_modules
naarad.metrics.metric.Metric.device_types = CONSTANTS.device_type_metrics
def create_analysis(self, config):
"""
Create Analysis and save in Naarad from config
:param config:
:return:
"""
self._default_test_id += 1
self._analyses[self._default_test_id] = _Analysis(ts_start=None, config=config, test_id=self._default_test_id)
def signal_start(self, config, test_id=None, **kwargs):
"""
Initialize an analysis object and set ts_start for the analysis represented by test_id
:param test_id: integer that represents the analysis
:param config: config can be a ConfigParser.ConfigParser object or a string specifying local or http(s) location
for config
:return: test_id
"""
if not test_id:
self._default_test_id += 1
test_id = self._default_test_id
self._analyses[test_id] = _Analysis(naarad.utils.get_standardized_timestamp('now', None), config,
test_id=test_id)
if kwargs:
if 'description' in kwargs.keys():
self._analyses[test_id].description = kwargs['description']
if 'input_directory' in kwargs.keys():
self._analyses[test_id].input_directory = kwargs['input_directory']
if 'output_directory' in kwargs.keys():
self._analyses[test_id].output_directory = kwargs['output_directory']
return test_id
def signal_stop(self, test_id=None):
"""
Set ts_end for the analysis represented by test_id
:param test_id: integer that represents the analysis
:return: test_id
"""
if test_id is None:
test_id = self._default_test_id
if self._analyses[test_id].ts_end:
return CONSTANTS.OK
self._analyses[test_id].ts_end = naarad.utils.get_standardized_timestamp('now', None)
return CONSTANTS.OK
def get_failed_analyses(self):
"""
Returns a list of test_id for which naarad analysis failed
:return: list of test_ids
"""
failed_analyses = []
for test_id in self._analyses.keys():
if self._analyses[test_id].status != CONSTANTS.OK:
failed_analyses.append(test_id)
return failed_analyses
def get_sla_data(self, test_id):
"""
Returns sla data for all the metrics associated with a test_id
:return: dict of form { metric.label:metric.sla_map}
"""
return self._analyses[test_id].sla_data
def _set_sla_data(self, test_id, metrics):
"""
Get sla data from each metric and set it in the _Analysis object specified by test_id to make it available
for retrieval
:return: currently always returns CONSTANTS.OK. Maybe enhanced in future to return additional status
"""
for metric in metrics:
self._analyses[test_id].sla_data[metric.label] = metric.sla_map
return CONSTANTS.OK
def get_stats_data(self, test_id):
"""
Returns summary stats data for all the metrics associated with a test_id
:return: dict of form { metric.label:metric.summary_stats}
"""
return self._analyses[test_id].stats_data
def _set_stats_data(self, test_id, metrics):
"""
Get summary stats data from each metric and set it in the _Analysis object specified by test_id to make it available
for retrieval
:return: currently always returns CONSTANTS.OK. Maybe enhanced in future to return additional status
"""
for metric in metrics:
self._analyses[test_id].stats_data[metric.label] = metric.summary_stats
return CONSTANTS.OK
def _create_output_directories(self, analysis):
"""
Create the necessary output and resource directories for the specified analysis
:param: analysis: analysis associated with a given test_id
"""
try:
os.makedirs(analysis.output_directory)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
try:
resource_directory = os.path.join(analysis.output_directory, analysis.resource_path)
os.makedirs(resource_directory)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def _run_pre(self, analysis, run_steps):
"""
If Naarad is run in CLI mode, execute any pre run steps specified in the config. ts_start/ts_end are set based on
workload run steps if any.
:param: analysis: The analysis object being processed
:param: run_steps: list of post run steps
"""
workload_run_steps = []
for run_step in sorted(run_steps, key=lambda step: step.run_rank):
run_step.run()
if run_step.run_type == CONSTANTS.RUN_TYPE_WORKLOAD:
workload_run_steps.append(run_step)
# Get analysis time period from workload run steps
if len(workload_run_steps) > 0:
analysis.ts_start, analysis.ts_end = naarad.utils.get_run_time_period(workload_run_steps)
return CONSTANTS.OK
def _run_post(self, run_steps):
"""
If Naarad is run in CLI mode, execute any post run steps specified in the config
:param: run_steps: list of post run steps
"""
for run_step in sorted(run_steps, key=lambda step: step.run_rank):
run_step.run()
return CONSTANTS.OK
def _process_args(self, analysis, args):
"""
When Naarad is run in CLI mode, get the CL arguments and update the analysis
:param: analysis: The analysis being processed
:param: args: Command Line Arguments received by naarad
"""
if args.exit_code:
self.return_exit_code = args.exit_code
if args.no_plots:
self.skip_plots = args.no_plots
if args.start:
analysis.ts_start = naarad.utils.get_standardized_timestamp(args.start, None)
if args.end:
analysis.ts_end = naarad.utils.get_standardized_timestamp(args.end, None)
if args.variables:
analysis.variables = naarad.utils.get_variables(args)
return CONSTANTS.OK
def analyze(self, input_directory, output_directory, **kwargs):
"""
Run all the analysis saved in self._analyses, sorted by test_id.
This is useful when Naarad() is used by other programs and multiple analyses are run
In naarad CLI mode, len(_analyses) == 1
:param: input_directory: location of log files
:param: output_directory: root directory for analysis output
:param: **kwargs: Optional keyword args
:return: int: status code.
"""
is_api_call = True
if len(self._analyses) == 0:
if 'config' not in kwargs.keys():
return CONSTANTS.ERROR
self.create_analysis(kwargs['config'])
if 'args' in kwargs:
self._process_args(self._analyses[0], kwargs['args'])
is_api_call = False
error_count = 0
self._input_directory = input_directory
self._output_directory = output_directory
for test_id in sorted(self._analyses.keys()):
# Setup
if not self._analyses[test_id].input_directory:
self._analyses[test_id].input_directory = input_directory
if not self._analyses[test_id].output_directory:
if len(self._analyses) > 1:
self._analyses[test_id].output_directory = os.path.join(output_directory, str(test_id))
else:
self._analyses[test_id].output_directory = output_directory
if('config' in kwargs.keys()) and (not self._analyses[test_id].config):
self._analyses[test_id].config = kwargs['config']
self._create_output_directories(self._analyses[test_id])
# Actually run analysis
self._analyses[test_id].status = self.run(self._analyses[test_id], is_api_call, **kwargs)
if self._analyses[test_id].status != CONSTANTS.OK:
error_count += 1
if len(self._analyses) == 1:
return self._analyses[0].status
elif error_count > 0:
return CONSTANTS.ERROR
else:
return CONSTANTS.OK
def run(self, analysis, is_api_call, **kwargs):
"""
:param analysis: Run naarad analysis for the specified analysis object
:param **kwargs: Additional keyword args can be passed in here for future enhancements
:return:
"""
threads = []
crossplots = []
report_args = {}
metrics = defaultdict()
run_steps = defaultdict(list)
discovery_mode = False
graph_timezone = None
graphing_library = None
if isinstance(analysis.config, str):
if not naarad.utils.is_valid_file(analysis.config):
return CONSTANTS.INVALID_CONFIG
config_object = ConfigParser.ConfigParser(analysis.variables)
config_object.optionxform = str
config_object.read(analysis.config)
elif isinstance(analysis.config, ConfigParser.ConfigParser):
config_object = analysis.config
else:
if is_api_call:
return CONSTANTS.INVALID_CONFIG
else:
metrics['metrics'] = naarad.utils.discover_by_name(analysis.input_directory, analysis.output_directory)
if len(metrics['metrics']) == 0:
logger.warning('Unable to auto detect metrics in the specified input directory: %s', analysis.input_directory)
return CONSTANTS.ERROR
else:
discovery_mode = True
metrics['aggregate_metrics'] = []
if not discovery_mode:
metrics, run_steps, crossplots, report_args, graph_timezone, graphing_library = self._process_naarad_config(config_object, analysis)
if graphing_library is None:
graphing_library = CONSTANTS.DEFAULT_GRAPHING_LIBRARY
# If graphing libraries are not installed, skip static images
if graphing_library not in self.available_graphing_modules.keys():
logger.error("Naarad cannot import graphing library %s on your system. Will not generate static charts", graphing_library)
self.skip_plots = True
if not is_api_call:
self._run_pre(analysis, run_steps['pre'])
for metric in metrics['metrics']:
if analysis.ts_start:
metric.ts_start = analysis.ts_start
if analysis.ts_end:
metric.ts_end = analysis.ts_end
thread = threading.Thread(target=naarad.utils.parse_and_plot_single_metrics,
args=(metric, graph_timezone, analysis.output_directory, analysis.input_directory, graphing_library, self.skip_plots))
thread.start()
threads.append(thread)
for t in threads:
t.join()
for metric in metrics['aggregate_metrics']:
thread = threading.Thread(target=naarad.utils.parse_and_plot_single_metrics,
args=(metric, graph_timezone, analysis.output_directory, analysis.input_directory, graphing_library, self.skip_plots))
thread.start()
threads.append(thread)
for t in threads:
t.join()
self._set_sla_data(analysis.test_id, metrics['metrics'] + metrics['aggregate_metrics'])
self._set_stats_data(analysis.test_id, metrics['metrics'] + metrics['aggregate_metrics'])
if len(crossplots) > 0 and not self.skip_plots:
correlated_plots = naarad.utils.nway_plotting(crossplots, metrics['metrics'] + metrics['aggregate_metrics'],
os.path.join(analysis.output_directory, analysis.resource_path),
analysis.resource_path, graphing_library)
else:
correlated_plots = []
rpt = reporting_modules['report'](None, analysis.output_directory, os.path.join(analysis.output_directory, analysis.resource_path), analysis.resource_path,
metrics['metrics'] + metrics['aggregate_metrics'], correlated_plots=correlated_plots, **report_args)
rpt.generate()
if not is_api_call:
self._run_post(run_steps['post'])
if self.return_exit_code:
for metric in metrics['metrics'] + metrics['aggregate_metrics']:
if metric.status == CONSTANTS.SLA_FAILED:
return CONSTANTS.SLA_FAILURE
return CONSTANTS.OK
def diff(self, test_id_1, test_id_2, config=None, **kwargs):
"""
Create a diff report using test_id_1 as a baseline
:param: test_id_1: test id to be used as baseline
:param: test_id_2: test id to compare against baseline
:param: config file for diff (optional)
:param: **kwargs: keyword arguments
"""
output_directory = os.path.join(self._output_directory, 'diff_' + str(test_id_1) + '_' + str(test_id_2))
if kwargs:
if 'output_directory' in kwargs.keys():
output_directory = kwargs['output_directory']
diff_report = Diff([NaaradReport(self._analyses[test_id_1].output_directory, None),
NaaradReport(self._analyses[test_id_2].output_directory, None)],
'diff', output_directory, os.path.join(output_directory, self._resource_path),
self._resource_path)
if config:
naarad.utils.extract_diff_sla_from_config_file(diff_report, config)
diff_report.generate()
if diff_report.sla_failures > 0:
return CONSTANTS.SLA_FAILURE
if diff_report.status != 'OK':
return CONSTANTS.ERROR
return CONSTANTS.OK
def diff_reports_by_location(self, report1_location, report2_location, output_directory, config=None, **kwargs):
"""
Create a diff report using report1 as a baseline
:param: report1_location: report to be used as baseline
:param: report2_location: report to compare against baseline
:param: config file for diff (optional)
:param: **kwargs: keyword arguments
"""
if kwargs:
if 'output_directory' in kwargs.keys():
output_directory = kwargs['output_directory']
diff_report = Diff([NaaradReport(report1_location, None), NaaradReport(report2_location, None)], 'diff',
output_directory, os.path.join(output_directory, self._resource_path), self._resource_path)
if config:
naarad.utils.extract_diff_sla_from_config_file(diff_report, config)
diff_report.generate()
if diff_report.sla_failures > 0:
return CONSTANTS.SLA_FAILURE
if diff_report.status != 'OK':
return CONSTANTS.ERROR
return CONSTANTS.OK
def _process_naarad_config(self, config, analysis):
"""
Process the config file associated with a particular analysis and return metrics, run_steps and crossplots.
Also sets output directory and resource_path for an anlaysis
"""
graph_timezone = None
output_directory = analysis.output_directory
resource_path = analysis.resource_path
run_steps = defaultdict(list)
metrics = defaultdict(list)
indir_default = ''
crossplots = []
report_args = {}
graphing_library = None
ts_start, ts_end = None, None
if config.has_section('GLOBAL'):
ts_start, ts_end = naarad.utils.parse_global_section(config, 'GLOBAL')
if config.has_option('GLOBAL', 'user_defined_metrics'):
naarad.utils.parse_user_defined_metric_classes(config, metric_classes)
config.remove_section('GLOBAL')
if config.has_section('REPORT'):
report_args = naarad.utils.parse_report_section(config, 'REPORT')
config.remove_section('REPORT')
for section in config.sections():
# GRAPH section is optional
if section == 'GRAPH':
graphing_library, crossplots, outdir_default, indir_default, graph_timezone = \
naarad.utils.parse_graph_section(config, section, output_directory, indir_default)
elif section.startswith('RUN-STEP'):
run_step = naarad.utils.parse_run_step_section(config, section)
if not run_step:
logger.error('Ignoring section %s, could not parse it correctly', section)
continue
if run_step.run_order == CONSTANTS.PRE_ANALYSIS_RUN:
run_steps['pre'].append(run_step)
# DURING_ANALYSIS_RUN not supported yet
elif run_step.run_order == CONSTANTS.DURING_ANALYSIS_RUN:
run_steps['in'].append(run_step)
elif run_step.run_order == CONSTANTS.POST_ANALYSIS_RUN:
run_steps['post'].append(run_step)
else:
logger.error('Unknown RUN-STEP run_order specified')
else:
# section name is used to create sub-directories, so enforce it.
if not naarad.utils.is_valid_metric_name(section):
logger.critical('Section name %s is invalid! Only letters, digits, dot(.), dash(-), underscore(_) are allowed'
% section)
return CONSTANTS.CRITICAL_FAILURE
if section == 'SAR-*':
hostname, infile, label, ts_start, ts_end, precision, kwargs, rule_strings = \
naarad.utils.parse_basic_metric_options(config, section)
sar_metrics = naarad.utils.get_all_sar_objects(metrics, infile, hostname, output_directory, label, ts_start,
ts_end, None)
for sar_metric in sar_metrics:
if sar_metric.ts_start is None and (sar_metric.ts_end is None or sar_metric.ts_end > ts_start):
sar_metric.ts_start = ts_start
if sar_metric.ts_end is None and (sar_metric.ts_start is None or ts_end > sar_metric.ts_start):
sar_metric.ts_end = ts_end
metrics['metrics'].extend(sar_metrics)
else:
new_metric = naarad.utils.parse_metric_section(config, section, metric_classes, metrics['metrics'],
aggregate_metric_classes, output_directory, resource_path)
if new_metric.ts_start is None and (new_metric.ts_end is None or new_metric.ts_end > ts_start):
new_metric.ts_start = ts_start
if new_metric.ts_end is None and (new_metric.ts_start is None or ts_end > new_metric.ts_start):
new_metric.ts_end = ts_end
metric_type = section.split('-')[0]
if metric_type in aggregate_metric_classes:
metrics['aggregate_metrics'].append(new_metric)
else:
metrics['metrics'].append(new_metric)
return metrics, run_steps, crossplots, report_args, graph_timezone, graphing_library
|
noplay/aiohttp
|
tests/test_parsers.py
|
"""Tests for parsers.py"""
import asyncio
import unittest
import unittest.mock
from aiohttp import errors
from aiohttp import parsers
class StreamParserTests(unittest.TestCase):
DATA = b'line1\nline2\nline3\n'
def setUp(self):
self.lines_parser = parsers.LinesParser()
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def test_at_eof(self):
proto = parsers.StreamParser()
self.assertFalse(proto.at_eof())
proto.feed_eof()
self.assertTrue(proto.at_eof())
def test_resume_stream(self):
transp = unittest.mock.Mock()
proto = parsers.StreamParser()
proto.set_transport(transp)
proto._paused = True
proto._stream_paused = True
proto.resume_stream()
transp.resume_reading.assert_called_with()
self.assertFalse(proto._paused)
self.assertFalse(proto._stream_paused)
def test_exception(self):
stream = parsers.StreamParser()
self.assertIsNone(stream.exception())
exc = ValueError()
stream.set_exception(exc)
self.assertIs(stream.exception(), exc)
def test_exception_connection_error(self):
stream = parsers.StreamParser()
self.assertIsNone(stream.exception())
exc = ConnectionError()
stream.set_exception(exc)
self.assertIsNot(stream.exception(), exc)
self.assertIsInstance(stream.exception(), RuntimeError)
self.assertIs(stream.exception().__cause__, exc)
self.assertIs(stream.exception().__context__, exc)
def test_exception_waiter(self):
stream = parsers.StreamParser()
stream._parser = self.lines_parser
buf = stream._output = parsers.FlowControlDataQueue(
stream, loop=self.loop)
exc = ValueError()
stream.set_exception(exc)
self.assertIs(buf.exception(), exc)
def test_feed_data(self):
stream = parsers.StreamParser()
stream.feed_data(self.DATA)
self.assertEqual(self.DATA, bytes(stream._buffer))
def test_feed_none_data(self):
stream = parsers.StreamParser()
stream.feed_data(None)
self.assertEqual(b'', bytes(stream._buffer))
def test_feed_data_pause_reading(self):
transp = unittest.mock.Mock()
proto = parsers.StreamParser()
proto.set_transport(transp)
proto.feed_data(b'1' * (2 ** 16 * 3))
transp.pause_reading.assert_called_with()
self.assertTrue(proto._paused)
def test_feed_data_pause_reading_not_supported(self):
transp = unittest.mock.Mock()
proto = parsers.StreamParser()
proto.set_transport(transp)
transp.pause_reading.side_effect = NotImplementedError()
proto.feed_data(b'1' * (2 ** 16 * 3))
self.assertIsNone(proto._transport)
def test_set_parser_unset_prev(self):
stream = parsers.StreamParser()
stream.set_parser(self.lines_parser)
unset = stream.unset_parser = unittest.mock.Mock()
stream.set_parser(self.lines_parser)
self.assertTrue(unset.called)
def test_set_parser_exception(self):
stream = parsers.StreamParser()
exc = ValueError()
stream.set_exception(exc)
s = stream.set_parser(self.lines_parser)
self.assertIs(s.exception(), exc)
def test_set_parser_feed_existing(self):
stream = parsers.StreamParser()
stream.feed_data(b'line1')
stream.feed_data(b'\r\nline2\r\ndata')
s = stream.set_parser(self.lines_parser)
self.assertEqual([bytearray(b'line1\r\n'), bytearray(b'line2\r\n')],
list(s._buffer))
self.assertEqual(b'data', bytes(stream._buffer))
self.assertIsNotNone(stream._parser)
stream.unset_parser()
self.assertIsNone(stream._parser)
self.assertEqual(b'data', bytes(stream._buffer))
self.assertTrue(s._eof)
def test_set_parser_feed_existing_exc(self):
def p(out, buf):
yield from buf.read(1)
raise ValueError()
stream = parsers.StreamParser()
stream.feed_data(b'line1')
s = stream.set_parser(p)
self.assertIsInstance(s.exception(), ValueError)
def test_set_parser_feed_existing_eof(self):
stream = parsers.StreamParser()
stream.feed_data(b'line1')
stream.feed_data(b'\r\nline2\r\ndata')
stream.feed_eof()
s = stream.set_parser(self.lines_parser)
self.assertEqual([bytearray(b'line1\r\n'), bytearray(b'line2\r\n')],
list(s._buffer))
self.assertEqual(b'data', bytes(stream._buffer))
self.assertIsNone(stream._parser)
def test_set_parser_feed_existing_eof_exc(self):
def p(out, buf):
try:
while True:
yield # read chunk
except parsers.EofStream:
raise ValueError()
stream = parsers.StreamParser()
stream.feed_data(b'line1')
stream.feed_eof()
s = stream.set_parser(p)
self.assertIsInstance(s.exception(), ValueError)
def test_set_parser_feed_existing_eof_unhandled_eof(self):
def p(out, buf):
while True:
yield # read chunk
stream = parsers.StreamParser()
stream.feed_data(b'line1')
stream.feed_eof()
s = stream.set_parser(p)
self.assertFalse(s.is_eof())
self.assertIsInstance(s.exception(), RuntimeError)
def test_set_parser_unset(self):
stream = parsers.StreamParser(paused=False)
s = stream.set_parser(self.lines_parser)
stream.feed_data(b'line1\r\nline2\r\n')
self.assertEqual(
[bytearray(b'line1\r\n'), bytearray(b'line2\r\n')],
list(s._buffer))
self.assertEqual(b'', bytes(stream._buffer))
stream.unset_parser()
self.assertTrue(s._eof)
self.assertEqual(b'', bytes(stream._buffer))
def test_set_parser_feed_existing_stop(self):
def LinesParser(out, buf):
try:
out.feed_data((yield from buf.readuntil(b'\n')))
out.feed_data((yield from buf.readuntil(b'\n')))
finally:
out.feed_eof()
stream = parsers.StreamParser()
stream.feed_data(b'line1')
stream.feed_data(b'\r\nline2\r\ndata')
s = stream.set_parser(LinesParser)
self.assertEqual(b'line1\r\nline2\r\n', b''.join(s._buffer))
self.assertEqual(b'data', bytes(stream._buffer))
self.assertIsNone(stream._parser)
self.assertTrue(s._eof)
def test_feed_parser(self):
stream = parsers.StreamParser(paused=False)
s = stream.set_parser(self.lines_parser)
stream.feed_data(b'line1')
stream.feed_data(b'\r\nline2\r\ndata')
self.assertEqual(b'data', bytes(stream._buffer))
stream.feed_eof()
self.assertEqual([bytearray(b'line1\r\n'), bytearray(b'line2\r\n')],
list(s._buffer))
self.assertEqual(b'data', bytes(stream._buffer))
self.assertTrue(s.is_eof())
def test_feed_parser_exc(self):
def p(out, buf):
yield # read chunk
raise ValueError()
stream = parsers.StreamParser(paused=False)
s = stream.set_parser(p)
stream.feed_data(b'line1')
self.assertIsInstance(s.exception(), ValueError)
self.assertEqual(b'', bytes(stream._buffer))
def test_feed_parser_stop(self):
def p(out, buf):
yield # chunk
stream = parsers.StreamParser(paused=False)
stream.set_parser(p)
stream.feed_data(b'line1')
self.assertIsNone(stream._parser)
self.assertEqual(b'', bytes(stream._buffer))
def test_feed_eof_exc(self):
def p(out, buf):
try:
while True:
yield # read chunk
except parsers.EofStream:
raise ValueError()
stream = parsers.StreamParser()
s = stream.set_parser(p)
stream.feed_data(b'line1')
self.assertIsNone(s.exception())
stream.feed_eof()
self.assertIsInstance(s.exception(), ValueError)
def test_feed_eof_stop(self):
def p(out, buf):
try:
while True:
yield # read chunk
except parsers.EofStream:
out.feed_eof()
stream = parsers.StreamParser()
s = stream.set_parser(p)
stream.feed_data(b'line1')
stream.feed_eof()
self.assertTrue(s._eof)
def test_feed_eof_unhandled_eof(self):
def p(out, buf):
while True:
yield # read chunk
stream = parsers.StreamParser()
s = stream.set_parser(p)
stream.feed_data(b'line1')
stream.feed_eof()
self.assertFalse(s.is_eof())
self.assertIsInstance(s.exception(), RuntimeError)
def test_feed_parser2(self):
stream = parsers.StreamParser()
s = stream.set_parser(self.lines_parser)
stream.feed_data(b'line1\r\nline2\r\n')
stream.feed_eof()
self.assertEqual(
[bytearray(b'line1\r\n'), bytearray(b'line2\r\n')],
list(s._buffer))
self.assertEqual(b'', bytes(stream._buffer))
self.assertTrue(s._eof)
def test_unset_parser_eof_exc(self):
def p(out, buf):
try:
while True:
yield # read chunk
except parsers.EofStream:
raise ValueError()
stream = parsers.StreamParser()
s = stream.set_parser(p)
stream.feed_data(b'line1')
stream.unset_parser()
self.assertIsInstance(s.exception(), ValueError)
self.assertIsNone(stream._parser)
def test_unset_parser_eof_unhandled_eof(self):
def p(out, buf):
while True:
yield # read chunk
stream = parsers.StreamParser()
s = stream.set_parser(p)
stream.feed_data(b'line1')
stream.unset_parser()
self.assertIsInstance(s.exception(), RuntimeError)
self.assertFalse(s.is_eof())
def test_unset_parser_stop(self):
def p(out, buf):
try:
while True:
yield # read chunk
except parsers.EofStream:
out.feed_eof()
stream = parsers.StreamParser()
s = stream.set_parser(p)
stream.feed_data(b'line1')
stream.unset_parser()
self.assertTrue(s._eof)
def test_eof_exc(self):
def p(out, buf):
while True:
yield # read chunk
class CustomEofErr(Exception):
pass
stream = parsers.StreamParser(eof_exc_class=CustomEofErr)
s = stream.set_parser(p)
stream.feed_eof()
self.assertIsInstance(s.exception(), CustomEofErr)
class StreamProtocolTests(unittest.TestCase):
def test_connection_made(self):
tr = unittest.mock.Mock()
proto = parsers.StreamProtocol()
self.assertIsNone(proto.transport)
proto.connection_made(tr)
self.assertIs(proto.transport, tr)
def test_connection_lost(self):
proto = parsers.StreamProtocol()
proto.connection_made(unittest.mock.Mock())
proto.connection_lost(None)
self.assertIsNone(proto.transport)
self.assertIsNone(proto.writer)
self.assertTrue(proto.reader._eof)
def test_connection_lost_exc(self):
proto = parsers.StreamProtocol()
proto.connection_made(unittest.mock.Mock())
exc = ValueError()
proto.connection_lost(exc)
self.assertIs(proto.reader.exception(), exc)
def test_data_received(self):
proto = parsers.StreamProtocol()
proto.connection_made(unittest.mock.Mock())
proto.reader = unittest.mock.Mock()
proto.data_received(b'data')
proto.reader.feed_data.assert_called_with(b'data')
def test_drain_waiter(self):
proto = parsers.StreamProtocol(loop=unittest.mock.Mock())
proto._paused = False
self.assertEqual(proto._make_drain_waiter(), ())
proto._paused = True
fut = proto._make_drain_waiter()
self.assertIsInstance(fut, asyncio.Future)
fut2 = proto._make_drain_waiter()
self.assertIs(fut, fut2)
class ParserBufferTests(unittest.TestCase):
def setUp(self):
self.stream = unittest.mock.Mock()
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def _make_one(self):
return parsers.ParserBuffer()
def test_feed_data(self):
buf = self._make_one()
buf.feed_data(b'')
self.assertEqual(len(buf), 0)
buf.feed_data(b'data')
self.assertEqual(len(buf), 4)
self.assertEqual(bytes(buf), b'data')
def test_read_exc(self):
buf = self._make_one()
exc = ValueError()
buf.set_exception(exc)
self.assertIs(buf.exception(), exc)
p = buf.read(3)
next(p)
self.assertRaises(ValueError, p.send, b'1')
def test_read(self):
buf = self._make_one()
p = buf.read(3)
next(p)
p.send(b'1')
try:
p.send(b'234')
except StopIteration as exc:
res = exc.value
self.assertEqual(res, b'123')
self.assertEqual(b'4', bytes(buf))
def test_readsome(self):
buf = self._make_one()
p = buf.readsome(3)
next(p)
try:
p.send(b'1')
except StopIteration as exc:
res = exc.value
self.assertEqual(res, b'1')
p = buf.readsome(2)
next(p)
try:
p.send(b'234')
except StopIteration as exc:
res = exc.value
self.assertEqual(res, b'23')
self.assertEqual(b'4', bytes(buf))
def test_wait(self):
buf = self._make_one()
p = buf.wait(3)
next(p)
p.send(b'1')
try:
p.send(b'234')
except StopIteration as exc:
res = exc.value
self.assertEqual(res, b'123')
self.assertEqual(b'1234', bytes(buf))
def test_skip(self):
buf = self._make_one()
p = buf.skip(3)
next(p)
p.send(b'1')
try:
p.send(b'234')
except StopIteration as exc:
res = exc.value
self.assertIsNone(res)
self.assertEqual(b'4', bytes(buf))
def test_readuntil_limit(self):
buf = self._make_one()
p = buf.readuntil(b'\n', 4)
next(p)
p.send(b'1')
p.send(b'234')
self.assertRaises(errors.LineLimitExceededParserError, p.send, b'5')
buf = parsers.ParserBuffer()
p = buf.readuntil(b'\n', 4)
next(p)
self.assertRaises(
errors.LineLimitExceededParserError, p.send, b'12345\n6')
buf = parsers.ParserBuffer()
p = buf.readuntil(b'\n', 4)
next(p)
self.assertRaises(
errors.LineLimitExceededParserError, p.send, b'12345\n6')
def test_readuntil(self):
buf = self._make_one()
p = buf.readuntil(b'\n', 4)
next(p)
p.send(b'123')
try:
p.send(b'\n456')
except StopIteration as exc:
res = exc.value
self.assertEqual(res, b'123\n')
self.assertEqual(b'456', bytes(buf))
def test_waituntil_limit(self):
buf = self._make_one()
p = buf.waituntil(b'\n', 4)
next(p)
p.send(b'1')
p.send(b'234')
self.assertRaises(errors.LineLimitExceededParserError, p.send, b'5')
buf = parsers.ParserBuffer()
p = buf.waituntil(b'\n', 4)
next(p)
self.assertRaises(
errors.LineLimitExceededParserError, p.send, b'12345\n6')
buf = parsers.ParserBuffer()
p = buf.waituntil(b'\n', 4)
next(p)
self.assertRaises(
errors.LineLimitExceededParserError, p.send, b'12345\n6')
def test_waituntil(self):
buf = self._make_one()
p = buf.waituntil(b'\n', 4)
next(p)
p.send(b'123')
try:
p.send(b'\n456')
except StopIteration as exc:
res = exc.value
self.assertEqual(res, b'123\n')
self.assertEqual(b'123\n456', bytes(buf))
def test_skipuntil(self):
buf = self._make_one()
p = buf.skipuntil(b'\n')
next(p)
p.send(b'123')
try:
p.send(b'\n456\n')
except StopIteration:
pass
self.assertEqual(b'456\n', bytes(buf))
p = buf.skipuntil(b'\n')
try:
next(p)
except StopIteration:
pass
self.assertEqual(b'', bytes(buf))
def test_lines_parser(self):
out = parsers.FlowControlDataQueue(self.stream, loop=self.loop)
buf = self._make_one()
p = parsers.LinesParser()(out, buf)
next(p)
for d in (b'line1', b'\r\n', b'lin', b'e2\r', b'\ndata'):
p.send(d)
self.assertEqual(
[bytearray(b'line1\r\n'), bytearray(b'line2\r\n')],
list(out._buffer))
try:
p.throw(parsers.EofStream())
except StopIteration:
pass
self.assertEqual(bytes(buf), b'data')
def test_chunks_parser(self):
out = parsers.FlowControlDataQueue(self.stream, loop=self.loop)
buf = self._make_one()
p = parsers.ChunksParser(5)(out, buf)
next(p)
for d in (b'line1', b'lin', b'e2d', b'ata'):
p.send(d)
self.assertEqual(
[bytearray(b'line1'), bytearray(b'line2')], list(out._buffer))
try:
p.throw(parsers.EofStream())
except StopIteration:
pass
self.assertEqual(bytes(buf), b'data')
|
DirectXMan12/nova-hacking
|
nova/virt/powervm/common.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import ftplib
import os
import uuid
import paramiko
from nova.openstack.common import log as logging
from nova.openstack.common import processutils
from nova.virt.powervm import constants
from nova.virt.powervm import exception
LOG = logging.getLogger(__name__)
class Connection(object):
def __init__(self, host, username, password, port=22, keyfile=None):
self.host = host
self.username = username
self.password = password
self.port = port
self.keyfile = keyfile
def ssh_connect(connection):
"""Method to connect to remote system using ssh protocol.
:param connection: a Connection object.
:returns: paramiko.SSHClient -- an active ssh connection.
:raises: PowerVMConnectionFailed
"""
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(connection.host,
username=connection.username,
password=connection.password,
port=connection.port,
key_filename=connection.keyfile,
timeout=constants.POWERVM_CONNECTION_TIMEOUT)
LOG.debug("SSH connection with %s established successfully." %
connection.host)
# send TCP keepalive packets every 20 seconds
ssh.get_transport().set_keepalive(20)
return ssh
except Exception:
LOG.exception(_('Connection error connecting PowerVM manager'))
raise exception.PowerVMConnectionFailed()
def check_connection(ssh, connection):
"""
Checks the SSH connection to see if the transport is valid.
If the connection is dead, a new connection is created and returned.
:param ssh: an existing paramiko.SSHClient connection.
:param connection: a Connection object.
:returns: paramiko.SSHClient -- an active ssh connection.
:raises: PowerVMConnectionFailed -- if the ssh connection fails.
"""
# if the ssh client is not set or the transport is dead, re-connect
if (ssh is None or
ssh.get_transport() is None or
not ssh.get_transport().is_active()):
LOG.debug("Connection to host %s will be established." %
connection.host)
ssh = ssh_connect(connection)
return ssh
def ssh_command_as_root(ssh_connection, cmd, check_exit_code=True):
"""Method to execute remote command as root.
:param connection: an active paramiko.SSHClient connection.
:param command: string containing the command to run.
:returns: Tuple -- a tuple of (stdout, stderr)
:raises: processutils.ProcessExecutionError
"""
LOG.debug(_('Running cmd (SSH-as-root): %s') % cmd)
chan = ssh_connection._transport.open_session()
# This command is required to be executed
# in order to become root.
chan.exec_command('ioscli oem_setup_env')
bufsize = -1
stdin = chan.makefile('wb', bufsize)
stdout = chan.makefile('rb', bufsize)
stderr = chan.makefile_stderr('rb', bufsize)
# We run the command and then call 'exit' to exit from
# super user environment.
stdin.write('%s\n%s\n' % (cmd, 'exit'))
stdin.flush()
exit_status = chan.recv_exit_status()
# Lets handle the error just like processutils.ssh_execute does.
if exit_status != -1:
LOG.debug(_('Result was %s') % exit_status)
if check_exit_code and exit_status != 0:
# TODO(mikal): I know this is weird, but it needs to be consistent
# with processutils.execute. I will move this method to oslo in
# a later commit.
raise processutils.ProcessExecutionError(exit_code=exit_status,
stdout=stdout,
stderr=stderr,
cmd=''.join(cmd))
return (stdout, stderr)
def ftp_put_command(connection, local_path, remote_dir):
"""Method to transfer a file via ftp.
:param connection: a Connection object.
:param local_path: path to the local file
:param remote_dir: path to remote destination
:raises: PowerVMFileTransferFailed
"""
try:
ftp = ftplib.FTP(host=connection.host,
user=connection.username,
passwd=connection.password)
ftp.cwd(remote_dir)
name = os.path.split(local_path)[1]
f = open(local_path, "rb")
ftp.storbinary("STOR " + name, f)
f.close()
ftp.close()
except Exception:
LOG.error(_('File transfer to PowerVM manager failed'))
raise exception.PowerVMFTPTransferFailed(ftp_cmd='PUT',
source_path=local_path, dest_path=remote_dir)
def ftp_get_command(connection, remote_path, local_path):
"""Retrieve a file via FTP
:param connection: a Connection object.
:param remote_path: path to the remote file
:param local_path: path to local destination
:raises: PowerVMFileTransferFailed
"""
try:
ftp = ftplib.FTP(host=connection.host,
user=connection.username,
passwd=connection.password)
ftp.cwd(os.path.dirname(remote_path))
name = os.path.basename(remote_path)
LOG.debug(_("ftp GET %(remote_path)s to: %(local_path)s") % locals())
with open(local_path, 'w') as ftpfile:
ftpcmd = 'RETR %s' % name
ftp.retrbinary(ftpcmd, ftpfile.write)
ftp.close()
except Exception:
LOG.error(_("File transfer from PowerVM manager failed"))
raise exception.PowerVMFTPTransferFailed(ftp_cmd='GET',
source_path=remote_path, dest_path=local_path)
def aix_path_join(path_one, path_two):
"""Ensures file path is built correctly for remote UNIX system
:param path_one: string of the first file path
:param path_two: string of the second file path
:returns: a uniform path constructed from both strings
"""
if path_one.endswith('/'):
path_one = path_one.rstrip('/')
if path_two.startswith('/'):
path_two = path_two.lstrip('/')
final_path = path_one + '/' + path_two
return final_path
@contextlib.contextmanager
def vios_to_vios_auth(source, dest, conn_info):
"""Context allowing for SSH between VIOS partitions
This context will build an SSH key on the source host, put the key
into the authorized_keys on the destination host, and make the
private key file name available within the context.
The key files and key inserted into authorized_keys will be
removed when the context exits.
:param source: source IP or DNS name
:param dest: destination IP or DNS name
:param conn_info: dictionary object with SSH connection
information for both hosts
"""
KEY_BASE_NAME = "os-%s" % uuid.uuid4().hex
keypair_uuid = uuid.uuid4()
src_conn_obj = ssh_connect(conn_info)
dest_conn_info = Connection(dest, conn_info.username,
conn_info.password)
dest_conn_obj = ssh_connect(dest_conn_info)
def run_command(conn_obj, cmd):
stdout, stderr = processutils.ssh_execute(conn_obj, cmd)
return stdout.strip().splitlines()
def build_keypair_on_source():
mkkey = ('ssh-keygen -f %s -N "" -C %s' %
(KEY_BASE_NAME, keypair_uuid.hex))
ssh_command_as_root(src_conn_obj, mkkey)
chown_key = ('chown %s %s*' % (conn_info.username, KEY_BASE_NAME))
ssh_command_as_root(src_conn_obj, chown_key)
cat_key = ('cat %s.pub' % KEY_BASE_NAME)
pubkey = run_command(src_conn_obj, cat_key)
return pubkey[0]
def cleanup_key_on_source():
rmkey = 'rm %s*' % KEY_BASE_NAME
run_command(src_conn_obj, rmkey)
def insert_into_authorized_keys(public_key):
echo_key = 'echo "%s" >> .ssh/authorized_keys' % public_key
ssh_command_as_root(dest_conn_obj, echo_key)
def remove_from_authorized_keys():
rmkey = ('sed /%s/d .ssh/authorized_keys > .ssh/authorized_keys' %
keypair_uuid.hex)
ssh_command_as_root(dest_conn_obj, rmkey)
public_key = build_keypair_on_source()
insert_into_authorized_keys(public_key)
try:
yield KEY_BASE_NAME
finally:
remove_from_authorized_keys()
cleanup_key_on_source()
|
queria/my-tempest
|
tempest/api/object_storage/test_account_services.py
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
from six import moves
import testtools
from tempest.api.object_storage import base
from tempest import clients
from tempest.common import custom_matchers
from tempest.common.utils import data_utils
from tempest import config
from tempest import test
CONF = config.CONF
class AccountTest(base.BaseObjectTest):
containers = []
@classmethod
def resource_setup(cls):
super(AccountTest, cls).resource_setup()
for i in moves.xrange(ord('a'), ord('f') + 1):
name = data_utils.rand_name(name='%s-' % chr(i))
cls.container_client.create_container(name)
cls.containers.append(name)
cls.containers_count = len(cls.containers)
@classmethod
def resource_cleanup(cls):
cls.delete_containers(cls.containers)
super(AccountTest, cls).resource_cleanup()
@test.attr(type='smoke')
def test_list_containers(self):
# list of all containers should not be empty
resp, container_list = self.account_client.list_account_containers()
self.assertHeaders(resp, 'Account', 'GET')
self.assertIsNotNone(container_list)
for container_name in self.containers:
self.assertIn(container_name, container_list)
@test.attr(type='smoke')
def test_list_no_containers(self):
# List request to empty account
# To test listing no containers, create new user other than
# the base user of this instance.
self.data.setup_test_user()
os_test_user = clients.Manager(self.data.test_credentials)
resp, container_list = \
os_test_user.account_client.list_account_containers()
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
# When sending a request to an account which has not received a PUT
# container request, the response does not contain 'accept-ranges'
# header. This is a special case, therefore the existence of response
# headers is checked without custom matcher.
self.assertIn('content-length', resp)
self.assertIn('x-timestamp', resp)
self.assertIn('x-account-bytes-used', resp)
self.assertIn('x-account-container-count', resp)
self.assertIn('x-account-object-count', resp)
self.assertIn('content-type', resp)
self.assertIn('x-trans-id', resp)
self.assertIn('date', resp)
# Check only the format of common headers with custom matcher
self.assertThat(resp, custom_matchers.AreAllWellFormatted())
self.assertEqual(len(container_list), 0)
@test.attr(type='smoke')
def test_list_containers_with_format_json(self):
# list containers setting format parameter to 'json'
params = {'format': 'json'}
resp, container_list = self.account_client.list_account_containers(
params=params)
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'GET')
self.assertIsNotNone(container_list)
self.assertTrue([c['name'] for c in container_list])
self.assertTrue([c['count'] for c in container_list])
self.assertTrue([c['bytes'] for c in container_list])
@test.attr(type='smoke')
def test_list_containers_with_format_xml(self):
# list containers setting format parameter to 'xml'
params = {'format': 'xml'}
resp, container_list = self.account_client.list_account_containers(
params=params)
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'GET')
self.assertIsNotNone(container_list)
self.assertEqual(container_list.tag, 'account')
self.assertTrue('name' in container_list.keys())
self.assertEqual(container_list.find(".//container").tag, 'container')
self.assertEqual(container_list.find(".//name").tag, 'name')
self.assertEqual(container_list.find(".//count").tag, 'count')
self.assertEqual(container_list.find(".//bytes").tag, 'bytes')
@test.attr(type='smoke')
@testtools.skipIf(
not CONF.object_storage_feature_enabled.discoverability,
'Discoverability function is disabled')
def test_list_extensions(self):
resp, extensions = self.account_client.list_extensions()
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertThat(resp, custom_matchers.AreAllWellFormatted())
@test.attr(type='smoke')
def test_list_containers_with_limit(self):
# list containers one of them, half of them then all of them
for limit in (1, self.containers_count / 2, self.containers_count):
params = {'limit': limit}
resp, container_list = \
self.account_client.list_account_containers(params=params)
self.assertHeaders(resp, 'Account', 'GET')
self.assertEqual(len(container_list), limit)
@test.attr(type='smoke')
def test_list_containers_with_marker(self):
# list containers using marker param
# first expect to get 0 container as we specified last
# the container as marker
# second expect to get the bottom half of the containers
params = {'marker': self.containers[-1]}
resp, container_list = \
self.account_client.list_account_containers(params=params)
self.assertHeaders(resp, 'Account', 'GET')
self.assertEqual(len(container_list), 0)
params = {'marker': self.containers[self.containers_count / 2]}
resp, container_list = \
self.account_client.list_account_containers(params=params)
self.assertHeaders(resp, 'Account', 'GET')
self.assertEqual(len(container_list), self.containers_count / 2 - 1)
@test.attr(type='smoke')
def test_list_containers_with_end_marker(self):
# list containers using end_marker param
# first expect to get 0 container as we specified first container as
# end_marker
# second expect to get the top half of the containers
params = {'end_marker': self.containers[0]}
resp, container_list = \
self.account_client.list_account_containers(params=params)
self.assertHeaders(resp, 'Account', 'GET')
self.assertEqual(len(container_list), 0)
params = {'end_marker': self.containers[self.containers_count / 2]}
resp, container_list = \
self.account_client.list_account_containers(params=params)
self.assertHeaders(resp, 'Account', 'GET')
self.assertEqual(len(container_list), self.containers_count / 2)
@test.attr(type='smoke')
def test_list_containers_with_marker_and_end_marker(self):
# list containers combining marker and end_marker param
params = {'marker': self.containers[0],
'end_marker': self.containers[self.containers_count - 1]}
resp, container_list = self.account_client.list_account_containers(
params=params)
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'GET')
self.assertEqual(len(container_list), self.containers_count - 2)
@test.attr(type='smoke')
def test_list_containers_with_limit_and_marker(self):
# list containers combining marker and limit param
# result are always limitated by the limit whatever the marker
for marker in random.choice(self.containers):
limit = random.randint(0, self.containers_count - 1)
params = {'marker': marker,
'limit': limit}
resp, container_list = \
self.account_client.list_account_containers(params=params)
self.assertHeaders(resp, 'Account', 'GET')
self.assertTrue(len(container_list) <= limit, str(container_list))
@test.attr(type='smoke')
def test_list_containers_with_limit_and_end_marker(self):
# list containers combining limit and end_marker param
limit = random.randint(1, self.containers_count)
params = {'limit': limit,
'end_marker': self.containers[self.containers_count / 2]}
resp, container_list = self.account_client.list_account_containers(
params=params)
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'GET')
self.assertEqual(len(container_list),
min(limit, self.containers_count / 2))
@test.attr(type='smoke')
def test_list_containers_with_limit_and_marker_and_end_marker(self):
# list containers combining limit, marker and end_marker param
limit = random.randint(1, self.containers_count)
params = {'limit': limit,
'marker': self.containers[0],
'end_marker': self.containers[self.containers_count - 1]}
resp, container_list = self.account_client.list_account_containers(
params=params)
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'GET')
self.assertEqual(len(container_list),
min(limit, self.containers_count - 2))
@test.attr(type='smoke')
def test_list_account_metadata(self):
# list all account metadata
# set metadata to account
metadata = {'test-account-meta1': 'Meta1',
'test-account-meta2': 'Meta2'}
resp, _ = self.account_client.create_account_metadata(metadata)
resp, _ = self.account_client.list_account_metadata()
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'HEAD')
self.assertIn('x-account-meta-test-account-meta1', resp)
self.assertIn('x-account-meta-test-account-meta2', resp)
self.account_client.delete_account_metadata(metadata)
@test.attr(type='smoke')
def test_list_no_account_metadata(self):
# list no account metadata
resp, _ = self.account_client.list_account_metadata()
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'HEAD')
self.assertNotIn('x-account-meta-', str(resp))
@test.attr(type='smoke')
def test_update_account_metadata_with_create_metadata(self):
# add metadata to account
metadata = {'test-account-meta1': 'Meta1'}
resp, _ = self.account_client.create_account_metadata(metadata)
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'POST')
resp, body = self.account_client.list_account_metadata()
self.assertIn('x-account-meta-test-account-meta1', resp)
self.assertEqual(resp['x-account-meta-test-account-meta1'],
metadata['test-account-meta1'])
self.account_client.delete_account_metadata(metadata)
@test.attr(type='smoke')
def test_update_account_metadata_with_delete_matadata(self):
# delete metadata from account
metadata = {'test-account-meta1': 'Meta1'}
self.account_client.create_account_metadata(metadata)
resp, _ = self.account_client.delete_account_metadata(metadata)
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'POST')
resp, _ = self.account_client.list_account_metadata()
self.assertNotIn('x-account-meta-test-account-meta1', resp)
@test.attr(type='smoke')
def test_update_account_metadata_with_create_matadata_key(self):
# if the value of metadata is not set, the metadata is not
# registered at a server
metadata = {'test-account-meta1': ''}
resp, _ = self.account_client.create_account_metadata(metadata)
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'POST')
resp, _ = self.account_client.list_account_metadata()
self.assertNotIn('x-account-meta-test-account-meta1', resp)
@test.attr(type='smoke')
def test_update_account_metadata_with_delete_matadata_key(self):
# Although the value of metadata is not set, the feature of
# deleting metadata is valid
metadata_1 = {'test-account-meta1': 'Meta1'}
self.account_client.create_account_metadata(metadata_1)
metadata_2 = {'test-account-meta1': ''}
resp, _ = self.account_client.delete_account_metadata(metadata_2)
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'POST')
resp, _ = self.account_client.list_account_metadata()
self.assertNotIn('x-account-meta-test-account-meta1', resp)
@test.attr(type='smoke')
def test_update_account_metadata_with_create_and_delete_metadata(self):
# Send a request adding and deleting metadata requests simultaneously
metadata_1 = {'test-account-meta1': 'Meta1'}
self.account_client.create_account_metadata(metadata_1)
metadata_2 = {'test-account-meta2': 'Meta2'}
resp, body = self.account_client.create_and_delete_account_metadata(
metadata_2,
metadata_1)
self.assertIn(int(resp['status']), test.HTTP_SUCCESS)
self.assertHeaders(resp, 'Account', 'POST')
resp, _ = self.account_client.list_account_metadata()
self.assertNotIn('x-account-meta-test-account-meta1', resp)
self.assertIn('x-account-meta-test-account-meta2', resp)
self.assertEqual(resp['x-account-meta-test-account-meta2'],
metadata_2['test-account-meta2'])
self.account_client.delete_account_metadata(metadata_2)
|
USGSDenverPychron/pychron
|
pychron/hardware/newport/newport_group.py
|
# ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import Float, Tuple
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.core.helpers.strtools import csv_to_ints
from pychron.hardware.axis import Axis
MAPPING = dict(
acceleration="HA",
deceleration="HD",
# emergency_deceleration = 'HE',
jerk="HJ",
velocity="HV",
axes="HN",
)
class NewportGroup(Axis):
# acceleration = Float
# deceleration = Float
emergency_deceleration = None
jerk = Float
# velocity = Float
name = "GroupedAxes"
machine_velocity = Float
machine_acceleration = Float
machine_deceleration = Float
axes = Tuple
# calculate_parameters = Bool(True)
id = None
def _set_acceleration(self, v):
self._acceleration = v
def _set_deceleration(self, v):
self._deceleration = v
def _set_velocity(self, v):
self._velocity = v
def load(self, path):
config = self.get_configuration(path)
for attr in [
"acceleration",
"deceleration",
# 'emergency_deceleration',
"jerk",
"velocity",
]:
self.set_attribute(config, attr, "General", attr, cast="float")
self.set_attribute(config, "id", "General", "id", cast="int")
axes = self.config_get(config, "General", "axes")
self.axes = tuple(csv_to_ints(axes))
self.nominal_velocity = self.velocity
self.nominal_acceleration = self.acceleration
self.nominal_deceleration = self.deceleration
def build_command(self, new_group):
cmds = []
for key, value in MAPPING.items():
if key is not "axes":
cmds.append("{}{}{:0.5f}".format(self.id, value, getattr(self, key)))
if new_group:
gid = "{:n}HN{}".format(self.id, ",".join(map(str, self.axes)))
cmds = [gid] + cmds
return ";".join(cmds)
# ============= EOF ==============================================
|
0--key/lib
|
portfolio/Python/scrapy/merckgroup/simplysupplements.py
|
import re
import os
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, HtmlResponse
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc
from urllib import urlencode
import hashlib
import csv
from product_spiders.items import Product, ProductLoaderWithNameStrip\
as ProductLoader
from scrapy import log
HERE = os.path.abspath(os.path.dirname(__file__))
class SimplySupplementsSpider(BaseSpider):
name = 'simplysupplements.net-merckgroup'
allowed_domains = ['www.simplysupplements.net', 'simplysupplements.net']
start_urls = ('http://www.simplysupplements.net/product-a-to-z/',)
def parse(self, response):
if not isinstance(response, HtmlResponse):
return
hxs = HtmlXPathSelector(response)
# getting product links from A-Z product list
links = hxs.select('//ul[@id="product-a-to-z"]/li/a/@href').extract()
for prod_url in links:
url = urljoin_rfc(get_base_url(response), prod_url)
yield Request(url)
# products
for product in self.parse_product(response):
yield product
def parse_product(self, response):
if not isinstance(response, HtmlResponse):
return
hxs = HtmlXPathSelector(response)
name = hxs.select('//div[@class="innercol"]/h1/text()').extract()
if name:
url = response.url
url = urljoin_rfc(get_base_url(response), url)
skus = hxs.select('//td[@class="size"]/strong/text()').extract()
prices = hxs.select('//td[@class="price"]/text()').extract()
skus_prices = zip(skus, prices)
for sku, price in skus_prices:
loader = ProductLoader(item=Product(), selector=hxs)
loader.add_value('url', url)
loader.add_value('name', name[0].strip() + ' ' + sku.strip(':'))
#loader.add_value('sku', sku)
loader.add_value('price', price)
yield loader.load_item()
|
cwolferh/heat-scratch
|
heat/engine/resources/openstack/neutron/lbaas/listener.py
|
#
# Copyright 2015 IBM Corp.
#
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common import exception
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine.resources.openstack.neutron import neutron
from heat.engine import support
from heat.engine import translation
class Listener(neutron.NeutronResource):
"""A resource for managing LBaaS v2 Listeners.
This resource creates and manages Neutron LBaaS v2 Listeners,
which represent a listening endpoint for the vip.
"""
support_status = support.SupportStatus(version='6.0.0')
required_service_extension = 'lbaasv2'
PROPERTIES = (
PROTOCOL_PORT, PROTOCOL, LOADBALANCER, NAME,
ADMIN_STATE_UP, DESCRIPTION, DEFAULT_TLS_CONTAINER_REF,
SNI_CONTAINER_REFS, CONNECTION_LIMIT, TENANT_ID
) = (
'protocol_port', 'protocol', 'loadbalancer', 'name',
'admin_state_up', 'description', 'default_tls_container_ref',
'sni_container_refs', 'connection_limit', 'tenant_id'
)
PROTOCOLS = (
TCP, HTTP, HTTPS, TERMINATED_HTTPS,
) = (
'TCP', 'HTTP', 'HTTPS', 'TERMINATED_HTTPS',
)
ATTRIBUTES = (
LOADBALANCERS_ATTR, DEFAULT_POOL_ID_ATTR
) = (
'loadbalancers', 'default_pool_id'
)
properties_schema = {
PROTOCOL_PORT: properties.Schema(
properties.Schema.INTEGER,
_('TCP or UDP port on which to listen for client traffic.'),
required=True,
constraints=[
constraints.Range(1, 65535),
]
),
PROTOCOL: properties.Schema(
properties.Schema.STRING,
_('Protocol on which to listen for the client traffic.'),
required=True,
constraints=[
constraints.AllowedValues(PROTOCOLS),
]
),
LOADBALANCER: properties.Schema(
properties.Schema.STRING,
_('ID or name of the load balancer with which listener '
'is associated.'),
required=True,
constraints=[
constraints.CustomConstraint('neutron.lbaas.loadbalancer')
]
),
NAME: properties.Schema(
properties.Schema.STRING,
_('Name of this listener.'),
update_allowed=True
),
ADMIN_STATE_UP: properties.Schema(
properties.Schema.BOOLEAN,
_('The administrative state of this listener.'),
update_allowed=True,
default=True
),
DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description of this listener.'),
update_allowed=True,
default=''
),
DEFAULT_TLS_CONTAINER_REF: properties.Schema(
properties.Schema.STRING,
_('Default TLS container reference to retrieve TLS '
'information.'),
update_allowed=True
),
SNI_CONTAINER_REFS: properties.Schema(
properties.Schema.LIST,
_('List of TLS container references for SNI.'),
update_allowed=True
),
CONNECTION_LIMIT: properties.Schema(
properties.Schema.INTEGER,
_('The maximum number of connections permitted for this '
'load balancer. Defaults to -1, which is infinite.'),
update_allowed=True,
default=-1,
constraints=[
constraints.Range(min=-1),
]
),
TENANT_ID: properties.Schema(
properties.Schema.STRING,
_('The ID of the tenant who owns the listener.')
),
}
attributes_schema = {
LOADBALANCERS_ATTR: attributes.Schema(
_('ID of the load balancer this listener is associated to.'),
type=attributes.Schema.LIST
),
DEFAULT_POOL_ID_ATTR: attributes.Schema(
_('ID of the default pool this listener is associated to.'),
type=attributes.Schema.STRING
)
}
def translation_rules(self, props):
return [
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.LOADBALANCER],
client_plugin=self.client_plugin(),
finder='find_resourceid_by_name_or_id',
entity='loadbalancer'
),
]
def validate(self):
res = super(Listener, self).validate()
if res:
return res
if self.properties[self.PROTOCOL] == self.TERMINATED_HTTPS:
if self.properties[self.DEFAULT_TLS_CONTAINER_REF] is None:
msg = (_('Property %(ref)s required when protocol is '
'%(term)s.') % {'ref': self.DEFAULT_TLS_CONTAINER_REF,
'term': self.TERMINATED_HTTPS})
raise exception.StackValidationFailed(message=msg)
def _check_lb_status(self):
lb_id = self.properties[self.LOADBALANCER]
return self.client_plugin().check_lb_status(lb_id)
def handle_create(self):
properties = self.prepare_properties(
self.properties,
self.physical_resource_name())
properties['loadbalancer_id'] = properties.pop(self.LOADBALANCER)
return properties
def check_create_complete(self, properties):
if self.resource_id is None:
try:
listener = self.client().create_listener(
{'listener': properties})['listener']
self.resource_id_set(listener['id'])
except Exception as ex:
if self.client_plugin().is_invalid(ex):
return False
raise
return self._check_lb_status()
def _show_resource(self):
return self.client().show_listener(
self.resource_id)['listener']
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
self._update_called = False
return prop_diff
def check_update_complete(self, prop_diff):
if not prop_diff:
return True
if not self._update_called:
try:
self.client().update_listener(self.resource_id,
{'listener': prop_diff})
self._update_called = True
except Exception as ex:
if self.client_plugin().is_invalid(ex):
return False
raise
return self._check_lb_status()
def handle_delete(self):
self._delete_called = False
def check_delete_complete(self, data):
if self.resource_id is None:
return True
if not self._delete_called:
try:
self.client().delete_listener(self.resource_id)
self._delete_called = True
except Exception as ex:
if self.client_plugin().is_invalid(ex):
return False
elif self.client_plugin().is_not_found(ex):
return True
raise
return self._check_lb_status()
def resource_mapping():
return {
'OS::Neutron::LBaaS::Listener': Listener,
}
|
DLR-SC/tigl
|
thirdparty/nsiqcppstyle/nsiqunittest/nsiqcppstyle_unittestbase.py
|
# Copyright (c) 2009 NHN Inc. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of NHN Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import nsiqcppstyle_checker
import unittest
import nsiqcppstyle_rulemanager
import nsiqcppstyle_reporter
import nsiqcppstyle_state
errors = []
def AddError(err):
errors.append(err)
def CheckErrorContent(msg):
for err in errors :
if err[1] == msg :
return True
return False
def MockError(token, category, message):
AddError((token, category, message))
print token, category, message
class nct(unittest.TestCase):
def setUp(self):
nsiqcppstyle_rulemanager.ruleManager.ResetRules()
nsiqcppstyle_rulemanager.ruleManager.ResetRegisteredRules()
nsiqcppstyle_state._nsiqcppstyle_state.verbose = True
nsiqcppstyle_reporter.Error = MockError
self.setUpRule()
global errors
errors = []
def Analyze(self, filename, data):
nsiqcppstyle_checker.ProcessFile(nsiqcppstyle_rulemanager.ruleManager, filename, data)
|
Triv90/SwiftUml
|
swift/common/middleware/bulk.py
|
# Copyright (c) 2013 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tarfile
from urllib import quote, unquote
from xml.sax import saxutils
from swift.common.swob import Request, HTTPBadGateway, \
HTTPCreated, HTTPBadRequest, HTTPNotFound, HTTPUnauthorized, HTTPOk, \
HTTPPreconditionFailed, HTTPRequestEntityTooLarge, HTTPNotAcceptable, \
wsgify
from swift.common.utils import json, TRUE_VALUES
from swift.common.constraints import check_utf8, MAX_FILE_SIZE
from swift.common.http import HTTP_BAD_REQUEST, HTTP_UNAUTHORIZED, \
HTTP_NOT_FOUND
from swift.common.constraints import MAX_OBJECT_NAME_LENGTH, \
MAX_CONTAINER_NAME_LENGTH
MAX_PATH_LENGTH = MAX_OBJECT_NAME_LENGTH + MAX_CONTAINER_NAME_LENGTH + 2
class CreateContainerError(Exception):
def __init__(self, msg, status_int, status):
self.status_int = status_int
self.status = status
Exception.__init__(self, msg)
ACCEPTABLE_FORMATS = ['text/plain', 'application/json', 'application/xml',
'text/xml']
def get_response_body(data_format, data_dict, error_list):
"""
Returns a properly formatted response body according to format.
:params data_format: resulting format
:params data_dict: generated data about results.
:params error_list: list of quoted filenames that failed
"""
if data_format == 'text/plain':
output = ''
for key in sorted(data_dict.keys()):
output += '%s: %s\n' % (key, data_dict[key])
output += 'Errors:\n'
output += '\n'.join(
['%s, %s' % (name, status)
for name, status in error_list])
return output
if data_format == 'application/json':
data_dict['Errors'] = error_list
return json.dumps(data_dict)
if data_format.endswith('/xml'):
output = '<?xml version="1.0" encoding="UTF-8"?>\n<delete>\n'
for key in sorted(data_dict.keys()):
xml_key = key.replace(' ', '_').lower()
output += '<%s>%s</%s>\n' % (xml_key, data_dict[key], xml_key)
output += '<errors>\n'
output += '\n'.join(
['<object>'
'<name>%s</name><status>%s</status>'
'</object>' % (saxutils.escape(name), status) for
name, status in error_list])
output += '</errors>\n</delete>\n'
return output
raise HTTPNotAcceptable('Invalid output type')
class Bulk(object):
"""
Middleware that will do many operations on a single request.
Extract Archive:
Expand tar files into a swift account. Request must be a PUT with the
query parameter ?extract-archive=format specifying the format of archive
file. Accepted formats are tar, tar.gz, and tar.bz2.
For a PUT to the following url:
/v1/AUTH_Account/$UPLOAD_PATH?extract-archive=tar.gz
UPLOAD_PATH is where the files will be expanded to. UPLOAD_PATH can be a
container, a pseudo-directory within a container, or an empty string. The
destination of a file in the archive will be built as follows:
/v1/AUTH_Account/$UPLOAD_PATH/$FILE_PATH
Where FILE_PATH is the file name from the listing in the tar file.
If the UPLOAD_PATH is an empty string, containers will be auto created
accordingly and files in the tar that would not map to any container (files
in the base directory) will be ignored.
Only regular files will be uploaded. Empty directories, symlinks, etc will
not be uploaded.
If all valid files were uploaded successfully will return an HTTPCreated
response. If any files failed to be created will return an HTTPBadGateway
response. In both cases the response body will specify the number of files
successfully uploaded and a list of the files that failed. The return body
will be formatted in the way specified in the request's Accept header.
Acceptable formats are text/plain, application/json, application/xml, and
text/xml.
There are proxy logs created for each file (which becomes a subrequest) in
the tar. The subrequest's proxy log will have a swift.source set to "EA"
the log's content length will reflect the unzipped size of the file. If
double proxy-logging is used the leftmost logger will not have a
swift.source set and the content length will reflect the size of the
payload sent to the proxy (the unexpanded size of the tar.gz).
Bulk Delete:
Will delete multiple objects or containers from their account with a
single request. Responds to DELETE requests with query parameter
?bulk-delete set. The Content-Type should be set to text/plain.
The body of the DELETE request will be a newline separated list of url
encoded objects to delete. You can only delete 1000 (configurable) objects
per request. The objects specified in the DELETE request body must be URL
encoded and in the form:
/container_name/obj_name
or for a container (which must be empty at time of delete)
/container_name
If all items were successfully deleted (or did not exist), will return an
HTTPOk. If any failed to delete, will return an HTTPBadGateway. In
both cases the response body will specify the number of items
successfully deleted, not found, and a list of those that failed.
The return body will be formatted in the way specified in the request's
Accept header. Acceptable formats are text/plain, application/json,
application/xml, and text/xml.
There are proxy logs created for each object or container (which becomes a
subrequest) that is deleted. The subrequest's proxy log will have a
swift.source set to "BD" the log's content length of 0. If double
proxy-logging is used the leftmost logger will not have a
swift.source set and the content length will reflect the size of the
payload sent to the proxy (the list of objects/containers to be deleted).
"""
def __init__(self, app, conf):
self.app = app
self.max_containers = int(
conf.get('max_containers_per_extraction', 10000))
self.max_failed_extractions = int(
conf.get('max_failed_extractions', 1000))
self.max_deletes_per_request = int(
conf.get('max_deletes_per_request', 1000))
def create_container(self, req, container_path):
"""
Makes a subrequest to create a new container.
:params container_path: an unquoted path to a container to be created
:returns: None on success
:raises: CreateContainerError on creation error
"""
new_env = req.environ.copy()
new_env['PATH_INFO'] = container_path
new_env['swift.source'] = 'EA'
create_cont_req = Request.blank(container_path, environ=new_env)
resp = create_cont_req.get_response(self.app)
if resp.status_int // 100 != 2:
raise CreateContainerError(
"Create Container Failed: " + container_path,
resp.status_int, resp.status)
def get_objs_to_delete(self, req):
"""
Will populate objs_to_delete with data from request input.
:params req: a Swob request
:returns: a list of the contents of req.body when separated by newline.
:raises: HTTPException on failures
"""
line = ''
data_remaining = True
objs_to_delete = []
if req.content_length is None and \
req.headers.get('transfer-encoding', '').lower() != 'chunked':
raise HTTPBadRequest('Invalid request: no content sent.')
while data_remaining:
if len(objs_to_delete) > self.max_deletes_per_request:
raise HTTPRequestEntityTooLarge(
'Maximum Bulk Deletes: %d per request' %
self.max_deletes_per_request)
if '\n' in line:
obj_to_delete, line = line.split('\n', 1)
objs_to_delete.append(unquote(obj_to_delete))
else:
data = req.body_file.read(MAX_PATH_LENGTH)
if data:
line += data
else:
data_remaining = False
if line.strip():
objs_to_delete.append(unquote(line))
if len(line) > MAX_PATH_LENGTH * 2:
raise HTTPBadRequest('Invalid File Name')
return objs_to_delete
def handle_delete(self, req, objs_to_delete=None, user_agent='BulkDelete',
swift_source='BD'):
"""
:params req: a swob Request
:raises HTTPException: on unhandled errors
:returns: a swob Response
"""
try:
vrs, account, _junk = req.split_path(2, 3, True)
except ValueError:
return HTTPNotFound(request=req)
incoming_format = req.headers.get('Content-Type')
if incoming_format and not incoming_format.startswith('text/plain'):
# For now only accept newline separated object names
return HTTPNotAcceptable(request=req)
out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
if not out_content_type:
return HTTPNotAcceptable(request=req)
if objs_to_delete is None:
objs_to_delete = self.get_objs_to_delete(req)
failed_files = []
success_count = not_found_count = 0
failed_file_response_type = HTTPBadRequest
for obj_to_delete in objs_to_delete:
obj_to_delete = obj_to_delete.strip().lstrip('/')
if not obj_to_delete:
continue
delete_path = '/'.join(['', vrs, account, obj_to_delete])
if not check_utf8(delete_path):
failed_files.append([quote(delete_path),
HTTPPreconditionFailed().status])
continue
new_env = req.environ.copy()
new_env['PATH_INFO'] = delete_path
del(new_env['wsgi.input'])
new_env['CONTENT_LENGTH'] = 0
new_env['HTTP_USER_AGENT'] = \
'%s %s' % (req.environ.get('HTTP_USER_AGENT'), user_agent)
new_env['swift.source'] = swift_source
delete_obj_req = Request.blank(delete_path, new_env)
resp = delete_obj_req.get_response(self.app)
if resp.status_int // 100 == 2:
success_count += 1
elif resp.status_int == HTTP_NOT_FOUND:
not_found_count += 1
elif resp.status_int == HTTP_UNAUTHORIZED:
return HTTPUnauthorized(request=req)
else:
if resp.status_int // 100 == 5:
failed_file_response_type = HTTPBadGateway
failed_files.append([quote(delete_path), resp.status])
resp_body = get_response_body(
out_content_type,
{'Number Deleted': success_count,
'Number Not Found': not_found_count},
failed_files)
if (success_count or not_found_count) and not failed_files:
return HTTPOk(resp_body, content_type=out_content_type)
if failed_files:
return failed_file_response_type(
resp_body, content_type=out_content_type)
return HTTPBadRequest('Invalid bulk delete.')
def handle_extract(self, req, compress_type):
"""
:params req: a swob Request
:params compress_type: specifying the compression type of the tar.
Accepts '', 'gz, or 'bz2'
:raises HTTPException: on unhandled errors
:returns: a swob response to request
"""
success_count = 0
failed_files = []
existing_containers = set()
out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
if not out_content_type:
return HTTPNotAcceptable(request=req)
if req.content_length is None and \
req.headers.get('transfer-encoding', '').lower() != 'chunked':
return HTTPBadRequest('Invalid request: no content sent.')
try:
vrs, account, extract_base = req.split_path(2, 3, True)
except ValueError:
return HTTPNotFound(request=req)
extract_base = extract_base or ''
extract_base = extract_base.rstrip('/')
try:
tar = tarfile.open(mode='r|' + compress_type,
fileobj=req.body_file)
while True:
tar_info = tar.next()
if tar_info is None or \
len(failed_files) >= self.max_failed_extractions:
break
if tar_info.isfile():
obj_path = tar_info.name
if obj_path.startswith('./'):
obj_path = obj_path[2:]
obj_path = obj_path.lstrip('/')
if extract_base:
obj_path = extract_base + '/' + obj_path
if '/' not in obj_path:
continue # ignore base level file
destination = '/'.join(
['', vrs, account, obj_path])
container = obj_path.split('/', 1)[0]
if not check_utf8(destination):
failed_files.append(
[quote(destination[:MAX_PATH_LENGTH]),
HTTPPreconditionFailed().status])
continue
if tar_info.size > MAX_FILE_SIZE:
failed_files.append([
quote(destination[:MAX_PATH_LENGTH]),
HTTPRequestEntityTooLarge().status])
continue
if container not in existing_containers:
try:
self.create_container(
req, '/'.join(['', vrs, account, container]))
existing_containers.add(container)
except CreateContainerError, err:
if err.status_int == HTTP_UNAUTHORIZED:
return HTTPUnauthorized(request=req)
failed_files.append([
quote(destination[:MAX_PATH_LENGTH]),
err.status])
continue
except ValueError:
failed_files.append([
quote(destination[:MAX_PATH_LENGTH]),
HTTP_BAD_REQUEST])
continue
if len(existing_containers) > self.max_containers:
return HTTPBadRequest(
'More than %d base level containers in tar.' %
self.max_containers)
tar_file = tar.extractfile(tar_info)
new_env = req.environ.copy()
new_env['wsgi.input'] = tar_file
new_env['PATH_INFO'] = destination
new_env['CONTENT_LENGTH'] = tar_info.size
new_env['swift.source'] = 'EA'
new_env['HTTP_USER_AGENT'] = \
'%s BulkExpand' % req.environ.get('HTTP_USER_AGENT')
create_obj_req = Request.blank(destination, new_env)
resp = create_obj_req.get_response(self.app)
if resp.status_int // 100 == 2:
success_count += 1
else:
if resp.status_int == HTTP_UNAUTHORIZED:
return HTTPUnauthorized(request=req)
failed_files.append([
quote(destination[:MAX_PATH_LENGTH]), resp.status])
resp_body = get_response_body(
out_content_type,
{'Number Files Created': success_count},
failed_files)
if success_count and not failed_files:
return HTTPCreated(resp_body, content_type=out_content_type)
if failed_files:
return HTTPBadGateway(resp_body, content_type=out_content_type)
return HTTPBadRequest('Invalid Tar File: No Valid Files')
except tarfile.TarError, tar_error:
return HTTPBadRequest('Invalid Tar File: %s' % tar_error)
@wsgify
def __call__(self, req):
extract_type = req.params.get('extract-archive')
if extract_type is not None and req.method == 'PUT':
archive_type = {
'tar': '', 'tar.gz': 'gz',
'tar.bz2': 'bz2'}.get(extract_type.lower().strip('.'))
if archive_type is not None:
return self.handle_extract(req, archive_type)
else:
return HTTPBadRequest("Unsupported archive format")
if 'bulk-delete' in req.params and req.method == 'DELETE':
return self.handle_delete(req)
return self.app
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def bulk_filter(app):
return Bulk(app, conf)
return bulk_filter
|
ikargis/horizon_fod
|
horizon/messages.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Drop-in replacement for django.contrib.messages which handles Horizon's
messaging needs (e.g. AJAX communication, etc.).
"""
from django.contrib import messages as _messages
from django.contrib.messages import constants
from django.utils.encoding import force_unicode # noqa
from django.utils.safestring import SafeData # noqa
def add_message(request, level, message, extra_tags='', fail_silently=False):
"""Attempts to add a message to the request using the 'messages' app."""
if request.is_ajax():
tag = constants.DEFAULT_TAGS[level]
# if message is marked as safe, pass "safe" tag as extra_tags so that
# client can skip HTML escape for the message when rendering
if isinstance(message, SafeData):
extra_tags = extra_tags + ' safe'
request.horizon['async_messages'].append([tag,
force_unicode(message),
extra_tags])
else:
return _messages.add_message(request, level, message,
extra_tags, fail_silently)
def debug(request, message, extra_tags='', fail_silently=False):
"""Adds a message with the ``DEBUG`` level."""
add_message(request, constants.DEBUG, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def info(request, message, extra_tags='', fail_silently=False):
"""Adds a message with the ``INFO`` level."""
add_message(request, constants.INFO, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def success(request, message, extra_tags='', fail_silently=False):
"""Adds a message with the ``SUCCESS`` level."""
add_message(request, constants.SUCCESS, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def warning(request, message, extra_tags='', fail_silently=False):
"""Adds a message with the ``WARNING`` level."""
add_message(request, constants.WARNING, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def error(request, message, extra_tags='', fail_silently=False):
"""Adds a message with the ``ERROR`` level."""
add_message(request, constants.ERROR, message, extra_tags=extra_tags,
fail_silently=fail_silently)
|
djkonro/client-python
|
kubernetes/test/test_v1_git_repo_volume_source.py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_git_repo_volume_source import V1GitRepoVolumeSource
class TestV1GitRepoVolumeSource(unittest.TestCase):
""" V1GitRepoVolumeSource unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1GitRepoVolumeSource(self):
"""
Test V1GitRepoVolumeSource
"""
model = kubernetes.client.models.v1_git_repo_volume_source.V1GitRepoVolumeSource()
if __name__ == '__main__':
unittest.main()
|
meidli/yabgp
|
yabgp/handler/default_handler.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# import json
# replace with simplejson
import simplejson as json
import os
import time
import logging
import traceback
import sys
from oslo_config import cfg
from yabgp.common import constants as bgp_cons
from yabgp.handler import BaseHandler
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
MSG_PROCESS_OPTS = [
cfg.BoolOpt('write_disk',
default=True,
help='Whether the BGP message is written to disk'),
cfg.StrOpt('write_dir',
default=os.path.join(os.environ.get('HOME') or '.', 'data/bgp/'),
help='The BGP messages storage path'),
cfg.IntOpt('write_msg_max_size',
default=500,
help='The Max size of one BGP message file, the unit is MB'),
cfg.BoolOpt('write_keepalive',
default=False,
help='Whether write keepalive message to disk')
]
CONF.register_opts(MSG_PROCESS_OPTS, group='message')
class DefaultHandler(BaseHandler):
def __init__(self):
super(DefaultHandler, self).__init__()
'''
{<peer>: (<path>, <current file>)}
'''
self.peer_files = {}
'''
{<peer>: <seq number>}
'''
self.msg_sequence = {}
def init(self):
if CONF.message.write_disk:
self.init_msg_file(CONF.bgp.running_config['remote_addr'].lower())
def init_msg_file(self, peer_addr):
msg_file_path_for_peer = os.path.join(
CONF.message.write_dir,
peer_addr
)
if not os.path.exists(msg_file_path_for_peer):
os.makedirs(msg_file_path_for_peer)
LOG.info('Create dir %s for peer %s', msg_file_path_for_peer, peer_addr)
LOG.info('BGP message file path is %s', msg_file_path_for_peer)
if msg_file_path_for_peer and peer_addr not in self.peer_files:
msg_path = msg_file_path_for_peer + '/msg/'
if not os.path.exists(msg_path):
os.makedirs(msg_path)
# try get latest file and msg sequence if any
last_msg_seq, msg_file_name = DefaultHandler.get_last_seq_and_file(msg_path)
if not msg_file_name:
msg_file_name = "%s.msg" % time.time()
# store the message sequence
self.msg_sequence[peer_addr] = last_msg_seq + 1
msg_file = open(os.path.join(msg_path, msg_file_name), 'a')
msg_file.flush()
self.peer_files[peer_addr] = (msg_path, msg_file)
LOG.info('BGP message file %s', msg_file_name)
LOG.info('The last bgp message seq number is %s', last_msg_seq)
@staticmethod
def get_last_seq_and_file(msg_path):
"""
Get the last sequence number in the latest log file.
"""
LOG.info('get the last bgp message seq for this peer')
last_seq = 0
# first get the last file
file_list = os.listdir(msg_path)
if not file_list:
return last_seq, None
file_list.sort()
msg_file_name = file_list[-1]
try:
with open(msg_path + msg_file_name, 'r') as fh:
line = None
for line in fh:
pass
last = line
if line:
if last.startswith('['):
last_seq = eval(last)[1]
elif last.startswith('{'):
last_seq = json.loads(last)['seq']
except OSError:
LOG.error('Error when reading bgp message files')
except Exception as e:
LOG.debug(traceback.format_exc())
LOG.error(e)
sys.exit()
return last_seq, msg_file_name
def write_msg(self, peer, timestamp, msg_type, msg):
"""
write bgp message into local disk file
:param peer: peer address
:param timestamp: timestamp
:param msg_type: message type (0,1,2,3,4,5,6)
:param msg: message dict
:param msg_path: path to store messages on disk
:return:
"""
msg_path, msg_file = self.peer_files.get(peer.lower(), (None, None))
if msg_path:
msg_seq = self.msg_sequence[peer.lower()]
msg_record = {
't': timestamp,
'seq': msg_seq,
'type': msg_type
}
msg_record.update(msg)
try:
json.dump(msg_record, msg_file)
except Exception as e:
LOG.error(e)
LOG.info('raw message %s', msg)
msg_file.write('\n')
self.msg_sequence[peer.lower()] += 1
msg_file.flush()
os.fsync(msg_file.fileno())
def check_file_size(self, peer):
"""if the size of the msg file is bigger than 'max_msg_file_size',
then save as and re-open a new file.
"""
msg_path, cur_file = self.peer_files.get(peer.lower(), (None, None))
if msg_path:
if os.path.getsize(cur_file.name) >= CONF.message.write_msg_max_size:
cur_file.close()
msg_file_name = "%s.msg" % time.time()
LOG.info('Open a new message file %s', msg_file_name)
msg_file = open(os.path.join(msg_path + msg_file_name), 'a')
self.peer_files[peer.lower()] = (msg_path, msg_file)
return True
return False
def on_update_error(self, peer, timestamp, msg):
self.write_msg(
peer=peer.factory.peer_addr,
timestamp=timestamp,
msg_type=6,
msg={'msg': msg}
)
def update_received(self, peer, timestamp, msg):
# write message to disk
self.write_msg(
peer=peer.factory.peer_addr,
timestamp=timestamp,
msg_type=bgp_cons.MSG_UPDATE,
msg={"msg": msg}
)
self.check_file_size(peer.factory.peer_addr)
def keepalive_received(self, peer, timestamp):
"""
keepalive message default handler
:param peer:
:param timestamp:
:return:
"""
if peer.msg_recv_stat['Keepalives'] == 1:
# do something with the connection establish event
pass
if CONF.message.write_keepalive:
# write bgp message
self.write_msg(
peer=peer.factory.peer_addr,
timestamp=timestamp,
msg_type=4,
msg={"msg": None}
)
def open_received(self, peer, timestamp, result):
# write bgp message
self.write_msg(
peer=peer.factory.peer_addr,
timestamp=timestamp,
msg_type=1,
msg={"msg": result}
)
def route_refresh_received(self, peer, msg, msg_type):
self.write_msg(
peer=peer.factory.peer_addr,
timestamp=time.time(),
msg_type=msg_type,
msg={"msg": msg}
)
def notification_received(self, peer, msg):
self.write_msg(
peer=peer.factory.peer_addr,
timestamp=time.time(),
msg_type=3,
msg={"msg": msg}
)
def on_connection_lost(self, peer):
self.write_msg(
peer=peer.factory.peer_addr,
timestamp=time.time(),
msg_type=bgp_cons.MSG_BGP_CLOSED,
msg={"msg": None}
)
def on_connection_failed(self, peer, msg):
self.write_msg(
peer=peer,
timestamp=time.time(),
msg_type=0,
msg={"msg": msg}
)
def on_established(self, peer, msg):
pass
|
luci/luci-py
|
appengine/components/components/prpc/discovery/service_prpc_pb2.py
|
# Generated by the pRPC protocol buffer compiler plugin. DO NOT EDIT!
# source: service.proto
import base64
import zlib
from google.protobuf import descriptor_pb2
# Includes description of the service.proto and all of its transitive
# dependencies. Includes source code info.
FILE_DESCRIPTOR_SET = descriptor_pb2.FileDescriptorSet()
FILE_DESCRIPTOR_SET.ParseFromString(zlib.decompress(base64.b64decode(
'eJzlvX10ZFd1J0pVqaTSbbV0VN2222o3fV3+aKktVbvbxoY2xqOW5LZMd6unpIaYGSyuqq6kcp'
'fqirpVLcuBlWQyhI98vEcAY+eBSfgyMQFCgCQDi5dhArMWvIQk7zFkzcKTYQWcQAjmw7OAISHv'
'7d8++5x7bpXaNoTO/PG8Elq177n77LPPPvvsvc8++3r/UPB2x2HrQr0aljdbUTsqDtbqcTW6EL'
'a2x/y1KFprhEf4wUpn9UgtjKut+mY7aunGpX6v7yVRvVZqe2qWn62ElTDejJpxWJz1dpn29ai5'
'L+NnxncdK5U1zrLBWb6z3ghnLd7FsF1xXyuOeQWhL96X9XPjgxX7+9hJb3DWEFs87hUMCcWRsh'
'1EGfSN7XcA3YSWnnP3N/u9gupTz1H7Vcb7WKYwxD+Kxx7L+DPR5narvrbe9o/dePQWf2k99E+d'
'm5n3pzvt9agVl/3pRsPnBrHfCkFaWCt7/rk49KNVv71ej/046rSqoV+NaqFPP9dARjOs+Z1mLW'
'xRk9Cf3gyqQEyjIoIm/ZeErZgG7x8r3+hRg6DtV4OmvxL6qxG95Neb/Nap+Zm5M4tz/ipxsOx5'
'hUJW9YNs+jNXeI4q0N+HvalCP/29i/4eUZmxg75lmF8TPhCBwlBCMoTmhQy9UFCjXsS/soRgWG'
'XVTWPLvmEejbXdaTVjP/Ab9biNsRosfkAkBr4jKz1TbAYFQmKMZcMPGg3qXpkOiYRhGs2wA8kS'
'RKm9DiRHkIPqiHeo0EckFjHGsf0+Jhx8Jo6FG5vtbX8jjONgLeTR9THqIo1uyLuZfmXUZXrax6'
'73uwXjYhyit/i9gtrntfgXOLSPOHTz2Io/mwgvDbDZDurEJUeiY/CKRmtxToIBdRKE7U3DvEaD'
'ONQKmnG9Xb8AOjZDkpVmtc4UKNNnoZ97vcaBZAhyLbEkgeQIckzd5J0QSEaN0TsHx475i3bCWq'
'Ghxm8GxC6fVueFeo1kdGWbppKluuX0nCn0MRYXkifILpKZBIKeimrMgeQIckA9d6WfV/9N3lt/'
'K+c9o5IpjnSpjNJpb7RHpIrP9/qwEEjP5EjPXPsMeuYswBV+o/TVPm/PDk+LRa8P3GDNNVjhv4'
'v7vAFaq+dJnEgbAWx+Fp/reXaetvflWFU5kOIN3uhmZ6VRry47zTxqlq8o/WA2aXzIG9kKg/Nu'
'013cdBhgp+GMNyTivQz52dfHo/d7Rt898l3y1hK9VJz2BsNmZ0NjyF+Ef3PUohtLAa8JigGRoH'
'39jOBQDwIRt24c5j0aymB4f5sUIHaLAUZy3Q6zGDZq3SiS94q3eAORXmb7CrzhXLWjICzoNhXT'
'uDjvKa2ml6Gml+vN1WjfICM42DsQbjhD7eapWWU4Tv0uXu71x9u07u/fN8QSIr9K/6HfG3k2In'
'abl1/FKHm7e9Y80O+kmdj/EzJx2tvVDON2WNMSkXuWMuXpl3pFqu8nEqmf8UYsScukDNeMbB55'
'JkrKc+a9Cl6rDIep32SZeFEzjFZpeVUbJCc7c2kBTXq4FGlotVF8QSJqAxeRlNN6kfVI2zlv2N'
'gKMrJBJqL8jCOryGt6YLtb7s/iNZ4FLLNYeayFhgzwDMHGHvCG0+wp7vXycTtotVkK8xX9o6i8'
'HCkZ1nL5Cv4s/qtkwDke8PW9M5rC3D3usVu93akBPNuuS6/yLtsRNQnJ3k6z3myHrU2ySGjcuq'
't9fztwEZk757bWWCp7Or3Aw4OFrw2on6f/sqVP9Xt7d1ozOy5fWv4kwSthi5mUr8gvWhH5RrAS'
'Nmg1ZMaHj93wrFZl+RReqeg3iy/y+kRFA8PhZ4cBa6nC7xX3e4P4V8tGP9NcAAByAUObl0ktNF'
'ub/Q3BqoWrQafRXr4QNDohCzwJlgBfAljxoLdLr6o6vXM/a898RS+0eUDQ/X0xrWURTe4CAO7+'
'1m7FfWDn4fWsJdoqtTUhUx809o0SgkJlWIMXBFr6/azXx4plxNu1dM/ZueXZhXMnTs2pTHHY8x'
'hw56mF6SWVtb/nzyzdcrPK2RfOaUCf2+CmYypPAjukEcz/zNwstehPQ6jNQHG3N8iQEwsLp1TB'
'4lxcqsyfOakGLc6TlYVzZ5VnMZyeW1ycPjmndtkWJ+5ZmltUQymyqIvdtou5M+dOq+HiqLdbd2'
'GIGOkCEaUqIURjGU0BqEWxNOPlWQxJ3IdPTZ+YO7W8cHZpfuHM9CninYVV5v71ufnK3Czxz4Gd'
'nZteIliuVPX27qRQd1xCjixkLyILjKtbFkpfyXp7dthUduzkDi+vZVlvsxM77k4s2T1bLb/nmh'
'q5i5gaQNEjsC/vUf56f7zl2eyPDPvxNoH8DpvAbd5oD6JnrYx/MePtuxhznkElZlMq8bZuDl59'
'8UnometHM97lO5uUO9LwIq9/IySX3ZhVvXvXaX7cPdnylrvb5y5mF2pqeih9fda7bEfkOxJ6wP'
'Pqzc1OW5tOWhMPMoSVF7Rsp22f5/i5p0Hc4PkJoX1M6HMvMtIewbzRU9VGPWy2l+N2Kww26s01'
'3moKx/OrQSMOKyP68aJ5ije0b+i80Z96Qz+2b5TeOOjtcgzw4tXe0H3BhWDZOFWaE7sAOyuO1Y'
'3eXm5CY6SOqo0gjplpBW5axLMFPJoxT4rP8/bwGxu0N9U3G+Ey3LyYtxxL2ShanJYGoCgms/AA'
'v7YWNsNW0A6Xw1d2qO0yueTL60G8vm8vEJzI7stUrkTDk9JujptNN2t3UaPice9yxkIcoQEvV9'
'fD6vnlTnv1+fv2u/0zhYvcZgZNzlGL4qI3hMnYqD9ANEct3kOHd1BNDgfLC/LCafI/jucXz87N'
'zVZ2GSx3Ri0I1FpkGbxLC9RaZNhLzKpW9ZjJN7WxNpViVrV6UjcwMQNaD5clzHJfHO0ZZfer1O'
'Pmdu+LxVSPm9vdr93q7d1c3+x977D7XpGadL94HXvmrbBKc1Xbd4Xb3HlQLJP4V5fDZrBCEhO0'
'6I9430Fu3NdudciLqFbn+OE0Pyse9kajlfuqWiKXCc1q/f591zJ7R/CA5fEsg4sThDteD1qbrJ'
'Jjmoxw33W6qYafMWCsiHirvto2GA/pFcEwwTbuKXAi1fE4NxsmuNsvbQZomXQ6oQ03AiY93uxd'
'jkak6IJa0A6c1pPcGmw/LQ9TdLY6K9tWsKY0nYAZ0bpkxnnpuDfkyn1x0NOSTwYJGUEzC7MwX1'
'42R7YImVGn5pfmlivnzizNn55TOcewv7uvcL06VPqLrDec9tSKL/SuMGGVOGwvb9VbvCA3Ar05'
'WvnZK60Ww/ZLqc2d3KR4yjvYjEgBkOIIWrXlJKC1HFRJIONIb4QWy1XNaFEaJzvEtDTtEt/cxc'
'SXrOuNYJPkt93aZvu8UCkQYA6//0XcJOJmQQ3S/w4qr/TXOW/Itdfh/lR5x8qwTrvmaa378gy2'
'suP92jiu6DdhRkDYQm2MFCryq3jS678vZtz9jHun2J+D++5FRj549+LymYXK6elTFXm9eKXX1w'
'ge2E5vegx6tpNAGBCgS281DLqEi+GIl2d+FT1POKaeUyx4fTMLFSwIWgEaunx2fm6G1kTpeV6/'
'ZgIWi2UDvaR/Co6MeXru9Im5isqmp7pP5UsxrULHDv+Xccb/Y8bb5djVMIiCRiPaWg4a9SAW0f'
'AYNA3Is526f6Elklf9pUcynuo2bLvIzPyvJLP0tow3nLZmu8i7+n8peV/OertTNuyzpe6V3mi9'
'Fm5sRm0Ez5cb4YWwsa/ESqM3qJjqoTyfvHcKrx3fMz87d/rswtLcmZl7ls+defGZhZeeqah6V7'
'NLuOzPeqqbqOIV3k5k0cre442cWaA9kTbGuTvvnJtZWtRxD9t6KbXAS7+R8/bsQAmpce2xaCdq'
'6tlQX4bNcJZcSXFwyBYiLjXb9dU62fPaB9duzEgC1yGlSa+4GelDsGXCK43h1vRVlHky32zb1s'
'1wLehqDWWeqyjzxLYm+6UWdWDr6XbYOzKVXRpmm4gVn0S9hsgUY5hucsgbCdbWWkBuEGm/ZNiC'
'ueHY3V7B8AFbNThBphM721kEwprmIXVaj5eTIH6Wnhcqu+qxDYCWHiWDJX0IQb5LoRGRkOuTds'
'zN+DOcW5RPSfuKfXPs8xmvYMC03fZtBu11Rpc/kVWZCv8GnCzAJouAwPEb89oIgxo7PdHGBs1k'
'bOZV4DMCxllYuxXUG6m2fdxWmQe28XHvSoO3RlYoOVS15KV+Dm5cIQ1m5bl5t/QXGW/UuGk1y6'
'zTnhc0m1HbZVevKPe8V562L1UcBGMbnpc8uSjbaJ+SEyY+ptSOvadB8OcQflkJ1+pNiRvrHyb8'
'0mfDLyf+twx5bNFGN70nVFd0Ib4r87IXrdXb652VMrU/shY1guZacs7Kf1SnyJ+aWoucU9fbkj'
'9/kMm8M5s7efbEY9mxk7q7s4Y9lXC1EVYx5Lv//ImsN6gOqeeoXxlQGe+xkcIQ/yoe+9SQzy9U'
'o4Z/orO6iuP+KV+jOhT78Ch8VhjVdYSgfG1je6msixufLy/4881q2b9IwsV6u70ZHz9C4yA9GG'
'1SR4ZBGPumEDG1ook44nl+JazVsZRXOnxWj4P3ThwiuUISNgBZqTeD1jbTFU/6W8RNP2rxv1GH'
'6NyIaqSu9IKZ5HN06nmj3iapSc7QOdcBCRurEawSklMkBtTqOhuAXiI8Yfs4kYT/DncRxvkCbg'
'rJRiduI/0ikDSQYCW6gEfCMc8nUaSNe1Inn5jsDLfHZq2LHCTHNIL6Bg75L0IEdebwwhBBY6x1'
'iDBLh5cQ8s+iwzNJLrWo2sFaDswkHSH+R/Sk5ZOkkJ9Pm3zCap4geuj5LvV2UGfCOr8JxFC4IM'
'iVrWaUPGO+19uxx1kcjCoi6d0ItpGIQ5JCxEc+LUiChhAKImKD9mJf86SNpI8WbTg1f5UeeCYV'
'aLW9BTERCfLjzbAKCaK36hCsFmSnqaUojpl2z1+6a37RX1y4c+ml05U5n/4+W1l4Ce3cs/6Je+'
'jhnD+zcPaeyvzJu5b8uxZOzc5VFv3pM7MEPUOW/IlzSwuVRc8vTS/SqyV+Mn3mHn/uZ85W5hYX'
'/YWKP3/67Kl5wkboK9PkJs8tTvrzZ2ZOnZslP2DSJwz+mYUlzz81f5pc6Vl/aWGSu+19z1+40z'
'89V5m5i35On5gnz/se7vDO+aUz6OzOhYrnT/tnpytL8zPnTk1X/LPnKmcXFud8jGx2fnHm1DR5'
'6bNl6p/69OdeQpaJv3jX9KlT6YF6Ptk1cxVQ7w7TPzFHVE6fODWHrnics/MVMncwoOSvGWIeEX'
'hq0vMXz87NzNNfxA+yhIiieyYF6eLcvz5HreihPzt9evokjW78mbhCEzNzrjJ3GlQTKxbPnVhc'
'ml86tzTnn1xYmGVmL85VXjI/M7d4m39qYZEZdm5xjgiZnV6a5q4JB7GLntPfJ84tzjPj5s8szV'
'Uq5/jcZYJm+aXEGaJymt6dZQ4vnMFoIStzC5V7gBZ84BmY9F961xzBK2Aqc2sabICbN7PkNqMO'
'iYk0pGSc/pm5k6fmT5I1OYfHC0Dz0vnFuQmasPlFNJjnjkkGqNNzPGpMFNHl6b8d0Z3k+fTn7/'
'SnZ18yD8qlNUnA4ryIC7Nt5i7hedk79qWsJNsd98+TIoia/ypR7P74ixnkvyRo1YIJWucnAqxM'
'gkSkhEhhNXo3INrf6mukzrap+WLQvI9W9Mn1cCPYCtqT/t3h6qo/GwZQ56SfWNPEvAhJL0i0J9'
'bKidYztnKbJqb1Vbhab4qCs9l6epPm1oSLFgBZivWaCzYJfpz11YDVQRqM3JZ2YxtqJvB3SFDy'
'rBYJmtuiE5G4gi0UynI8LK+VbZuWNpGg0vw6eQ2tdjwhWYMTtFfvo78K6gb66xyAhV3yN6CT9N'
'ckQzPyN6BT9NdRhpq/8VeZ/rqVodfJ34Aeob+uZui18jegN9JfBxl6UP4G9Gb66yrv5zP096D+'
'MdZ2swk103gDIoaZSHcN+xLUKZLWyN3DVt6yJsrUCsTC84PGGslFe32DpCBqHmr7W1HrvF/rwK'
'D3V6KoTZtGsLlJv4g1DU4rfD5RcFxlxu5lCTCGBG0KG5s0JS2eOH380jNLi2HbpAACP8RDU+9p'
'UaC58MkHiG1eIfL3nq8K6kpvmH8hr/AFKqvGOYdOJz32McRzIP0E2aWe60AyBDkoeYEmVfIF6n'
'p1yDvKOY+305heRmO6xuY8IpETQ2qQ3+bKpZPweDsRtt97oU14fBGRsb80qcUXO+YkMbzBThZE'
'tkXcdEyXdisM3dTFPn7fheQJ4iYQYhgvUkV1eSqZ8UXqSjXmTdlkxjsIy3NLB3yW9dJqFBFF+K'
'e8ErToz7Bd7cpbvKMnb/GOnrzFO6jbK1N5i3eoq9QB71aBZNUJwnL12CH/jLEUZEJ5Yem8Sasg'
'HAKyRMCJFAFZIuBEigCM6gQRcJUDyRHkoPK9JYHk1CzEYmzW59QKTQLkS2cSJnQIWWJL2bxBbY'
'6xxeZQlyPqZlPU5Yi6WaJOOZAMQUZVyYGAmutIuF4tkD51krBcP7bRTR0Coc+ONtKSd9J61sbZ'
'FLsI0OUb9bWWVm5Rs7Fd9mcjWJmwxpxh9AkBLiRPEHcYWAUnaRi+A8kR5Bp1nfcCgeTV3YRlcm'
'yCXY52tDnFAaOUinc3AoeEPJFwd4qEPC3Tu4mEMQeSIch+4lsCyRHksLqBlz8g/erFhGXKtugn'
'vC9O4e3nNmb5a0iGIAdFaWhIjiA3kN42eAfUKcJSti0GCO+pFN4BwnuK8F7tQDIEKakJB5IjyC'
'TRZ/AW1OkU3gLhPZ3CWyC8pwnvQQeSIYjv4C0Q3tMpvINqIZXoPEh4F1J4BwnvAuG93IFkCHKF'
'w5lBwrugribZ/X5GQJ46R2iOjP1NhtQ7z2PYqCV52yalKLWv0rLpaIfO2hiOf0aSe0/UYT8hDl'
'ZD2r9b4QY8NJYTxLfI/NfdmK15PWjhcNxvdejpBjkvq51mVXdcb2+bxZNsgeRDTzHIpYqQN2nz'
'Y+OHOoV1I74w3CcS36jhJo97xMFzKQ56xMFzxMErHUiGIGPqsAPJEWSKZvgVAtml7oEGHjvL26'
'NOdbXKyNlG9OPOpqx9DpXpzPMSNztWYntL/7ip5FC6iyi9J0XpLlqa96R05i6i9B7SmfscSI4g'
'+0lpTxAkq15OW96racvbn9ryxKDj3HvZ6qB+X05b3RUsfFne6u7FVseos7J53WsJysrmda8lKC'
'ub171288rK5nUvb14Gb0a9grActi2wO70ihTdDU/IKu1iysju9ghbLdQ4kR5BxWj4Gb1YFdhFm'
'ZdMJUnihWIIUXlAT2EWYlU0nsItQ/64SlhtsC2wX1RTeHOGtWiWXle2iStNwvQMBngkat8Hbp2'
'pWyWVFf9dSePsIb80quazo75pVclnR3zVWchqCKzCrhOW8yiUQemuVNoF9nm8hmN811acOlIYQ'
'BGh04jpvinvdFkQR2gx1QfME3a2KXdAMQfdQH2lojqD7aVN3e86odcK6n3qeu3/nniET6z09w2'
'pZ7+k5w/j2kMyloTmCQu6KDjSr7iOsR1ItMRP39fQFWbmP+ip1QTMEvYZmMQ3NERTqwcxtXjVS'
'soiNsZGaW2yMDat2NCRDkDFHFrExNlKy2K+a0Nu2BTbGZgpvP7dxZRwbY5Nk/LADyRHEpXdAbc'
'J0sS2wMW6m8GJj3EzRi41xk+i92oHkCHItrdIPZoQ9GdUhNPer3Nj/kfE53w5a0gQw/Xaw5uus'
'uLjsV3aA8oYCW8cEn6DgeRuJEXyy5wU+pxRasypGlMtcgEoQSyPCyeFCHxexGsFm2bNLJcMUe6'
'QGfQvBUrnwNEvFWPYXUkJkrPsLKYE1Fv6F1FIxVv6F1FLRlv7W0ywVY9xv9fSMpbLV03OG8blL'
'xRj6WykVXVDbMAHtxMKe2U6JA+yZbRKHfQ4kQ5Arxa7TkBxBYNe9JiOgQfUqQnPdWCeZE20TcO'
'hx0t9ar1fXd5hzM+U7TS8CC2vEl6aObeJlbQqEiZxFzWpYTqiHHfWq1HgGiV+vSm1lsKNeRVvZ'
'QQeSI0hJXevtZrX7c7S3/lJGZXj/hKj/HO2fB7wK/4LQ/EKG+jg+Nq1daHi85KqTIRMaC4td4V'
'oUxvDJW2E1WmuST++vhy2S2sUwtK7KqMFJhDNWF9QPEHaJBJQB6KC6yQHlALpFvcD7twzKq9cA'
'z5Vjp/0ZTm+M2aVnI58sug7RuZFQ2UxWWmwveNr15VI6orHT6An/bpKPUQMgohg06oCyAF1GrZ'
'7H29/rM8TSJ4mlY9el7JXEbiRKAudmIyYCeyK9WKCVU+SfCF+8AYP77QxtgqMGRs0IOqB2eT9j'
'QZilX83Q6tk7NuPfCGPSigziKmGrhcuunr/Qwp1VerwV1lv6GXGAprJeJTO4FQYxDss97zIXM3'
'XIuEe6wFmAi2qPt8cBZ9Svoe2eVNuMAQ93gbMAj9LqfpUDzqo3MoqxNf8MrZ6X1ddeRoqUfEsy'
'z2tl3z8jZ8FWt7aD86F/9EZaX+2Q9C/f3nVy4P36KqlJ85Jjujbq58nA7xouhvDGXlqFLNDqDj'
'enfp3ZnmqLNfTrvRwj+QUYHHOH26fe8lMc7k3HfrzhQure0jtc2Nxv6R1uXj2Itpel2mLDZ7Dq'
'AmcB3kPMcVH0q7f2osDe/tZeFP2E4q29KAbUQ2hbTLXFNs7g3V3gLMCKlquLoqAe7p03qP6He+'
'cNMdaH9bz9bcaBD6pH9JL7C/I+g7WpWtiob9ThHtnkAVpyJ1tRZ5M9FKy7JJuF/SXsDolXRUpB'
'+09l/65oi7y/1qQOf9/kIezSCO1JWuzH5HqSLonb9UYDWwQyWxG3Y/XMm8sad7zF/iq7megRF5'
'ND+lM/lN0oiP1O83wz2moKpEtGsIU80isjg8SXRyAje7xjDthT70Dby0sH/FNhc629vjNjUqjg'
'qr6jd/496uEdmP/LvHEHvEu9UzN+Dy2OLbDtgr1xn8YLx/KdvZTvIrzv1JS7QjGk3tUrmkOE4l'
'29QjFEKN4FoUiL5m717l71t5tQvLtXNHcTindDNNNrbFi9B22vSLUdJhQMHu0CZwHeS5aQi2JE'
'vbcXxQiheG8vihFC8V6NYtIBK/Uo86J0BfRLnFJLOr7uIlGE+9FeJinC/ahmkot7VL3vx8A9Sr'
'jf14t7lHC/T+M222VGvR/b5e+622VGQwfIrjxiQdguP8AcGhu76HaZUGGM3Q+ktU5GdsEPYOtP'
'JkCbux9MT4CxWj/YiwK74Ad7UWTVh3pRAPOHelFIa6AYYSAG+HsZDnuMGgBtNL+XmFx9Ytb/Xo'
'atxQSUAQiRjwSUAwh2tUGeUR8BpqtsGxjvH0kjh+X+kQxHbRMQvzhKg0pAOYDGiFCDPKs+mqYc'
'W+RH08ixjXw0jRxUfRTIL3dAOYBA+TsyAsupP9T24q9m/PlV316ohBDEYVvSHRCKM6Y7QWmTpb'
'YrEWcn1CX1wbzpSQEG86492mqyj2iv9E367oVAeH3JhcFyMjSEZf4wPVrEZf4Qox1xQBmAUFAj'
'AfHYriAr9ONZgfWpPwIqf+x9WQ7Im4AZBoCb2rpqhRBej1OZFfibA2s8ePPE8/leWaxzNQL/UP'
'nQJKx/BF87jcb2FG7TIAcQpssCDjW36qiJMnPDDVMwQPy4GuGAzvNbnYYYJiYbg0z2mu3WH6+X'
'qe/VeivW0Vp9PV3qbIgNDbq9ZFQ8D0ELOWRclqOZtGPHhbbPSRwuY0OOGIhjLs+3VyYmnInoM9'
'xzQXmA3AUDrfNHWDD7HVAOoOeSy/ULRuzy6o8zXLdjk+ch8UKenvckR4gsy01esHQerPagr6LG'
'hbCW9iSDZpPMe9rOrXA640Hs5o/T48lrutzxwJj74wwX/khAOYAOkGf2USNY/epzQHXt2Du1YJ'
'Es4dKOkScbgU+F2ds0Ds46ImVrUoJksGymyqnRShQ1wgCsKeHmTglLpcTZwCVpoVM4u/uR68y6'
'Gzxhb3Acy5h8sGBTcwuH7VvB9oTpDEZ0F6IZ216TpZPWuKX/otv9o8eez6ImjRAcX5hdGNfZDR'
'PHdRLDFPkd2oa/I+E3wlyfS09BP03B59JTAGP4cxl22RNQDqCSusZ7rRGpAfX5DB9jXsD6ZP2D'
'8EEsRwu1kAP5gc9XuI2cuOfZNFWHYj8ppeDpozvfPUqp6zA7HA89VWQ78huOYCHI9vn0qAZoVJ'
'9P62fY55+Hfr7KAeUAwsnoj8yoCuoLelTfyPh3Ly6ccZaEIarMcQieGtHaiKz0HO2XRW150C/k'
'5wa6aeCX7DX2kkQGoBMS/JMyTH5EHOKekK2hLW1oq7Kr2upt5EKGyKTT2WI6LQ6o6AHTT/QR58'
'lKbbPqq+NAx6/Sk8ZMELuLFBGqL6R5WSBefiEtIXBUvpBWOghSfQFKx7cb6aD6Iq9R2wbhoi+m'
'kePc7YtAfoUDygC0zxE/RIy+qMXv9wcF5qlvZjgG9u5BZjSt1ESXBeLI+CVzBFcq+y+FQrZPrH'
'yZFmAk102qnkdKIHREFaGvtaBVo51Cjunxkhz3aIRd1QNYkya06BfMlrwSNiJIfmSXBZI5kE4E'
'7RT5UaNmyKtKMIlFwVLDyPnaEhuKaZmEXpbFFvvanQrAihKO1dbDdr1a0s8nJXeyhz4k95Be54'
'xSXnLjYVBdNyTZIeqX1kKkAtH/oyPbhe5houwvGogQFdMugqwBe2BvjiM5yqhJqunzeHODlnXl'
'9Nn5nZBZKwcRJXh2q21sPbRESuRgNmSknHrlGh7av5zsmTUTPKzCO9UdxyHtZxiRTi6exERhDp'
'pRc4o2kZCd6DRe6p9Uu8yRnTXrTWPdscOM9NQa8n5FW9qukKFeb0DStmi4JteU9cdWC9mhiTxj'
'VyJm8dFoPTYJYNSUzCAWjhbSx6Zop+KEJCd9SB+pMltIKcQ0KZw3sUXTDgXeM6buyaVRxtGkHy'
'JUjBPotXUxwDB3rfCVHXL54ehHPXxYkuUZct4qTda2pHPQ6G3aVTsJ8tKgDmFNhjSt9VUbwKy7'
'Z680CpRqIwNgkmQXlLBoyEyY+fS6BwHB4NjGsS65jo0UQlnrNdII1iZd8rYJO/Lhtu00egkStk'
'tfkS508QpHw+Lk/JtpJeiRhv1m2r5GQOKbsK8POqAcQIibX8uW4HcQ5f0fiPLuTUV5zfgQ1IU5'
'9R0d1B3hn/DMvpv4N3nxzL6bUJQXz+y7ic7Pi2f23cQzy4tn9t3EM8uzD/RUovPz4pk9lUaOY+'
'mnEp2fF8/sqUTn58Uze0rrfBzB96vvY7hvzNJwr3SH20wcCRkz7JfvZzjdbIR/Ysw/SMbcL2P+'
'QUJWv4z5B8mY+2XMP0jG3C9j/kEy5n4e8w8zfEpp2mDMP0wjx5h/COTXOCB+8Vo14YByAOGk0i'
'DPqn/McO6KaQOn6h/TyGEO/2OGs1cSUAagK+Q0o1+8UQIhf+UjfCbOZ9z/PkuoXptVubFHsjsc'
'LBq7WodxnSNAievudKyIO8T1rjNEzNGOB4hd54cIQYfmyoNWFkjFJ8Xb1qbN01a00l3Su9jJ8b'
'Y5ZfTjTnXdPGJdGWySLtxs1Wnxco6NOAVMsWTZ1Jvtm455pA42yGItG3brNABim0fidLUFQcZe'
'k93hdPMytwnNHTca6gLnAd4tUZUEnAEYB5xpcA5gnHC63WfUL2XliPNi3UMuf6m3e0RKfqm3+4'
'xGiVPONDgHMBbABzIiWjn1KxCjG3c+nL6oDKUfdMuSB6uV17Y875YnH05DoBP6LipbniNcZiUg'
'xMEEu6B+gHaJd9AvIQ4CHVA3OCAeaVkd8SIB9an/PcuO6L0JBQnRFz2PbYXajN/xyNXb6czV0N'
'BnunRBeYBczYWwAIGMD9cvYQECQaHeRD8G1JuzpFAfzvYk8mrKzSmh1q+iWuFE0WsFsfIHWOzf'
'krWqdUCE/C0JgQMi4G9JCBwQ4X5L1qrWARHst2Stah1goX4wawN9AyLCD6aRQ3wfzFpfb0BEl0'
'Am0DcgYksgE+gbYNX6UJazo00bzOZDaeRQrQ8B+X4HlAHoKtHJA6JaCYQM6esLqGPwNrD2t8Da'
'y1OsNbWSNTfhRr0N3Hwu01Rgbr494WZBuPn2hKaCcPPtCTcLws23J9wsCDffnnCzwHQ/kuV8BN'
'MG3HwkjRwb1SNA7jsgfvFqGl0CygGEnASDPKt+M8u5L6YNuPmbaeTg5m9mOfslAWUAQvpLAsoB'
'hPwXnOsMqneDm78Hbpa60u9wxVpHGdKchQ/57iynEYzwT3D2PQlnB4Wz70noGxTOvifh7KBw9j'
'0JZweFs+/RnP2NjMAy6rezHDZ4XYbUL2fz4/qHTuznSBprAg5u0MLm+M2OMTSyWslrI6dlp2J8'
'NqxG5minbW7RrcJNiHrTEQ29mOXfTg8Ua+a30wPN6DEUZbENyiwT6KCs9kGe5ceAqWTbYJYfSy'
'NHtPGxNHIw6DEgP+CAcgD5NPEGeU79DjBdZ9tAUf9OGjkU9e9kOWMmAWUAulJEdlAUNYGuIUP0'
'LgH1qQ8B061jt/rz5p54DK9Ce9q+LkEFx1IXdTJwc2knIaHP4HJBeYCMBT8oGphAyqEKGvhDmq'
'oEVADoOnWLAxoA6Eb1PEt7Xn14Z9p1faxe2gXeSzuCsB9O057X6F3a4TV8OE07grAfTtOeJ9o/'
'nKY9T7R/WNP+YSRBeeq/YO3+eU5ljp3xb//n/+f5UlHBO/bnu/05RCdsfnOSVq/vtGI7XQ8uWK'
'c5LvlBW9/wdteh59/HDqO9gx0727X2+fjiLC0x2pZNbKLmxw1cUyXjsVbHxTCaFuzciDUE2ybO'
'6dMW2o7ZHdc4YIGSr1/f7DTY+7dRQzNVsIFNIhEQ7ZxIFMQ9iUShf1hY4+CSUTptjQ2yHbIdIm'
'EifZGIwwR1aJc46nLD2ZLinKskXG4ssg0cCzTjNnnEOsLBmVlclp3TjMRpb6aITE5NqkGjweXs'
'e8tclN1QMGs7nk+rLOX4q9qK4phjP70s8F8a6pMY7btbbEhViPQs6CCtw6MtPrkJScfWG+la9J'
'NglZaOzYjo5U55DmNN2koYNj3NN51hQZxEGwc7VD+zHVNtqzLY7BqWlfRsy6Xh6noUhxzT01ee'
'4+Oef5jDOKahpowDxSZ1j70ym/GFKSb1QM5PQ66dk8DienPUWiOpfEBuohNKPoe+f5Psc076aJ'
'g+JpmRkmwoJD/vRvoPWNrrLQ4HvQD/mcMiOe3ajjr6xnPcaclCAKhmr+DoafBlZhk1U2GVmlBQ'
'fppRr8gVpnhd2M/D17ea+FaDxYaecEihrw212edr12Grh1MbiMGZQgdTa41oJWhM2RmcaoVruB'
'2+7Vwk5cFHxmZ30mltUu4iEmi2zVVzDJ2vfN8XkmPLxj9ftwSehRUAySuYmvE3G521enOCh5J6'
'ZStciettHFKS8XOB6EX+zYRc7GjhXKUZARkfJIPt9282WB9FW8x2rLUmPxHO0yI+F3c4JIfnLD'
'78ctRkXnUPqczXVHSqI7ywsGeepNwBEOEo2iJgsvT04tik025LeFLURdxZmUqlRPKRmF4RZnnH'
'+l4maT4tdnzRPxahDZCqhMXh/7hlHYBEQ6+JjJqS02dE3vmLFXx5h7cCrT+gEzAd9hMIRhARP+'
'xsimQEHSKfVleVORzEfIVYDn+MjGjTFZG//wLTdY++ZuSx7fol7NnXjD2RIY60ddD4bpp3X8rn'
'QWW1YLIQZtK9ySYlx0FEvcR+hTrSUY2gyhvhiW1zcjnpXLsxiLF+Yxxnr3SSkEy02oaaqzedKI'
'oNkKbet9HUJlLAMHY+vteysUJttoJWLTZBFjGStW3iiY3+pcRc8cRG/1JiXXpio38J1uVzHVAO'
'IAS7/lNWYBn1FaC6Yex3s/bEErqZzyWFc/b4zL1czRUXcLFXTnBFjXo+1xTV79raEY5Db44yWd'
'1iS4x0Dgcb80c9WvKv7NRpK9UXrPRZNanV0tQULZhlro/JX5gQQdFqQILW4zzjtPkZSpOjB30D'
'PNY+gWh8plFP4rZcL9sKuFuPdUO9WocpQ1tMJCF4PSoMY8KZEPgSX0lPCHyJr6QnJKN5XRRv0B'
'Nf4itZvrjz3pzAsuobQHXb2JtzMiH26CxsJjLOlpOINFdPoTlLjojotc2A/yyDY/putNYEsA6T'
'e5eiUyblrIA3Z1my2siyNop7y5rnsKO/shKHNvsDtgaJ9WEsdEkMqZN2q2neOXKhkzs4936nMr'
'SE3qZadr2pezEJlp6zqHF2K/alPunlajhrYTvxF8cnjHccINmaUOAosLntOQxxbJIuBsh1VDN3'
'cJ6+kZ50+CTfSJwGT3y8b8BpuMYB5QC6XmIsGlQAaEIdd0ADAN2sXsBZeh6/9iT6mxvbr1eQaE'
'qc0kGbrOssPYMAbuKTafqQmP9kmj64iU+CvikHxB3dqI45oAJAN6lZzoYTkG53s5rxvm60SZ/6'
'Hrp80dhfZp9GeI89vfRap8Bj22lLn2fcXw31eGGI8sEhAtbtNr5HBPW7EWh3RLaRgI8kzy3dOf'
'V8j7NEiJhXdvi8WKsAzlaRc2LPl1qlTnkDTVQtsoY1tbJSGTh3FsTu0zKJYqSb7TjpPN13bE4m'
'Ze6Q/9AkQ4R22ipyOLSVbc1T7rsBk0ausTrz22eY7YLyALnzC4f7e5jf6x1QDqAJCeF64nATaF'
'Ld7oAGALpVvdA7yyBcfPgh+vt/cS7yQt+W8rLbhJy27lQ9wrh6McHtGPS1CcI5QNv6vAVhZ/8H'
'BOSHSzfbXpJCCowHmMU5m4TCqgcNY6fr6L5FRT0wssEucBbgIbXbO+WAM+pHaFsc69dFDkpHOI'
'8/Kcu1sMk7iM37FgWotYrJ0LbYDL7dXeAswEh+d/vOqn/Kcv7tre6ga3BWIKgioKfq7dAWeeqS'
'DLcTjIXxqS4wd4Pc/RGZ6Lz6+Rz2GzvziIYwyHNA/QCZw0lPoiE/j+9gXeuAcgAdchQboiEEch'
'UboiEEgmJ7wqiNfvVadFga+3+yiSl3Muoy5GjJcg2oH8eQIyUU6dphkz1IxSR1S0qla2extTzF'
'iU2odeLmAVpLkE/qfVSjm0TsyVTNKutXnYQk9y0y99paflKbK28xgoYP3nbG0k1L6mVz/92qJ0'
'dlINPttemZRabba3MpOwUnxQQyMU8NygF0UF3t/WWfwAbUgyw3Y5/p8xf1vQcpI558tS4VGsI9'
'LxgfqJjXIS7c4fslKStesq/ovF3ONDBVvKCum9tY7uQpwLfwK2dn/Hib7IsNHbLa5peSnrhQCP'
'J1YAykt5q4hwwyGk3Fn5quokZ+vXYxUczHP9k9qK1Q4j3sxZ1HemzypT7TE5R7coeED4CDFjWX'
'OxEcnuoujujt0JnmxpbOSdFJPc5tFRrLanAh4ixArSaEcPkYYHpLdTmq/eaLs5T2waiF9B6t4G'
'wWJ4kZcj6RRsw1HWqhRBp00ACZi71ioK/FpGxqnUdC3i0nxHTcjQ2Ziw+mpRSZiw/mUhsbDt0I'
'5BpWyFwkkGtYDZD+eTCtfwa05EL/GBVYUG9Ff8nmh4S/t6ZJQMLfW9Mk4KTqrSDhOgeUA2icDP'
'oExOhvoI00AQ0AdAspXUPCoHoorYWRFvhQmgRcI30oTQKOdB5KcwFpgQ+luTBIJDyU5sIg7mul'
'ueCph9FfQiaSch5Ok4CknIfTJMA1fxgkXOuAcgAdkiQODSoAdNgZoUckEOh5RNX3jE+/S70DHR'
'4b+2rGn49T9cCM0N/h+fpbfRD3SKtP8p7J0IfSb+NWlaQZVvlrlfU22if3u2xA29P+EL7bybaz'
'a+dztK/etnuEsT6Qm7VGXntYu800x/ue3wiDuO2mWvLtLmOUcE9mCNrsbKRcetTjeEea1SjI8Y'
'40q3Fx6h1g9ZgDygF0QI60NagAkK+OOqABgG5QN3r/zrB6SL0rx8cnr/T1Jxhik1bHZ4f8PQbJ'
'pzThix1LtZVT7ryntSI1XuuxD4Fs5oYbnGEP0bDflR72EA37Xelh82WvnD140aAcQNc4S2+Ihv'
'0uyP0tDmgAoKPqed5bzLB3q/ehw4mxX3SiRpGJLvpVcTP1FyBEt9X5NwdF2c9EfMR5xdtprF2W'
'SVkrVZ0QaNSpw4ndxIn3pTmxmzjxvvTWjDtr78PWfI0DygGE5X5KQMPq/cA0Pnabbz8twczvIf'
'M2Q0lsQi1ioTiUDRNl709TNkyUvT9NGa7CvR+UlRxQDiAUsXqzMfRG1IdznBjy77JOiM1fxIc4'
'3E2a1x1fqe0NvsH6nTFZ2pzURHND5h7W1aHyIe048XeA4yqOZkwRWp0/FZmpNbc54iPx9sZK1E'
'C8TTv8khTdTvy0WIuANu0mdbYjk2jPTyTxXR/4eE/Xje0l4ecIzh/TLB7B+WOaxbgq+OFcKqA3'
'gvPHnLqa5OEXjIAr9TE9+5uJfG+ubz5buUbTHjnxdpDnWZk7ZK7ik8DOeBSN52Pp8Sgaz8fS48'
'H1xI+lRUbReD6mReZTZjyj6uM5Tmn/UIadMWdaOOajixybezpa2EmB7TgOS7WXkN0z2fZR2mZn'
'i9u9boAlBZlpo66sLUZoiXD4Qf/qQbigPEAuP3Cl8uM5m52kQTmAkN36p4YfRfVHQFUe+8Q/gx'
'/muzSWMV7vfD4jY5JYsMsbzzLnWfGmiAtdad4UcaErzZsiLnSBN+MOKAfQDWrK+4zhzR71aa1e'
'PvJMvDGzipS8DvkLP7moSFb0TyQs3HWvyt1DPPl0mid7iCefTvNkD/Hk02l9sId48mmtD35OQH'
'vVZ3NcKKT5ExUK8exhU7potDEMSgsmi8CcPrlVRZgAGsxn04PZSw7/Z3O2qogGZQAyVUU0KAcQ'
'qoq8Xk9wXv1pjq+JPvDPLivyk49Lm8uoQULEmBokntQgYdCoA8oChBok+hRrUP0ZRjAsWAYJy5'
'+BEbvllUHG0gPKGhDSR3ap/zunnqN+vU9lGCusQoIU1OXel/L8GxG0J3Ich/1sHrsAu1jOuWZy'
'p+aoCSyhlVuWYDV1IdKELZ1tkFvYw6opWMjEwpU6l9ezwcsu7J6gJ/8RJ6VSLyE5665rKzGpva'
'urnxzHqfmh2MeNIw/RUvIi+doo4qer4RYOxcOg3WnhqBr7PWYaez/b7XwZodZVb9jelTFR/vD+'
'gCsDpzIJfNv8zijyf1bXPJe1f5GPWfm3M7dv020dEbwZE7AR3M9PXp1O6g6dxA94KDpvAmww5O'
'lrGLc5DI0lLZabulPl8c3ItPCzv88pd2bcCBew62+s+tt0vVc2bkxyy4okfMf6gChmv6j7ng93'
'ecLeBBEPyJzkasWok4/aW5wO0G7Vq7ZQP89+iKKLVYmU2M0ldXFQqw8WbtIoTyQaRYPyABmvYZ'
'dEfp+A13DIAeUAOiyRbw0qAGQi3xo0ABAi309mBJZRf4cO7xx7POPP1uPEXXLCPRKNM58o80s1'
'5+Cp5JvPlJm8Z2Ixl8FfJflsm4vb+ijBYDLZPObIlPWXPqAkQSLj2JZuFQNNljStmXrYus1vhl'
'sS+dHrLLgQ1Y0kyQmcQ2TJYTFONP8uzWKcaP5dmsUZzReljjigHEDHRI9rUAGgm9WcAxoA6A41'
'633HsDirvo0Oj47998T1N4viknn/zsr7MV1+8fi9Z+3yO4vFsAHnZd9Ocxnx+G+nuQzp+3bi9W'
'tQDqADsoFqUAGgg+TiJ6ABgA7T9LyrX2A59do+hdqib+yHNWNv0xlO65WbTsxwzMZgky+ybGtV'
'IhxEoHXTXLk0VwstRKtRH5AXvjjcxifSJn3+fg/+fBHgy1rsb/eP3uYlVkrNvQ7ZiKLzMRdLMu'
'iE4NPBJmcF8zf5jIZ2tbT5fl9aLyctgoYvZPnnw20hoqeJJVg8vdv9Y9Ls1fofqxTTBHWNzvPn'
'u0oGcV6kTjCAInQCJ3peDPm38xZuZXUFX0qBvg1oIXMYAnNTTy0IG+EmpOvY8fGMxqhLZcjFE8'
'1w59hz+uw8G098OainwBEfcpr8KS70jrzs+qpvb1frtbDzXVC+y7SwNHfcFJmWMLA1p7vK+tNG'
'xvkbxmxhqdL1cD3jbOuL54JAFJ3J86tvpALO+lxAHBezsUhiorvB4Oicl4kLygPkrkscnRNISd'
'6+BvH6Qlnz6xk0qF4PPLtLV3CiAw7slu1xJW0i2pzcxYbg64F+SHBpQ7AHlDWgqwX9GzT6IqNv'
'Bs1oOYiX0U2COYNGLprMTqCsAVVkLH3q1/p+mhUGGafB6oL6AdrlqDKcWRPooLOH4MyaQKbCIG'
'Kjb+y7VBUGd7F1T/iNdb9LrHsGjTqgLECw7mGXD6k395Fd/i1jlyNsSZCC2uu9N8u/YZc/3MfO'
'+5uzzFX+CGci/eaEk5P7brihO01CDPggSa72LlK5Q9J5yd/FFNk0sy2sDrImjO9jToQ8yypcxt'
'8y9kbayUVytNUHSVpMRLux1OoGNdgWQ6RckqnT2iZjJzyEUF6LY8J89bAW8s1oToLuwGY3h2JX'
'a6kYEhPv4URQNKgfIHOjaUhMPAKZ6kJDYuIR6ErZLIfExCPQfklxGRITj0DXq0kuU8WfiVC/gf'
'5+q0/KVJlPRxAUZaqutSBM4tv6UPlqbMQGTTa4SDkftdtWuMeEdt3gLMC7yQfc44Az6pE+mylg'
'gQZc6AJnAcZidVFk1W/22UJpFoibRH02DyABc2vkAfw3I5sZ9Sg4sH/s/8rKiueSCiIEktyhP+'
'WsnT+r4zdbqCSHTUhsS05pZv2GCzhwxKzP1iOwWorgQJX9SiAGCXVmsMPFwSecTMwE9XtNqack'
'oy3UYqmdDbmqGrRatLlygXgu28hblU3+a3SXwVtpRCtlf94Ur5jUu4g5s8QG0tbfeuH8QD4G1c'
'aiNqvl/FUzzamaZmQOJvWjaZGGSf1osqkMyZQ/ik3lcgeUAwgi/fa8wLLqo0B109gv53mu9Md2'
'bUaYhJnCJCd2kQ0pzTQbn5MLC5FUZJGaGe5+Cn/ffnyN+YH3brnZX+El3A7J/2jwdKzW7zd1oD'
'x/nB7dcvOk35F/Y/mXGzFA/ppAGR+nAqsZiP1irqfrwInI8By649FZXMal4okgKzHSdUlge9c5'
'Q0snk0GE15GwKmlWAVlIZCglRWukGowEw/3VRqRNd32rIekW0SPWnNt4aj/naz0KGQQ7xCk3kT'
'A7xMsFGs+aN86zks5/LpliX5yDEpKrX49azuUeVj56rjzffnWYb3mnLDf7KZC2DoOklLutFhDJ'
'ZLuZEDq3U+d2ExjHHzonH9kLZXIS1svzkANrQA2JY/PRtKxzcbg+G8AcEt1EoKLsskPi2Hy0j6'
'9xJ6ACQAckHXFIHBsCjaujVn1n1MfQ3//pqu+Mhg6QVp20IKjvP+jjJK8rJEbspDfoO4KXua0J'
'yR+kVbMpMfgHfZzEVXbAGfVxjftKxt0jqXEX9ox5Y3cXOAtwN/as+uRFsCfZ8i4a0PPJXuyCCN'
'i/OyQszanH+zgN/MtDJpvHuRe0Yl2yRvBAvbF9h++fCh7Ytjnc5sxXTKop8NFUUtc3XhC3kLo9'
'WyYVVKf1Op4nX1VjQ0X3Nqn1Up0LqEm7Q3FSRYy1r9wdF/qQyy2OgbZVdd6R3gB09FFWUwqrKP'
'JqW1/2SPAxsVJJj4ve6TCOCSdK5j+GH/C1Noy2y2dCjGu1FYb6BII9PVvHhg065AatoZRXC+zd'
'trfO0lWW2ra8ljFgzXUwm5bq2U0qdfABxzHurK2FsSmdlIqwBfwhOFh+9VBXKgvYtwSeFD2pel'
'xcrzpqSZjXURgr5KmfD0NdThBlBtYxFyQREk2QD6qksijrPWrJJDX7gU6NZYrlO0y4Ebcqx1oI'
'kTpnNzTLt3l8jimJ3lxGigPD+GRM4FwqJr7d2WlhGmCgQNRQvWYK35ex34fxnM7qLj3O16g0wb'
'dxhlbbZIKazoCNVTzGrmN09kaQ7pDFudpp6auSvJM1dDmlNEIIfb2Jkmd8pYrrCSEpWkp5aLEk'
'Lrpx5W7vvTfWUF0Pq+dteSJjvumbcR5vkDT/qbtHNEvAjOp8NCSIxXw8r9ctcn7HJ4xFl1rdHv'
'fdCnFbSAskF0qSuEF6KSKmzx/q5CWQosw5hmiGGDC+nWqu2EQmS3IHnOz4yD3JSCeI6vVdb+pi'
'WbL58XdY+aIHGDOJjSEMrOm52WltRjo/BozxzMqAEdPs3nElysvsjp+W356NydvKU235IFK97X'
'LcHI04eXvO3BhtmSaDUcsnWg/r6zCHhYw6f+03RQoHQQ9zev1h7+mapXWT0We6+lYqoLgOoxsX'
'QIXjjpWAMMvjaSsBYZbH0xYxwiyP99miqkMSZiHQFY7hgBsKj8NIPuyABgBCKaFvZATWp/66j4'
'PM/9XNL4M6u2QhZhP3j3+yALOvq6k+q5wyqRJuGNBnhuuC8gC5/IXt9Nd9Nrw8JNGWv+6z4WUN'
'KgBkwssaNAAQwssnBJRXT6C/ybGjP/6X5gxapJ8/kaY6rxG7VCP9/Im0VCD9/AlIxZUOqADQmB'
'z4aNAAQNeRoFQE1K++/lONdDFOGsXX06Po1x3tcriKVOuvJ5EuDcoBZCJdSIf7+0sW6RriSNff'
'J5GuIYl0/X0S6RqSSNff60jXcxk0qJ7UgccR+RJbzb+PjT5Bi3jmk0mIcUjimT2grAEhgLZbfQ'
'cBtO+bABqy3b6jA2gV/gnb/amf6lTtlljTU8lU7ZZY01PJVO0WP+CpZKp2S6zpqWSqkK/3vUs2'
'Vbt5qr6XTNVumarvJVO1W6bqe0lQclj9T/D0V/LCU+Tp/c8+vkhb5Z/g6Y9AtT9W0akC6ZwZkz'
'kQ4EgcAfhN/VFxCUDqzynxBzvbyVUpz5QOZrKGhck/Spg8LGe2P0pW9bAw+UdY1fsdUA4g1G3+'
'WkZgGfWLedbiX0y0uNS+uoTnhPqG5KXV4XzI7rANQSMeqwvKA+SyLaM5YlT4sASNCGRU+LCcwx'
'LIqPBhOYclEFT41QwaVK/JP+2BwjAv7dfk7ToelqXdA8oaUEU6y6rX5X+aa3dYAg2vS/MH+vx1'
'ebt2h8UPfl3ert1hCTQQyKxdZLT+cv5Srd1hXruE36zdYVm7DBp1QFmAzNodUb+Wp7X7G2btIg'
'GUIAV6/MMM/8bifZNeCl/rWgraVbzkC0L3c6nPzp2CdzL5I6JT3pRM/ojolDcli2NEdMqbksUx'
'IjrlTcniGJFDgjcli2NEDgnepBdHRUAZ9eBPVYRHZIk/mB4Fyo09mIjwiCzxBxMRHpEl/mAiws'
'gYfviSifAIi/DDiQiPiAg/nIjwiIjww4kIK/U2iPAHjQgj5/dtec5VezzHvyHCj+b5zoeT+JHc'
'Qb+E8iudXGrhNdegyt4xWqTwzo/7upKZrRVz1DdFYm46ZqqgJV8+0Wb0odi3hnTl7AyyDlZbtN'
'fiEJ6cxJeigE3UiNYgbfwJsYgcNPFcY+c7VRF55iS2jQthLGkEPsr68K02U8FXB3/4fhbX5l7h'
'22BoVgurdQnfmLO+sxJIAqITumaIiLeSRfpoIt5KFumjySJVskgfTRapkkX6aN7ebFGySAlkbr'
'YoWaQEws2WioAy6rGf6iJVskgfS48Ci/SxZJEqWaSPJYtUySJ9LFmkSIP/wCVbpIoX6QeSRapk'
'kX4gWaRKFukHkkU6qn4Xi/RPzSJFIvrvYpFe5v3XHP/GIv2EXqSPu9lZHGK7xMlZ6OPS52bJ/e'
'3/v63QUVmhn0hke1RW6CeSFToqK/QTyQodlRX6iWSFjsoK/USyQkdlhX5Cr9D/kWEYDtv/Izr8'
'dF7l0ul+ErOthVO6JMIUB87HUUcAkVSa47uWls5iTTeCZjWc0IJRCzc2I0TNJrnUXFOHu+7QbX'
'Fbusb3W7sjY0k09OTcEgRnRVcsoJ48IxI6nfjsOed50p0NzpoTh66DubMLi0uW0TqdgMY9oK7g'
'c3sNwtL6VF71qav4jMYCqS2Dr+gCZwFGwdYJB5xR/wlt95X26pQnXNGzVHopDBnTeE8XOAvw5d'
'TfCx1wVv0xty0dcrms62yaQoJc4EVPV5zuC4Tx+8NdYEaLzxgWRUgy6jMQiP+clzoVo6JzP5OW'
'S+jcz+Rt4dlRGQ+BrpLsjlHRuQQyBUxGxfch5P1yX3BUfJ//DCqu5Z1jlMn67E915xgVD+Wz6V'
'HAQ/lssnOMCqc+m+wco+KhfDbZOXBh6E8u2c4xyjvHnyQ7x6jsHH+S7ByjsnP8id45fhVbQ1F9'
'EVvH39HWMfaPWX/ahn3tkT3UVGDjCQlX7QGPZaJcJNVJ7TimD/RtfTMkqSioP15gLvnbNL7jx8'
'9KXUbc3+H7TLZMbBQ1TF3ZWJQtn+txKUMQOOt8d4Pvdcbl1CX4LhLqzdSXOvQbuladnHFo+hK0'
'x48LivEJraMIk/40TVezmWhzeykan5iQw00udMPL7JxbCtLWizTFJnWZNFyT+mKei/z/WZZ/o5'
'r94xCb/w5d+3Gd2eMWj0hVmEyOFLmIqNTIsXOpizWvSeEKnA/VovaUKTVVM7nq9Xg5KY5T119+'
'8eurq87bLsqmU2bSH6+FJBSm/I3+OhgmLCUJSFuLu5NFUathjmZg8mf9f1NajaLSpM7Refkk/V'
'4JWuWV4AGCgRgGvbJzv23iv9qhyPPxenlc3pkoo6Ws6KJUuieWevKFyKKtdP/foOpKrOoskFY/'
'g4e6wHmAd4sSTsAZgPeqA13gHMAoCux2mFF/BczXpNpCaf5Vb4cIGv2VXsppMCPBFbI0OAcw6s'
'SNMBij+zKk6IBwQY/sy4leK0rU9MvQa3scUAagvaJVijIaAiHFg7//V+ShfBWoDuH7f0vpc+6d'
'JXQSs7+1TlKH9cHZNmxsRudDqJKWh+1KlwTm8qtB7Nc6LZ2gJUd2c3LfR74IqNWC5A/LBwaToY'
'GtX02PFiz9at4mtBSFnV/N2xutRWElga6jfc2wMqueAKYJ2wZbxBNp5HyUkkYOLj0B5Nc6oBxA'
'KLhjkOfU3wDTuG2DQ7a/SSPHIdvf5G0mpQZlABqVi+UaxLhQvt0g71N/C0xJmz4D8hxQHiCXcp'
'ww/W3eXlTUoBxAroTl1dfytog2Awj519LI87qVSzkOgr4Gyg84oBxApoh2kTfcrwPT9bYNn8+k'
'kaMUztfTlPP5DCi/2gHlAEId9schvnvUd7ADfrefdsD7/LlmNdiMpYxxvalvhMntwY6kupsP7+'
'mcWanMh8wASWJDmfNG2FXl3N8KnMJH5Ki84qdZODqhhnPSQLjeUHDH9Ds6XvOaUf7N1fj7Faq6'
'P6Vowz8VScncelKDO/A366HOz0ijTQoz8qh5wLgE1SLVuxk1a1Ke0TnfTgpY20tQDlfrsdRslU'
'8rJd96oh/zs3P8DcGafHgvxBFs+rZnUnFASh/WN+rUK3BFDfuZLCmeOkmeAT4IJXfz9BDsjZSL'
'3hMEb6XWHD0y10te7fmnQr7SGEXnUT+Zy20nqdvJuBn706G6V+6q3Huv/Qf/d++9eBjIw5Uq/0'
'O88Fd9f2297sEftYWjbckrokfPp762E2+ShelzeSs//Z+7X/r+vwkm6xP0j3/zpH/jpH+M/td/'
'ObeDOt9ajxq9AyvLiytdL076N+NdvNgIVsIGuX8y+gn9SnWy1vPK88wr+iulmk3SPpxc7Wl/1L'
'TXZYaJn9J4bXK9p/FNtrGu0Dt+dMJ8lQdsmqJlYNgmeS726wM2R1qSptrk16/KV1QlJ4QLUPqu'
'0OvvREp96np7wrn/1zFJaboCIl9roWUm6c+xLt3u+4gy6HSrsFltRHG6RqtcCtS2GPKgXCHnbN'
'B2vZUUOObU6Op5f3wziuP6SsMWcufQiUlnSmw4p+i8NmO56LC+0CppQZZdWyjfreWLuWaPEUuJ'
'+1KyXOSQik0W5ppdTc2tMqbhtKHFCnHipdoLlejLMFRnA8cmHdhUsLX8cz+mw1+13Gyxm4+OdX'
'11O3wuOCefsvA3opijNtHKhXrUiQ1zzQdl9dhqJeFrsIZUMVOl2hQ2d2tyu9OQ/uQPPtyL6q1S'
'89+p+r3DqNOieijWy9sktumrVlxqWqQKmT/aBJe2WlY0RSIuznhCFOVdTdfGdBlofDqdgAgsKy'
'FthSxGYut1c0Zf5Y7Xg5Z2lbqqxptENV3tmt/hQd6t86l0Xliw04jdYcbRhmSPdbcEZuuoIoXV'
'N1/LYhRwAonaYIdF5JfWWlFnsyTuOStJLnMcaA2FkTkfAbArM/X1pqTIbiLRQJRsmHX94cy2UX'
'w6Ex9IpUZkvcUeMhm5NmHWfvqJGDWTFHDTnyfjSzpibTvLSOxi2rtXghWdIEuDr681OdDIZeM5'
'DktdRqbUjhMo0dV4cDF6EqY43wjRieVIpbO9VHWZOF9/6amKxLyk+i5fFRKTfI84IGyIuKB+gI'
'wDskccEALtlYTyPeKAEAgfcCkyCD7yI8D0nX7JHt8jbh5B4ea9bsDCYP78fj/5V5Nj3+13vzsi'
'3z9AGW+R5YtZceZGv3zNzbMM4E8fOJdZJHLtxKHlu45a40npQ05ZDLQKhRmp78/jE9o7UmDqPE'
'FdSV1NCVkmdhavcWeF4xYIoWeLgHfGm3hnvJW3US9R3Me1fm6EZVMbAZM8fhNtqEeO8HvmZm2Z'
'RzV+64S1J6gBUNoG2JfHk8fc4GiS/mkW9w5DTHWuK0S4LLyZqbT7cjd/Ui/f7t+MdOJmTzNNfy'
'/yY2nkO32vyDfXgY8J6h0/asRmSQ/6ozsag9xWrhcnGkNXXGCxSL5AbffdKPlwqERt5ldh20o2'
'lxaiRoDcUS2MPZOPmbei0bV5pw2+ZAcYtzVWreb3zHLRtp5RZnx/yZyfbtSrUSNqTsj1hj1OcI'
'XX4lAXOA+w+XLfHie4QuA9ElHf4wRXCIyIehpcAPgqdYN3RRpM/js9OKAOe3+ZdZ5k1Ce1WvhU'
'1txaXucP0ugoA5K9Q/0JkU7LmmvHpfB/gxTCpPxNQ+1sNCfxxcgaP0js30knxzmI4w4KOPDujq'
'9oW0QTk/yqxmO/doMzK7mcRgreKYKvZ4kv0skdveo2iUOSRQ6cWlI1SntQBZQPhK1oSh+xwICx'
'Wf6op8+7jRTUxw0BD6dokbZV5LyqVo9JE23XzfefO/oitjsTCL18sneWEX75ZO8sZ/REdM8ywj'
'Cf7J1lHAt8sneWMzzLn9Sz/NiQ8ySrngQpE2NvHrJfwFhkFxc76Ty5puloqa1a7awCUzU/4Np0'
'2/B/N8R4ZwOpnhznsZXA33DWOjpZSRIr4BdMSrv5jpNZRK4tgaIwXChb9lw+MujtFxc9aScOqx'
'2+zolmsa4pjLKFLH2ejrJ1vaX3fdteH9O2JVTPH9y2rgd3CoIaZEsTcBnfdwbdy5YgRoBLJNhQ'
'g7VWsLnOZNsGLJiaAM8waxynUjDUaARNfUejHU3oQwJ9v8Ksu7LeZi1uvjhjgtj4viaqfXUPJk'
'o4lmzQ2jZJ3BNb5WCBb0StJ6/IpSW3gtJt9uFG0DqPFaWPEI4cmdB+XMzfqQ7Z4RALU9vFhg+T'
'hoeQh7YUe2OhwReRSG7q8Xkv+faKQderhdmH5KrIEIwouYWrIxJku5EiORNuMU9YcuUqd3INnL'
'/qp7/eZL41k9qsuI6QDVzwfj4jw3cFF6F3z7/oYwTld0BLYL2j7rQ/rgQP0MObbntatA+YXqeb'
'4gqAEz1tngbHKzv3C45nwmRaOh9a76zQ2iC4NjkEwawsDCsn+pNprTXnq+EQeCMEyJloBXW+lW'
'NERFDpXn3zvvvh7pZWRSuNoHleC71ZDXLdWVuVjAYuTPmZyUuWln+svOOc6Ga3+8/Ts3LYP+EK'
'tuUWm4OH9bc9eNj+KRmrEe9YmhghFwOm7B8+8rSYxW2hN4lO1GKVF7oESz8kQm+xsyIZJ36ta/'
'hx18aFePmTvRsXIt1P9tuznwScAXivurYLnAMYIf49DjinvgXMh1NtEer/Vm+HCPd/q7dDhPy/'
'hQ6v6wIz7nE1keqwT30bmI+l2vYZ8FAXOA9wd4c4Bvg2OpzqAucAvlEd9b6NgPpe9f1+ZKwPqA'
'xyQ5JPm2pN29C+4np9k2a7vYWLRen7fzpogOpo6UC7+XLNdPLlCrtnu9WP4jiq1gN7BGk/1WV7'
'8dzIfZILYT4+w5Ywf+sDYpskystLqfo2OsiO2oc05oK60vtZ/gkn84f9XPT0Ptwsm7aZUWZvi3'
'XQgqMZsJ/D+3kz6uIEBxfNZuWZiI4pWUaLyCFGPOu9Ylf/MPGs94pn/UN41pc5oAxAl0tV5r1i'
'SxMIn8MsMgie9T8A0y8PiGe9Vzzrf4Bnvdd7c8bCMOh/0ib0z7mONWcap/fb7iMUdxjOxxE541'
'4fFrAxYGJhQbfNVjaHGGUdJmP5tFQRN/4pEfIEnAfY2J8JOAOwsT8TcA5gY38m4ALAxv50wPhO'
'h7Y/l5wHGfULA0TJtWN3dHOI5Ynr92t/zHw5a0dOdY0QFjbjHeoC5wE2yzgBMxV71cEucA5gfE'
'j7VQ44q14DzAfG1ropZodFmx6rCMTR3OJTOTa8mZZl+aABr//krolzyKuzMLpGBr31mt6RQQW/'
'ZqBn7sBfAptPyyfgHMA4sP6yK6459Qag3j/2uUyPvEoK5LMZma/vWz/NyBiLLnMVNs1v56ga2m'
'kziNuO047MvwvwvPjrQ+Py5VJd8sQ43Kw0bmeUU9oSnOhiHzaUN/SyDxvKG3rZhw3lDWDf5V1g'
'ZtSVauz/A3VwEtY=')))
_INDEX = {
f.name: {
'descriptor': f,
'services': {s.name: s for s in f.service},
}
for f in FILE_DESCRIPTOR_SET.file
}
DiscoveryServiceDescription = {
'file_descriptor_set': FILE_DESCRIPTOR_SET,
'file_descriptor': _INDEX[u'service.proto']['descriptor'],
'service_descriptor': _INDEX[u'service.proto']['services'][u'Discovery'],
}
|
rmoorman/appier
|
src/appier/export.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Hive Appier Framework
# Copyright (c) 2008-2015 Hive Solutions Lda.
#
# This file is part of Hive Appier Framework.
#
# Hive Appier Framework is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by the Apache
# Foundation, either version 2.0 of the License, or (at your option) any
# later version.
#
# Hive Appier Framework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License along with
# Hive Appier Framework. If not, see <http://www.apache.org/licenses/>.
__author__ = "João Magalhães <joamag@hive.pt>"
""" The author(s) of the module """
__version__ = "1.0.0"
""" The version of the module """
__revision__ = "$LastChangedRevision$"
""" The revision number of the module """
__date__ = "$LastChangedDate$"
""" The last change date of the module """
__copyright__ = "Copyright (c) 2008-2015 Hive Solutions Lda."
""" The copyright for the module """
__license__ = "Apache License, Version 2.0"
""" The license for the module """
import os
import json
import zipfile
import tempfile
from . import legacy
try: import bson
except: bson = None
IGNORE = 1
""" Ignore strategy for conflict solving in the import operation
basically this strategy skips importing a document that has the same
key value as one that already exists in the collection """
OVERWRITE = 2
""" Strategy for conflict solving that overwrites (completely) a
previously existing document in the data source if it has the same
key value as the one being imported, this should be used carefully
as it may create data loss """
DUPLICATE = 3
""" Conflict solving strategy that basically duplicates the entries
in the data source even if they have the same key value, this may
create a somehow inconsistent state and so must be used carefully """
JOIN = 4
""" Join strategy for conflict solving in document collision, that
basically adds new fields or updates existing fields in a previously
existing document, this strategy does not remove extra fields existing
in the previous document """
class ExportManager(object):
db = None
single = None
multiple = None
def __init__(self, db, single = (), multiple = ()):
self.db = db
self.single = single
self.multiple = multiple
def import_data(self, file_path, policy = IGNORE):
temporary_path = tempfile.mkdtemp()
base_path = temporary_path
single_path = os.path.join(base_path, "settings")
self._deploy_zip(file_path, temporary_path)
for name, key in self.single:
collection = self.db[name]
source_path = os.path.join(single_path, "%s.json" % name)
file = open(source_path, "rb")
try: data = file.read()
finally: file.close()
self._import_single(
collection,
data,
key = key,
policy = policy
)
for name, key in self.multiple:
source_directory = os.path.join(base_path, name)
if not os.path.exists(source_directory): continue
collection = self.db[name]
items = os.listdir(source_directory)
data = []
for item in items:
value, _extension = os.path.splitext(item)
source_path = os.path.join(source_directory, item)
file = open(source_path, "rb")
try: _data = file.read()
finally: file.close()
data.append((value, _data))
self._import_multiple(
collection,
data,
key = key,
policy = policy
)
def export_data(self, file_path):
temporary_path = tempfile.mkdtemp()
base_path = temporary_path
single_path = os.path.join(base_path, "settings")
if not os.path.exists(single_path): os.makedirs(single_path)
for name, key in self.single:
collection = self.db[name]
data = self._export_single(collection, key)
target_path = os.path.join(single_path, "%s.json" % name)
file = open(target_path, "wb")
try: file.write(data)
finally: file.close()
for name, key in self.multiple:
collection = self.db[name]
data = self._export_multiple(collection, key)
target_directory = os.path.join(base_path, name)
if not os.path.exists(target_directory): os.makedirs(target_directory)
for value, _data in data:
target_path = os.path.join(target_directory, "%s.json" % value)
file = open(target_path, "wb")
try: file.write(_data)
finally: file.close()
self._create_zip(file_path, temporary_path)
def _import_single(self, collection, data, key, policy = IGNORE):
# loads the provided json data as a sequence of key value items
# and then starts loading all the values into the data source
data = data.decode("utf-8")
data_s = json.loads(data)
for _key, entity in data_s.items():
# verifies if the "native" object id value for the mongo
# database exists and if that's the case tries to convert
# the value from the "underlying" string value to object
# identifier, defaulting to a string value if it fails
if "_id" in entity:
try: entity["_id"] = bson.ObjectId(entity["_id"])
except: entity["_id"] = entity["_id"]
# retrieves the key value for the current entity to
# be inserted and then tries to retrieve an existing
# entity for the same key, to avoid duplicated entry
value = entity.get(key, None)
if value: entity_e = collection.find_one({key : value})
else: entity_e = None
# in case there's no existing entity for the same key
# (normal situation) only need to insert the new entity
# otherwise must apply the selected conflict policy for
# the resolution of the data source conflict
if not entity_e: collection.insert(entity)
elif policy == IGNORE: continue
elif policy == OVERWRITE:
collection.remove({key : value})
collection.insert(entity)
elif policy == DUPLICATE:
collection.insert(entity)
elif policy == JOIN:
if "_id" in entity: del entity["_id"]
collection.update({
"_id" : entity_e["_id"]
}, {
"$set" : entity
})
def _import_multiple(self, collection, data, key, policy = IGNORE):
# iterates over the complete set of data element to load
# the json contents and then load the corresponding entity
# value into the data source
for _value, _data in data:
# loads the current data in iteration from the file
# as the entity to be loaded into the data source
_data = _data.decode("utf-8")
entity = json.loads(_data)
# verifies if the "native" object id value for the mongo
# database exists and if that's the case tries to convert
# the value from the "underlying" string value to object
# identifier, defaulting to a string value if it fails
if "_id" in entity:
try: entity["_id"] = bson.ObjectId(entity["_id"])
except: entity["_id"] = entity["_id"]
# retrieves the key value for the current entity to
# be inserted and then tries to retrieve an existing
# entity for the same key, to avoid duplicated entry
value = entity.get(key, None)
if value: entity_e = collection.find_one({key : value})
else: entity_e = None
# in case there's no existing entity for the same key
# (normal situation) only need to insert the new entity
# otherwise must apply the selected conflict policy for
# the resolution of the data source conflict
if not entity_e: collection.insert(entity)
elif policy == IGNORE: continue
elif policy == OVERWRITE:
collection.remove({key : value})
collection.insert(entity)
elif policy == DUPLICATE:
collection.insert(entity)
elif policy == JOIN:
if "_id" in entity: del entity["_id"]
collection.update({
"_id" : entity_e["_id"]
}, {
"$set" : entity
})
def _export_single(self, collection, key = "_id"):
entities = collection.find()
_entities = {}
for entity in entities:
value = entity[key]
value_s = self._to_key(value)
_entities[value_s] = entity
data = json.dumps(_entities, cls = MongoEncoder)
data = legacy.bytes(data)
return data
def _export_multiple(self, collection, key = "_id"):
entities = collection.find()
for entity in entities:
value = entity[key]
value_s = self._to_key(value)
value_s = self._escape_key(value_s)
_data = json.dumps(entity, cls = MongoEncoder)
_data = legacy.bytes(_data)
yield (value_s, _data)
def _to_key(self, key):
key_t = type(key)
if key_t in legacy.STRINGS: return key
key = legacy.UNICODE(key)
return key
def _escape_key(self, key):
return key.replace(":", "_")
def _deploy_zip(self, zip_path, path):
zip_file = zipfile.ZipFile(
zip_path,
mode = "r",
compression = zipfile.ZIP_DEFLATED
)
try: zip_file.extractall(path)
finally: zip_file.close()
def _create_zip(self, zip_path, path):
zip_file = zipfile.ZipFile(
zip_path,
mode = "w",
compression = zipfile.ZIP_DEFLATED
)
try:
list = os.listdir(path)
for name in list:
_path = os.path.join(path, name)
is_file = os.path.isfile(_path)
if is_file: zip_file.write(_path)
else: self.__add_to_zip(zip_file, _path, base = path)
finally:
zip_file.close()
def __add_to_zip(self, zip_file, path, base = ""):
list = os.listdir(path)
for name in list:
_path = os.path.join(path, name)
_path_out = _path[len(base):]
_path_out = _path_out.replace("\\", "/")
_path_out = _path_out.strip("/")
if os.path.isfile(_path):
zip_file.write(_path, _path_out)
elif os.path.isdir(_path):
self.__add_to_zip(zip_file, _path, base = base)
class MongoEncoder(json.JSONEncoder):
def default(self, obj, **kwargs):
if isinstance(obj, bson.objectid.ObjectId): return str(obj)
else: return json.JSONEncoder.default(obj, **kwargs)
|
Bloomie/murano-repository
|
muranorepository/api/v1.py
|
# Copyright (c) 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from flask import Blueprint, make_response, send_file
from flask import jsonify, request, abort
from werkzeug import secure_filename
from muranorepository.utils.parser import ManifestParser
from muranorepository.utils.archiver import Archiver
from muranorepository.consts import DATA_TYPES, MANIFEST
from oslo.config import cfg
CONF = cfg.CONF
v1_api = Blueprint('v1', __name__)
@v1_api.route('/client/ui')
def get_ui_data():
parser = ManifestParser(CONF.manifests)
manifests = parser.parse()
archive_name = Archiver().create(manifests, "ui")
return send_file(archive_name)
@v1_api.route('/client/conductor')
def get_conductor_data():
parser = ManifestParser(CONF.manifests)
manifests = parser.parse()
archive_name = Archiver().create(manifests,
"heat",
"agent",
"scripts")
return send_file(archive_name)
@v1_api.route('/admin/<data_type>', methods=['GET', 'POST'])
def get_data_type_locations(data_type):
####### validation ########
if data_type not in DATA_TYPES:
abort(404)
result_path = os.path.join(CONF.manifests, getattr(CONF, data_type))
####### end validation ########
if request.method == 'GET':
locations = []
if data_type == MANIFEST:
for item in os.listdir(result_path):
if '-manifest' in item:
locations.append(item)
else:
for path, subdirs, files in os.walk(result_path):
for name in files:
locations.append(name)
result = {data_type: locations}
return jsonify(result)
if request.method == 'POST':
try:
file_to_upload = request.files.get('files')
if file_to_upload:
filename = secure_filename(file_to_upload.filename)
file_to_upload.save(os.path.join(result_path, filename))
return jsonify(result="success")
except:
abort(403)
@v1_api.route('/admin/<data_type>/<path:path>', methods=['GET', 'POST'])
def get_data_type_locations_by_path_or_get_file(data_type, path):
if data_type not in DATA_TYPES:
abort(404)
result_path = os.path.join(os.path.join(CONF.manifests,
getattr(CONF, data_type),
path))
if not os.path.exists(result_path):
abort(404)
if request.method == 'GET':
locations = []
if os.path.isfile(result_path):
return send_file(result_path)
else:
for file in os.listdir(result_path):
locations.append(file)
result = {data_type: locations}
return jsonify(result)
if request.method == 'POST':
file_to_upload = request.files.get('files')
if file_to_upload:
filename = secure_filename(file_to_upload.filename)
file_to_upload.save(os.path.join(result_path, filename))
return jsonify(result="success")
else:
abort(403)
@v1_api.route('/admin/<data_type>/<path:path>', methods=['PUT', 'DELETE'])
def create_dirs(data_type, path):
if data_type not in DATA_TYPES:
abort(404)
result_path = os.path.join(CONF.manifests, getattr(CONF, data_type), path)
if request.method == 'PUT':
resp = make_response()
if os.path.exists(result_path):
return resp
if data_type == MANIFEST:
abort(403)
try:
os.makedirs(result_path)
except Exception as e:
abort(403)
return resp
if request.method == 'DELETE':
if not os.path.exists(result_path):
abort(404)
if os.path.isfile(result_path):
try:
os.remove(result_path)
except Exception as e:
abort(404)
else:
try:
os.rmdir(result_path)
except Exception as e:
abort(403)
resp = make_response()
return resp
|
cloud-fan/spark
|
python/docs/source/conf.py
|
# -*- coding: utf-8 -*-
#
# pyspark documentation build configuration file, created by
# sphinx-quickstart on Thu Aug 28 15:17:47 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shutil
import errno
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
# Remove previously generated rst files. Ignore errors just in case it stops
# generating whole docs.
shutil.rmtree(
"%s/reference/api" % os.path.dirname(os.path.abspath(__file__)), ignore_errors=True)
shutil.rmtree(
"%s/reference/pyspark.pandas/api" % os.path.dirname(os.path.abspath(__file__)),
ignore_errors=True)
try:
os.mkdir("%s/reference/api" % os.path.dirname(os.path.abspath(__file__)))
except OSError as e:
if e.errno != errno.EEXIST:
raise
try:
os.mkdir("%s/reference/pyspark.pandas/api" % os.path.dirname(
os.path.abspath(__file__)))
except OSError as e:
if e.errno != errno.EEXIST:
raise
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.2'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.mathjax',
'sphinx.ext.autosummary',
'nbsphinx', # Converts Jupyter Notebook to reStructuredText files for Sphinx.
# For ipython directive in reStructuredText files. It is generated by the notebook.
'IPython.sphinxext.ipython_console_highlighting',
'numpydoc', # handle NumPy documentation formatted docstrings.
'sphinx_plotly_directive', # For visualize plot result
]
# plotly plot directive
plotly_include_source = True
plotly_html_show_formats = False
plotly_html_show_source_link = False
plotly_pre_code = """import numpy as np
import pandas as pd
import pyspark.pandas as ps"""
numpydoc_show_class_members = False
# Links used globally in the RST files.
# These are defined here to allow link substitutions dynamically.
rst_epilog = """
.. |binder| replace:: Live Notebook
.. _binder: https://mybinder.org/v2/gh/apache/spark/{0}?filepath=python%2Fdocs%2Fsource%2Fgetting_started%2Fquickstart.ipynb
.. |examples| replace:: Examples
.. _examples: https://github.com/apache/spark/tree/{0}/examples/src/main/python
.. |downloading| replace:: Downloading
.. _downloading: https://spark.apache.org/docs/{1}/building-spark.html
.. |building_spark| replace:: Building Spark
.. _building_spark: https://spark.apache.org/docs/{1}/#downloading
""".format(
os.environ.get("GIT_HASH", "master"),
os.environ.get("RELEASE_VERSION", "latest"),
)
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'PySpark'
copyright = ''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'master'
# The full version, including alpha/beta/rc tags.
release = os.environ.get('RELEASE_VERSION', version)
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', '.DS_Store', '**.ipynb_checkpoints']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for autodoc --------------------------------------------------
# Look at the first line of the docstring for function and method signatures.
autodoc_docstring_signature = True
autosummary_generate = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'pydata_sphinx_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "../../../docs/img/spark-logo-reverse.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_css_files = [
'css/pyspark.css',
]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pysparkdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'pyspark.tex', 'pyspark Documentation',
'Author', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyspark', 'pyspark Documentation',
['Author'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pyspark', 'pyspark Documentation',
'Author', 'pyspark', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = 'pyspark'
epub_author = 'Author'
epub_publisher = 'Author'
epub_copyright = '2014, Author'
# The basename for the epub file. It defaults to the project name.
#epub_basename = 'pyspark'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
def setup(app):
# The app.add_javascript() is deprecated.
getattr(app, "add_js_file", getattr(app, "add_javascript"))('copybutton.js')
# Skip sample endpoint link (not expected to resolve)
linkcheck_ignore = [r'https://kinesis.us-east-1.amazonaws.com']
|
amwelch/a10sdk-python
|
a10sdk/core/cgnv6/cgnv6_ddos_protection_stats.py
|
from a10sdk.common.A10BaseClass import A10BaseClass
class Stats(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param ip_other_block_alloc: {"optional": true, "size": "8", "type": "number", "oid": "17", "format": "counter"}
:param entry_match_drop: {"optional": true, "size": "8", "type": "number", "oid": "6", "format": "counter"}
:param ip_port_block_free: {"optional": true, "size": "8", "type": "number", "oid": "15", "format": "counter"}
:param ip_node_alloc_failure: {"optional": true, "size": "8", "type": "number", "oid": "13", "format": "counter"}
:param entry_list_alloc_failure: {"optional": true, "size": "8", "type": "number", "oid": "10", "format": "counter"}
:param ip_node_alloc: {"optional": true, "size": "8", "type": "number", "oid": "11", "format": "counter"}
:param entry_added_shadow: {"optional": true, "size": "8", "type": "number", "oid": "20", "format": "counter"}
:param ip_port_block_alloc_failure: {"optional": true, "size": "8", "type": "number", "oid": "16", "format": "counter"}
:param ip_other_block_alloc_failure: {"optional": true, "size": "8", "type": "number", "oid": "19", "format": "counter"}
:param entry_removed_from_hw: {"optional": true, "size": "8", "type": "number", "oid": "4", "format": "counter"}
:param entry_deleted: {"optional": true, "size": "8", "type": "number", "oid": "2", "format": "counter"}
:param entry_list_alloc: {"optional": true, "size": "8", "type": "number", "oid": "8", "format": "counter"}
:param entry_list_free: {"optional": true, "size": "8", "type": "number", "oid": "9", "format": "counter"}
:param entry_added_to_hw: {"optional": true, "size": "8", "type": "number", "oid": "3", "format": "counter"}
:param ip_node_free: {"optional": true, "size": "8", "type": "number", "oid": "12", "format": "counter"}
:param entry_added: {"optional": true, "size": "8", "type": "number", "oid": "1", "format": "counter"}
:param ip_other_block_free: {"optional": true, "size": "8", "type": "number", "oid": "18", "format": "counter"}
:param entry_invalidated: {"optional": true, "size": "8", "type": "number", "oid": "21", "format": "counter"}
:param ip_port_block_alloc: {"optional": true, "size": "8", "type": "number", "oid": "14", "format": "counter"}
:param entry_match_drop_hw: {"optional": true, "size": "8", "type": "number", "oid": "7", "format": "counter"}
:param hw_out_of_entries: {"optional": true, "size": "8", "type": "number", "oid": "5", "format": "counter"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "stats"
self.DeviceProxy = ""
self.ip_other_block_alloc = ""
self.entry_match_drop = ""
self.ip_port_block_free = ""
self.ip_node_alloc_failure = ""
self.entry_list_alloc_failure = ""
self.ip_node_alloc = ""
self.entry_added_shadow = ""
self.ip_port_block_alloc_failure = ""
self.ip_other_block_alloc_failure = ""
self.entry_removed_from_hw = ""
self.entry_deleted = ""
self.entry_list_alloc = ""
self.entry_list_free = ""
self.entry_added_to_hw = ""
self.ip_node_free = ""
self.entry_added = ""
self.ip_other_block_free = ""
self.entry_invalidated = ""
self.ip_port_block_alloc = ""
self.entry_match_drop_hw = ""
self.hw_out_of_entries = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class DdosProtection(A10BaseClass):
"""Class Description::
Statistics for the object ddos-protection.
Class ddos-protection supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/cgnv6/ddos-protection/stats`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "ddos-protection"
self.a10_url="/axapi/v3/cgnv6/ddos-protection/stats"
self.DeviceProxy = ""
self.stats = {}
for keys, value in kwargs.items():
setattr(self,keys, value)
|
mlperf/training_results_v0.7
|
Google/benchmarks/bert/implementations/bert-cloud-TF2.0-tpu-v3-32/modeling/networks/__init__.py
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Networks package definition."""
from REDACTED.tf2_bert.modeling.networks.albert_transformer_encoder import AlbertTransformerEncoder
from REDACTED.tf2_bert.modeling.networks.classification import Classification
from REDACTED.tf2_bert.modeling.networks.encoder_scaffold import EncoderScaffold
from REDACTED.tf2_bert.modeling.networks.masked_lm import MaskedLM
from REDACTED.tf2_bert.modeling.networks.span_labeling import SpanLabeling
from REDACTED.tf2_bert.modeling.networks.transformer_encoder import TransformerEncoder
|
aronsky/home-assistant
|
homeassistant/components/forked_daapd/media_player.py
|
"""This library brings support for forked_daapd to Home Assistant."""
import asyncio
from collections import defaultdict
import logging
from pyforked_daapd import ForkedDaapdAPI
from pylibrespot_java import LibrespotJavaAPI
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import MEDIA_TYPE_MUSIC
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
STATE_IDLE,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.util.dt import utcnow
from .const import (
CALLBACK_TIMEOUT,
CONF_LIBRESPOT_JAVA_PORT,
CONF_MAX_PLAYLISTS,
CONF_TTS_PAUSE_TIME,
CONF_TTS_VOLUME,
DEFAULT_TTS_PAUSE_TIME,
DEFAULT_TTS_VOLUME,
DEFAULT_UNMUTE_VOLUME,
DOMAIN,
FD_NAME,
HASS_DATA_REMOVE_LISTENERS_KEY,
HASS_DATA_UPDATER_KEY,
KNOWN_PIPES,
PIPE_FUNCTION_MAP,
SIGNAL_ADD_ZONES,
SIGNAL_CONFIG_OPTIONS_UPDATE,
SIGNAL_UPDATE_DATABASE,
SIGNAL_UPDATE_MASTER,
SIGNAL_UPDATE_OUTPUTS,
SIGNAL_UPDATE_PLAYER,
SIGNAL_UPDATE_QUEUE,
SOURCE_NAME_CLEAR,
SOURCE_NAME_DEFAULT,
STARTUP_DATA,
SUPPORTED_FEATURES,
SUPPORTED_FEATURES_ZONE,
TTS_TIMEOUT,
)
_LOGGER = logging.getLogger(__name__)
WS_NOTIFY_EVENT_TYPES = ["player", "outputs", "volume", "options", "queue", "database"]
WEBSOCKET_RECONNECT_TIME = 30 # seconds
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up forked-daapd from a config entry."""
host = config_entry.data[CONF_HOST]
port = config_entry.data[CONF_PORT]
password = config_entry.data[CONF_PASSWORD]
forked_daapd_api = ForkedDaapdAPI(
async_get_clientsession(hass), host, port, password
)
forked_daapd_master = ForkedDaapdMaster(
clientsession=async_get_clientsession(hass),
api=forked_daapd_api,
ip_address=host,
api_port=port,
api_password=password,
config_entry=config_entry,
)
@callback
def async_add_zones(api, outputs):
zone_entities = []
for output in outputs:
zone_entities.append(ForkedDaapdZone(api, output, config_entry.entry_id))
async_add_entities(zone_entities, False)
remove_add_zones_listener = async_dispatcher_connect(
hass, SIGNAL_ADD_ZONES.format(config_entry.entry_id), async_add_zones
)
remove_entry_listener = config_entry.add_update_listener(update_listener)
if not hass.data.get(DOMAIN):
hass.data[DOMAIN] = {config_entry.entry_id: {}}
hass.data[DOMAIN][config_entry.entry_id] = {
HASS_DATA_REMOVE_LISTENERS_KEY: [
remove_add_zones_listener,
remove_entry_listener,
]
}
async_add_entities([forked_daapd_master], False)
forked_daapd_updater = ForkedDaapdUpdater(
hass, forked_daapd_api, config_entry.entry_id
)
await forked_daapd_updater.async_init()
hass.data[DOMAIN][config_entry.entry_id][
HASS_DATA_UPDATER_KEY
] = forked_daapd_updater
async def update_listener(hass, entry):
"""Handle options update."""
async_dispatcher_send(
hass, SIGNAL_CONFIG_OPTIONS_UPDATE.format(entry.entry_id), entry.options
)
class ForkedDaapdZone(MediaPlayerEntity):
"""Representation of a forked-daapd output."""
def __init__(self, api, output, entry_id):
"""Initialize the ForkedDaapd Zone."""
self._api = api
self._output = output
self._output_id = output["id"]
self._last_volume = DEFAULT_UNMUTE_VOLUME # used for mute/unmute
self._available = True
self._entry_id = entry_id
async def async_added_to_hass(self):
"""Use lifecycle hooks."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_OUTPUTS.format(self._entry_id),
self._async_update_output_callback,
)
)
@callback
def _async_update_output_callback(self, outputs, _event=None):
new_output = next(
(output for output in outputs if output["id"] == self._output_id), None
)
self._available = bool(new_output)
if self._available:
self._output = new_output
self.async_write_ha_state()
@property
def unique_id(self):
"""Return unique ID."""
return f"{self._entry_id}-{self._output_id}"
@property
def should_poll(self) -> bool:
"""Entity pushes its state to HA."""
return False
async def async_toggle(self):
"""Toggle the power on the zone."""
if self.state == STATE_OFF:
await self.async_turn_on()
else:
await self.async_turn_off()
@property
def available(self) -> bool:
"""Return whether the zone is available."""
return self._available
async def async_turn_on(self):
"""Enable the output."""
await self._api.change_output(self._output_id, selected=True)
async def async_turn_off(self):
"""Disable the output."""
await self._api.change_output(self._output_id, selected=False)
@property
def name(self):
"""Return the name of the zone."""
return f"{FD_NAME} output ({self._output['name']})"
@property
def state(self):
"""State of the zone."""
if self._output["selected"]:
return STATE_ON
return STATE_OFF
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._output["volume"] / 100
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._output["volume"] == 0
async def async_mute_volume(self, mute):
"""Mute the volume."""
if mute:
if self.volume_level == 0:
return
self._last_volume = self.volume_level # store volume level to restore later
target_volume = 0
else:
target_volume = self._last_volume # restore volume level
await self.async_set_volume_level(volume=target_volume)
async def async_set_volume_level(self, volume):
"""Set volume - input range [0,1]."""
await self._api.set_volume(volume=volume * 100, output_id=self._output_id)
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORTED_FEATURES_ZONE
class ForkedDaapdMaster(MediaPlayerEntity):
"""Representation of the main forked-daapd device."""
def __init__(
self, clientsession, api, ip_address, api_port, api_password, config_entry
):
"""Initialize the ForkedDaapd Master Device."""
self._api = api
self._player = STARTUP_DATA[
"player"
] # _player, _outputs, and _queue are loaded straight from api
self._outputs = STARTUP_DATA["outputs"]
self._queue = STARTUP_DATA["queue"]
self._track_info = defaultdict(
str
) # _track info is found by matching _player data with _queue data
self._last_outputs = [] # used for device on/off
self._last_volume = DEFAULT_UNMUTE_VOLUME
self._player_last_updated = None
self._pipe_control_api = {}
self._ip_address = (
ip_address # need to save this because pipe control is on same ip
)
self._tts_pause_time = DEFAULT_TTS_PAUSE_TIME
self._tts_volume = DEFAULT_TTS_VOLUME
self._tts_requested = False
self._tts_queued = False
self._tts_playing_event = asyncio.Event()
self._on_remove = None
self._available = False
self._clientsession = clientsession
self._config_entry = config_entry
self.update_options(config_entry.options)
self._paused_event = asyncio.Event()
self._pause_requested = False
self._sources_uris = {}
self._source = SOURCE_NAME_DEFAULT
self._max_playlists = None
async def async_added_to_hass(self):
"""Use lifecycle hooks."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_PLAYER.format(self._config_entry.entry_id),
self._update_player,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_QUEUE.format(self._config_entry.entry_id),
self._update_queue,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_OUTPUTS.format(self._config_entry.entry_id),
self._update_outputs,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_MASTER.format(self._config_entry.entry_id),
self._update_callback,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_CONFIG_OPTIONS_UPDATE.format(self._config_entry.entry_id),
self.update_options,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_DATABASE.format(self._config_entry.entry_id),
self._update_database,
)
)
@callback
def _update_callback(self, available):
"""Call update method."""
self._available = available
self.async_write_ha_state()
@callback
def update_options(self, options):
"""Update forked-daapd server options."""
if CONF_LIBRESPOT_JAVA_PORT in options:
self._pipe_control_api["librespot-java"] = LibrespotJavaAPI(
self._clientsession, self._ip_address, options[CONF_LIBRESPOT_JAVA_PORT]
)
if CONF_TTS_PAUSE_TIME in options:
self._tts_pause_time = options[CONF_TTS_PAUSE_TIME]
if CONF_TTS_VOLUME in options:
self._tts_volume = options[CONF_TTS_VOLUME]
if CONF_MAX_PLAYLISTS in options:
# sources not updated until next _update_database call
self._max_playlists = options[CONF_MAX_PLAYLISTS]
@callback
def _update_player(self, player, event):
self._player = player
self._player_last_updated = utcnow()
self._update_track_info()
if self._tts_queued:
self._tts_playing_event.set()
self._tts_queued = False
if self._pause_requested:
self._paused_event.set()
self._pause_requested = False
event.set()
@callback
def _update_queue(self, queue, event):
self._queue = queue
if (
self._tts_requested
and self._queue["count"] == 1
and self._queue["items"][0]["uri"].find("tts_proxy") != -1
):
self._tts_requested = False
self._tts_queued = True
if (
self._queue["count"] >= 1
and self._queue["items"][0]["data_kind"] == "pipe"
and self._queue["items"][0]["title"] in KNOWN_PIPES
): # if we're playing a pipe, set the source automatically so we can forward controls
self._source = f"{self._queue['items'][0]['title']} (pipe)"
self._update_track_info()
event.set()
@callback
def _update_outputs(self, outputs, event=None):
if event: # Calling without event is meant for zone, so ignore
self._outputs = outputs
event.set()
@callback
def _update_database(self, pipes, playlists, event):
self._sources_uris = {SOURCE_NAME_CLEAR: None, SOURCE_NAME_DEFAULT: None}
if pipes:
self._sources_uris.update(
{
f"{pipe['title']} (pipe)": pipe["uri"]
for pipe in pipes
if pipe["title"] in KNOWN_PIPES
}
)
if playlists:
self._sources_uris.update(
{
f"{playlist['name']} (playlist)": playlist["uri"]
for playlist in playlists[: self._max_playlists]
}
)
event.set()
def _update_track_info(self): # run during every player or queue update
try:
self._track_info = next(
track
for track in self._queue["items"]
if track["id"] == self._player["item_id"]
)
except (StopIteration, TypeError, KeyError):
_LOGGER.debug("Could not get track info")
self._track_info = defaultdict(str)
@property
def unique_id(self):
"""Return unique ID."""
return self._config_entry.entry_id
@property
def should_poll(self) -> bool:
"""Entity pushes its state to HA."""
return False
@property
def available(self) -> bool:
"""Return whether the master is available."""
return self._available
async def async_turn_on(self):
"""Restore the last on outputs state."""
# restore state
await self._api.set_volume(volume=self._last_volume * 100)
if self._last_outputs:
futures = []
for output in self._last_outputs:
futures.append(
self._api.change_output(
output["id"],
selected=output["selected"],
volume=output["volume"],
)
)
await asyncio.wait(futures)
else: # enable all outputs
await self._api.set_enabled_outputs(
[output["id"] for output in self._outputs]
)
async def async_turn_off(self):
"""Pause player and store outputs state."""
await self.async_media_pause()
self._last_outputs = self._outputs
if any(output["selected"] for output in self._outputs):
await self._api.set_enabled_outputs([])
async def async_toggle(self):
"""Toggle the power on the device.
Default media player component method counts idle as off.
We consider idle to be on but just not playing.
"""
if self.state == STATE_OFF:
await self.async_turn_on()
else:
await self.async_turn_off()
@property
def name(self):
"""Return the name of the device."""
return f"{FD_NAME} server"
@property
def state(self):
"""State of the player."""
if self._player["state"] == "play":
return STATE_PLAYING
if self._player["state"] == "pause":
return STATE_PAUSED
if not any(output["selected"] for output in self._outputs):
return STATE_OFF
if self._player["state"] == "stop": # this should catch all remaining cases
return STATE_IDLE
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._player["volume"] / 100
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._player["volume"] == 0
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self._player["item_id"]
@property
def media_content_type(self):
"""Content type of current playing media."""
return self._track_info["media_kind"]
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self._player["item_length_ms"] / 1000
@property
def media_position(self):
"""Position of current playing media in seconds."""
return self._player["item_progress_ms"] / 1000
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid."""
return self._player_last_updated
@property
def media_title(self):
"""Title of current playing media."""
# Use album field when data_kind is url
# https://github.com/ejurgensen/forked-daapd/issues/351
if self._track_info["data_kind"] == "url":
return self._track_info["album"]
return self._track_info["title"]
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return self._track_info["artist"]
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
# Use title field when data_kind is url
# https://github.com/ejurgensen/forked-daapd/issues/351
if self._track_info["data_kind"] == "url":
return self._track_info["title"]
return self._track_info["album"]
@property
def media_album_artist(self):
"""Album artist of current playing media, music track only."""
return self._track_info["album_artist"]
@property
def media_track(self):
"""Track number of current playing media, music track only."""
return self._track_info["track_number"]
@property
def shuffle(self):
"""Boolean if shuffle is enabled."""
return self._player["shuffle"]
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORTED_FEATURES
@property
def source(self):
"""Name of the current input source."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return [*self._sources_uris]
async def async_mute_volume(self, mute):
"""Mute the volume."""
if mute:
if self.volume_level == 0:
return
self._last_volume = self.volume_level # store volume level to restore later
target_volume = 0
else:
target_volume = self._last_volume # restore volume level
await self._api.set_volume(volume=target_volume * 100)
async def async_set_volume_level(self, volume):
"""Set volume - input range [0,1]."""
await self._api.set_volume(volume=volume * 100)
async def async_media_play(self):
"""Start playback."""
if self._use_pipe_control():
await self._pipe_call(self._use_pipe_control(), "async_media_play")
else:
await self._api.start_playback()
async def async_media_pause(self):
"""Pause playback."""
if self._use_pipe_control():
await self._pipe_call(self._use_pipe_control(), "async_media_pause")
else:
await self._api.pause_playback()
async def async_media_stop(self):
"""Stop playback."""
if self._use_pipe_control():
await self._pipe_call(self._use_pipe_control(), "async_media_stop")
else:
await self._api.stop_playback()
async def async_media_previous_track(self):
"""Skip to previous track."""
if self._use_pipe_control():
await self._pipe_call(
self._use_pipe_control(), "async_media_previous_track"
)
else:
await self._api.previous_track()
async def async_media_next_track(self):
"""Skip to next track."""
if self._use_pipe_control():
await self._pipe_call(self._use_pipe_control(), "async_media_next_track")
else:
await self._api.next_track()
async def async_media_seek(self, position):
"""Seek to position."""
await self._api.seek(position_ms=position * 1000)
async def async_clear_playlist(self):
"""Clear playlist."""
await self._api.clear_queue()
async def async_set_shuffle(self, shuffle):
"""Enable/disable shuffle mode."""
await self._api.shuffle(shuffle)
@property
def media_image_url(self):
"""Image url of current playing media."""
if url := self._track_info.get("artwork_url"):
url = self._api.full_url(url)
return url
async def _save_and_set_tts_volumes(self):
if self.volume_level: # save master volume
self._last_volume = self.volume_level
self._last_outputs = self._outputs
if self._outputs:
await self._api.set_volume(volume=self._tts_volume * 100)
futures = []
for output in self._outputs:
futures.append(
self._api.change_output(
output["id"], selected=True, volume=self._tts_volume * 100
)
)
await asyncio.wait(futures)
async def _pause_and_wait_for_callback(self):
"""Send pause and wait for the pause callback to be received."""
self._pause_requested = True
await self.async_media_pause()
try:
await asyncio.wait_for(
self._paused_event.wait(), timeout=CALLBACK_TIMEOUT
) # wait for paused
except asyncio.TimeoutError:
self._pause_requested = False
self._paused_event.clear()
async def async_play_media(self, media_type, media_id, **kwargs):
"""Play a URI."""
if media_type == MEDIA_TYPE_MUSIC:
saved_state = self.state # save play state
saved_mute = self.is_volume_muted
sleep_future = asyncio.create_task(
asyncio.sleep(self._tts_pause_time)
) # start timing now, but not exact because of fd buffer + tts latency
await self._pause_and_wait_for_callback()
await self._save_and_set_tts_volumes()
# save position
saved_song_position = self._player["item_progress_ms"]
saved_queue = (
self._queue if self._queue["count"] > 0 else None
) # stash queue
if saved_queue:
saved_queue_position = next(
i
for i, item in enumerate(saved_queue["items"])
if item["id"] == self._player["item_id"]
)
self._tts_requested = True
await sleep_future
await self._api.add_to_queue(uris=media_id, playback="start", clear=True)
try:
await asyncio.wait_for(
self._tts_playing_event.wait(), timeout=TTS_TIMEOUT
)
# we have started TTS, now wait for completion
await asyncio.sleep(
self._queue["items"][0]["length_ms"]
/ 1000 # player may not have updated yet so grab length from queue
+ self._tts_pause_time
)
except asyncio.TimeoutError:
self._tts_requested = False
_LOGGER.warning("TTS request timed out")
self._tts_playing_event.clear()
# TTS done, return to normal
await self.async_turn_on() # restore outputs and volumes
if saved_mute: # mute if we were muted
await self.async_mute_volume(True)
if self._use_pipe_control(): # resume pipe
await self._api.add_to_queue(
uris=self._sources_uris[self._source], clear=True
)
if saved_state == STATE_PLAYING:
await self.async_media_play()
else: # restore stashed queue
if saved_queue:
uris = ""
for item in saved_queue["items"]:
uris += item["uri"] + ","
await self._api.add_to_queue(
uris=uris,
playback="start",
playback_from_position=saved_queue_position,
clear=True,
)
await self._api.seek(position_ms=saved_song_position)
if saved_state == STATE_PAUSED:
await self.async_media_pause()
elif saved_state != STATE_PLAYING:
await self.async_media_stop()
else:
_LOGGER.debug("Media type '%s' not supported", media_type)
async def async_select_source(self, source):
"""Change source.
Source name reflects whether in default mode or pipe mode.
Selecting playlists/clear sets the playlists/clears but ends up in default mode.
"""
if source == self._source:
return
if self._use_pipe_control(): # if pipe was playing, we need to stop it first
await self._pause_and_wait_for_callback()
self._source = source
if not self._use_pipe_control(): # playlist or clear ends up at default
self._source = SOURCE_NAME_DEFAULT
if self._sources_uris.get(source): # load uris for pipes or playlists
await self._api.add_to_queue(uris=self._sources_uris[source], clear=True)
elif source == SOURCE_NAME_CLEAR: # clear playlist
await self._api.clear_queue()
self.async_write_ha_state()
def _use_pipe_control(self):
"""Return which pipe control from KNOWN_PIPES to use."""
if self._source[-7:] == " (pipe)":
return self._source[:-7]
return ""
async def _pipe_call(self, pipe_name, base_function_name):
if self._pipe_control_api.get(pipe_name):
return await getattr(
self._pipe_control_api[pipe_name],
PIPE_FUNCTION_MAP[pipe_name][base_function_name],
)()
_LOGGER.warning("No pipe control available for %s", pipe_name)
class ForkedDaapdUpdater:
"""Manage updates for the forked-daapd device."""
def __init__(self, hass, api, entry_id):
"""Initialize."""
self.hass = hass
self._api = api
self.websocket_handler = None
self._all_output_ids = set()
self._entry_id = entry_id
async def async_init(self):
"""Perform async portion of class initialization."""
server_config = await self._api.get_request("config")
if websocket_port := server_config.get("websocket_port"):
self.websocket_handler = asyncio.create_task(
self._api.start_websocket_handler(
websocket_port,
WS_NOTIFY_EVENT_TYPES,
self._update,
WEBSOCKET_RECONNECT_TIME,
self._disconnected_callback,
)
)
else:
_LOGGER.error("Invalid websocket port")
def _disconnected_callback(self):
async_dispatcher_send(
self.hass, SIGNAL_UPDATE_MASTER.format(self._entry_id), False
)
async_dispatcher_send(
self.hass, SIGNAL_UPDATE_OUTPUTS.format(self._entry_id), []
)
async def _update(self, update_types):
"""Private update method."""
update_types = set(update_types)
update_events = {}
_LOGGER.debug("Updating %s", update_types)
if (
"queue" in update_types
): # update queue, queue before player for async_play_media
queue = await self._api.get_request("queue")
if queue:
update_events["queue"] = asyncio.Event()
async_dispatcher_send(
self.hass,
SIGNAL_UPDATE_QUEUE.format(self._entry_id),
queue,
update_events["queue"],
)
# order of below don't matter
if not {"outputs", "volume"}.isdisjoint(update_types): # update outputs
outputs = await self._api.get_request("outputs")
if outputs:
outputs = outputs["outputs"]
update_events[
"outputs"
] = asyncio.Event() # only for master, zones should ignore
async_dispatcher_send(
self.hass,
SIGNAL_UPDATE_OUTPUTS.format(self._entry_id),
outputs,
update_events["outputs"],
)
self._add_zones(outputs)
if not {"database"}.isdisjoint(update_types):
pipes, playlists = await asyncio.gather(
self._api.get_pipes(), self._api.get_playlists()
)
update_events["database"] = asyncio.Event()
async_dispatcher_send(
self.hass,
SIGNAL_UPDATE_DATABASE.format(self._entry_id),
pipes,
playlists,
update_events["database"],
)
if not {"update", "config"}.isdisjoint(update_types): # not supported
_LOGGER.debug("update/config notifications neither requested nor supported")
if not {"player", "options", "volume"}.isdisjoint(
update_types
): # update player
player = await self._api.get_request("player")
if player:
update_events["player"] = asyncio.Event()
if update_events.get("queue"):
await update_events[
"queue"
].wait() # make sure queue done before player for async_play_media
async_dispatcher_send(
self.hass,
SIGNAL_UPDATE_PLAYER.format(self._entry_id),
player,
update_events["player"],
)
if update_events:
await asyncio.wait(
[asyncio.create_task(event.wait()) for event in update_events.values()]
) # make sure callbacks done before update
async_dispatcher_send(
self.hass, SIGNAL_UPDATE_MASTER.format(self._entry_id), True
)
def _add_zones(self, outputs):
outputs_to_add = []
for output in outputs:
if output["id"] not in self._all_output_ids:
self._all_output_ids.add(output["id"])
outputs_to_add.append(output)
if outputs_to_add:
async_dispatcher_send(
self.hass,
SIGNAL_ADD_ZONES.format(self._entry_id),
self._api,
outputs_to_add,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.