code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from scrapy.spider import BaseSpider
from scrapy.selector import Selector
from luooSpiders.items import luooSpidersItem
from scrapy.http import Request
class luooSpider(BaseSpider):
name = "luoo"
allowed_domains = ["luoo.net"]
start_urls = ['http://www.luoo.net/tag/?p=1']
def __init__(self, page=1, *args, **kwargs):
super(luooSpider, self).__init__(*args, **kwargs)
if page:
self.page = int(page)
def parse(self, response):
print("current url:%s"%response.url)
periodicals = response.xpath('//a[@class="cover-wrapper"]/@href').extract()
last_page = int(max(response.xpath('//a[@class="page"]/text()').extract()))
for periodical in periodicals:
print("start periodical:%s"%periodical)
yield Request(periodical, callback=self.parse_item)
if self.page > last_page:
self.page = last_page
current = 1
while current <= self.page:
current += 1
yield Request('http://www.luoo.net/tag/?p=%s' % current, callback=self.parse)
def parse_item(self, response):
special = response.url.split("/")[-1]
container = Selector(response).xpath('/html/body/div[@class="container ct-sm"]')
items = []
titles = container.xpath('h1[@class="vol-name"]/\
span[@class="vol-title"]/text()').extract()
tracks = container.xpath('div[@class="vol-tracklist"]/ul/li[@class="track-item rounded"]/\
div[@class="track-wrapper clearfix"]/a[@class="trackname btn-play"]/text()').extract()
for index,track in enumerate(tracks):
item = luooSpidersItem()
mp3_url = "http://luoo.waasaa.com/low/luoo/radio%s/%s.mp3"%(special,str(index+1).zfill(2))
path = titles[0]+'/'+track[3:]+'.mp3'
item['title'] = titles[0]
item['url'] = mp3_url
item['path'] = path
items.append(item)
return items
|
c4x/LuooSpiders
|
luooSpiders/spiders/luooSpinder.py
|
Python
|
mit
| 2,002
|
import _plotly_utils.basevalidators
class ValueminusValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="valueminus", parent_name="scatter.error_x", **kwargs
):
super(ValueminusValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 0),
role=kwargs.pop("role", "info"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/scatter/error_x/_valueminus.py
|
Python
|
mit
| 510
|
"""
Functions to generate Theano update dictionaries for training.
Copied from Lasagne. See documentation at:
http://lasagne.readthedocs.org/en/latest/modules/updates.html
"""
from collections import OrderedDict
import numpy as np
import theano
import theano.tensor as T
__all__ = [
"sgd",
"apply_momentum",
"momentum",
"apply_nesterov_momentum",
"nesterov_momentum",
"adagrad",
"rmsprop",
"adadelta",
"adam",
"adamax",
"norm_constraint",
"total_norm_constraint"
]
def get_or_compute_grads(loss_or_grads, params):
if any(not isinstance(p, theano.compile.SharedVariable) for p in params):
raise ValueError("params must contain shared variables only. If it "
"contains arbitrary parameter expressions, then "
"lasagne.utils.collect_shared_vars() may help you.")
if isinstance(loss_or_grads, list):
if not len(loss_or_grads) == len(params):
raise ValueError("Got %d gradient expressions for %d parameters" %
(len(loss_or_grads), len(params)))
return loss_or_grads
else:
return theano.grad(loss_or_grads, params)
def sgd(loss_or_grads, params, learning_rate):
"""Stochastic Gradient Descent (SGD) updates
* ``param := param - learning_rate * gradient``
"""
grads = get_or_compute_grads(loss_or_grads, params)
updates = OrderedDict()
for param, grad in zip(params, grads):
updates[param] = param - learning_rate * grad
return updates
def apply_momentum(updates, params=None, momentum=0.9):
"""Returns a modified update dictionary including momentum
* ``velocity := momentum * velocity + updates[param] - param``
* ``param := param + velocity``
"""
if params is None:
params = updates.keys()
updates = OrderedDict(updates)
for param in params:
value = param.get_value(borrow=True)
velocity = theano.shared(np.zeros(value.shape, dtype=value.dtype),
broadcastable=param.broadcastable)
x = momentum * velocity + updates[param]
updates[velocity] = x - param
updates[param] = x
return updates
def momentum(loss_or_grads, params, learning_rate, momentum=0.9):
"""Stochastic Gradient Descent (SGD) updates with momentum
* ``velocity := momentum * velocity - learning_rate * gradient``
* ``param := param + velocity``
"""
updates = sgd(loss_or_grads, params, learning_rate)
return apply_momentum(updates, momentum=momentum)
def apply_nesterov_momentum(updates, params=None, momentum=0.9):
"""Returns a modified update dictionary including Nesterov momentum
* ``velocity := momentum * velocity + updates[param] - param``
* ``param := param + momentum * velocity + updates[param] - param``
"""
if params is None:
params = updates.keys()
updates = OrderedDict(updates)
for param in params:
value = param.get_value(borrow=True)
velocity = theano.shared(np.zeros(value.shape, dtype=value.dtype),
broadcastable=param.broadcastable)
x = momentum * velocity + updates[param] - param
updates[velocity] = x
updates[param] = momentum * x + updates[param]
return updates
def nesterov_momentum(loss_or_grads, params, learning_rate, momentum=0.9):
"""Stochastic Gradient Descent (SGD) updates with Nesterov momentum
* ``velocity := momentum * velocity - learning_rate * gradient``
* ``param := param + momentum * velocity - learning_rate * gradient``
"""
updates = sgd(loss_or_grads, params, learning_rate)
return apply_nesterov_momentum(updates, momentum=momentum)
def adagrad(loss_or_grads, params, learning_rate=1.0, epsilon=1e-6):
"""Adagrad updates
Scale learning rates by dividing with the square root of accumulated
squared gradients. See [1]_ for further description.
"""
grads = get_or_compute_grads(loss_or_grads, params)
updates = OrderedDict()
for param, grad in zip(params, grads):
value = param.get_value(borrow=True)
accu = theano.shared(np.zeros(value.shape, dtype=value.dtype),
broadcastable=param.broadcastable)
accu_new = accu + grad ** 2
updates[accu] = accu_new
updates[param] = param - (learning_rate * grad /
T.sqrt(accu_new + epsilon))
return updates
def rmsprop(loss_or_grads, params, learning_rate=1.0, rho=0.9, epsilon=1e-6):
"""RMSProp updates
Scale learning rates by dividing with the moving average of the root mean
squared (RMS) gradients. See [1]_ for further description.
"""
grads = get_or_compute_grads(loss_or_grads, params)
updates = OrderedDict()
for param, grad in zip(params, grads):
value = param.get_value(borrow=True)
accu = theano.shared(np.zeros(value.shape, dtype=value.dtype),
broadcastable=param.broadcastable)
accu_new = rho * accu + (1 - rho) * grad ** 2
updates[accu] = accu_new
updates[param] = param - (learning_rate * grad /
T.sqrt(accu_new + epsilon))
return updates
def adadelta(loss_or_grads, params, learning_rate=1.0, rho=0.95, epsilon=1e-6):
""" Adadelta updates
Scale learning rates by a the ratio of accumulated gradients to accumulated
step sizes, see [1]_ and notes for further description.
"""
grads = get_or_compute_grads(loss_or_grads, params)
updates = OrderedDict()
for param, grad in zip(params, grads):
value = param.get_value(borrow=True)
# accu: accumulate gradient magnitudes
accu = theano.shared(np.zeros(value.shape, dtype=value.dtype),
broadcastable=param.broadcastable)
# delta_accu: accumulate update magnitudes (recursively!)
delta_accu = theano.shared(np.zeros(value.shape, dtype=value.dtype),
broadcastable=param.broadcastable)
# update accu (as in rmsprop)
accu_new = rho * accu + (1 - rho) * grad ** 2
updates[accu] = accu_new
# compute parameter update, using the 'old' delta_accu
update = (grad * T.sqrt(delta_accu + epsilon) /
T.sqrt(accu_new + epsilon))
updates[param] = param - learning_rate * update
# update delta_accu (as accu, but accumulating updates)
delta_accu_new = rho * delta_accu + (1 - rho) * update ** 2
updates[delta_accu] = delta_accu_new
return updates
def adam(loss_or_grads, params, learning_rate=0.001, beta1=0.9,
beta2=0.999, epsilon=1e-8):
all_grads = get_or_compute_grads(loss_or_grads, params)
t_prev = theano.shared(np.asarray(0, dtype=theano.config.floatX))
updates = OrderedDict()
t = t_prev + 1
a_t = learning_rate*T.sqrt(1-beta2**t)/(1-beta1**t)
for param, g_t in zip(params, all_grads):
value = param.get_value(borrow=True)
m_prev = theano.shared(np.zeros(value.shape, dtype=value.dtype),
broadcastable=param.broadcastable)
v_prev = theano.shared(np.zeros(value.shape, dtype=value.dtype),
broadcastable=param.broadcastable)
m_t = beta1*m_prev + (1-beta1)*g_t
v_t = beta2*v_prev + (1-beta2)*g_t**2
step = a_t*m_t/(T.sqrt(v_t) + epsilon)
updates[m_prev] = m_t
updates[v_prev] = v_t
updates[param] = param - step
updates[t_prev] = t
return updates
def adamax(loss_or_grads, params, learning_rate=0.002, beta1=0.9,
beta2=0.999, epsilon=1e-8):
"""
This is a variant of of the Adam algorithm based on the infinity norm.
"""
all_grads = get_or_compute_grads(loss_or_grads, params)
t_prev = theano.shared(np.asarray(0., dtype=theano.config.floatX))
updates = OrderedDict()
t = t_prev + 1
a_t = learning_rate/(1-beta1**t)
for param, g_t in zip(params, all_grads):
value = param.get_value(borrow=True)
m_prev = theano.shared(np.zeros(value.shape, dtype=value.dtype),
broadcastable=param.broadcastable)
u_prev = theano.shared(np.zeros(value.shape, dtype=value.dtype),
broadcastable=param.broadcastable)
m_t = beta1*m_prev + (1-beta1)*g_t
u_t = T.maximum(beta2*u_prev, abs(g_t))
step = a_t*m_t/(u_t + epsilon)
updates[m_prev] = m_t
updates[u_prev] = u_t
updates[param] = param - step
updates[t_prev] = t
return updates
def norm_constraint(tensor_var, max_norm, norm_axes=None, epsilon=1e-7):
"""Max weight norm constraints and gradient clipping
This takes a TensorVariable and rescales it so that incoming weight
norms are below a specified constraint value. Vectors violating the
constraint are rescaled so that they are within the allowed range.
"""
ndim = tensor_var.ndim
if norm_axes is not None:
sum_over = tuple(norm_axes)
elif ndim == 2: # DenseLayer
sum_over = (0,)
elif ndim in [3, 4, 5]: # Conv{1,2,3}DLayer
sum_over = tuple(range(1, ndim))
else:
raise ValueError(
"Unsupported tensor dimensionality {}."
"Must specify `norm_axes`".format(ndim)
)
dtype = np.dtype(theano.config.floatX).type
norms = T.sqrt(T.sum(T.sqr(tensor_var), axis=sum_over, keepdims=True))
target_norms = T.clip(norms, 0, dtype(max_norm))
constrained_output = \
(tensor_var * (target_norms / (dtype(epsilon) + norms)))
return constrained_output
def total_norm_constraint(tensor_vars, max_norm, epsilon=1e-7,
return_norm=False):
"""Rescales a list of tensors based on their combined norm
If the combined norm of the input tensors exceeds the threshold then all
tensors are rescaled such that the combined norm is equal to the threshold.
"""
norm = T.sqrt(sum(T.sum(tensor**2) for tensor in tensor_vars))
dtype = np.dtype(theano.config.floatX).type
target_norm = T.clip(norm, 0, dtype(max_norm))
multiplier = target_norm / (dtype(epsilon) + norm)
tensor_vars_scaled = [step*multiplier for step in tensor_vars]
if return_norm:
return tensor_vars_scaled, norm
else:
return tensor_vars_scaled
|
rakeshvar/rnn_ctc
|
nnet/updates.py
|
Python
|
apache-2.0
| 10,487
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/gugu/w/calibre/src/calibre/gui2/store/stores/mobileread/store_dialog.ui'
#
# Created: Thu Jul 19 23:32:29 2012
# by: PyQt4 UI code generator 4.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(691, 614)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label = QtGui.QLabel(Dialog)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_2.addWidget(self.label)
self.adv_search_button = QtGui.QToolButton(Dialog)
self.adv_search_button.setObjectName(_fromUtf8("adv_search_button"))
self.horizontalLayout_2.addWidget(self.adv_search_button)
self.search_query = HistoryLineEdit(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.search_query.sizePolicy().hasHeightForWidth())
self.search_query.setSizePolicy(sizePolicy)
self.search_query.setObjectName(_fromUtf8("search_query"))
self.horizontalLayout_2.addWidget(self.search_query)
self.search_button = QtGui.QPushButton(Dialog)
self.search_button.setObjectName(_fromUtf8("search_button"))
self.horizontalLayout_2.addWidget(self.search_button)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.results_view = QtGui.QTreeView(Dialog)
self.results_view.setAlternatingRowColors(True)
self.results_view.setRootIsDecorated(False)
self.results_view.setItemsExpandable(False)
self.results_view.setSortingEnabled(True)
self.results_view.setExpandsOnDoubleClick(False)
self.results_view.setObjectName(_fromUtf8("results_view"))
self.results_view.header().setCascadingSectionResizes(False)
self.verticalLayout.addWidget(self.results_view)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout.addWidget(self.label_2)
self.total = QtGui.QLabel(Dialog)
self.total.setObjectName(_fromUtf8("total"))
self.horizontalLayout.addWidget(self.total)
spacerItem = QtGui.QSpacerItem(308, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.close_button = QtGui.QPushButton(Dialog)
self.close_button.setObjectName(_fromUtf8("close_button"))
self.horizontalLayout.addWidget(self.close_button)
self.verticalLayout.addLayout(self.horizontalLayout)
self.label.setBuddy(self.search_query)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.close_button, QtCore.SIGNAL(_fromUtf8("clicked()")), Dialog.accept)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_("Dialog"))
self.label.setText(_("&Query:"))
self.adv_search_button.setText(_("..."))
self.search_button.setText(_("Search"))
self.label_2.setText(_("Books:"))
self.total.setText(_("0"))
self.close_button.setText(_("Close"))
from calibre.gui2.widgets import HistoryLineEdit
|
Eksmo/calibre
|
src/calibre/gui2/store/stores/mobileread/store_dialog_ui.py
|
Python
|
gpl-3.0
| 3,890
|
from itertools import chain, repeat
import xml.etree.cElementTree as ET
import mock
import pytest
import spreads.vendor.confit as confit
from spreads.vendor.pathlib import Path
@pytest.fixture
def pluginclass(mock_findinpath):
import spreadsplug.scantailor as scantailor
return scantailor.ScanTailorPlugin
@pytest.fixture
def config(pluginclass):
config = confit.Configuration('test_scantailor')
tmpl = pluginclass.configuration_template()
for key, option in tmpl.items():
if option.selectable:
config['scantailor'][key] = option.value[0]
else:
config['scantailor'][key] = option.value
return config
@pytest.fixture
def plugin(pluginclass, config):
with mock.patch('subprocess.check_output') as mock_co:
mock_co.return_value = "".join(chain(
repeat("\n", 7),
("scantailor-cli [options] <images|directory|-> <output>",))
)
return pluginclass(config)
@mock.patch('spreadsplug.scantailor.psutil.Process')
@mock.patch('spreadsplug.scantailor.subprocess.Popen')
def test_generate_configuration(popen, proc, plugin):
proc.return_value.is_running.return_value = False
# TODO: Setup up some config variables
imgdir = mock.MagicMock(wraps=Path('/tmp/raw'))
imgs = [imgdir/"foo.jpg", imgdir/"bar.jpg"]
imgdir.iterdir.return_value = imgs
plugin._generate_configuration(Path('/tmp/foo.st'),
imgdir,
Path('/tmp/out'))
# TODO: Check the sp.call for the correct parameters
@mock.patch('spreadsplug.scantailor.psutil.Process')
@mock.patch('spreadsplug.scantailor.subprocess.Popen')
def test_generate_configuration_noenhanced(popen, proc, config, pluginclass):
proc.return_value.is_running.return_value = False
# TODO: Setup up some config variables
with mock.patch('subprocess.check_output') as mock_co:
mock_co.return_value = "".join(chain(
repeat("\n", 7),
("scantailor-cli [options] <image, image, ...>"
" <output_directory>",))
)
plugin = pluginclass(config)
imgdir = mock.MagicMock(wraps=Path('/tmp/raw'))
imgs = [imgdir/"foo.jpg", imgdir/"bar.jpg"]
imgdir.iterdir.return_value = imgs
plugin._generate_configuration(Path('/tmp/foo.st'), imgdir,
Path('/tmp/out'))
assert (unicode(imgs[0]) in popen.call_args[0][0])
def test_split_configuration(plugin, tmpdir):
with mock.patch('spreadsplug.scantailor.multiprocessing.cpu_count') as cnt:
cnt.return_value = 4
splitfiles = plugin._split_configuration(
Path('./tests/data/test.scanTailor'), Path(unicode(tmpdir)))
assert len(splitfiles) == 4
tree = ET.parse(unicode(splitfiles[0]))
for elem in ('files', 'images', 'pages', 'file-name-disambiguation'):
assert len(tree.find('./{0}'.format(elem))) == 7
@mock.patch('spreadsplug.scantailor.subprocess.Popen')
def test_generate_output(popen, plugin):
plugin._split_configuration = mock.Mock(
return_value=['foo.st', 'bar.st'])
plugin._generate_output('/tmp/foo.st', Path('/tmp'), 8)
@mock.patch('spreadsplug.scantailor.subprocess.call')
def test_process(call, plugin):
plugin._generate_configuration = mock.Mock()
plugin._generate_output = mock.Mock()
plugin.config['autopilot'] = True
imgdir = mock.MagicMock(wraps=Path('/tmp'))
imgs = [imgdir/"foo.jpg", imgdir/"bar.jpg"]
(imgdir/"raw").iterdir.return_value = imgs
plugin.process(imgdir)
assert call.call_count == 0
plugin.config['autopilot'] = False
plugin.process(imgdir)
assert call.call_count == 1
|
tfmorris/spreads
|
tests/scantailor_test.py
|
Python
|
agpl-3.0
| 3,718
|
'''
Tile.py
The tiles that get placed on the board and how they are displayed.
'''
from options import Options
class Tile:
def __init__(self, row, col, options):
self.row = row
self.col = col
self.team = 0
self.strength = 0
self.exists = True
self.neighbors = []
self.options = options
def text(self):
out = ""
if self.exists:
out += "<" + self.options.teamChars[self.team]
if(self.strength < 10):
out += "_"
if(self.strength == 0):
out += "_"
else:
out += str(self.strength)
out += ">"
else:
out = " "
return out
def showNeighbors(self):
out = "Tile at " + str(self.row) + "," + str(self.col) + " has neighbors at "
for neighbor in self.neighbors:
out += str(neighbor.row) + "," + str(neighbor.col) + "|"
print(out)
|
bacable/ProximityBasic
|
tile.py
|
Python
|
gpl-2.0
| 801
|
import contextlib
from hacheck.compat import nested
import mock
import json
import os
from unittest import TestCase
import hacheck.haupdown
import hacheck.spool
# can't use an actual mock.sentinel because it doesn't support string ops
sentinel_service_name = 'testing_service_name'
class TestCallable(TestCase):
@contextlib.contextmanager
def setup_wrapper(self, args=frozenset()):
with nested(
mock.patch.object(hacheck, 'spool', return_value=(True, {})),
mock.patch.object(hacheck.haupdown, 'print_s'),
mock.patch('sys.argv', ['ignored'] + list(args))
) as (mock_spool, mock_print, _1):
yield mock_spool, mock_print
def test_basic(self):
with self.setup_wrapper() as (spooler, _):
spooler.status.return_value = (True, {})
hacheck.haupdown.main('status_downed')
spooler.configure.assert_called_once_with('/var/spool/hacheck', needs_write=False)
def test_exit_codes(self):
with self.setup_wrapper([sentinel_service_name]) as (spooler, mock_print):
spooler.status.return_value = (True, {})
self.assertEqual(0, hacheck.haupdown.main('status'))
mock_print.assert_any_call('UP\t%s', sentinel_service_name)
spooler.status.return_value = (False, {'reason': 'irrelevant'})
self.assertEqual(1, hacheck.haupdown.main('status'))
mock_print.assert_any_call('DOWN\t%f\t%s\t%s', float('Inf'), sentinel_service_name, 'irrelevant')
def test_up(self):
with self.setup_wrapper([sentinel_service_name]) as (spooler, mock_print):
hacheck.haupdown.up()
spooler.up.assert_called_once_with(sentinel_service_name, port=None)
self.assertEqual(mock_print.call_count, 0)
def test_up_with_port(self):
with self.setup_wrapper(['-P', '1234', sentinel_service_name]) as (spooler, mock_print):
hacheck.haupdown.up()
spooler.up.assert_called_once_with(sentinel_service_name, port=1234)
self.assertEqual(mock_print.call_count, 0)
def test_down(self):
os.environ['SSH_USER'] = 'testyuser'
os.environ['SUDO_USER'] = 'testyuser'
with self.setup_wrapper([sentinel_service_name]) as (spooler, mock_print):
hacheck.haupdown.down()
spooler.down.assert_called_once_with(sentinel_service_name, 'testyuser', expiration=None, port=None)
self.assertEqual(mock_print.call_count, 0)
def test_down_with_reason(self):
with self.setup_wrapper(['-r', 'something', sentinel_service_name]) as (spooler, mock_print):
hacheck.haupdown.down()
spooler.down.assert_called_once_with(sentinel_service_name, 'something', expiration=None, port=None)
self.assertEqual(mock_print.call_count, 0)
def test_down_with_expiration(self):
with self.setup_wrapper(['-e', '9876543210', sentinel_service_name]) as (spooler, mock_print):
hacheck.haupdown.down()
spooler.down.assert_called_once_with(sentinel_service_name, 'testyuser', expiration=9876543210, port=None)
self.assertEqual(mock_print.call_count, 0)
def test_down_with_port(self):
with self.setup_wrapper(['-P', '1234', sentinel_service_name]) as (spooler, mock_print):
hacheck.haupdown.down()
spooler.down.assert_called_once_with(sentinel_service_name, 'testyuser', expiration=None, port=1234)
self.assertEqual(mock_print.call_count, 0)
def test_status(self):
with self.setup_wrapper([sentinel_service_name]) as (spooler, mock_print):
spooler.status.return_value = (True, {})
hacheck.haupdown.status()
spooler.status.assert_called_once_with(sentinel_service_name, port=None)
mock_print.assert_called_once_with("UP\t%s", sentinel_service_name)
def test_status_downed(self):
with self.setup_wrapper() as (spooler, mock_print):
spooler.status_all_down.return_value = [
(sentinel_service_name, None, {'service': sentinel_service_name, 'reason': '', 'expiration': None})
]
self.assertEqual(hacheck.haupdown.status_downed(), 0)
mock_print.assert_called_once_with("DOWN\t%f\t%s\t%s", float('Inf'), sentinel_service_name, mock.ANY)
def test_status_downed_expiration(self):
with self.setup_wrapper() as (spooler, mock_print):
spooler.status_all_down.return_value = [
(
sentinel_service_name,
None,
{'service': sentinel_service_name, 'reason': '', 'expiration': 9876543210}
),
]
self.assertEqual(hacheck.haupdown.status_downed(), 0)
mock_print.assert_called_once_with("DOWN\t%f\t%s\t%s", 9876543210, sentinel_service_name, mock.ANY)
def test_list(self):
with self.setup_wrapper() as (spooler, mock_print):
with mock.patch.object(hacheck.haupdown, 'urlopen') as mock_urlopen:
mock_urlopen.return_value.read.return_value = json.dumps({
"seen_services": ["foo"],
"threshold_seconds": 10,
})
self.assertEqual(hacheck.haupdown.halist(), 0)
mock_urlopen.assert_called_once_with('http://127.0.0.1:3333/recent', timeout=mock.ANY)
mock_print.assert_called_once_with("foo")
def test_print_status(self):
with self.setup_wrapper() as (spooler, mock_print):
hacheck.haupdown.print_status('foo', None, True, {})
mock_print.assert_called_once_with('UP\t%s', 'foo')
with self.setup_wrapper() as (spooler, mock_print):
hacheck.haupdown.print_status('foo', None, False, {'reason': 'somereason'})
mock_print.assert_called_once_with('DOWN\t%f\t%s\t%s', float('Inf'), 'foo', 'somereason')
with self.setup_wrapper() as (spooler, mock_print):
hacheck.haupdown.print_status('foo', 1234, False, {'reason': 'somereason'})
mock_print.assert_called_once_with('DOWN\t%f\t%s:%d\t%s', float('Inf'), 'foo', 1234, 'somereason')
|
EvanKrall/hacheck
|
tests/test_callables.py
|
Python
|
mit
| 6,245
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-12 01:39
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0002_add_lobby_and_matches'),
]
operations = [
migrations.RemoveField(
model_name='match',
name='takers',
),
]
|
kkmsc17/smes
|
backend/core/migrations/0003_remove_match_takers.py
|
Python
|
agpl-3.0
| 392
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -- stdlib --
import json
import os
import socket
import subprocess
import sys
import time
# -- third party --
# -- own --
# -- code --
ts = int(time.time())
p = subprocess.Popen("which megacli", shell=True, stdout=subprocess.PIPE)
cli = p.stdout.read().strip()
if cli:
CLI = cli
else:
CLI = '/opt/MegaRAID/MegaCli/MegaCli64'
if not os.path.exists(CLI):
print json.dumps([])
sys.exit(0)
p = subprocess.Popen(
CLI + " -LdPdInfo -a0 | grep -Ec 'Firmware state: (Failed|Offline)'",
shell=True, stdout=subprocess.PIPE,
)
total_err = int(p.stdout.read())
print json.dumps([{
"metric": "megaraid.offline",
"timestamp": ts,
"step": 600,
"value": total_err,
}])
|
kadashu/satori
|
satori-rules/plugin/infra/600_megaraid.py
|
Python
|
apache-2.0
| 746
|
from puzzle.heuristics.acrostics import _acrostic_search
class Acrostic(_acrostic_search.AcrosticSearch):
"""Best available Acrostic solver."""
pass
|
PhilHarnish/forge
|
src/puzzle/heuristics/acrostic.py
|
Python
|
mit
| 155
|
# Description: Shows how usage of different classes for discretization, including manual discretization
# Category: discretization, categorization, preprocessing
# Classes: EntropyDiscretization, EquiDistDiscretization, BiModalDiscretization, Discretization, IntervalDiscretizer, Discretizer, BiModalDiscretizer
# Uses: iris
# Referenced: discretization.htm
import Orange
data = Orange.data.Table("iris")
print "\nEntropy discretization, first 10 examples"
sep_w = Orange.feature.discretization.Entropy("sepal width", data)
data2 = data.select([data.domain["sepal width"], sep_w, data.domain.class_var])
for ex in data2[:10]:
print ex
print "\nDiscretized attribute:", sep_w
print "Continuous attribute:", sep_w.get_value_from.whichVar #FIXME not which_var
print "Cut-off points:", sep_w.get_value_from.transformer.points
print "\nManual construction of Interval discretizer - single attribute"
idisc = Orange.feature.discretization.IntervalDiscretizer(points = [3.0, 5.0])
sep_l = idisc.construct_variable(data.domain["sepal length"])
data2 = data.select([data.domain["sepal length"], sep_l, data.domain.classVar])
for ex in data2[:10]:
print ex
print "\nManual construction of Interval discretizer - all attributes"
idisc = Orange.feature.discretization.IntervalDiscretizer(points = [3.0, 5.0])
newattrs = [idisc.construct_variable(attr) for attr in data.domain.attributes]
data2 = data.select(newattrs + [data.domain.class_var])
for ex in data2[:10]:
print ex
print "\n\nDiscretization with equal width intervals"
disc = Orange.feature.discretization.EqualWidth(numberOfIntervals = 6)
newattrs = [disc(attr, data) for attr in data.domain.attributes]
data2 = data.select(newattrs + [data.domain.classVar])
for attr in newattrs:
print "%s: %s" % (attr.name, attr.values)
print
for attr in newattrs:
print "%15s: first interval at %5.3f, step %5.3f" % (attr.name, attr.get_value_from.transformer.first_cut, attr.get_value_from.transformer.step)
print " "*17 + "cutoffs at " + ", ".join(["%5.3f" % x for x in attr.get_value_from.transformer.points])
print
print "\n\nQuartile (equal frequency) discretization"
disc = Orange.feature.discretization.EqualFreq(numberOfIntervals = 6)
newattrs = [disc(attr, data) for attr in data.domain.attributes]
data2 = data.select(newattrs + [data.domain.classVar])
for attr in newattrs:
print "%s: %s" % (attr.name, attr.values)
print
for attr in newattrs:
print " "*17 + "cutoffs at " + ", ".join(["%5.3f" % x for x in attr.get_value_from.transformer.points])
print
print "\nManual construction of EqualWidth - all attributes"
edisc = Orange.feature.discretization.EqualWidthDiscretizer(first_cut=2.0, step=1.0, n=5)
newattrs = [edisc.constructVariable(attr) for attr in data.domain.attributes]
data2 = data.select(newattrs + [data.domain.classVar])
for ex in data2[:10]:
print ex
print "\nFayyad-Irani entropy-based discretization"
entro = Orange.feature.discretization.Entropy()
for attr in data.domain.attributes:
disc = entro(attr, data)
print "%s: %s" % (attr.name, disc.get_value_from.transformer.points)
print
newclass = Orange.feature.Discrete("is versicolor", values = ["no", "yes"])
newclass.get_value_from = lambda ex, w: ex["iris"]=="Iris-versicolor"
newdomain = Orange.data.Domain(data.domain.attributes, newclass)
data_v = Orange.data.Table(newdomain, data)
print "\nBi-modal discretization on a binary problem"
bimod = Orange.feature.discretization.BiModal(split_in_two = 0)
for attr in data_v.domain.attributes:
disc = bimod(attr, data_v)
print "%s: %s" % (attr.name, disc.get_value_from.transformer.points)
print
print "\nBi-modal discretization on a binary problem"
bimod = Orange.feature.discretization.BiModal()
for attr in data_v.domain.attributes:
disc = bimod(attr, data_v)
print "%s: (%5.3f, %5.3f]" % (attr.name, disc.get_value_from.transformer.low, disc.get_value_from.transformer.high)
print
print "\nEntropy-based discretization on a binary problem"
for attr in data_v.domain.attributes:
disc = entro(attr, data_v)
print "%s: %s" % (attr.name, disc.getValueFrom.transformer.points)
|
yzl0083/orange
|
docs/reference/rst/code/discretization.py
|
Python
|
gpl-3.0
| 4,155
|
# Copyright (C) 2007 Free Software Foundation, Inc.
# This file contains code that is adapted from Ajenti.
# Written by Eugeny Pankov, 2010-2011.
#
# Ajenti is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; only
# version 3 of the License.
#
# Ajenti is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this file; if not, see <http://www.gnu.org/licenses/>
from ConfigParser import ConfigParser
import os
class Config(ConfigParser):
internal = {}
filename = ''
def __init__(self):
ConfigParser.__init__(self)
def load(self, fn):
self.filename = fn
self.read(fn)
def save(self):
with open(self.filename, 'w') as f:
self.write(f)
def get(self, section, val=None, default=None):
if val is None:
return self.internal[section]
else:
try:
return ConfigParser.get(self, section, val)
except:
if default is not None:
return default
raise
def set(self, section, val, value=None):
if value is None:
self.internal[section] = val
else:
if not self.has_section(section):
self.add_section(section)
ConfigParser.set(self, section, val, value)
def has_option(self, section, name):
try:
return ConfigParser.has_option(self, section, name)
except:
return False
def getlist(self, section, option, separator=","):
s = self.get(section, option).strip()
if s:
return [i.strip() for i in s.split(separator)]
else:
return []
|
strassek/storm
|
storm/config.py
|
Python
|
gpl-3.0
| 2,053
|
# coding: utf8
from __future__ import unicode_literals, print_function, division
from collections import OrderedDict
from json import loads
from clldutils.jsonlib import dump
from util import REPO_DIR, ns, read_terms
MODULES = {
'Generic': [],
'Wordlist': ['forms'],
'StructureDataset': ['values'],
'Dictionary': ['entries', 'senses'],
'ParallelText': ['forms'],
}
COMPONENTS = {
'borrowings': {
'table': 'BorrowingTable',
'columns': [
('id', True),
('targetFormReference', True),
('sourceFormReference', False),
('comment', False),
('source', False),
]
},
'codes': {
'table': 'CodeTable',
'columns': [
('id', True),
('parameterReference', True),
('name', False),
('description', False),
]
},
'cognates': {
'table': 'CognateTable',
'columns': [
('id', True),
('formReference', True),
('cognatesetReference', True),
('segmentSlice', False),
('alignment', False),
('source', False),
]
},
'cognatesets': {
'table': 'CognatesetTable',
'columns': [
('id', True),
('description', False),
('source', False),
]
},
'examples': {
'table': 'ExampleTable',
'columns': [
('id', True),
('languageReference', True),
('primaryText', True),
('analyzedWord', False),
('gloss', False),
('translatedText', False),
('metaLanguageReference', False),
('comment', False),
]
},
'languages': {
'table': 'LanguageTable',
'columns': [
('id', True),
('name', False),
('macroarea', False),
('latitude', False),
('longitude', False),
('glottocode', False),
('iso639P3code', False),
]
},
'parameters': {
'table': 'ParameterTable',
'columns': [
('id', True),
('name', False),
('description', False),
]
},
'entries': {
'table': 'EntryTable',
'columns': [
('id', True),
('languageReference', True),
('headword', True),
('partOfSpeech', False),
]
},
'senses': {
'table': 'SenseTable',
'columns': [
('id', True),
('description', True),
('entryReference', True),
]
},
'values': {
'table': 'ValueTable',
'columns': [
('id', True),
('languageReference', True),
('parameterReference', True),
('value', False),
('codeReference', False),
('comment', False),
('source', False),
]
},
'forms': {
'table': 'FormTable',
'columns': [
('id', True),
('languageReference', True),
('parameterReference', True),
('form', True),
('segments', False),
('comment', False),
('source', False),
]
},
'functionalequivalents': {
'table': 'FunctionalEquivalentTable',
'columns': [
('id', True),
('formReference', True),
('functionalEquivalentsetReference', True),
('segmentSlice', False),
('alignment', False),
('source', False),
]
},
'functionalequivalentsets': {
'table': 'FunctionalEquivalentsetTable',
'columns': [
('id', True),
('description', False),
('source', False),
]
},
}
def csvw_prop(e, lname):
if e.find(ns('csvw:{0}'.format(lname))) is not None:
return loads(e.find(ns('csvw:{0}'.format(lname))).text)
def make_table(e):
res = OrderedDict()
res['url'] = csvw_prop(e, 'url')
res['dc:conformsTo'] = e.attrib[ns('rdf:about')]
res['tableSchema'] = OrderedDict([('columns', [])])
return res
def make_column(e, required):
res = OrderedDict()
res['name'] = csvw_prop(e, 'name') or e.find(ns('rdfs:label')).text
res['required'] = required
res['propertyUrl'] = e.attrib[ns('rdf:about')]
res['datatype'] = csvw_prop(e, 'datatype') or 'string'
for k in ['separator', 'null', 'valueUrl']:
v = csvw_prop(e, k)
if v:
res[k] = v
return res
def make():
tables = {}
columns = {}
for e in read_terms().iter():
if ns('rdf:about') in e.attrib:
lname = e.attrib[ns('rdf:about')].split('#')[-1]
if e.tag == ns('rdfs:Class') and lname.endswith('Table'):
tables[lname] = e
elif e.tag == ns('rdf:Property'):
columns[lname] = e
comps = {}
for subdir, spec in COMPONENTS.items():
table = make_table(tables.pop(spec['table']))
for c, req in spec['columns']:
table['tableSchema']['columns'].append(make_column(columns[c], req))
comps[subdir] = table
dump(
table,
REPO_DIR.joinpath(
'components', subdir, '{0}-metadata.json'.format(spec['table'])),
indent=4)
for subdir, comprefs in MODULES.items():
dump(
OrderedDict([
("@context", ["http://www.w3.org/ns/csvw", {"@language": "en"}]),
("dc:conformsTo",
"http://cldf.clld.org/v1.0/terms.rdf#{0}".format(subdir)),
("dialect", {
"commentPrefix": None,
}),
("tables", [comps[ref] for ref in comprefs]),
]),
REPO_DIR.joinpath('modules', subdir, '{0}-metadata.json'.format(subdir)),
indent=4)
if __name__ == '__main__':
make()
|
LinguList/cldf
|
scripts/make_defaults.py
|
Python
|
apache-2.0
| 5,999
|
#!/usr/bin/env python3
# Questo file legge il file di configurazione,
# trova e modifica il parametro eseguendo il rispettivo "write*.py"
# Serve per la parte di gestione html in python
import cgi
import cgitb
# Abilita gli errori al server web/http
cgitb.enable()
# Le mie librerie Json, Html, flt (Thermo(Redis))
import mjl, mhl, flt
import redis
# Parametri generali
TestoPagina="Configurazione/Selezione funzionamento"
ConfigFile="../conf/thermo.json"
#WriteFile="/cgi-bin/writethermofunction.py"
# Redis "key"
RedisKey = "thermo:function"
# Apro il database Redis con l'istruzione della mia libreria
MyDB = flt.OpenDBFile(ConfigFile)
# Start web page - Uso l'intestazione "web" della mia libreria
print (mhl.MyHtml())
print (mhl.MyHtmlHead())
# Scrivo il Titolo/Testo della pagina
print ("<h1>","<center>",TestoPagina,"</center>","</h1>")
print ("<hr/>","<br/>")
# Eventuale help/annotazione
#print ("Ho lasciato la possibilita` di lasciare vuota la password","<hr/>","<br/>")
form=cgi.FieldStorage()
if RedisKey not in form:
print ("<h3>Manca il valore: </h3>",RedisKey)
else:
MyDB.set(RedisKey,cgi.escape(form[RedisKey].value))
print ("<h2>Dati inseriti/modificati:</h2>")
print ("<br>")
print ("<table border=\"1\" cellspacing=\"0\" cellpadding=\"3\">")
print ("<tr>")
print ("<td>")
print (RedisKey)
print ("</td>")
print ("<td>")
print (MyDB.get(RedisKey))
print ("</td>")
print ("</tr>")
print ("</table>")
# End web page
print (mhl.MyHtmlBottom())
|
raspibo/Thermo
|
var/www/cgi-bin/writethermofunction.py
|
Python
|
mit
| 1,482
|
from coalib.bears.GlobalBear import GlobalBear
from coalib.results.Result import Result
class GlobalTestBear(GlobalBear): # pragma: no cover
def run(self):
for filename in self.file_dict:
return [Result.from_values("GlobalTestBear",
"test message",
filename)]
|
Tanmay28/coala
|
coalib/tests/processes/section_executor_test_files/GlobalTestBear.py
|
Python
|
agpl-3.0
| 365
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._storage_accounts_operations import build_check_name_availability_request, build_create_request_initial, build_delete_request, build_get_properties_request, build_list_account_sas_request, build_list_by_resource_group_request, build_list_keys_request, build_list_request, build_list_service_sas_request, build_regenerate_key_request, build_update_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class StorageAccountsOperations:
"""StorageAccountsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2017_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def check_name_availability(
self,
account_name: "_models.StorageAccountCheckNameAvailabilityParameters",
**kwargs: Any
) -> "_models.CheckNameAvailabilityResult":
"""Checks that the storage account name is valid and is not already in use.
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name:
~azure.mgmt.storage.v2017_06_01.models.StorageAccountCheckNameAvailabilityParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_06_01.models.CheckNameAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckNameAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(account_name, 'StorageAccountCheckNameAvailabilityParameters')
request = build_check_name_availability_request(
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.check_name_availability.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/checkNameAvailability'} # type: ignore
async def _create_initial(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.StorageAccountCreateParameters",
**kwargs: Any
) -> Optional["_models.StorageAccount"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'StorageAccountCreateParameters')
request = build_create_request_initial(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self._create_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('StorageAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore
@distributed_trace_async
async def begin_create(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.StorageAccountCreateParameters",
**kwargs: Any
) -> AsyncLROPoller["_models.StorageAccount"]:
"""Asynchronously creates a new storage account with the specified parameters. If an account is
already created and a subsequent create request is issued with different properties, the
account properties will be updated. If an account is already created and a subsequent create or
update request is issued with the exact same set of properties, the request will succeed.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param parameters: The parameters to provide for the created account.
:type parameters: ~azure.mgmt.storage.v2017_06_01.models.StorageAccountCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either StorageAccount or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.v2017_06_01.models.StorageAccount]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_initial(
resource_group_name=resource_group_name,
account_name=account_name,
parameters=parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('StorageAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore
@distributed_trace_async
async def delete(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> None:
"""Deletes a storage account in Microsoft Azure.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore
@distributed_trace_async
async def get_properties(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> "_models.StorageAccount":
"""Returns the properties for the specified storage account including but not limited to name, SKU
name, location, and account status. The ListKeys operation should be used to retrieve storage
keys.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccount, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_06_01.models.StorageAccount
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_properties_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=self.get_properties.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('StorageAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore
@distributed_trace_async
async def update(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.StorageAccountUpdateParameters",
**kwargs: Any
) -> "_models.StorageAccount":
"""The update operation can be used to update the SKU, encryption, access tier, or tags for a
storage account. It can also be used to map the account to a custom domain. Only one custom
domain is supported per storage account; the replacement/change of custom domain is not
supported. In order to replace an old custom domain, the old value must be cleared/unregistered
before a new value can be set. The update of multiple properties is supported. This call does
not change the storage keys for the account. If you want to change the storage account keys,
use the regenerate keys operation. The location and name of the storage account cannot be
changed after creation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param parameters: The parameters to provide for the updated account.
:type parameters: ~azure.mgmt.storage.v2017_06_01.models.StorageAccountUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccount, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_06_01.models.StorageAccount
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'StorageAccountUpdateParameters')
request = build_update_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('StorageAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore
@distributed_trace
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.StorageAccountListResult"]:
"""Lists all the storage accounts available under the subscription. Note that storage keys are not
returned; use the ListKeys operation for this.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageAccountListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2017_06_01.models.StorageAccountListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("StorageAccountListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/storageAccounts'} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.StorageAccountListResult"]:
"""Lists all the storage accounts available under the given resource group. Note that storage keys
are not returned; use the ListKeys operation for this.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageAccountListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2017_06_01.models.StorageAccountListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("StorageAccountListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts'} # type: ignore
@distributed_trace_async
async def list_keys(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> "_models.StorageAccountListKeysResult":
"""Lists the access keys for the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccountListKeysResult, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_06_01.models.StorageAccountListKeysResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListKeysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_keys_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=self.list_keys.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('StorageAccountListKeysResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listKeys'} # type: ignore
@distributed_trace_async
async def regenerate_key(
self,
resource_group_name: str,
account_name: str,
regenerate_key: "_models.StorageAccountRegenerateKeyParameters",
**kwargs: Any
) -> "_models.StorageAccountListKeysResult":
"""Regenerates one of the access keys for the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param regenerate_key: Specifies name of the key which should be regenerated -- key1 or key2.
:type regenerate_key:
~azure.mgmt.storage.v2017_06_01.models.StorageAccountRegenerateKeyParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccountListKeysResult, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_06_01.models.StorageAccountListKeysResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListKeysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(regenerate_key, 'StorageAccountRegenerateKeyParameters')
request = build_regenerate_key_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.regenerate_key.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('StorageAccountListKeysResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
regenerate_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/regenerateKey'} # type: ignore
@distributed_trace_async
async def list_account_sas(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.AccountSasParameters",
**kwargs: Any
) -> "_models.ListAccountSasResponse":
"""List SAS credentials of a storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param parameters: The parameters to provide to list SAS credentials for the storage account.
:type parameters: ~azure.mgmt.storage.v2017_06_01.models.AccountSasParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListAccountSasResponse, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_06_01.models.ListAccountSasResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAccountSasResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'AccountSasParameters')
request = build_list_account_sas_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.list_account_sas.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ListAccountSasResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_account_sas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListAccountSas'} # type: ignore
@distributed_trace_async
async def list_service_sas(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.ServiceSasParameters",
**kwargs: Any
) -> "_models.ListServiceSasResponse":
"""List service SAS credentials of a specific resource.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param parameters: The parameters to provide to list service SAS credentials.
:type parameters: ~azure.mgmt.storage.v2017_06_01.models.ServiceSasParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListServiceSasResponse, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_06_01.models.ListServiceSasResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListServiceSasResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ServiceSasParameters')
request = build_list_service_sas_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.list_service_sas.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ListServiceSasResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_service_sas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListServiceSas'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_storage_accounts_operations.py
|
Python
|
mit
| 36,878
|
from __future__ import unicode_literals
import xml.etree.ElementTree
from .subtitles import SubtitlesInfoExtractor
from ..utils import ExtractorError
from ..compat import compat_HTTPError
class BBCCoUkIE(SubtitlesInfoExtractor):
IE_NAME = 'bbc.co.uk'
IE_DESC = 'BBC iPlayer'
_VALID_URL = r'https?://(?:www\.)?bbc\.co\.uk/(?:(?:(?:programmes|iplayer/(?:episode|playlist))/)|music/clips[/#])(?P<id>[\da-z]{8})'
_TESTS = [
{
'url': 'http://www.bbc.co.uk/programmes/b039g8p7',
'info_dict': {
'id': 'b039d07m',
'ext': 'flv',
'title': 'Kaleidoscope, Leonard Cohen',
'description': 'The Canadian poet and songwriter reflects on his musical career.',
'duration': 1740,
},
'params': {
# rtmp download
'skip_download': True,
}
},
{
'url': 'http://www.bbc.co.uk/iplayer/episode/b00yng5w/The_Man_in_Black_Series_3_The_Printed_Name/',
'info_dict': {
'id': 'b00yng1d',
'ext': 'flv',
'title': 'The Man in Black: Series 3: The Printed Name',
'description': "Mark Gatiss introduces Nicholas Pierpan's chilling tale of a writer's devilish pact with a mysterious man. Stars Ewan Bailey.",
'duration': 1800,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'Episode is no longer available on BBC iPlayer Radio',
},
{
'url': 'http://www.bbc.co.uk/iplayer/episode/b03vhd1f/The_Voice_UK_Series_3_Blind_Auditions_5/',
'info_dict': {
'id': 'b00yng1d',
'ext': 'flv',
'title': 'The Voice UK: Series 3: Blind Auditions 5',
'description': "Emma Willis and Marvin Humes present the fifth set of blind auditions in the singing competition, as the coaches continue to build their teams based on voice alone.",
'duration': 5100,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'Currently BBC iPlayer TV programmes are available to play in the UK only',
},
{
'url': 'http://www.bbc.co.uk/iplayer/episode/p026c7jt/tomorrows-worlds-the-unearthly-history-of-science-fiction-2-invasion',
'info_dict': {
'id': 'b03k3pb7',
'ext': 'flv',
'title': "Tomorrow's Worlds: The Unearthly History of Science Fiction",
'description': '2. Invasion',
'duration': 3600,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'Currently BBC iPlayer TV programmes are available to play in the UK only',
}, {
'url': 'http://www.bbc.co.uk/programmes/b04v20dw',
'info_dict': {
'id': 'b04v209v',
'ext': 'flv',
'title': 'Pete Tong, The Essential New Tune Special',
'description': "Pete has a very special mix - all of 2014's Essential New Tunes!",
'duration': 10800,
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
'url': 'http://www.bbc.co.uk/music/clips/p02frcc3',
'note': 'Audio',
'info_dict': {
'id': 'p02frcch',
'ext': 'flv',
'title': 'Pete Tong, Past, Present and Future Special, Madeon - After Hours mix',
'description': 'French house superstar Madeon takes us out of the club and onto the after party.',
'duration': 3507,
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
'url': 'http://www.bbc.co.uk/music/clips/p025c0zz',
'note': 'Video',
'info_dict': {
'id': 'p025c103',
'ext': 'flv',
'title': 'Reading and Leeds Festival, 2014, Rae Morris - Closer (Live on BBC Three)',
'description': 'Rae Morris performs Closer for BBC Three at Reading 2014',
'duration': 226,
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
'url': 'http://www.bbc.co.uk/iplayer/playlist/p01dvks4',
'only_matching': True,
}, {
'url': 'http://www.bbc.co.uk/music/clips#p02frcc3',
'only_matching': True,
}
]
def _extract_asx_playlist(self, connection, programme_id):
asx = self._download_xml(connection.get('href'), programme_id, 'Downloading ASX playlist')
return [ref.get('href') for ref in asx.findall('./Entry/ref')]
def _extract_connection(self, connection, programme_id):
formats = []
protocol = connection.get('protocol')
supplier = connection.get('supplier')
if protocol == 'http':
href = connection.get('href')
# ASX playlist
if supplier == 'asx':
for i, ref in enumerate(self._extract_asx_playlist(connection, programme_id)):
formats.append({
'url': ref,
'format_id': 'ref%s_%s' % (i, supplier),
})
# Direct link
else:
formats.append({
'url': href,
'format_id': supplier,
})
elif protocol == 'rtmp':
application = connection.get('application', 'ondemand')
auth_string = connection.get('authString')
identifier = connection.get('identifier')
server = connection.get('server')
formats.append({
'url': '%s://%s/%s?%s' % (protocol, server, application, auth_string),
'play_path': identifier,
'app': '%s?%s' % (application, auth_string),
'page_url': 'http://www.bbc.co.uk',
'player_url': 'http://www.bbc.co.uk/emp/releases/iplayer/revisions/617463_618125_4/617463_618125_4_emp.swf',
'rtmp_live': False,
'ext': 'flv',
'format_id': supplier,
})
return formats
def _extract_items(self, playlist):
return playlist.findall('./{http://bbc.co.uk/2008/emp/playlist}item')
def _extract_medias(self, media_selection):
error = media_selection.find('./{http://bbc.co.uk/2008/mp/mediaselection}error')
if error is not None:
raise ExtractorError(
'%s returned error: %s' % (self.IE_NAME, error.get('id')), expected=True)
return media_selection.findall('./{http://bbc.co.uk/2008/mp/mediaselection}media')
def _extract_connections(self, media):
return media.findall('./{http://bbc.co.uk/2008/mp/mediaselection}connection')
def _extract_video(self, media, programme_id):
formats = []
vbr = int(media.get('bitrate'))
vcodec = media.get('encoding')
service = media.get('service')
width = int(media.get('width'))
height = int(media.get('height'))
file_size = int(media.get('media_file_size'))
for connection in self._extract_connections(media):
conn_formats = self._extract_connection(connection, programme_id)
for format in conn_formats:
format.update({
'format_id': '%s_%s' % (service, format['format_id']),
'width': width,
'height': height,
'vbr': vbr,
'vcodec': vcodec,
'filesize': file_size,
})
formats.extend(conn_formats)
return formats
def _extract_audio(self, media, programme_id):
formats = []
abr = int(media.get('bitrate'))
acodec = media.get('encoding')
service = media.get('service')
for connection in self._extract_connections(media):
conn_formats = self._extract_connection(connection, programme_id)
for format in conn_formats:
format.update({
'format_id': '%s_%s' % (service, format['format_id']),
'abr': abr,
'acodec': acodec,
})
formats.extend(conn_formats)
return formats
def _extract_captions(self, media, programme_id):
subtitles = {}
for connection in self._extract_connections(media):
captions = self._download_xml(connection.get('href'), programme_id, 'Downloading captions')
lang = captions.get('{http://www.w3.org/XML/1998/namespace}lang', 'en')
ps = captions.findall('./{0}body/{0}div/{0}p'.format('{http://www.w3.org/2006/10/ttaf1}'))
srt = ''
for pos, p in enumerate(ps):
srt += '%s\r\n%s --> %s\r\n%s\r\n\r\n' % (str(pos), p.get('begin'), p.get('end'),
p.text.strip() if p.text is not None else '')
subtitles[lang] = srt
return subtitles
def _download_media_selector(self, programme_id):
try:
media_selection = self._download_xml(
'http://open.live.bbc.co.uk/mediaselector/5/select/version/2.0/mediaset/pc/vpid/%s' % programme_id,
programme_id, 'Downloading media selection XML')
except ExtractorError as ee:
if isinstance(ee.cause, compat_HTTPError) and ee.cause.code == 403:
media_selection = xml.etree.ElementTree.fromstring(ee.cause.read().encode('utf-8'))
else:
raise
formats = []
subtitles = None
for media in self._extract_medias(media_selection):
kind = media.get('kind')
if kind == 'audio':
formats.extend(self._extract_audio(media, programme_id))
elif kind == 'video':
formats.extend(self._extract_video(media, programme_id))
elif kind == 'captions':
subtitles = self._extract_captions(media, programme_id)
return formats, subtitles
def _download_playlist(self, playlist_id):
try:
playlist = self._download_json(
'http://www.bbc.co.uk/programmes/%s/playlist.json' % playlist_id,
playlist_id, 'Downloading playlist JSON')
version = playlist.get('defaultAvailableVersion')
if version:
smp_config = version['smpConfig']
title = smp_config['title']
description = smp_config['summary']
for item in smp_config['items']:
kind = item['kind']
if kind != 'programme' and kind != 'radioProgramme':
continue
programme_id = item.get('vpid')
duration = int(item.get('duration'))
formats, subtitles = self._download_media_selector(programme_id)
return programme_id, title, description, duration, formats, subtitles
except ExtractorError as ee:
if not isinstance(ee.cause, compat_HTTPError) and ee.cause.code == 404:
raise
# fallback to legacy playlist
playlist = self._download_xml(
'http://www.bbc.co.uk/iplayer/playlist/%s' % playlist_id,
playlist_id, 'Downloading legacy playlist XML')
no_items = playlist.find('./{http://bbc.co.uk/2008/emp/playlist}noItems')
if no_items is not None:
reason = no_items.get('reason')
if reason == 'preAvailability':
msg = 'Episode %s is not yet available' % playlist_id
elif reason == 'postAvailability':
msg = 'Episode %s is no longer available' % playlist_id
elif reason == 'noMedia':
msg = 'Episode %s is not currently available' % playlist_id
else:
msg = 'Episode %s is not available: %s' % (playlist_id, reason)
raise ExtractorError(msg, expected=True)
for item in self._extract_items(playlist):
kind = item.get('kind')
if kind != 'programme' and kind != 'radioProgramme':
continue
title = playlist.find('./{http://bbc.co.uk/2008/emp/playlist}title').text
description = playlist.find('./{http://bbc.co.uk/2008/emp/playlist}summary').text
programme_id = item.get('identifier')
duration = int(item.get('duration'))
formats, subtitles = self._download_media_selector(programme_id)
return programme_id, title, description, duration, formats, subtitles
def _real_extract(self, url):
group_id = self._match_id(url)
webpage = self._download_webpage(url, group_id, 'Downloading video page')
programme_id = self._search_regex(
r'"vpid"\s*:\s*"([\da-z]{8})"', webpage, 'vpid', fatal=False, default=None)
if programme_id:
player = self._download_json(
'http://www.bbc.co.uk/iplayer/episode/%s.json' % group_id,
group_id)['jsConf']['player']
title = player['title']
description = player['subtitle']
duration = player['duration']
formats, subtitles = self._download_media_selector(programme_id)
else:
programme_id, title, description, duration, formats, subtitles = self._download_playlist(group_id)
if self._downloader.params.get('listsubtitles', False):
self._list_available_subtitles(programme_id, subtitles)
return
self._sort_formats(formats)
return {
'id': programme_id,
'title': title,
'description': description,
'duration': duration,
'formats': formats,
'subtitles': subtitles,
}
|
janusnic/youtube-dl-GUI
|
youtube_dl/extractor/bbccouk.py
|
Python
|
mit
| 14,360
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.bases.organization import OrganizationEndpoint, OrganizationPermission
from sentry.models import Integration
class GitHubSearchEndpoint(OrganizationEndpoint):
permission_classes = (OrganizationPermission, )
def get(self, request, organization, integration_id):
try:
integration = Integration.objects.get(
organizations=organization,
id=integration_id,
provider='github',
)
except Integration.DoesNotExist:
return Response(status=404)
field = request.GET.get('field')
query = request.GET.get('query')
if field is None:
return Response({'detail': 'field is a required parameter'}, status=400)
if not query:
return Response({'detail': 'query is a required parameter'}, status=400)
installation = integration.get_installation(organization.id)
if field == 'externalIssue':
repo = request.GET.get('repo')
if repo is None:
return Response({'detail': 'repo is a required parameter'}, status=400)
try:
response = installation.search_issues(
query=(u'repo:%s %s' % (repo, query)).encode('utf-8'),
)
except Exception as e:
return self.handle_api_error(e)
return Response([{
'label': '#%s %s' % (i['number'], i['title']),
'value': i['number']
} for i in response.get('items', [])])
if field == 'repo':
account_type = 'user' if integration.metadata['account_type'] == 'User' else 'org'
full_query = (u'%s:%s %s' % (account_type, integration.name, query)).encode('utf-8')
try:
response = installation.get_client().search_repositories(full_query)
except Exception as e:
return self.handle_api_error(e)
return Response([{
'label': i['name'],
'value': i['full_name']
} for i in response.get('items', [])])
return Response(status=400)
|
ifduyue/sentry
|
src/sentry/integrations/github/search.py
|
Python
|
bsd-3-clause
| 2,249
|
from routersploit.modules.exploits.routers.dlink.dir_645_password_disclosure import Exploit
def test_check_success(target):
""" Test scenario - successful check """
route_mock = target.get_route_mock("/getcfg.php", methods=["POST"])
route_mock.return_value = (
"""
<?xml version="1.0" encoding="utf-8"?>
<postxml>
<module>
<service>DEVICE.ACCOUNT</service>
<device>
<gw_name>DIR-645</gw_name>
<account>
<seqno>2</seqno>
<max>2</max>
<count>2</count>
<entry>
<uid>USR-</uid>
<name>admin</name>
<usrid></usrid>
<password>0920983386</password>
<group>0</group>
<description></description>
</entry>
<entry>
<uid>USR-1</uid>
<name>user</name>
<usrid></usrid>
<password>3616441</password>
<group>101</group>
<description></description>
</entry>
</account>
<group>
<seqno></seqno>
<max></max>
<count>0</count>
</group>
<session>
<captcha>0</captcha>
<dummy></dummy>
<timeout>600</timeout>
<maxsession>128</maxsession>
<maxauthorized>16</maxauthorized>
</session>
</device>
</module>
</postxml>
"""
)
exploit = Exploit()
assert exploit.target == ""
assert exploit.port == 8080
exploit.target = target.host
exploit.port = target.port
assert exploit.check()
assert exploit.run() is None
|
dasseclab/dasseclab
|
clones/routersploit/tests/exploits/routers/dlink/test_dir_645_password_disclosure.py
|
Python
|
gpl-2.0
| 2,021
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.cloud.automl_v1.proto import (
prediction_service_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_prediction__service__pb2,
)
from google.longrunning import (
operations_pb2 as google_dot_longrunning_dot_operations__pb2,
)
class PredictionServiceStub(object):
"""AutoML Prediction API.
On any input that is documented to expect a string parameter in
snake_case or kebab-case, either of those cases is accepted.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Predict = channel.unary_unary(
"/google.cloud.automl.v1.PredictionService/Predict",
request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_prediction__service__pb2.PredictRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_prediction__service__pb2.PredictResponse.FromString,
)
self.BatchPredict = channel.unary_unary(
"/google.cloud.automl.v1.PredictionService/BatchPredict",
request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_prediction__service__pb2.BatchPredictRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
class PredictionServiceServicer(object):
"""AutoML Prediction API.
On any input that is documented to expect a string parameter in
snake_case or kebab-case, either of those cases is accepted.
"""
def Predict(self, request, context):
"""Perform an online prediction. The prediction result will be directly
returned in the response.
Available for following ML problems, and their expected request payloads:
* Image Classification - Image in .JPEG, .GIF or .PNG format, image_bytes
up to 30MB.
* Image Object Detection - Image in .JPEG, .GIF or .PNG format, image_bytes
up to 30MB.
* Text Classification - TextSnippet, content up to 60,000 characters,
UTF-8 encoded.
* Text Extraction - TextSnippet, content up to 30,000 characters,
UTF-8 NFC encoded.
* Translation - TextSnippet, content up to 25,000 characters, UTF-8
encoded.
* Text Sentiment - TextSnippet, content up 500 characters, UTF-8
encoded.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def BatchPredict(self, request, context):
"""Perform a batch prediction. Unlike the online
[Predict][google.cloud.automl.v1.PredictionService.Predict], batch
prediction result won't be immediately available in the response. Instead,
a long running operation object is returned. User can poll the operation
result via [GetOperation][google.longrunning.Operations.GetOperation]
method. Once the operation is done,
[BatchPredictResult][google.cloud.automl.v1.BatchPredictResult] is returned
in the [response][google.longrunning.Operation.response] field. Available
for following ML problems:
* Image Classification
* Image Object Detection
* Text Extraction
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_PredictionServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
"Predict": grpc.unary_unary_rpc_method_handler(
servicer.Predict,
request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_prediction__service__pb2.PredictRequest.FromString,
response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_prediction__service__pb2.PredictResponse.SerializeToString,
),
"BatchPredict": grpc.unary_unary_rpc_method_handler(
servicer.BatchPredict,
request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_prediction__service__pb2.BatchPredictRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"google.cloud.automl.v1.PredictionService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
|
tseaver/google-cloud-python
|
automl/google/cloud/automl_v1/proto/prediction_service_pb2_grpc.py
|
Python
|
apache-2.0
| 4,461
|
item_id = 4986168
user_id = 20000
item_category = 9656
time = 31
|
CharLLCH/jianchi_alimobileR
|
ftrldata/TCReBuild/codes/mylibs/size.py
|
Python
|
gpl-2.0
| 65
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2012 Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: assert
short_description: Asserts given expressions are true
description:
- This module asserts that given expressions are true with an optional custom message.
- This module is also supported for Windows targets.
version_added: "1.5"
options:
that:
description:
- "A string expression of the same form that can be passed to the 'when' statement"
- "Alternatively, a list of string expressions"
required: true
msg:
description:
- "The customized message used for a failing assertion"
notes:
- This module is also supported for Windows targets.
author:
- "Ansible Core Team"
- "Michael DeHaan"
'''
EXAMPLES = '''
- assert: { that: "ansible_os_family != 'RedHat'" }
- assert:
that:
- "'foo' in some_command_result.stdout"
- "number_of_the_counting == 3"
- assert:
that:
- "my_param <= 100"
- "my_param >= 0"
msg: "'my_param' must be between 0 and 100"
'''
|
sonaht/ansible
|
lib/ansible/modules/utilities/logic/assert.py
|
Python
|
gpl-3.0
| 1,869
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_target_https_proxy
description:
- Represents a TargetHttpsProxy resource, which is used by one or more global forwarding
rule to route incoming HTTPS requests to a URL map.
short_description: Creates a GCP TargetHttpsProxy
version_added: 2.6
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices:
- present
- absent
default: present
description:
description:
- An optional description of this resource.
required: false
name:
description:
- Name of the resource. Provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
required: true
quic_override:
description:
- Specifies the QUIC override policy for this resource. This determines whether
the load balancer will attempt to negotiate QUIC with clients or not. Can specify
one of NONE, ENABLE, or DISABLE. If NONE is specified, uses the QUIC policy
with no user overrides, which is equivalent to DISABLE. Not specifying this
field is equivalent to specifying NONE.
required: false
version_added: 2.7
choices:
- NONE
- ENABLE
- DISABLE
ssl_certificates:
description:
- A list of SslCertificate resources that are used to authenticate connections
between users and the load balancer. Currently, exactly one SSL certificate
must be specified.
required: true
ssl_policy:
description:
- A reference to the SslPolicy resource that will be associated with the TargetHttpsProxy
resource. If not set, the TargetHttpsProxy resource will not have any SSL policy
configured.
- 'This field represents a link to a SslPolicy resource in GCP. It can be specified
in two ways. First, you can place in the selfLink of the resource here as a
string Alternatively, you can add `register: name-of-resource` to a gcp_compute_ssl_policy
task and then set this ssl_policy field to "{{ name-of-resource }}"'
required: false
version_added: 2.8
url_map:
description:
- A reference to the UrlMap resource that defines the mapping from URL to the
BackendService.
- 'This field represents a link to a UrlMap resource in GCP. It can be specified
in two ways. First, you can place in the selfLink of the resource here as a
string Alternatively, you can add `register: name-of-resource` to a gcp_compute_url_map
task and then set this url_map field to "{{ name-of-resource }}"'
required: true
extends_documentation_fragment: gcp
notes:
- 'API Reference: U(https://cloud.google.com/compute/docs/reference/latest/targetHttpsProxies)'
- 'Official Documentation: U(https://cloud.google.com/compute/docs/load-balancing/http/target-proxies)'
'''
EXAMPLES = '''
- name: create a instance group
gcp_compute_instance_group:
name: "instancegroup-targethttpsproxy"
zone: us-central1-a
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: instancegroup
- name: create a http health check
gcp_compute_http_health_check:
name: "httphealthcheck-targethttpsproxy"
healthy_threshold: 10
port: 8080
timeout_sec: 2
unhealthy_threshold: 5
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: healthcheck
- name: create a backend service
gcp_compute_backend_service:
name: "backendservice-targethttpsproxy"
backends:
- group: "{{ instancegroup }}"
health_checks:
- "{{ healthcheck.selfLink }}"
enable_cdn: true
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: backendservice
- name: create a url map
gcp_compute_url_map:
name: "urlmap-targethttpsproxy"
default_service: "{{ backendservice }}"
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: urlmap
- name: create a ssl certificate
gcp_compute_ssl_certificate:
name: "sslcert-targethttpsproxy"
description: A certificate for testing. Do not use this certificate in production
certificate: |
-----BEGIN CERTIFICATE-----
MIICqjCCAk+gAwIBAgIJAIuJ+0352Kq4MAoGCCqGSM49BAMCMIGwMQswCQYDVQQG
EwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjERMA8GA1UEBwwIS2lya2xhbmQxFTAT
BgNVBAoMDEdvb2dsZSwgSW5jLjEeMBwGA1UECwwVR29vZ2xlIENsb3VkIFBsYXRm
b3JtMR8wHQYDVQQDDBZ3d3cubXktc2VjdXJlLXNpdGUuY29tMSEwHwYJKoZIhvcN
AQkBFhJuZWxzb25hQGdvb2dsZS5jb20wHhcNMTcwNjI4MDQ1NjI2WhcNMjcwNjI2
MDQ1NjI2WjCBsDELMAkGA1UEBhMCVVMxEzARBgNVBAgMCldhc2hpbmd0b24xETAP
BgNVBAcMCEtpcmtsYW5kMRUwEwYDVQQKDAxHb29nbGUsIEluYy4xHjAcBgNVBAsM
FUdvb2dsZSBDbG91ZCBQbGF0Zm9ybTEfMB0GA1UEAwwWd3d3Lm15LXNlY3VyZS1z
aXRlLmNvbTEhMB8GCSqGSIb3DQEJARYSbmVsc29uYUBnb29nbGUuY29tMFkwEwYH
KoZIzj0CAQYIKoZIzj0DAQcDQgAEHGzpcRJ4XzfBJCCPMQeXQpTXwlblimODQCuQ
4mzkzTv0dXyB750fOGN02HtkpBOZzzvUARTR10JQoSe2/5PIwaNQME4wHQYDVR0O
BBYEFKIQC3A2SDpxcdfn0YLKineDNq/BMB8GA1UdIwQYMBaAFKIQC3A2SDpxcdfn
0YLKineDNq/BMAwGA1UdEwQFMAMBAf8wCgYIKoZIzj0EAwIDSQAwRgIhALs4vy+O
M3jcqgA4fSW/oKw6UJxp+M6a+nGMX+UJR3YgAiEAvvl39QRVAiv84hdoCuyON0lJ
zqGNhIPGq2ULqXKK8BY=
-----END CERTIFICATE-----
private_key: |
-----BEGIN EC PRIVATE KEY-----
MHcCAQEEIObtRo8tkUqoMjeHhsOh2ouPpXCgBcP+EDxZCB/tws15oAoGCCqGSM49
AwEHoUQDQgAEHGzpcRJ4XzfBJCCPMQeXQpTXwlblimODQCuQ4mzkzTv0dXyB750f
OGN02HtkpBOZzzvUARTR10JQoSe2/5PIwQ==
-----END EC PRIVATE KEY-----
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: sslcert
- name: create a target https proxy
gcp_compute_target_https_proxy:
name: "test_object"
ssl_certificates:
- "{{ sslcert }}"
url_map: "{{ urlmap }}"
project: "test_project"
auth_kind: "serviceaccount"
service_account_file: "/tmp/auth.pem"
state: present
'''
RETURN = '''
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource.
returned: success
type: str
id:
description:
- The unique identifier for the resource.
returned: success
type: int
name:
description:
- Name of the resource. Provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
returned: success
type: str
quicOverride:
description:
- Specifies the QUIC override policy for this resource. This determines whether
the load balancer will attempt to negotiate QUIC with clients or not. Can specify
one of NONE, ENABLE, or DISABLE. If NONE is specified, uses the QUIC policy with
no user overrides, which is equivalent to DISABLE. Not specifying this field is
equivalent to specifying NONE.
returned: success
type: str
sslCertificates:
description:
- A list of SslCertificate resources that are used to authenticate connections between
users and the load balancer. Currently, exactly one SSL certificate must be specified.
returned: success
type: list
sslPolicy:
description:
- A reference to the SslPolicy resource that will be associated with the TargetHttpsProxy
resource. If not set, the TargetHttpsProxy resource will not have any SSL policy
configured.
returned: success
type: str
urlMap:
description:
- A reference to the UrlMap resource that defines the mapping from URL to the BackendService.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, replace_resource_dict
import json
import time
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
description=dict(type='str'),
name=dict(required=True, type='str'),
quic_override=dict(type='str', choices=['NONE', 'ENABLE', 'DISABLE']),
ssl_certificates=dict(required=True, type='list'),
ssl_policy=dict(),
url_map=dict(required=True),
)
)
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
state = module.params['state']
kind = 'compute#targetHttpsProxy'
fetch = fetch_resource(module, self_link(module), kind)
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
update(module, self_link(module), kind, fetch)
fetch = fetch_resource(module, self_link(module), kind)
changed = True
else:
delete(module, self_link(module), kind)
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module), kind)
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.post(link, resource_to_request(module)))
def update(module, link, kind, fetch):
update_fields(module, resource_to_request(module), response_to_hash(module, fetch))
return fetch_resource(module, self_link(module), kind)
def update_fields(module, request, response):
if response.get('quicOverride') != request.get('quicOverride'):
quic_override_update(module, request, response)
if response.get('sslCertificates') != request.get('sslCertificates'):
ssl_certificates_update(module, request, response)
if response.get('sslPolicy') != request.get('sslPolicy'):
ssl_policy_update(module, request, response)
if response.get('urlMap') != request.get('urlMap'):
url_map_update(module, request, response)
def quic_override_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/global/targetHttpsProxies/{name}/setQuicOverride"]).format(**module.params),
{u'quicOverride': module.params.get('quic_override')},
)
def ssl_certificates_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/targetHttpsProxies/{name}/setSslCertificates"]).format(**module.params),
{u'sslCertificates': replace_resource_dict(module.params.get('ssl_certificates', []), 'selfLink')},
)
def ssl_policy_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/global/targetHttpsProxies/{name}/setSslPolicy"]).format(**module.params),
{u'sslPolicy': replace_resource_dict(module.params.get(u'ssl_policy', {}), 'selfLink')},
)
def url_map_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/targetHttpsProxies/{name}/setUrlMap"]).format(**module.params),
{u'urlMap': replace_resource_dict(module.params.get(u'url_map', {}), 'selfLink')},
)
def delete(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.delete(link))
def resource_to_request(module):
request = {
u'kind': 'compute#targetHttpsProxy',
u'description': module.params.get('description'),
u'name': module.params.get('name'),
u'quicOverride': module.params.get('quic_override'),
u'sslCertificates': replace_resource_dict(module.params.get('ssl_certificates', []), 'selfLink'),
u'sslPolicy': replace_resource_dict(module.params.get(u'ssl_policy', {}), 'selfLink'),
u'urlMap': replace_resource_dict(module.params.get(u'url_map', {}), 'selfLink'),
}
return_vals = {}
for k, v in request.items():
if v or v is False:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, kind, allow_not_found=True):
auth = GcpSession(module, 'compute')
return return_if_object(module, auth.get(link), kind, allow_not_found)
def self_link(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/targetHttpsProxies/{name}".format(**module.params)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/targetHttpsProxies".format(**module.params)
def return_if_object(module, response, kind, allow_not_found=False):
# If not found, return nothing.
if allow_not_found and response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError):
module.fail_json(msg="Invalid JSON response with error: %s" % response.text)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'creationTimestamp': response.get(u'creationTimestamp'),
u'description': module.params.get('description'),
u'id': response.get(u'id'),
u'name': module.params.get('name'),
u'quicOverride': response.get(u'quicOverride'),
u'sslCertificates': response.get(u'sslCertificates'),
u'sslPolicy': response.get(u'sslPolicy'),
u'urlMap': response.get(u'urlMap'),
}
def async_op_url(module, extra_data=None):
if extra_data is None:
extra_data = {}
url = "https://www.googleapis.com/compute/v1/projects/{project}/global/operations/{op_id}"
combined = extra_data.copy()
combined.update(module.params)
return url.format(**combined)
def wait_for_operation(module, response):
op_result = return_if_object(module, response, 'compute#operation')
if op_result is None:
return {}
status = navigate_hash(op_result, ['status'])
wait_done = wait_for_completion(status, op_result, module)
return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#targetHttpsProxy')
def wait_for_completion(status, op_result, module):
op_id = navigate_hash(op_result, ['name'])
op_uri = async_op_url(module, {'op_id': op_id})
while status != 'DONE':
raise_if_errors(op_result, ['error', 'errors'], module)
time.sleep(1.0)
op_result = fetch_resource(module, op_uri, 'compute#operation')
status = navigate_hash(op_result, ['status'])
return op_result
def raise_if_errors(response, err_path, module):
errors = navigate_hash(response, err_path)
if errors is not None:
module.fail_json(msg=errors)
if __name__ == '__main__':
main()
|
valentin-krasontovitsch/ansible
|
lib/ansible/modules/cloud/google/gcp_compute_target_https_proxy.py
|
Python
|
gpl-3.0
| 18,284
|
#!/usr/bin/env python
# icon.py
#http://www.saltycrane.com/blog/2007/12/pyqt-43-qtableview-qabstracttable-model/
#http://www.commandprompt.com/community/pyqt/book1
#http://doc.qt.nokia.com/latest/qstandarditemmodel.html
#http://www.interactivestars.com/lost_zodiac/lost_zodiac_sign.html <- interesting
#http://www.ips-planetarium.org/planetarian/articles/realconstellations_zodiac.html <- this too
import os
import sys
import datetime
import argparse
from shlex import split
from subprocess import call
from re import findall, match
from PyQt4 import QtGui, QtCore
import swisseph
from . import geolocationwidget ## from example, but modified a little
from .astro_rewrite import previous_new_moon, updatePandC, get_signs, grab_phase,\
state_to_string, get_sunrise_and_sunset, \
compare_to_the_second, get_planet_day
from .astroclock import AstroClock
from .astrocalendar import AstroCalendar
from .astrowidgets import PlanetaryHoursList, MoonCycleList, SignsForDayList, housesDialog
from .eventplanner import EventsList
from .chronostext import prepare_planetary_info, prepare_moon_cycle, prepare_sign_info, \
prepare_events, prepare_all
from .guiconfig import ChronosLNXConfig
from . import APPNAME, APPVERSION, DESCRIPTION, EMAIL, AUTHOR, YEAR, PAGE
pynf = True
#http://pastebin.com/BvNx9wdk
class ReusableDialog(QtGui.QDialog):
#because some dialogs are better if they're made and just re-used instead of completely reconstructed
def __init__(self, *args):
super().__init__(*args)
def closeEvent(self, event):
if hasattr(self.parent(), "trayIcon") and self.parent().trayIcon.isVisible():
self.hide()
event.ignore()
class ChronosLNX(QtGui.QMainWindow):
def __init__(self, parent=None):
super().__init__(parent)
self.timer = QtCore.QTimer(self)
self.draw_timer = QtCore.QTimer(self)
self.now = clnxcfg.observer.obvdate
self.make_settings_dialog()
self.make_save_for_date_range()
self.make_tray_icon()
self.setWindowTitle(APPNAME)
self.houses, self.zodiac = get_signs(clnxcfg.baby.obvdate, clnxcfg.baby,
clnxcfg.show_nodes, clnxcfg.show_admi)
#self.setDocumentMode (True)
self.add_widgets()
self.timer.timeout.connect(self.update)
self.draw_timer.timeout.connect(self.update_astro_clock)
self.setDockNestingEnabled(True)
self.timer.start(1000)
if self.astroClock is not None:
self.draw_timer.start(60000)
def add_widgets(self):
##left pane
if clnxcfg.show_aclk:
self.astroClock = AstroClock(self)
self.setCentralWidget(self.astroClock)
else:
self.astroClock = None
#self.astroClock.hide()
self.todayOther = QtGui.QLabel()
self.todayOther.setTextFormat(QtCore.Qt.RichText)
docktlabel = QtGui.QDockWidget(self)
docktlabel.setWidget(self.todayOther)
docktlabel.setWindowTitle("Info for Today")
self.addDockWidget(QtCore.Qt.RightDockWidgetArea, docktlabel)
dockcalendar = QtGui.QDockWidget(self)
self.calendar = AstroCalendar(dockcalendar)
dockcalendar.setWidget(self.calendar)
self.addDockWidget(QtCore.Qt.RightDockWidgetArea, dockcalendar)
dockcalendar.setWindowTitle("Calendar")
self.make_calendar_menu()
aspectsAction = QtGui.QAction(QtGui.QIcon.fromTheme("view-calendar-list"), 'Aspects for Now', self)
aspectsAction.triggered.connect(lambda: aspectsDialog(self, self.zodiac, clnxcfg.natal_data[1],
clnxcfg.main_icons, clnxcfg.sign_icons,
clnxcfg.pluto_alt, clnxcfg.show_admi,
clnxcfg.show_nodes, clnxcfg.orbs)
)
housesAction = QtGui.QAction(QtGui.QIcon.fromTheme("measure"), 'Houses for Now', self)
housesAction.triggered.connect(lambda: housesDialog(self, self.houses, \
clnxcfg.capricorn_alt, clnxcfg.sign_icons))
natalAction = QtGui.QAction(QtGui.QIcon.fromTheme("view-calendar-birthday"), '&View Natal Data', self)
natalAction.triggered.connect(lambda: self.get_info_for_date(clnxcfg.baby.obvdate, birth = True))
saveRangeAction = QtGui.QAction(QtGui.QIcon.fromTheme("document-save-as"), 'Save data from dates', self)
saveRangeAction.triggered.connect(self.save_for_range_dialog.open)
settingsAction = QtGui.QAction(QtGui.QIcon.fromTheme('preferences-other'), 'Settings', self)
settingsAction.triggered.connect(self.settings_dialog.open)
helpAction = QtGui.QAction(QtGui.QIcon.fromTheme('help-contents'), 'Help', self)
helpAction.triggered.connect(self.show_help)
aboutAction = QtGui.QAction(QtGui.QIcon.fromTheme('help-about'), 'About', self)
aboutAction.triggered.connect(self.show_about)
toolbar = self.addToolBar('Main')
toolbar.addAction(aspectsAction)
toolbar.addAction(housesAction)
toolbar.addAction(natalAction)
toolbar.addAction(saveRangeAction)
toolbar.addAction(settingsAction)
toolbar.addAction(helpAction)
toolbar.addAction(aboutAction)
##right pane
#dayinfo = QtGui.QHBoxLayout()
#self.todayPicture = QtGui.QLabel()
#dayinfo.addWidget(self.todayPicture)
#dayinfo.addWidget(self.todayOther)
dockhours = QtGui.QDockWidget(self)
self.hoursToday = PlanetaryHoursList(self)
dockhours.setWindowTitle("Planetary Hours")
dockhours.setWidget(self.hoursToday)
self.addDockWidget(QtCore.Qt.RightDockWidgetArea, dockhours)
dockmoon = QtGui.QDockWidget(self)
self.moonToday = MoonCycleList(self)
dockmoon.setWindowTitle("Moon Phases")
dockmoon.setWidget(self.moonToday)
self.tabifyDockWidget(dockhours, dockmoon)
docksigns = QtGui.QDockWidget(self)
self.signsToday = SignsForDayList(clnxcfg.main_icons, clnxcfg.sign_icons,
clnxcfg.show_admi, clnxcfg.show_nodes,
clnxcfg.pluto_alt, clnxcfg.capricorn_alt,
table=clnxcfg.natal_data[1],
orbs=clnxcfg.orbs, parent=self)
docksigns.setWindowTitle("Signs")
docksigns.setWidget(self.signsToday)
self.tabifyDockWidget(dockmoon, docksigns)
dockevents = QtGui.QDockWidget(self)
self.eventsToday = EventsList(self)
dockevents.setWindowTitle("Events")
dockevents.setWidget(self.eventsToday)
self.tabifyDockWidget(docksigns, dockevents)
#comment this out later
self.update_widgets_config()
self.prepare_hours_for_today()
self.moonToday.get_moon_cycle(self.now)
self.moonToday.highlight_cycle_phase(self.now)
self.signsToday.get_constellations(self.now, clnxcfg.observer)
clnxcfg.todays_schedule.setDate(self.now.date())
self.eventsToday.tree.setModel(clnxcfg.todays_schedule)
self.update()
def update_astro_clock(self):
if self.astroClock is not None:
self.astroClock.signData = [self.houses, self.zodiac]
def update_widgets_config(self):
app.setStyleSheet(clnxcfg.stylesheet)
if not clnxcfg.show_aclk:
if self.astroClock is not None:
self.setCentralWidget(None)
self.astroClock = None
self.draw_timer.stop()
else:
self.astroClock = AstroClock(self)
self.setCentralWidget(self.astroClock)
self.astroClock.icons = clnxcfg.main_icons
self.astroClock.sign_icons = clnxcfg.sign_icons
self.astroClock.natData = clnxcfg.natal_data
self.astroClock.bd = clnxcfg.baby.obvdate
self.astroClock.signData = [self.houses, self.zodiac]
self.astroClock.hours = self.hoursToday
self.astroClock.pluto_alternate = clnxcfg.pluto_alt
self.astroClock.capricorn_alternate = clnxcfg.capricorn_alt
self.astroClock.orbs = clnxcfg.orbs
self.astroClock.observer = clnxcfg.observer
if not clnxcfg.use_css:
self.astroClock.init_colors()
self.draw_timer.start(60000)
self.calendar.setRefinements(clnxcfg.refinements)
self.calendar.setIcons(clnxcfg.moon_icons)
self.calendar.setShowPhase(clnxcfg.show_mcal)
self.calendar.setSolarReturn(clnxcfg.show_sr)
self.calendar.setLunarReturn(clnxcfg.show_lr)
self.calendar.setBirthTime(clnxcfg.baby.obvdate)
self.calendar.setNatalMoon(clnxcfg.natal_moon)
self.calendar.setNatalSun(clnxcfg.natal_sun)
self.calendar.useCSS = clnxcfg.use_css
self.calendar.observer = clnxcfg.observer
self.hoursToday.icons = clnxcfg.main_icons
self.moonToday.icons = clnxcfg.moon_icons
self.moonToday.refinement = clnxcfg.refinements['Moon Phase']
self.signsToday.table = clnxcfg.natal_data[1]
self.signsToday.icons = clnxcfg.main_icons
self.signsToday.sign_icons = clnxcfg.sign_icons
self.signsToday.admi = clnxcfg.show_admi
self.signsToday.nodes = clnxcfg.show_nodes
self.signsToday.pluto_alternate = clnxcfg.pluto_alt
self.signsToday.capricorn_alternate = clnxcfg.capricorn_alt
self.signsToday.orbs = clnxcfg.orbs
##time related
def update_hours(self):
self.hoursToday.clear()
self.signsToday.tree.clear()
self.now = clnxcfg.observer.obvdate
self.prepare_hours_for_today()
self.eventsToday.tree.model().setDate(self.now.date())
self.signsToday.get_constellations(self.now, clnxcfg.observer)
def update_moon_cycle(self):
if previous_new_moon(self.now).timetuple().tm_yday == self.now.timetuple().tm_yday:
self.moonToday.clear()
self.moonToday.get_moon_cycle(self.now)
self.moonToday.highlight_cycle_phase(self.now)
def prepare_hours_for_today(self):
dayn = self.now.isoweekday() % 7
self.pday = get_planet_day(dayn)
self.sunrise, self.sunset, self.next_sunrise = get_sunrise_and_sunset(self.now, clnxcfg.observer)
if self.astroClock is not None:
self.astroClock.nexts = self.next_sunrise
if self.now < self.sunrise:
self.sunrise, self.sunset, self.next_sunrise = get_sunrise_and_sunset(self.now-timedelta(days = 1), clnxcfg.observer)
if self.astroClock is not None:
self.astroClock.nexts = self.next_sunrise
self.hoursToday.prepareHours(self.now-timedelta(days = 1), clnxcfg.observer)
self.pday = get_planet_day(dayn-1)
else:
self.hoursToday.prepareHours(self.now, clnxcfg.observer)
#http://www.riverbankcomputing.co.uk/static/Docs/PyQt4/html/qtreewidgetitem.html#setIcon
#http://www.riverbankcomputing.co.uk/static/Docs/PyQt4/html/qtreewidget.html
def show_notification(self, title, text, ptrigger):
if pynf:
fldr = QtCore.QDir("skin:/")
if ptrigger:
path = clnxcfg.grab_icon_path("planets", self.phour.lower())
else:
path = clnxcfg.grab_icon_path("misc", "chronoslnx")
path = fldr.absoluteFilePath(path.replace("skin:", ""))
call(['notify-send', '-t', '10000', '-a', APPNAME,
'-i', path, title, text])
else:
if self.trayIcon.supportsMessages():
if ptrigger:
self.trayIcon.showMessage(title, text, msecs=10000)
#clnxcfg.main_icons[self.phour],msecs = 10000)
else:
self.trayIcon.showMessage(title, text, msecs=10000)
#clnxcfg.main_icons['logo'],msecs = 10000)
else:
#last resort, as popup dialogs are annoying
if ptrigger:
pixmap = self.main_pixmaps[self.phour]
else:
pixmap = self.main_pixmaps['logo']
dialog = QtGui.QMessageBox(self)
dialog.setTitle(title)
dialog.setTitle(text)
dialog.setIconPixmap(pixmap)
dialog.open()
##datepicking related
#http://eli.thegreenplace.net/2011/04/25/passing-extra-arguments-to-pyqt-slot/
def get_info_for_date(self, date, birth = False):
info_dialog = QtGui.QDialog(self)
dateinfo = "Info for %s" %(date.strftime("%m/%d/%Y"))
if birth:
ob = clnxcfg.baby
text = ("\nNote: This is for the birth timezone %s and this time."
"\nIf you want adjust your birth time, go to Settings.") \
% clnxcfg.baby.obvdate.tzname()
else:
ob = clnxcfg.observer
text = ""
infotext = "{dateinfo}{text}".format(**locals())
info_dialog.setAttribute(QtCore.Qt.WA_DeleteOnClose)
vbox = QtGui.QVBoxLayout(info_dialog)
vbox.addWidget(QtGui.QLabel(text))
hoursToday = PlanetaryHoursList(info_dialog)
hoursToday.icons = clnxcfg.main_icons
moonToday = MoonCycleList(info_dialog)
moonToday.refinement = clnxcfg.refinements['Moon Phase']
moonToday.icons = clnxcfg.moon_icons
signsToday = SignsForDayList(clnxcfg.main_icons, clnxcfg.sign_icons,
clnxcfg.show_admi, clnxcfg.show_nodes,
clnxcfg.pluto_alt, clnxcfg.capricorn_alt,
orbs=clnxcfg.orbs, parent=info_dialog)
if not birth:
signsToday.table = clnxcfg.natal_data[1]
eventsToday = EventsList(info_dialog)
model = DayEventsModel()
model.setSourceModel(clnxcfg.schedule)
model.setDate(date)
eventsToday.tree.setModel(model)
eventsToday.setAttribute(QtCore.Qt.WA_DeleteOnClose)
dayData = QtGui.QTabWidget(info_dialog)
hoursToday.prepareHours(date, ob)
moonToday.get_moon_cycle(date)
moonToday.highlight_cycle_phase(date)
if birth:
print("Using already available birth data instead of recalculating it")
signsToday.time.timeChanged.disconnect()
signsToday.time.setReadOnly(True)
signsToday.time.setTime(clnxcfg.baby.obvdate.time())
signsToday.assembleFromZodiac(clnxcfg.natal_data[1])
signsToday.h = clnxcfg.natal_data[0]
else:
signsToday.get_constellations(date, ob)
dayData.addTab(hoursToday, "Planetary Hours")
dayData.addTab(moonToday, "Moon Phases")
dayData.addTab(signsToday, "Signs")
dayData.addTab(eventsToday, "Events")
vbox.addWidget(dayData)
info_dialog.show()
def make_save_for_date_range(self):
#self.save_for_range_dialog = QtGui.QDialog(self)
self.save_for_range_dialog = ReusableDialog(self)
self.save_for_range_dialog.setFixedSize(380, 280)
self.save_for_range_dialog.setWindowTitle("Save Data for Dates")
grid = QtGui.QGridLayout(self.save_for_range_dialog)
self.save_for_range_dialog.date_start = QtGui.QDateTimeEdit(self.save_for_range_dialog)
self.save_for_range_dialog.date_start.setDisplayFormat("MM/dd/yyyy")
self.save_for_range_dialog.date_end = QtGui.QDateTimeEdit(self.save_for_range_dialog)
self.save_for_range_dialog.date_end.setDisplayFormat("MM/dd/yyyy")
grid.addWidget(QtGui.QLabel("Save from"), 0, 0)
grid.addWidget(self.save_for_range_dialog.date_start, 0, 1)
grid.addWidget(QtGui.QLabel("To"), 1, 0)
grid.addWidget(self.save_for_range_dialog.date_end, 1, 1)
grid.addWidget(QtGui.QLabel("Data to Save"), 2, 0)
self.save_for_range_dialog.checkboxes = QtGui.QButtonGroup()
self.save_for_range_dialog.checkboxes.setExclusive(False)
checkboxes_frame = QtGui.QFrame(self.save_for_range_dialog)
vbox = QtGui.QVBoxLayout(checkboxes_frame)
all_check = QtGui.QCheckBox("All", checkboxes_frame)
ph_check = QtGui.QCheckBox("Planetary Hours", checkboxes_frame)
s_check = QtGui.QCheckBox("Planetary Signs", checkboxes_frame)
m_check = QtGui.QCheckBox("Moon Phase", checkboxes_frame)
e_check = QtGui.QCheckBox("Events", checkboxes_frame)
self.save_for_range_dialog.checkboxes.addButton(all_check)
self.save_for_range_dialog.checkboxes.addButton(ph_check)
self.save_for_range_dialog.checkboxes.addButton(s_check)
self.save_for_range_dialog.checkboxes.addButton(m_check)
self.save_for_range_dialog.checkboxes.addButton(e_check)
vbox.addWidget(all_check)
vbox.addWidget(ph_check)
vbox.addWidget(s_check)
vbox.addWidget(m_check)
vbox.addWidget(e_check)
grid.addWidget(checkboxes_frame, 2, 1)
grid.addWidget(QtGui.QLabel("Folder to save in"), 3, 0)
hbox = QtGui.QHBoxLayout()
self.save_for_range_dialog.filename = QtGui.QLineEdit(self.save_for_range_dialog)
button = QtGui.QPushButton(self.save_for_range_dialog)
button.setIcon(QtGui.QIcon.fromTheme("document-open"))
button.clicked.connect(self.get_folder_name)
hbox.addWidget(self.save_for_range_dialog.filename)
hbox.addWidget(button)
grid.addLayout(hbox, 3, 1)
buttonbox = QtGui.QDialogButtonBox(QtCore.Qt.Horizontal)
okbutton = buttonbox.addButton(QtGui.QDialogButtonBox.Ok)
#okbutton.clicked.connect(self.print_to_file("All", date,filename = "",suppress_notification = True))
okbutton.clicked.connect(self.mass_print)
cancelbutton = buttonbox.addButton(QtGui.QDialogButtonBox.Cancel)
cancelbutton.clicked.connect(self.save_for_range_dialog.hide)
grid.addWidget(buttonbox, 4, 0, 1, 2)
def get_folder_name(self):
text = QtGui.QFileDialog.getExistingDirectory(caption="Save in folder...",
options=QtGui.QFileDialog.ShowDirsOnly)
self.save_for_range_dialog.filename.setText(text)
def mass_print(self):
day_numbers = (self.save_for_range_dialog.date_end.date().toPyDate() - \
self.save_for_range_dialog.date_start.date().toPyDate()).days
if self.save_for_range_dialog.filename.text() > "":
for j in self.save_for_range_dialog.checkboxes.buttons():
if j.isChecked():
store_here = os.path.join(self.save_for_range_dialog.filename.text(),
j.text().replace(" ", "_"))
if not os.path.exists(store_here):
os.mkdir(store_here)
for i in range(day_numbers+1):
date = self.save_for_range_dialog.date_start.dateTime().toPyDateTime().replace(tzinfo=clnxcfg.observer.timezone) + timedelta(days=i)
for j in self.save_for_range_dialog.checkboxes.buttons():
if j.isChecked():
filename = os.path.join(self.save_for_range_dialog.filename.text(),
j.text().replace(" ", "_"),
date.strftime("%m-%d-%Y.txt"))
self.print_to_file(j.text(), date, filename=filename, suppress_notification=True)
#'''
def make_calendar_menu(self):
self.calendar._table.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.calendar._table.customContextMenuRequested.connect(self.get_cal_menu)
#self.calendar.setContextMenu(self.menu)
#'''
def copy_to_clipboard(self, option, date):
if option == "All":
text = prepare_all(date, clnxcfg.observer, clnxcfg.schedule,
clnxcfg.show_nodes, clnxcfg.show_admi)
elif option == "Moon Phase":
text = prepare_moon_cycle(date)
elif option == "Planetary Signs":
text = prepare_sign_info(date, clnxcfg.observer,
clnxcfg.show_nodes, clnxcfg.show_admi)
elif option == "Planetary Hours":
text = prepare_planetary_info(date, clnxcfg.observer)
else: #option == "Events"
text = prepare_events(date, clnxcfg.schedule)
app.clipboard().setText(text)
#KGlobal::locale::Warning your global KLocale is being recreated with a valid main component instead of a fake component, this usually means you tried to call i18n related functions before your main component was created. You should not do that since it most likely will not work
#X Error: RenderBadPicture (invalid Picture parameter) 174
#Extension: 153 (RENDER)
#Minor opcode: 8 (RenderComposite)
#Resource id: 0x3800836
#weird bug related to opening file dialog on linux through this, but it's harmless
def print_to_file(self, option, date, filename=None,
suppress_notification=False):
if option == "All":
text = prepare_all(date, clnxcfg.observer, clnxcfg.schedule,
clnxcfg.show_nodes, clnxcfg.show_admi)
elif option == "Moon Phase":
text = prepare_moon_cycle(date)
elif option == "Planetary Signs":
text = prepare_sign_info(date, clnxcfg.observer,
clnxcfg.show_nodes, clnxcfg.show_admi)
elif option == "Planetary Hours":
text = prepare_planetary_info(date, clnxcfg.observer)
else: #option == "Events"
text = prepare_events(date, clnxcfg.schedule)
if filename == None:
filename = QtGui.QFileDialog.getSaveFileName(self, caption="Saving %s for %s" \
%(option, date.strftime("%m/%d/%Y")),
filter="*.txt")
if filename is not None and filename != "":
f = open(filename, "w")
f.write(text)
if not suppress_notification:
self.show_notification("Saved", "%s has the %s you saved." \
%(filename, option), False)
f.close()
def get_cal_menu(self, qpoint):
table = self.calendar._table
item = table.itemAt(qpoint)
day = item.data(QtCore.Qt.UserRole)
date2 = None
date3 = None
tzone = clnxcfg.observer.timezone
date = datetime.fromordinal(day.toordinal())
date = date.replace(hour=12, minute=0, second=0, tzinfo=tzone)
if self.calendar.lunarReturn:
idx = self.calendar.fetchLunarReturn(day)
if idx >= 0:
date2 = self.calendar.lunarReturns[idx]
if self.calendar.solarReturn and day == self.calendar.solarReturnTime.date():
date3 = self.calendar.solarReturnTime
#self.calendar.setGridVisible(True)
menu = QtGui.QMenu(self.calendar)
if date2:
lritem = menu.addAction("Lunar Return for %s" %(date.strftime("%m/%d/%Y")))
lritem.triggered.connect(lambda: self.get_info_for_date(date2))
lritem.setIcon(QtGui.QIcon.fromTheme("dialog-information"))
if date3:
sritem = menu.addAction("Solar Return for %s" %(date.strftime("%m/%d/%Y")))
sritem.triggered.connect(lambda: self.get_info_for_date(date3))
sritem.setIcon(QtGui.QIcon.fromTheme("dialog-information"))
infoitem = menu.addAction("Info for %s" %(date.strftime("%m/%d/%Y")))
infoitem.triggered.connect(lambda: self.get_info_for_date(date))
infoitem.setIcon(QtGui.QIcon.fromTheme("dialog-information"))
copymenu = menu.addMenu("Copy")
copymenu.setIcon(QtGui.QIcon.fromTheme("edit-copy"))
copyall = copymenu.addAction("All")
copydate = copymenu.addAction("Date")
copyplanetdata = copymenu.addAction("Planetary Hours")
copymoonphasedata = copymenu.addAction("Moon Phases")
copysignsdata = copymenu.addAction("Signs for this date")
copyeventdata = copymenu.addAction("Events")
copyall.triggered.connect(lambda: self.copy_to_clipboard("All", date))
copydate.triggered.connect(lambda: app.clipboard().setText(date.strftime("%m/%d/%Y")))
copyplanetdata.triggered.connect(lambda: self.copy_to_clipboard("Planetary Hours", date))
copymoonphasedata.triggered.connect(lambda: self.copy_to_clipboard("Moon Phase", date))
copysignsdata.triggered.connect(lambda: self.copy_to_clipboard("Planetary Signs", date))
copyeventdata.triggered.connect(lambda: self.copy_to_clipboard("Events", date))
savemenu = menu.addMenu("Save to File")
savemenu.setIcon(QtGui.QIcon.fromTheme("document-save-as"))
saveall = savemenu.addAction("All")
saveplanetdata = savemenu.addAction("Planetary Hours")
savemoonphasedata = savemenu.addAction("Moon Phases")
savesignsdata = savemenu.addAction("Signs for this date")
saveeventdata = savemenu.addAction("Events")
saveall.triggered.connect(lambda: self.print_to_file("All", date))
saveplanetdata.triggered.connect(lambda: self.print_to_file("Planetary Hours", date))
savemoonphasedata.triggered.connect(lambda: self.print_to_file("Moon Phase", date))
savesignsdata.triggered.connect(lambda: self.print_to_file("Planetary Signs", date))
saveeventdata.triggered.connect(lambda: self.print_to_file("Events", date))
menu.exec_(self.calendar.mapToGlobal(qpoint))
#http://www.qtcentre.org/archive/index.php/t-42524.html?s = ef30fdd9697c337a1d588ce9d26f47d8
##config related
def settings_reset(self):
clnxcfg.reset_settings()
self.update_settings_widgets()
self.update_widgets_config()
self.update_hours()
self.moonToday.clear()
self.moonToday.get_moon_cycle(self.now)
self.moonToday.highlight_cycle_phase(self.now)
def update_settings_widgets(self):
self.settings_dialog.location_widget.setLatitude(clnxcfg.observer.lat)
self.settings_dialog.location_widget.setLongitude(clnxcfg.observer.lng)
self.settings_dialog.location_widget.setElevation(clnxcfg.observer.elevation)
self.settings_dialog.css_check.setChecked(clnxcfg.use_css)
self.settings_dialog.override_ui_icon.setText(clnxcfg.current_icon_override)
self.settings_dialog.date.setDateTime(clnxcfg.baby.obvdate)
self.settings_dialog.birth_widget.setLatitude(clnxcfg.baby.lat)
self.settings_dialog.birth_widget.setLongitude(clnxcfg.baby.lng)
self.settings_dialog.birth_widget.setElevation(clnxcfg.baby.elevation)
idx = self.settings_dialog.appearance_icons.findText(clnxcfg.current_theme)
self.settings_dialog.appearance_icons.setCurrentIndex(idx)
self.settings_dialog.s_check.setChecked(clnxcfg.show_sign)
self.settings_dialog.m_check.setChecked(clnxcfg.show_moon)
self.settings_dialog.h_check.setChecked(clnxcfg.show_house_of_moment)
self.settings_dialog.mp_check.setChecked(clnxcfg.show_mcal)
self.settings_dialog.sr_check.setChecked(clnxcfg.show_sr)
self.settings_dialog.lr_check.setChecked(clnxcfg.show_lr)
self.settings_dialog.a_check.setChecked(clnxcfg.show_admi)
self.settings_dialog.n_check.setChecked(clnxcfg.show_nodes)
self.settings_dialog.p_check.setChecked(clnxcfg.pluto_alt)
self.settings_dialog.ac_check.setChecked(clnxcfg.show_aclk)
idx2 = self.settings_dialog.c_check.findText(clnxcfg.capricorn_alt)
self.settings_dialog.c_check.setCurrentIndex(idx2)
self.settings_dialog.solar.setValue(clnxcfg.refinements['Solar Return'])
self.settings_dialog.lunar.setValue(clnxcfg.refinements['Lunar Return'])
self.settings_dialog.phase.setValue(clnxcfg.refinements['Moon Phase'])
for i in self.settings_dialog.orbs:
self.settings_dialog.orbs[i].setValue(clnxcfg.orbs[i])
def settings_change(self):
lat = float(self.settings_dialog.location_widget.latitude)
lng = float(self.settings_dialog.location_widget.longitude)
elv = float(self.settings_dialog.location_widget.elevation)
blat = float(self.settings_dialog.birth_widget.latitude)
blng = float(self.settings_dialog.birth_widget.longitude)
belv = float(self.settings_dialog.birth_widget.elevation)
thm = self.settings_dialog.appearance_icons.currentText()
cp = self.settings_dialog.c_check.currentText()
iothm = self.settings_dialog.override_ui_icon.text()
clnxcfg.observer.lat = lat
clnxcfg.observer.lng = lng
clnxcfg.observer.elevation = elv
clnxcfg.baby.lat = blat
clnxcfg.baby.lng = blng
clnxcfg.baby.elevation = belv
#how to migrate?
clnxcfg.baby.obvdate = self.settings_dialog.date.dateTime().toPyDateTime()
clnxcfg.current_theme = thm
clnxcfg.current_icon_override = iothm
clnxcfg.show_sign = self.settings_dialog.s_check.isChecked()
clnxcfg.show_moon = self.settings_dialog.m_check.isChecked()
clnxcfg.show_house_of_moment = self.settings_dialog.h_check.isChecked()
clnxcfg.show_nodes = self.settings_dialog.n_check.isChecked()
clnxcfg.show_admi = self.settings_dialog.a_check.isChecked()
clnxcfg.show_mcal = self.settings_dialog.mp_check.isChecked()
clnxcfg.show_sr = self.settings_dialog.sr_check.isChecked()
clnxcfg.show_lr = self.settings_dialog.lr_check.isChecked()
clnxcfg.pluto_alt = self.settings_dialog.p_check.isChecked()
clnxcfg.capricorn_alt = cp
clnxcfg.use_css = self.settings_dialog.css_check.isChecked()
clnxcfg.show_aclk = self.settings_dialog.ac_check.isChecked()
clnxcfg.refinements['Solar Return'] = self.settings_dialog.solar.value()
clnxcfg.refinements['Lunar Return'] = self.settings_dialog.lunar.value()
clnxcfg.refinements['Moon Phase'] = self.settings_dialog.phase.value()
for i in self.settings_dialog.orbs:
clnxcfg.orbs[i] = self.settings_dialog.orbs[i].value()
clnxcfg.load_theme()
clnxcfg.load_natal_data()
self.update_widgets_config()
self.update_hours()
self.moonToday.clear()
self.moonToday.get_moon_cycle(self.now)
self.moonToday.highlight_cycle_phase(self.now)
self.settings_dialog.c_check.setItemIcon(0, clnxcfg.sign_icons['Capricorn'])
self.settings_dialog.c_check.setItemIcon(1, clnxcfg.sign_icons['Capricorn 2'])
self.settings_dialog.c_check.setItemIcon(2, clnxcfg.sign_icons['Capricorn 3'])
#eventually load DB of events
def settings_write(self):
self.settings_change()
clnxcfg.save_settings()
#clnxcfg.save_schedule()
self.settings_dialog.hide()
def make_settings_dialog(self):
self.settings_dialog = ReusableDialog(self)
self.settings_dialog.setWindowTitle("Settings")
tabs = QtGui.QTabWidget(self.settings_dialog)
location_page = QtGui.QFrame()
appearance_page = QtGui.QFrame()
events_page = QtGui.QFrame()
tweaks_page = QtGui.QFrame()
calculations_page = QtGui.QFrame()
tabs.addTab(location_page, "Your Info")
tabs.addTab(appearance_page, "Appearance")
tabs.addTab(events_page, "Events")
tabs.addTab(tweaks_page, "Tweaks")
tabs.addTab(calculations_page, "Calculations")
groupbox = QtGui.QGroupBox("Current Location", location_page)
groupbox2 = QtGui.QGroupBox("Birth Information", location_page)
self.settings_dialog.location_widget = geolocationwidget.GeoLocationWidget(groupbox)
vbox = QtGui.QVBoxLayout(location_page)
gvbox = QtGui.QVBoxLayout(groupbox)
gvbox.addWidget(self.settings_dialog.location_widget)
vbox.addWidget(groupbox)
vbox.addWidget(groupbox2)
self.settings_dialog.birth_widget = geolocationwidget.GeoLocationWidget(groupbox2)
self.settings_dialog.date = QtGui.QDateTimeEdit(groupbox2)
self.settings_dialog.date.setDateRange(QtCore.QDate(1902, 1, 1), QtCore.QDate(2037, 1, 1))
self.settings_dialog.date.setDisplayFormat("MM/dd/yyyy - HH:mm:ss")
tgrid = QtGui.QGridLayout(groupbox2)
tgrid.addWidget(self.settings_dialog.birth_widget, 0, 0, 3, 2)
tgrid.addWidget(QtGui.QLabel("Birth time"), 3, 0)
tgrid.addWidget(self.settings_dialog.date, 3, 1)
layout = QtGui.QVBoxLayout(self.settings_dialog)
layout.addWidget(tabs)
grid = QtGui.QGridLayout(appearance_page)
appearance_label = QtGui.QLabel("Icon theme")
self.settings_dialog.appearance_icons = QtGui.QComboBox()
self.settings_dialog.appearance_icons.addItem("None")
self.settings_dialog.css_check = QtGui.QCheckBox("Use the custom UI styling in the theme", appearance_page)
for theme in clnxcfg.get_available_themes():
#is it all right to format path here?
path = "skins:%s/misc/chronoslnx.png" %(theme)
icon = QtGui.QIcon(path)
self.settings_dialog.appearance_icons.addItem(icon, theme)
grid.addWidget(appearance_label, 0, 0)
grid.addWidget(self.settings_dialog.appearance_icons, 0, 1)
self.settings_dialog.override_ui_icon = QtGui.QLineEdit(appearance_page)
self.settings_dialog.override_ui_icon.setToolTip(("You should only set this if"
"standard icons, like Quit, "
"are not showing.\n"
"This will take effect after "
"a restart of ChronosLNX.\n"
"Currently detected icon theme by system: %s") \
% clnxcfg.sys_icotheme)
grid.addWidget(QtGui.QLabel("UI Icon theme: "), 1, 0)
grid.addWidget(self.settings_dialog.override_ui_icon, 1, 1)
grid.addWidget(self.settings_dialog.css_check, 2, 0, 1, 2)
grid.addWidget(QtGui.QLabel("Change the following for signs information"), 3, 0, 1, 2)
self.settings_dialog.c_check = QtGui.QComboBox(appearance_page)
self.settings_dialog.c_check.addItem(clnxcfg.sign_icons["Capricorn"], "Capricorn")
self.settings_dialog.c_check.addItem(clnxcfg.sign_icons["Capricorn 2"], "Capricorn 2")
self.settings_dialog.c_check.addItem(clnxcfg.sign_icons["Capricorn 3"], "Capricorn 3")
self.settings_dialog.p_check = QtGui.QCheckBox("Use the P-looking Pluto icon", appearance_page)
grid.addWidget(self.settings_dialog.p_check, 4, 1)
grid.addWidget(QtGui.QLabel("Use this Capricorn glyph"), 5, 0)
grid.addWidget(self.settings_dialog.c_check, 5, 1)
buttonbox = QtGui.QDialogButtonBox(QtCore.Qt.Horizontal)
resetbutton = buttonbox.addButton(QtGui.QDialogButtonBox.Reset)
okbutton = buttonbox.addButton(QtGui.QDialogButtonBox.Ok)
applybutton = buttonbox.addButton(QtGui.QDialogButtonBox.Apply)
cancelbutton = buttonbox.addButton(QtGui.QDialogButtonBox.Cancel)
resetbutton.clicked.connect(self.settings_reset)
okbutton.clicked.connect(self.settings_write)
applybutton.clicked.connect(self.settings_change)
cancelbutton.clicked.connect(self.settings_dialog.hide)
layout.addWidget(buttonbox)
eventplanner = EventsList(events_page)
a_vbox = QtGui.QVBoxLayout(events_page)
a_vbox.addWidget(eventplanner)
eventplanner.tree.setModel(clnxcfg.schedule)
tweak_grid = QtGui.QGridLayout(tweaks_page)
self.settings_dialog.s_check = QtGui.QCheckBox("Sign of the month", tweaks_page)
self.settings_dialog.m_check = QtGui.QCheckBox("Current moon phase", tweaks_page)
self.settings_dialog.h_check = QtGui.QCheckBox("House of moment", tweaks_page)
self.settings_dialog.n_check = QtGui.QCheckBox("Show Nodes", tweaks_page)
self.settings_dialog.a_check = QtGui.QCheckBox("Show the ADMI axis", tweaks_page)
self.settings_dialog.a_check.setToolTip("This will show the Ascendant, Descendant, MC, and IC")
self.settings_dialog.mp_check = QtGui.QCheckBox("Show moon phases", tweaks_page)
self.settings_dialog.sr_check = QtGui.QCheckBox("Show solar returns", tweaks_page)
self.settings_dialog.lr_check = QtGui.QCheckBox("Show lunar returns", tweaks_page)
self.settings_dialog.ac_check = QtGui.QCheckBox("Show rendered astrological clock?", tweaks_page)
tweak_grid.addWidget(QtGui.QLabel("Show these main window's textual information"), 0, 0, 1, 2)
tweak_grid.addWidget(self.settings_dialog.s_check, 1, 1)
tweak_grid.addWidget(self.settings_dialog.m_check, 2, 1)
tweak_grid.addWidget(self.settings_dialog.h_check, 3, 1)
tweak_grid.addWidget(QtGui.QLabel("Change the following for signs information"), 4, 0, 1, 2)
tweak_grid.addWidget(self.settings_dialog.n_check, 5, 1)
tweak_grid.addWidget(self.settings_dialog.a_check, 6, 1)
tweak_grid.addWidget(QtGui.QLabel("Change the following for the calendar"), 7, 0, 1, 2)
tweak_grid.addWidget(self.settings_dialog.mp_check, 8, 1)
tweak_grid.addWidget(self.settings_dialog.sr_check, 9, 1)
tweak_grid.addWidget(self.settings_dialog.lr_check, 10, 1)
tweak_grid.addWidget(QtGui.QLabel("Graphical Clock"), 11, 0, 1, 2)
tweak_grid.addWidget(self.settings_dialog.ac_check, 12, 1)
another_vbox = QtGui.QVBoxLayout(calculations_page)
gbox1 = QtGui.QGroupBox("Refinements")
another_vbox.addWidget(gbox1)
ggbox = QtGui.QGridLayout(gbox1)
gbox2 = QtGui.QGroupBox("Orbs")
another_vbox.addWidget(gbox2)
ggbox2 = QtGui.QGridLayout(gbox2)
self.settings_dialog.solar = QtGui.QSpinBox(calculations_page)
self.settings_dialog.lunar = QtGui.QSpinBox(calculations_page)
self.settings_dialog.phase = QtGui.QSpinBox(calculations_page)
ggbox.addWidget(QtGui.QLabel(("Adjust the number of refinements to perform\n"
"for each of these calculations")),
0, 0, 1, 2)
ggbox.addWidget(QtGui.QLabel("Solar Return"), 1, 0, 1, 2)
ggbox.addWidget(self.settings_dialog.solar, 1, 2)
ggbox.addWidget(QtGui.QLabel("Lunar Return"), 2, 0, 1, 2)
ggbox.addWidget(self.settings_dialog.lunar, 2, 2)
ggbox.addWidget(QtGui.QLabel("Moon Phase"), 3, 0, 1, 2)
ggbox.addWidget(self.settings_dialog.phase, 3, 2)
self.settings_dialog.orbs = {}
for x, i in enumerate(list(clnxcfg.orbs.keys())):
self.settings_dialog.orbs[i] = QtGui.QDoubleSpinBox(calculations_page)
self.settings_dialog.orbs[i].setRange(0, 10)
ggbox2.addWidget(QtGui.QLabel(i.title()), x, 0, 1, 5)
ggbox2.addWidget(self.settings_dialog.orbs[i], x, 5, 1, 1)
self.update_settings_widgets()
## systray
#http://stackoverflow.com/questions/893984/pyqt-show-menu-in-a-system-tray-application
#http://www.itfingers.com/Question/758256/pyqt4-minimize-to-tray
def make_tray_icon(self):
self.trayIcon = QtGui.QSystemTrayIcon(QtGui.QIcon(clnxcfg.main_icons['logo']), app)
menu = QtGui.QMenu()
quitAction = QtGui.QAction(self.tr("&Quit"), self)
quitAction.triggered.connect(QtGui.qApp.quit)
showaction = menu.addAction("&Show", self.show)
showaction.setIcon(QtGui.QIcon.fromTheme("show-menu"))
setaction = menu.addAction("&Settings", self.settings_dialog.open)
setaction.setIcon(QtGui.QIcon.fromTheme("preferences-other"))
menu.addAction(quitAction)
quitAction.setIcon(QtGui.QIcon.fromTheme("application-exit"))
self.trayIcon.setContextMenu(menu)
self.trayIcon.activated.connect(self.__icon_activated)
self.trayIcon.show()
def _ChronosLNX__icon_activated(self, reason):
if reason == QtGui.QSystemTrayIcon.DoubleClick:
if(self.isHidden()):
self.show()
else:
self.hide()
def closeEvent(self, event):
if self.trayIcon.isVisible():
self.hide()
event.ignore()
##misc
#http://www.saltycrane.com/blog/2008/01/python-variable-scope-notes/
def show_about(self):
QtGui.QMessageBox.about(self, "About {}".format(APPNAME),
("<center><big><b>{0} {1}</b></big>"
"<br />{2}<br />(C) <a href=\"mailto:{3}\">{4}</a>"
" {5}<br /><a href = \"{6}\">{0} Homepage</a>"
"</center>").format(APPNAME, APPVERSION, DESCRIPTION,
EMAIL, AUTHOR, YEAR, PAGE)
)
def show_help(self):
print("Stubbing out")
def update(self):
self.now = clnxcfg.observer.obvdate
updatePandC(self.now, clnxcfg.observer, self.houses, self.zodiac)
#self.astroClock.signData = [self.houses,self.zodiac]
if self.now >= self.next_sunrise:
self.update_hours()
self.update_moon_cycle()
self.phour = self.hoursToday.grab_nearest_hour(self.now)
self.check_alarm()
if clnxcfg.show_house_of_moment:
hom = self.zodiac[0].m.house_info.num
if hom == 1:
suffix = "st"
elif hom == 2:
suffix = "nd"
elif hom == 3:
suffix = "rd"
else:
suffix = "th"
house_of_moment_string = "<br />The sun is in the {}<sup>{}</sup> house".format(hom, suffix)
else:
house_of_moment_string = ""
if clnxcfg.show_sign:
sign_string = "<br />The sign of the month is {}".format(self.zodiac[0].m.signData['name'])
else:
sign_string = ""
if clnxcfg.show_moon:
phase = grab_phase(self.now)
moon_phase = "<br />{}: {} illuminated".format(state_to_string(phase, swisseph.MOON), phase[2])
else:
moon_phase = ""
#probably get boolean of day/night out of model?
planets_string = "Day of {}, the hour of {}".format(self.pday, self.phour)
total_string = "{}{}{}{}".format(planets_string, sign_string,
moon_phase, house_of_moment_string)
if clnxcfg.current_theme == "None":
sysicon = QtGui.QIcon(clnxcfg.grab_icon_path("misc", "chronoslnx"))
else:
sysicon = clnxcfg.main_icons[self.phour]
self.trayIcon.setToolTip("{} - {}\n{}".format(self.now.strftime("%Y/%m/%d"),
self.now.strftime("%H:%M:%S"),
total_string.replace("<br />", "\n")\
.replace("<sup>","")\
.replace("</sup>",""))
)
self.trayIcon.setIcon(sysicon)
#self.todayPicture.setPixmap(clnxcfg.main_pixmaps[str(self.phour)])
self.todayOther.setText("%s<br />%s" %(self.now.strftime("%H:%M:%S"), total_string))
def event_trigger(self, event_type, text, planet_trigger):
if event_type == "Save to file":
print_to_file(self, text, self.now)
elif event_type == "Command":
call([split(text)])
else: #event_type == "Textual reminder"
self.show_notification("Reminder", text, planet_trigger)
def parse_phour_args(self, string):
alist = None
args = len(findall("%\(prev\)s|%\(next\)s", string))
model = self.hoursToday.tree.model().sourceModel()
if args == 2:
if model.last_index > 0:
idx = model.last_index - 1
else:
idx = model.last_index + 6
prev_hour = model.get_planet(idx)
alist = {'prev': prev_hour, "next": self.phour}
elif args == 1:
if match("%\(prev\)s"):
if model.last_index > 0:
idx = model.last_index - 1
else:
idx = model.last_index + 6
prev_hour = model.get_planet(idx)
alist = {"prev": prev_hour}
else:
alist = {"next": self.phour}
return alist, args
def parse_hour_args(self, string):
alist = None
args = len(findall("%\(prev\)s|%\(next\)s", string))
if args == 2:
if self.now.hour == 0:
prev_hour = 23
else:
prev_hour = self.now.hour - 1
alist = {'prev': prev_hour, "next": self.now.hour}
elif args == 1:
if match("%\(prev\)s"):
if self.now.hour == 0:
prev_hour = 23
else:
prev_hour = self.now.hour - 1
alist = {"prev": prev_hour}
else:
alist = {"next": self.now.hour}
return alist, args
def check_alarm(self):
for i in range(clnxcfg.todays_schedule.rowCount()):
hour_trigger = False
pt = False
real_row = clnxcfg.todays_schedule.mapToSource(clnxcfg.todays_schedule.index(i, 0)).row()
enabled_item = clnxcfg.schedule.item(real_row, 0)
if enabled_item.checkState() == QtCore.Qt.Checked:
hour_item = clnxcfg.schedule.item(real_row, 2).data(QtCore.Qt.UserRole)
txt = clnxcfg.schedule.item(real_row, 4).data(QtCore.Qt.EditRole)
args = 0
if isinstance(hour_item, QtCore.QTime):
hour_trigger = compare_to_the_second(self.now ,hour_item.hour(),
hour_item.minute(), 0)
else:
if hour_item == "Every planetary hour":
phm = self.hoursToday.tree.model().sourceModel()
dt = phm.get_date(phm.last_index)
hour_trigger = compare_to_the_second(self.now, dt.hour, dt.minute, dt.second+1)
if hour_trigger:
pt = True
alist, args = self.parse_phour_args(txt)
elif self.phour == hour_item:
phm = self.hoursToday.tree.model().sourceModel()
dt = phm.get_date(phm.last_index)
hour_trigger = compare_to_the_second(self.now, dt.hour, dt.minute, dt.second+1)
pt = True
elif hour_item == "When the sun rises":
hour_trigger = compare_to_the_second(self.now, self.sunrise,
self.sunrise.minute, self.sunrise.second)
elif hour_item == "When the sun sets":
hour_trigger = compare_to_the_second(self.now, self.sunset.hour,
self.sunset.minute, self.minute.second)
elif hour_item == "Every normal hour":
hour_trigger = compare_to_the_second(self.now, self.now.hour, 0, 0)
alist, args = self.parse_hour_args(txt)
if hour_trigger:
event_type_item = clnxcfg.schedule.item(real_row, 3).data(QtCore.Qt.EditRole)
if args > 0:
self.event_trigger(event_type_item, txt % alist, pt)
else:
self.event_trigger(event_type_item, txt, pt)
def main():
global app, clnxcfg, pynf
app = QtGui.QApplication(sys.argv)
app.setApplicationName(APPNAME)
app.setApplicationVersion(APPVERSION)
if os.name == 'nt':
QtGui.QIcon.setThemeSearchPaths(['/icons'])
app.setQuitOnLastWindowClosed(False)
clnxcfg = ChronosLNXConfig()
app.setWindowIcon(clnxcfg.main_icons['logo'])
try:
retcode = call(['which', 'notify-send'])
pynf = True if retcode == 0 else False
except Exception as e:
pynf = False
if not pynf:
print("Warning, couldn't find notify-send! On Linux systems, the notifications might look ugly.")
chronoslnx = ChronosLNX()
chronoslnx.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
ShadowKyogre/ChronosLNX
|
chronoslnxlib/gui.py
|
Python
|
gpl-3.0
| 43,436
|
from incload.parsers import baseparser
from incload import downloader
class FullAlphabeticalParser(baseparser.BaseParser):
Source="http://teknoaxe.com/Home.php"
def __init__(self):
baseparser.BaseParser.__init__(self)
self.__DetectionLevel=0
self.__Category=""
self.__Categories={}
self.__Genre=""
self.__Genres={}
self.__Song=""
self.__Songs={}
self.__FooterItemCount=0
def feed(self, data):
baseparser.BaseParser.feed(self, data)
if self.__DetectionLevel==0:
self.__DetectionLevel=1
for category in sorted(self.__Categories.keys()):
print "Downloading genre list for category '%s'"%category
dl=downloader.Downloader(self.__Categories[category])
try:
dl.start()
dl.wait()
except KeyboardInterrupt:
dl.stop()
raise KeyboardInterrupt()
print "Parsing genre list for category '%s'"%category
self.feed(dl.read())
self.__DetectionLevel=2
for self.__Genre in sorted(self.__Genres.keys()):
print "Downloading song list for genre '%s'"%self.__Genre
dl=downloader.Downloader(self.__Genres[self.__Genre])
try:
dl.start()
dl.wait()
except KeyboardInterrupt:
dl.stop()
raise KeyboardInterrupt()
print "Parsing song list for genre '%s'"%self.__Genre
self.feed(dl.read())
def handle_starttag(self, tag, attr):
if self.__DetectionLevel==0:
if self.__FooterItemCount==2:
if tag=="a":
self.__Category=self.getAttribute(attr, "href")
return
if tag=="div":
sclass=self.getAttribute(attr, "class")
if sclass=="footeritem":
self.__FooterItemCount=self.__FooterItemCount+1
elif self.__DetectionLevel==1:
if tag=="a":
sclass=self.getAttribute(attr, "class")
if sclass=="musiclink":
self.__Genre=self.getAttribute(attr, "href")
elif self.__DetectionLevel==2:
if tag=="a":
sclass=self.getAttribute(attr, "class")
if sclass=="genre_post":
self.__Song=self.getAttribute(attr, "href")
elif tag=="img":
sclass=self.getAttribute(attr, "class")
if sclass=="genre_image":
salt=self.getAttribute(attr, "alt")
salt=salt.split(":")[1]
if not salt in self.__Songs or (self.__Songs[salt]["genre"] in self.__Categories and self.__Genre not in self.__Categories):
self.__Songs[salt]={"link":self.__Song, "title":salt, "genre":self.__Genre}
self.__Song=""
def handle_data(self, data):
if self.__DetectionLevel==0 and self.__FooterItemCount==2 and self.__Category:
if data != "Music" and data != "Commission":
self.__Categories[data]=self.__Category
self.__Category=""
elif self.__DetectionLevel==1 and self.__Genre:
self.__Genres[data]=self.__Genre
self.__Genre=""
@property
def Result(self):
ordered=sorted(self.__Songs.keys())
l=[]
for o in ordered:
l.append(self.__Songs[o])
return l
|
Timtam/incompetech-downloader
|
incload/parsers/teknoaxe/fullalphabetical.py
|
Python
|
gpl-3.0
| 3,085
|
import _plotly_utils.basevalidators
class ValueValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name="value",
parent_name="funnel.marker.colorbar.tickformatstop",
**kwargs
):
super(ValueValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "style"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/funnel/marker/colorbar/tickformatstop/_value.py
|
Python
|
mit
| 509
|
"""SCons.Tool.gfortran
Tool-specific initialization for gfortran, the GNU Fortran 95/Fortran
2003 compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import SCons.Util
import fortran
def generate(env):
"""Add Builders and construction variables for gfortran to an
Environment."""
fortran.generate(env)
for dialect in ['F77', 'F90', 'FORTRAN', 'F95', 'F03', 'F08']:
env['%s' % dialect] = 'gfortran'
env['SH%s' % dialect] = '$%s' % dialect
if env['PLATFORM'] in ['cygwin', 'win32']:
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect)
else:
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect)
env['INC%sPREFIX' % dialect] = "-I"
env['INC%sSUFFIX' % dialect] = ""
def exists(env):
return env.Detect('gfortran')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
andrewyoung1991/scons
|
src/engine/SCons/Tool/gfortran.py
|
Python
|
mit
| 2,214
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1LocalVolumeSource(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, path=None):
"""
V1LocalVolumeSource - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'path': 'str'
}
self.attribute_map = {
'path': 'path'
}
self._path = path
@property
def path(self):
"""
Gets the path of this V1LocalVolumeSource.
The full path to the volume on the node For alpha, this path must be a directory Once block as a source is supported, then this path can point to a block device
:return: The path of this V1LocalVolumeSource.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this V1LocalVolumeSource.
The full path to the volume on the node For alpha, this path must be a directory Once block as a source is supported, then this path can point to a block device
:param path: The path of this V1LocalVolumeSource.
:type: str
"""
if path is None:
raise ValueError("Invalid value for `path`, must not be `None`")
self._path = path
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1LocalVolumeSource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
djkonro/client-python
|
kubernetes/client/models/v1_local_volume_source.py
|
Python
|
apache-2.0
| 3,365
|
import pytest
from utils import testgen
from cfme.web_ui import Quadicon, toolbar
from cfme.infrastructure.host import Host
from cfme.web_ui import InfoBlock
from cfme.fixtures import pytest_selenium as sel
from cfme.configure.tasks import is_host_analysis_finished
from utils.wait import wait_for
pytest_generate_tests = testgen.generate(testgen.provider_by_type,
['openstack-infra'],
scope='module')
@pytest.mark.usefixtures("setup_provider_modscope")
def test_host_security(provider, soft_assert):
provider.load_details()
sel.click(InfoBlock.element("Relationships", "Nodes"))
my_quads = list(Quadicon.all())
assert len(my_quads) > 0
for quad in my_quads:
host = Host(name=quad.name)
host.run_smartstate_analysis()
wait_for(lambda: is_host_analysis_finished(host.name), delay=15,
timeout="10m", fail_func=lambda: toolbar.select('Reload'))
soft_assert(
int(host.get_detail("Security", "Users")) > 0,
'Nodes number of Users is 0')
soft_assert(
int(host.get_detail("Security", "Groups")) > 0,
'Nodes number of Groups is 0')
|
kzvyahin/cfme_tests
|
cfme/tests/openstack/test_host_security.py
|
Python
|
gpl-2.0
| 1,236
|
# Generated by Django 2.2.5 on 2019-09-19 22:22
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0003_auto_20190914_1504'),
('sponsors', '0003_auto_20190914_1745'),
]
operations = [
]
|
patrick91/pycon
|
backend/sponsors/migrations/0004_merge_20190919_2222.py
|
Python
|
mit
| 275
|
"""Picture views."""
import os
from pyramid.httpexceptions import HTTPForbidden
from pyramid.httpexceptions import HTTPNotFound
from pyramid.response import FileResponse
from sqlalchemy.orm import join as orm_join
from sqlalchemy.orm.exc import NoResultFound
from lasco.auth import get_user_role
from lasco.auth import get_user_metadata
from lasco.auth import ROLE_GALLERY_ADMIN
from lasco.models import DBSession
from lasco.models import Album
from lasco.models import Gallery
from lasco.models import Picture
from lasco.views.utils import TemplateAPI
def picture_in_album(request):
session = DBSession()
gallery_name = request.matchdict['gallery_name']
album_name = request.matchdict['album_name']
try:
gallery = session.query(Gallery).filter_by(name=gallery_name).one()
album = session.query(Album).filter_by(gallery_id=gallery.id,
name=album_name).one()
except NoResultFound:
raise HTTPNotFound(request.url)
role = get_user_role(request, session, gallery, album)
if not role:
raise HTTPForbidden()
can_edit = role == ROLE_GALLERY_ADMIN
pictures = sorted(album.pictures, key=lambda p: p.date)
picture_index = None
picture_id = int(request.matchdict['picture_id'])
previous_id = next_id = None
for i in range(len(pictures)):
i_id = pictures[i].id
if i_id == picture_id:
picture_index = i
break
previous_id = i_id
if picture_index is None:
raise HTTPNotFound(request.url)
picture = pictures[picture_index]
if picture_index != len(pictures) - 1:
next_id = pictures[picture_index + 1].id
api = TemplateAPI(request,
'%s - %s' % (gallery.title, album.title))
if previous_id:
api.previous_url = request.route_url(
'picture_in_album',
gallery_name=gallery_name, album_name=album_name,
picture_id=previous_id)
if next_id:
api.next_url = request.route_url(
'picture_in_album',
gallery_name=gallery_name, album_name=album_name,
picture_id=next_id)
return {'api': api,
'gallery': gallery,
'album': album,
'picture': picture,
'picture_index': picture_index,
'previous_id': previous_id,
'next_id': next_id,
'can_edit': can_edit}
def picture_as_image(request):
"""Return an image file for the requested picture."""
session = DBSession()
picture_id = request.matchdict['picture_id']
user_id = get_user_metadata(request).get('id', None)
if user_id:
query = (
"SELECT DISTINCT pictures.* "
"FROM pictures, album_viewers "
"WHERE pictures.id=%(picture_id)s AND "
" pictures.album_id=album_viewers.album_id AND "
" album_viewers.user_id='%(user_id)s' "
" UNION "
" SELECT DISTINCT pictures.* "
" FROM pictures, albums, gallery_administrators "
" WHERE pictures.id=%(picture_id)s AND "
" pictures.album_id=albums.id AND "
" albums.gallery_id=gallery_administrators.gallery_id AND "
" gallery_administrators.user_id='%(user_id)s' "
) % {'picture_id': picture_id, 'user_id': user_id}
picture = session.execute(query).first() # may return None
else:
picture = None
if picture is None:
# We always raise Forbidden, whether the picture exists (and
# the user is not allowed to view it) or not.
raise HTTPForbidden()
base_path = request.registry.settings['lasco.pictures_base_path']
full_path = os.path.join(base_path, picture.path)
return FileResponse(full_path, request=request)
def ajax_update(request):
session = DBSession()
pic_id = int(request.matchdict['picture_id'])
picture = session.query(Picture).filter_by(id=pic_id).one()
gallery = session.query(Gallery).select_from(
orm_join(Gallery, Album)).\
filter(Album.id==picture.album_id).\
filter(Gallery.id==Album.gallery_id).one()
if get_user_role(request, session, gallery) != ROLE_GALLERY_ADMIN:
raise HTTPForbidden()
picture.caption = request.POST['caption']
picture.location = request.POST['location']
return {'pic_info': picture.get_info}
|
dbaty/Lasco
|
lasco/views/picture.py
|
Python
|
bsd-3-clause
| 4,459
|
# Copyright (c) 2014, Stanford University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
Very simple Python interface to MongoDB.
@Author: paepcke
'''
from pymongo import MongoClient
class MongoDB(object):
'''
Very simple Python interface to MongoDB. Based on pymongo,
this class provides methods to get and set default databases
and collections, to insert documents, query collections, and
clear all documents in a collection.
The query() method encapsulates MangoDB native methods find()
and find_one(). The query() method makes it very convenient to
to request only particular sets of fields (columns in relational
terms). Example::
myMongoDb.query({'lname' : 'Doe'}, ('fname', 'lname', 'age'))
'''
# Map of query strings to MongoDB cursors. Administered
# by query() method. Used in resultCount():
queryCursors = {}
# ---------------------------- Public Methods -------------------
def __init__(self, host="localhost", ssl_keyfile=None, dbName="test", collection="test_collection", port=27017, user=None, pwd=""):
'''
Create a connection to the MongoDB demon on the given host/port.
:param host: host name where MongoDB demon is running. Can be IP address as string.
:type host: String
:param port: MongoDB demon's port
:type port: int
'''
self.host = host
self.port = port
self.dbName = dbName
self.collection = collection
if user is not None:
# Use Mongodb URI, mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]]
# e.g.: mongodb://readonly:xxxxx@stanford-edx-prod.m0.mongolayer.com/stanford-edx-prod)
if ssl_keyfile is None:
self.client=MongoClient("mongodb://%s:%s@%s/%s" % (user,pwd,host,dbName), port)
else:
self.client=MongoClient("mongodb://%s:%s@%s/%s" % (user,pwd,host,dbName), port, ssl_keyfile=ssl_keyfile)
else:
if ssl_keyfile is None:
self.client=MongoClient("mongodb://%s/%s" % (host,dbName), port)
else:
self.client=MongoClient("mongodb://%s/%s" % (host,dbName), port, ssl_keyfile=ssl_keyfile)
self.setDB(dbName)
self.setCollection(collection)
def setDB(self, dbName):
'''
Establish a default database within MongoDB for subsequent calls
to other methods of this class.
:param dbName: MongoDB database name
:type dbName: String
'''
self.dbName = dbName
self.db = self.client[dbName]
def getDBName(self):
'''
Obtain the name of the MongoDB database that is
currently the default for calls to methods of this class.
:rtype: String
'''
return self.dbName
def setCollection(self, collName):
'''
Establish a default MongoDB collection for subsequent calls to
other methods of this class.
:param collName: MongoDB collection name
:type collName: String
'''
self.coll = self.db[collName]
def getCollectionName(self):
'''
Obtain the name of the MongoDB collection that is
currently the default for calls to methods of this class.
:rtype: String
'''
return self.coll.name
def query(self, mongoQuery, colNameTuple=(), limit=0, db=None, collection=None, wantMongoId=True):
'''
Method for querying the database. The mongoQuery parameter is a
dictionary conforming to the MongoDB query conventions. This query
is passed through to the underlying MongoDB.
The colNameTuple contains a list of field (a.k.a. relational column) names.
Result documents will contain only those fields. In contrast to MangoDB
convention, the _id field is not automatically returned in the result dictionaries.
This field is only included if the caller sets wantMongoId to True
If colNameTuple is an empty tuple, the entirety of each document is returned
for each query result. The rule for the Mongo _id field still holds in this case.
The limit parameter determines how many documents will be returned. A value of zero
returns the entire result set. A value of 1 makes the method's behavior analogous to
MangoDB's native find_one() method.
The db and collection keyword arguments allow callers to address a MongoDB database
and/or collection that are not the default. After the method returns, the default
database and collection values will still be untouched.
:param mongoQuery: MangoDB query
:type mongoQuery: Dict<String,<any>>
:param colNameTuple: a possibly empty tuple of field/column names to retrieve for each result document
If empty, all fields are returned.
:type colNameTuple: (String)
:param limit: maximum number of documents to return
:type limit: int
:param db: name of MongoDB database other than the current default
:type db: String
:param collection: name of MongoDB collection other than the current default
:type collection: String
:param wantMongoId: set to True if return results should include the MongoDb _id field.
:type wantMongoId: Boolean
:rtype: {generator<ResultDict>}
'''
with newMongoDB(self, db) as db, newMongoColl(self, collection) as coll:
# Turn the list of column names to return into
# Mongo-speak. First, take care of the Python weirdness
# that turn single-element tuples into the element
# themselve: ("foo", "bar") ---> ("foo", "bar"). BUT
# ("foo") ---> "foo". Whereas
# ("foo,") ---> ("foo",). Whereas
if not isinstance(colNameTuple, tuple):
colNameTuple = (str(colNameTuple),)
# Create dict {"colName1" : 1, "colName2" : 1,...}
colsToReturn = {}
for colName in colNameTuple:
colsToReturn[colName] = 1
# MongoDB insists on returning the '_id' field, even
# if you don't ask for it. Suppress that behavior
# unless wantMongoId is True.
if not wantMongoId:
# Caller did not explicitly ask for the _id field,
# so suppress _id:
colsToReturn['_id'] = 0
if len(colsToReturn) > 0:
cursor = coll.find(mongoQuery, colsToReturn, limit=limit)
else:
cursor = coll.find(mongoQuery, limit=limit)
# Make the cursor findable, so that callers can ask for number of results
MongoDB.queryCursors[str(mongoQuery)] = cursor
while True:
# Termination happens when cursor is exhausted:
try:
yield cursor.next()
except:
# Cursor is exhausted, remove it from our
# records:
del(MongoDB.queryCursors[str(mongoQuery)])
return
def resultCount(self, queryDict):
'''
Return number of results in the given query. Only works
when query has previously been issued via the query()
method AND at least one result has been extracted. That's
because the first call to query() only returns a generator.
This isn't good.
:param queryDict: Same query that was provided to the query() method
:type queryDict: Dict<String,<any>>
:return: number of results, taking into account limit provided to query(). None if no result has been pulled from query()
:rtype: {int | None}
'''
try:
cursor = MongoDB.queryCursors[str(queryDict)]
return cursor.count(with_limit_and_skip=True)
except KeyError:
#raise ValueError("Called resultCount() with a query string that was not used in a prior call to query(), or the query results have all been retrieved.")
return None
def clearCollection(self, db=None, collection=None):
'''
Remove all documents from a collection. The affected database/collection
are the current defaults, if database/collection are None, else the specified
database/collection is affected.
:param db: Name of MongoDB database, or None
:type db: String
:param collection: Name of MongoDB collection, or None
:type collection: String
'''
with newMongoDB(self, db) as db, newMongoColl(self, collection) as coll:
coll.remove()
def dropCollection(self, db=None, collection=None):
'''
Remove a collection from the database. The affected database/collection
are the current defaults, if database/collection are None, else the specified
database/collection is affected.
:param db: Name of MongoDB database, or None
:type db: String
:param collection: Name of MongoDB collection, or None
:type collection: String
'''
with newMongoDB(self, db) as db, newMongoColl(self, collection) as coll:
coll.drop()
def insert(self, doc_or_docs, db=None, collection=None):
'''
Insert the given dictionary into a MongoDB collection.
:param doc_or_docs: Dictionary whose entries are the documents
:type doc_or_docs: Dict<String,<any>>
:param db: Name of MongoDB database, or None
:type db: String
:param collection: Name of MongoDB collection, or None
:type collection: String
'''
with newMongoDB(self, db) as db, newMongoColl(self, collection) as coll:
coll.insert(doc_or_docs)
def close(self):
'''
Release all resources.
'''
for cursor in MongoDB.queryCursors.values():
try:
cursor.close()
except:
pass
self.client.close()
# ---------------------------- Private Methods -------------------
def get_db(self):
'''
Obtain current default MongoDB database object
'''
return self.db
def get_collection(self):
'''
Obtain current default MongoDB database object
'''
return self.coll
# ---------------------------- Private Classes -------------------
class newMongoDB:
'''
Class that enables the construct "with newMongoDB('myDB') as db:"
See http://effbot.org/zone/python-with-statement.htm for explanation
'''
def __init__(self, mongoObj, newDbName):
self.mongoObj = mongoObj
self.newDbName = newDbName
def __enter__(self):
self.savedDBName = self.mongoObj.getDBName()
if self.newDbName is not None:
self.mongoObj.setDB(self.newDbName)
return self.mongoObj.get_db()
def __exit__(self, errType, errValue, errTraceback):
self.mongoObj.setDB(self.savedDBName)
# Have any exception re-raised:
return False
class newMongoColl:
'''
Class that enables the construct "with newMongoColl('myColl') as coll:"
See http://effbot.org/zone/python-with-statement.htm for explanation
'''
def __init__(self, mongoObj, newCollName):
self.mongoObj = mongoObj
self.newCollName = newCollName
def __enter__(self):
self.savedCollName = self.mongoObj.getCollectionName()
if self.newCollName is not None:
self.mongoObj.setCollection(self.newCollName)
return self.mongoObj.get_collection()
def __exit__(self, errType, errValue, errTraceback):
self.mongoObj.setCollection(self.savedCollName)
# Have any exception re-raised:
return False
|
paepcke/json_to_relation
|
json_to_relation/mongodb.py
|
Python
|
bsd-3-clause
| 13,626
|
import LibreNMS
import json
import logging
import os
import pymysql
import subprocess
import threading
import sys
import time
from datetime import timedelta
from datetime import datetime
from logging import debug, info, warning, error, critical, exception
from platform import python_version
from time import sleep
from socket import gethostname
from signal import signal, SIGTERM
from uuid import uuid1
class ServiceConfig:
def __init__(self):
"""
Stores all of the configuration variables for the LibreNMS service in a common object
Starts with defaults, but can be populated with variables from config.php by calling populate()
"""
self._uuid = str(uuid1())
self.set_name(gethostname())
def set_name(self, name):
if name:
self.name = name.strip()
self.unique_name = "{}-{}".format(self.name, self._uuid)
class PollerConfig:
def __init__(self, workers, frequency, calculate=None):
self.enabled = True
self.workers = workers
self.frequency = frequency
self.calculate = calculate
# config variables with defaults
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
node_id = None
name = None
unique_name = None
single_instance = True
distributed = False
group = 0
debug = False
log_level = 20
max_db_failures = 5
alerting = PollerConfig(1, 60)
poller = PollerConfig(24, 300)
services = PollerConfig(8, 300)
discovery = PollerConfig(16, 21600)
billing = PollerConfig(2, 300, 60)
ping = PollerConfig(1, 120)
down_retry = 60
update_enabled = True
update_frequency = 86400
master_resolution = 1
master_timeout = 10
redis_host = 'localhost'
redis_port = 6379
redis_db = 0
redis_pass = None
redis_socket = None
redis_sentinel = None
redis_sentinel_service = None
redis_timeout = 60
db_host = 'localhost'
db_port = 0
db_socket = None
db_user = 'librenms'
db_pass = ''
db_name = 'librenms'
watchdog_enabled = False
watchdog_logfile = 'logs/librenms.log'
def populate(self):
config = self._get_config_data()
# populate config variables
self.node_id = os.getenv('NODE_ID')
self.set_name(config.get('distributed_poller_name', None))
self.distributed = config.get('distributed_poller', ServiceConfig.distributed)
self.group = ServiceConfig.parse_group(config.get('distributed_poller_group', ServiceConfig.group))
# backward compatible options
self.poller.workers = config.get('poller_service_workers', ServiceConfig.poller.workers)
self.poller.frequency = config.get('poller_service_poll_frequency', ServiceConfig.poller.frequency)
self.discovery.frequency = config.get('poller_service_discover_frequency', ServiceConfig.discovery.frequency)
self.down_retry = config.get('poller_service_down_retry', ServiceConfig.down_retry)
self.log_level = config.get('poller_service_loglevel', ServiceConfig.log_level)
# new options
self.poller.enabled = config.get('service_poller_enabled', True) # unused
self.poller.workers = config.get('service_poller_workers', ServiceConfig.poller.workers)
self.poller.frequency = config.get('service_poller_frequency', ServiceConfig.poller.frequency)
self.discovery.enabled = config.get('service_discovery_enabled', True) # unused
self.discovery.workers = config.get('service_discovery_workers', ServiceConfig.discovery.workers)
self.discovery.frequency = config.get('service_discovery_frequency', ServiceConfig.discovery.frequency)
self.services.enabled = config.get('service_services_enabled', True)
self.services.workers = config.get('service_services_workers', ServiceConfig.services.workers)
self.services.frequency = config.get('service_services_frequency', ServiceConfig.services.frequency)
self.billing.enabled = config.get('service_billing_enabled', True)
self.billing.frequency = config.get('service_billing_frequency', ServiceConfig.billing.frequency)
self.billing.calculate = config.get('service_billing_calculate_frequency', ServiceConfig.billing.calculate)
self.alerting.enabled = config.get('service_alerting_enabled', True)
self.alerting.frequency = config.get('service_alerting_frequency', ServiceConfig.alerting.frequency)
self.ping.enabled = config.get('service_ping_enabled', False)
self.ping.frequency = config.get('ping_rrd_step', ServiceConfig.billing.calculate)
self.down_retry = config.get('service_poller_down_retry', ServiceConfig.down_retry)
self.log_level = config.get('service_loglevel', ServiceConfig.log_level)
self.update_enabled = config.get('service_update_enabled', ServiceConfig.update_enabled)
self.update_frequency = config.get('service_update_frequency', ServiceConfig.update_frequency)
self.redis_host = os.getenv('REDIS_HOST', config.get('redis_host', ServiceConfig.redis_host))
self.redis_db = os.getenv('REDIS_DB', config.get('redis_db', ServiceConfig.redis_db))
self.redis_pass = os.getenv('REDIS_PASSWORD', config.get('redis_pass', ServiceConfig.redis_pass))
self.redis_port = int(os.getenv('REDIS_PORT', config.get('redis_port', ServiceConfig.redis_port)))
self.redis_socket = os.getenv('REDIS_SOCKET', config.get('redis_socket', ServiceConfig.redis_socket))
self.redis_sentinel = os.getenv('REDIS_SENTINEL', config.get('redis_sentinel', ServiceConfig.redis_sentinel))
self.redis_sentinel_service = os.getenv('REDIS_SENTINEL_SERVICE',
config.get('redis_sentinel_service',
ServiceConfig.redis_sentinel_service))
self.redis_timeout = os.getenv('REDIS_TIMEOUT', self.alerting.frequency if self.alerting.frequency != 0 else self.redis_timeout)
self.db_host = os.getenv('DB_HOST', config.get('db_host', ServiceConfig.db_host))
self.db_name = os.getenv('DB_DATABASE', config.get('db_name', ServiceConfig.db_name))
self.db_pass = os.getenv('DB_PASSWORD', config.get('db_pass', ServiceConfig.db_pass))
self.db_port = int(os.getenv('DB_PORT', config.get('db_port', ServiceConfig.db_port)))
self.db_socket = os.getenv('DB_SOCKET', config.get('db_socket', ServiceConfig.db_socket))
self.db_user = os.getenv('DB_USERNAME', config.get('db_user', ServiceConfig.db_user))
self.watchdog_enabled = config.get('service_watchdog_enabled', ServiceConfig.watchdog_enabled)
self.watchdog_logfile = config.get('log_file', ServiceConfig.watchdog_logfile)
# set convenient debug variable
self.debug = logging.getLogger().isEnabledFor(logging.DEBUG)
if not self.debug and self.log_level:
try:
logging.getLogger().setLevel(self.log_level)
except ValueError:
error("Unknown log level {}, must be one of 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'".format(self.log_level))
logging.getLogger().setLevel(logging.INFO)
def _get_config_data(self):
try:
import dotenv
env_path = "{}/.env".format(self.BASE_DIR)
info("Attempting to load .env from '%s'", env_path)
dotenv.load_dotenv(dotenv_path=env_path, verbose=True)
if not os.getenv('NODE_ID'):
raise ImportError(".env does not contain a valid NODE_ID setting.")
except ImportError as e:
exception("Could not import .env - check that the poller user can read the file, and that composer install has been run recently")
sys.exit(3)
config_cmd = ['/usr/bin/env', 'php', '{}/config_to_json.php'.format(self.BASE_DIR), '2>&1']
try:
return json.loads(subprocess.check_output(config_cmd).decode())
except subprocess.CalledProcessError as e:
error("ERROR: Could not load or parse configuration! {}: {}"
.format(subprocess.list2cmdline(e.cmd), e.output.decode()))
@staticmethod
def parse_group(g):
if g is None:
return [0]
elif type(g) is int:
return [g]
elif type(g) is str:
try:
return [int(x) for x in set(g.split(','))]
except ValueError:
pass
error("Could not parse group string, defaulting to 0")
return [0]
class Service:
config = ServiceConfig()
_fp = False
_started = False
queue_managers = {}
poller_manager = None
discovery_manager = None
last_poll = {}
terminate_flag = False
db_failures = 0
def __init__(self):
self.config.populate()
threading.current_thread().name = self.config.name # rename main thread
self.attach_signals()
self._db = LibreNMS.DB(self.config)
self._lm = self.create_lock_manager()
self.daily_timer = LibreNMS.RecurringTimer(self.config.update_frequency, self.run_maintenance, 'maintenance')
self.stats_timer = LibreNMS.RecurringTimer(self.config.poller.frequency, self.log_performance_stats, 'performance')
if self.config.watchdog_enabled:
info("Starting watchdog timer for log file: {}".format(self.config.watchdog_logfile))
self.watchdog_timer = LibreNMS.RecurringTimer(self.config.poller.frequency, self.logfile_watchdog, 'watchdog')
else:
info("Watchdog is disabled.")
self.is_master = False
def attach_signals(self):
info("Attaching signal handlers on thread %s", threading.current_thread().name)
signal(SIGTERM, self.terminate) # capture sigterm and exit gracefully
def start(self):
debug("Performing startup checks...")
if self.config.single_instance:
self.check_single_instance() # don't allow more than one service at a time
if self._started:
raise RuntimeWarning("Not allowed to start Poller twice")
self._started = True
debug("Starting up queue managers...")
# initialize and start the worker pools
self.poller_manager = LibreNMS.PollerQueueManager(self.config, self._lm)
self.queue_managers['poller'] = self.poller_manager
self.discovery_manager = LibreNMS.DiscoveryQueueManager(self.config, self._lm)
self.queue_managers['discovery'] = self.discovery_manager
if self.config.alerting.enabled:
self.queue_managers['alerting'] = LibreNMS.AlertQueueManager(self.config, self._lm)
if self.config.services.enabled:
self.queue_managers['services'] = LibreNMS.ServicesQueueManager(self.config, self._lm)
if self.config.billing.enabled:
self.queue_managers['billing'] = LibreNMS.BillingQueueManager(self.config, self._lm)
if self.config.ping.enabled:
self.queue_managers['ping'] = LibreNMS.PingQueueManager(self.config, self._lm)
if self.config.update_enabled:
self.daily_timer.start()
self.stats_timer.start()
if self.config.watchdog_enabled:
self.watchdog_timer.start()
info("LibreNMS Service: {} started!".format(self.config.unique_name))
info("Poller group {}. Using Python {} and {} locks and queues"
.format('0 (default)' if self.config.group == [0] else self.config.group, python_version(),
'redis' if isinstance(self._lm, LibreNMS.RedisLock) else 'internal'))
if self.config.update_enabled:
info("Maintenance tasks will be run every {}".format(timedelta(seconds=self.config.update_frequency)))
else:
warning("Maintenance tasks are disabled.")
# Main dispatcher loop
try:
while not self.terminate_flag:
master_lock = self._acquire_master()
if master_lock:
if not self.is_master:
info("{} is now the master dispatcher".format(self.config.name))
self.is_master = True
self.start_dispatch_timers()
devices = self.fetch_immediate_device_list()
for device in devices:
device_id = device[0]
group = device[1]
if device[2]: # polling
self.dispatch_immediate_polling(device_id, group)
if device[3]: # discovery
self.dispatch_immediate_discovery(device_id, group)
else:
if self.is_master:
info("{} is no longer the master dispatcher".format(self.config.name))
self.stop_dispatch_timers()
self.is_master = False # no longer master
sleep(self.config.master_resolution)
except KeyboardInterrupt:
pass
info("Dispatch loop terminated")
self.shutdown()
def _acquire_master(self):
return self._lm.lock('dispatch.master', self.config.unique_name, self.config.master_timeout, True)
def _release_master(self):
self._lm.unlock('dispatch.master', self.config.unique_name)
# ------------ Discovery ------------
def dispatch_immediate_discovery(self, device_id, group):
if not self.discovery_manager.is_locked(device_id):
self.discovery_manager.post_work(device_id, group)
# ------------ Polling ------------
def dispatch_immediate_polling(self, device_id, group):
if not self.poller_manager.is_locked(device_id):
self.poller_manager.post_work(device_id, group)
if self.config.debug:
cur_time = time.time()
elapsed = cur_time - self.last_poll.get(device_id, cur_time)
self.last_poll[device_id] = cur_time
# arbitrary limit to reduce spam
if elapsed > (self.config.poller.frequency - self.config.master_resolution):
debug("Dispatching polling for device {}, time since last poll {:.2f}s"
.format(device_id, elapsed))
def fetch_immediate_device_list(self):
try:
poller_find_time = self.config.poller.frequency - 1
discovery_find_time = self.config.discovery.frequency - 1
result = self._db.query('''SELECT `device_id`,
`poller_group`,
COALESCE(`last_polled` <= DATE_ADD(DATE_ADD(NOW(), INTERVAL -%s SECOND), INTERVAL `last_polled_timetaken` SECOND), 1) AS `poll`,
IF(snmp_disable=1 OR status=0, 0, COALESCE(`last_discovered` <= DATE_ADD(DATE_ADD(NOW(), INTERVAL -%s SECOND), INTERVAL `last_discovered_timetaken` SECOND), 1)) AS `discover`
FROM `devices`
WHERE `disabled` = 0 AND (
`last_polled` IS NULL OR
`last_discovered` IS NULL OR
`last_polled` <= DATE_ADD(DATE_ADD(NOW(), INTERVAL -%s SECOND), INTERVAL `last_polled_timetaken` SECOND) OR
`last_discovered` <= DATE_ADD(DATE_ADD(NOW(), INTERVAL -%s SECOND), INTERVAL `last_discovered_timetaken` SECOND)
)
ORDER BY `last_polled_timetaken` DESC''', (poller_find_time, discovery_find_time, poller_find_time, discovery_find_time))
self.db_failures = 0
return result
except pymysql.err.Error:
self.db_failures += 1
if self.db_failures > self.config.max_db_failures:
warning("Too many DB failures ({}), attempting to release master".format(self.db_failures))
self._release_master()
sleep(self.config.master_resolution) # sleep to give another node a chance to acquire
return []
def run_maintenance(self):
"""
Runs update and cleanup tasks by calling daily.sh. Reloads the python script after the update.
Sets a schema-update lock so no distributed pollers will update until the schema has been updated.
"""
attempt = 0
wait = 5
max_runtime = 86100
max_tries = int(max_runtime / wait)
info("Waiting for schema lock")
while not self._lm.lock('schema-update', self.config.unique_name, max_runtime):
attempt += 1
if attempt >= max_tries: # don't get stuck indefinitely
warning('Reached max wait for other pollers to update, updating now')
break
sleep(wait)
info("Running maintenance tasks")
output = LibreNMS.call_script('daily.sh')
info("Maintenance tasks complete\n{}".format(output))
self.restart()
def create_lock_manager(self):
"""
Create a new LockManager. Tries to create a Redis LockManager, but falls
back to python's internal threading lock implementation.
Exits if distributing poller is enabled and a Redis LockManager cannot be created.
:return: Instance of LockManager
"""
try:
return LibreNMS.RedisLock(namespace='librenms.lock',
host=self.config.redis_host,
port=self.config.redis_port,
db=self.config.redis_db,
password=self.config.redis_pass,
unix_socket_path=self.config.redis_socket,
sentinel=self.config.redis_sentinel,
sentinel_service=self.config.redis_sentinel_service,
socket_timeout=self.config.redis_timeout)
except ImportError:
if self.config.distributed:
critical("ERROR: Redis connection required for distributed polling")
critical("Please install redis-py, either through your os software repository or from PyPI")
sys.exit(2)
except Exception as e:
if self.config.distributed:
critical("ERROR: Redis connection required for distributed polling")
critical("Could not connect to Redis. {}".format(e))
sys.exit(2)
return LibreNMS.ThreadingLock()
def restart(self):
"""
Stop then recreate this entire process by re-calling the original script.
Has the effect of reloading the python files from disk.
"""
if sys.version_info < (3, 4, 0):
warning("Skipping restart as running under an incompatible interpreter")
warning("Please restart manually")
return
info('Restarting service... ')
self._stop_managers_and_wait()
self._release_master()
python = sys.executable
os.execl(python, python, *sys.argv)
def terminate(self, _unused=None, _=None):
"""
Handle a set the terminate flag to begin a clean shutdown
:param _unused:
:param _:
"""
info("Received SIGTERM on thead %s, handling", threading.current_thread().name)
self.terminate_flag = True
def shutdown(self, _unused=None, _=None):
"""
Stop and exit, waiting for all child processes to exit.
:param _unused:
:param _:
"""
info('Shutting down, waiting for running jobs to complete...')
self.stop_dispatch_timers()
self._release_master()
self.daily_timer.stop()
self.stats_timer.stop()
if self.config.watchdog_enabled:
self.watchdog_timer.stop()
self._stop_managers_and_wait()
# try to release master lock
info('Shutdown of %s/%s complete', os.getpid(), threading.current_thread().name)
sys.exit(0)
def start_dispatch_timers(self):
"""
Start all dispatch timers and begin pushing events into queues.
This should only be started when we are the master dispatcher.
"""
for manager in self.queue_managers.values():
try:
manager.start_dispatch()
except AttributeError:
pass
def stop_dispatch_timers(self):
"""
Stop all dispatch timers, this should be called when we are no longer the master dispatcher.
"""
for manager in self.queue_managers.values():
try:
manager.stop_dispatch()
except AttributeError:
pass
def _stop_managers_and_wait(self):
"""
Stop all QueueManagers, and wait for their processing threads to complete.
We send the stop signal to all QueueManagers first, then wait for them to finish.
"""
for manager in self.queue_managers.values():
manager.stop()
for manager in self.queue_managers.values():
manager.stop_and_wait()
def check_single_instance(self):
"""
Check that there is only one instance of the service running on this computer.
We do this be creating a file in the base directory (.lock.service) if it doesn't exist and
obtaining an exclusive lock on that file.
"""
lock_file = "{}/{}".format(self.config.BASE_DIR, '.lock.service')
import fcntl
self._fp = open(lock_file, 'w') # keep a reference so the file handle isn't garbage collected
self._fp.flush()
try:
fcntl.lockf(self._fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
warning("Another instance is already running, quitting.")
exit(2)
def log_performance_stats(self):
info("Counting up time spent polling")
try:
# Report on the poller instance as a whole
self._db.query('INSERT INTO poller_cluster(node_id, poller_name, poller_version, poller_groups, last_report, master) '
'values("{0}", "{1}", "{2}", "{3}", NOW(), {4}) '
'ON DUPLICATE KEY UPDATE poller_version="{2}", poller_groups="{3}", last_report=NOW(), master={4}; '
.format(self.config.node_id, self.config.name, "librenms-service", ','.join(str(g) for g in self.config.group), 1 if self.is_master else 0))
# Find our ID
self._db.query('SELECT id INTO @parent_poller_id FROM poller_cluster WHERE node_id="{0}"; '.format(self.config.node_id))
for worker_type, manager in self.queue_managers.items():
worker_seconds, devices = manager.performance.reset()
# Record the queue state
self._db.query('INSERT INTO poller_cluster_stats(parent_poller, poller_type, depth, devices, worker_seconds, workers, frequency) '
'values(@parent_poller_id, "{0}", {1}, {2}, {3}, {4}, {5}) '
'ON DUPLICATE KEY UPDATE depth={1}, devices={2}, worker_seconds={3}, workers={4}, frequency={5}; '
.format(worker_type,
sum([manager.get_queue(group).qsize() for group in self.config.group]),
devices,
worker_seconds,
getattr(self.config, worker_type).workers,
getattr(self.config, worker_type).frequency)
)
except pymysql.err.Error:
exception("Unable to log performance statistics - is the database still online?")
def logfile_watchdog(self):
try:
# check that lofgile has been written to within last poll period
logfile_mdiff = datetime.now().timestamp() - os.path.getmtime(self.config.watchdog_logfile)
except FileNotFoundError as e:
error("Log file not found! {}".format(e))
return
if logfile_mdiff > self.config.poller.frequency:
critical("BARK! Log file older than {}s, restarting service!".format(self.config.poller.frequency))
self.restart()
else:
info("Log file updated {}s ago".format(int(logfile_mdiff)))
|
arjitc/librenms
|
LibreNMS/service.py
|
Python
|
gpl-3.0
| 24,575
|
__doc__ = """
Neural turing machine.
Names of arrays
---------------
Along with range of elements [low, high]
Mprev_bnm: previous memory state. [-inf, inf]
X_bk: external inputs. [0,1]
wprev_bHn: previous weights (read & write). [0, 1]. Normalized along axis 2.
rprev_bhm: previous vector read from memory. [-1, 1]
k_bHm: key vectors [-1, 1]
beta_bH: key strength [0, infinity]
g_bH: gating for weight update. [0, 1]
s_bH3: shift weighting. [0, 1]. Normalized along axis 2.
gamma: sharpening [1, 2]
e_bhm: erase [0, 1]
a_bhm: add [-1, 1]
Names of subscripts
-------------------
- b: batch size
- h: number of read heads == number of write heads
- H: number of read + write heads == 2*h
- n: number of memory sites
- m: dimension at each memory site
- k: dimension of input
- p: dimension of output
"""
import cgt, numpy as np, numpy.random as nr
from cgt import core, nn
from collections import namedtuple
from cgt.core import infer_shape
from example_utils import fmt_row
from param_collection import ParamCollection
import time
# Subscript indicate dimensions of array, and what each dimension indexes over
NTMOpts = namedtuple("NTMOpts",[
"b", # batch size
"h", # number of heads
"n", # number of memory sites
"m", # dimension at each memory site
"k", # dimension of input
"p", # dimension of output
"ff_hid_sizes", # hidden layer sizes of feedforward controller
])
def make_ff_controller(opt):
b, h, m, p, k = opt.b, opt.h, opt.m, opt.p, opt.k
H = 2*h
in_size = k + h*m
out_size = H*m + H + H + H*3 + H + h*m + h*m + p
# Previous reads
r_bhm = cgt.tensor3("r", fixed_shape = (b,h,m))
# External inputs
X_bk = cgt.matrix("x", fixed_shape = (b,k))
r_b_hm = r_bhm.reshape([r_bhm.shape[0], r_bhm.shape[1]*r_bhm.shape[2]])
# Input to controller
inp_bq = cgt.concatenate([X_bk, r_b_hm], axis=1)
hid_sizes = opt.ff_hid_sizes
activation = cgt.tanh
layer_out_sizes = [in_size] + hid_sizes + [out_size]
last_out = inp_bq
# feedforward part. we could simplify a bit by using nn.Affine
for i in xrange(len(layer_out_sizes)-1):
indim = layer_out_sizes[i]
outdim = layer_out_sizes[i+1]
W = cgt.shared(.02*nr.randn(indim, outdim), name="W%i"%i, fixed_shape_mask="all")
bias = cgt.shared(.02*nr.randn(1, outdim), name="b%i"%i, fixed_shape_mask="all")
last_out = cgt.broadcast("+",last_out.dot(W),bias,"xx,1x")
# Don't apply nonlinearity at the last layer
if i != len(layer_out_sizes)-2: last_out = activation(last_out)
idx = 0
k_bHm = last_out[:,idx:idx+H*m]; idx += H*m; k_bHm = k_bHm.reshape([b,H,m])
beta_bH = last_out[:,idx:idx+H]; idx += H
g_bH = last_out[:,idx:idx+H]; idx += H
s_bH3 = last_out[:,idx:idx+3*H]; idx += 3*H; s_bH3 = s_bH3.reshape([b,H,3])
gamma_bH = last_out[:,idx:idx+H]; idx += H
e_bhm = last_out[:,idx:idx+h*m]; idx += h*m; e_bhm = e_bhm.reshape([b,h,m])
a_bhm = last_out[:,idx:idx+h*m]; idx += h*m; a_bhm = a_bhm.reshape([b,h,m])
y_bp = last_out[:,idx:idx+p]; idx += p
k_bHm = cgt.tanh(k_bHm)
beta_bH = nn.softplus(beta_bH)
g_bH = cgt.sigmoid(g_bH)
s_bH3 = sum_normalize2(cgt.exp(s_bH3))
gamma_bH = cgt.sigmoid(gamma_bH)+1
e_bhm = cgt.sigmoid(e_bhm)
a_bhm = cgt.tanh(a_bhm)
# y_bp = y_bp
assert infer_shape(k_bHm) == (b,H,m)
assert infer_shape(beta_bH) == (b,H)
assert infer_shape(g_bH) == (b,H)
assert infer_shape(s_bH3) == (b,H,3)
assert infer_shape(gamma_bH) == (b,H)
assert infer_shape(e_bhm) == (b,h,m)
assert infer_shape(a_bhm) == (b,h,m)
assert infer_shape(y_bp) == (b,p)
return nn.Module([r_bhm, X_bk], [k_bHm, beta_bH, g_bH, s_bH3, gamma_bH, e_bhm, a_bhm, y_bp])
def make_ntm_initial_states(opt):
n, m, h, b = opt.n, opt.m, opt.h, opt.b
M_1nm = cgt.shared(.1*nr.randn(1,n,m))
winit_1Hn = cgt.shared(.1*nr.rand(1,2*h,n))
winit_1Hn = sum_normalize2(cgt.exp(winit_1Hn))
rinit_1hm = cgt.shared(np.zeros((1,h,m)))
return [cgt.repeat(arr, b, axis=0) for arr in (M_1nm, winit_1Hn, rinit_1hm)]
def ntm_address(opt, wprev_bhn, M_bnm, k_bhm, beta_bh, g_bh, s_bh3, gamma_bh):
# Content addressing
# Cosine similarity
# take inner product along memory axis k * M
numer_bhn = cgt.einsum("bhm,bnm->bhn", k_bhm, M_bnm)
# compute denominator |k| * |m|
denom_bhn = cgt.broadcast("*",
cgt.norm(k_bhm, axis=2, keepdims=True), # -> shape bh1
cgt.norm(M_bnm, axis=2, keepdims=True).transpose([0,2,1]), # -> bn1 -> b1n
"xx1,x1x"
)
csim_bhn = numer_bhn / denom_bhn
assert infer_shape(csim_bhn) == (opt.b, 2*opt.h, opt.n)
# scale by beta
tmp_bhn = cgt.broadcast("*", beta_bh[:,:,None], csim_bhn, "xx1,xxx")
wc_bhn = sum_normalize2(cgt.exp( tmp_bhn ))
# Interpolation
g_bh1 = g_bh[:,:,None]
wg_bhn = cgt.broadcast("*", wprev_bhn, (1 - g_bh1), "xxx,xx1") \
+ cgt.broadcast("*", wc_bhn, g_bh1, "xxx,xx1")
# Shift
wtil_bhn = circ_conv_1d(wg_bhn, s_bh3, axis=2)
# Sharpening
wfin_bhn = sum_normalize2(cgt.broadcast("**", wtil_bhn, gamma_bh.reshape([opt.b,2*opt.h,1]), "xxx,xx1"))
b,h,n = opt.b, 2*opt.h, opt.n
assert infer_shape(wtil_bhn) == (b,h,n)
assert infer_shape(gamma_bh) == (b,h)
assert infer_shape(gamma_bh[:,:,None]) == (b,h,1)
return wfin_bhn
def ntm_read(M_bnm, w_bhn):
r_bhm = cgt.einsum('bhn,bnm->bhm', w_bhn, M_bnm)
return r_bhm
def ntm_write(M_bnm, w_bhn, e_bhm, a_bhm):
if False: # Here's the version that's faithful to the paper
# weighted erases bhn1 bh1m
# ideally we wouldn't create this big 4-tensor but this operation
# requires a more general kind of contraction than is provided by einsum
we_bhmn = cgt.broadcast("*", w_bhn[:,:,:,None], e_bhm[:,:,None,:], "xxx1,xx1x")
# take produce of erasing factors
mult_bmn = (1 - we_bhmn).prod(axis=1)
M_bnm = M_bnm * mult_bmn # Equation 3 http://arxiv.org/pdf/1410.5401v2.pdf
else: # This version just does a regular contraction
erase_bnm = cgt.einsum( "bhn,bhm->bnm", w_bhn, e_bhm)
M_bnm = M_bnm*(1-erase_bnm)
# Now do the same thing with adds
# But now it's just a regular contraction since we are adding rather than taking product
add_bnm = cgt.einsum( "bhn,bhm->bnm", w_bhn, a_bhm)
M_bnm = M_bnm + add_bnm
return M_bnm
def ntm_step(opt, Mprev_bnm, X_bk, wprev_bHn, rprev_bhm, controller):
n_heads = opt.h
k_bHm, beta_bH, g_bH, s_bH3, gamma_bH, e_bhm, a_bhm, y_bp = controller([rprev_bhm, X_bk])
w_bHn = ntm_address(opt, wprev_bHn, Mprev_bnm, k_bHm, beta_bH, g_bH, s_bH3, gamma_bH)
wr_bhn = w_bHn[:,:n_heads,:]
ww_bhn = w_bHn[:,n_heads:,:]
r_bhm = ntm_read(Mprev_bnm, wr_bhn)
M_bnm = ntm_write(Mprev_bnm, ww_bhn, e_bhm, a_bhm)
return M_bnm, w_bHn, r_bhm, y_bp
def sum_normalize2(x):
return cgt.broadcast("/", x, x.sum(axis=2,keepdims=True), "xxx,xx1")
def make_ntm(opt):
Mprev_bnm = cgt.tensor3("M", fixed_shape=(opt.b, opt.n, opt.m))
X_bk = cgt.matrix("X", fixed_shape=(opt.b, opt.k))
wprev_bHn = cgt.tensor3("w", fixed_shape=(opt.b, opt.h*2, opt.n))
rprev_bhm = cgt.tensor3("r", fixed_shape=(opt.b, opt.h, opt.m))
controller = make_ff_controller(opt)
M_bnm, w_bHn, r_bhm, y_bp = ntm_step(opt, Mprev_bnm, X_bk, wprev_bHn, rprev_bhm, controller)
# in this form it looks like a standard seq-to-seq model
# external input and output are first elements
ntm = nn.Module([X_bk, Mprev_bnm, wprev_bHn, rprev_bhm], [y_bp, M_bnm, w_bHn, r_bhm])
return ntm
def bernoulli_crossentropy(bins, probs):
"bins = binary values. probs = Pr(b=1)"
return -( bins*cgt.log(probs) + (1-bins)*cgt.log(1-probs))
def make_funcs(opt, ntm, total_time, loss_timesteps):
x_tbk = cgt.tensor3("x", fixed_shape=(total_time, opt.b, opt.k))
y_tbp = cgt.tensor3("y", fixed_shape=(total_time, opt.b, opt.p))
loss_timesteps = set(loss_timesteps)
initial_states = make_ntm_initial_states(opt)
params = ntm.get_parameters() + get_parameters(initial_states)
# params = ntm.get_parameters()
lossCE = 0
loss01 = 0
state_arrs = initial_states
for t in xrange(total_time):
tmp = ntm([x_tbk[t]] + state_arrs)
raw_pred = tmp[0]
state_arrs = tmp[1:4]
if t in loss_timesteps:
p_pred = cgt.sigmoid(raw_pred)
ce = bernoulli_crossentropy(y_tbp[t] , p_pred).sum() # cross-entropy of bernoulli distribution
lossCE = lossCE + ce
loss01 = loss01 + cgt.cast(cgt.equal(y_tbp[t], round01(p_pred)),cgt.floatX).sum()
lossCE = lossCE / (len(loss_timesteps) * opt.p * opt.b) / np.log(2)
loss01 = loss01 / (len(loss_timesteps) * opt.p * opt.b)
gradloss = cgt.grad(lossCE, params)
flatgrad = flatcat(gradloss)
f_loss = cgt.function([x_tbk, y_tbp], lossCE)
f_loss_and_grad = cgt.function([x_tbk, y_tbp], [lossCE, loss01, flatgrad])
print "number of nodes in computation graph:", core.count_nodes([lossCE, loss01, flatgrad])
return f_loss, f_loss_and_grad, params
def round01(x):
return cgt.cast(x>.5,cgt.floatX)
def flatcat(xs):
return cgt.concatenate([x.flatten() for x in xs])
class CopyTask(object):
def __init__(self, batch_size, seq_length, output_dim):
self.b = batch_size
self.t = seq_length
self.k = output_dim+2
self.p = output_dim
def gen_batch(self):
assert self.k == self.p + 2
x_tbk = np.zeros((2*self.t + 2, self.b, self.k),cgt.floatX)
x_tbk[0, :, 0] = 1 # start symbol
message = (nr.rand(self.t, self.b, self.p) > .5).astype(cgt.floatX)
# message = (nr.rand(self.t, self.b, self.p)).astype(cgt.floatX)
x_tbk[1:self.t+1,:,2:] = message
x_tbk[self.t+1, :, 1] = 1 # end symbol
y_tbk = np.zeros((2*self.t+2, self.b, self.p),cgt.floatX)
y_tbk[self.t+2:] = message # desired output
return x_tbk, y_tbk
def loss_timesteps(self):
return range(self.t+1, 2*self.t+2)
def total_time(self):
return 2*self.t+2
class ReverseCopyTask(object):
def __init__(self, batch_size, seq_length, output_dim):
self.b = batch_size
self.t = seq_length
self.k = output_dim+2
self.p = output_dim
def gen_batch(self):
assert self.k == self.p + 2
x_tbk = np.zeros((2*self.t + 2, self.b, self.k),cgt.floatX)
x_tbk[0, :, 0] = 1 # start symbol
message = (nr.rand(self.t, self.b, self.p) > .5).astype(cgt.floatX)
# message = (nr.rand(self.t, self.b, self.p)).astype(cgt.floatX)
x_tbk[1:self.t+1,:,2:] = message
x_tbk[self.t+1, :, 1] = 1 # end symbol
y_tbk = np.zeros((2*self.t+2, self.b, self.p),cgt.floatX)
y_tbk[self.t+2:] = message[::-1] # desired output
return x_tbk, y_tbk
def loss_timesteps(self):
return range(self.t+1, 2*self.t+2)
def total_time(self):
return 2*self.t+2
class RepeatCopyTask(object):
def __init__(self, batch_size, seq_length, output_dim, n_copies):
self.b = batch_size
self.t = seq_length
self.k = output_dim+2
self.p = output_dim
self.n_copies = n_copies
def gen_batch(self):
assert self.k == self.p + 2
x_tbk = np.zeros(((1+self.n_copies)*self.t + 2, self.b, self.k),cgt.floatX)
x_tbk[0, :, 0] = 1 # start symbol
message = (nr.rand(self.t, self.b, self.p) > .5).astype(cgt.floatX)
# message = (nr.rand(self.t, self.b, self.p)).astype(cgt.floatX)
x_tbk[1:self.t+1,:,2:] = message
x_tbk[self.t+1, :, 1] = 1 # end symbol
y_tbk = np.zeros((2*self.t+2, self.b, self.p),cgt.floatX)
for i in xrange(self.n_copies):
start=self.t+2+i*self.t
y_tbk[start:start+self.t] = message # desired output
return x_tbk, y_tbk
def loss_timesteps(self):
return range(self.t+1, 2*self.t+2)
def total_time(self):
return 2*self.t+2
def circ_conv_1d(wg_bhn, s_bh3, axis=2):
"VERY inefficient way to implement circular convolution for the special case of filter size 3"
assert axis == 2
n = cgt.size(wg_bhn,2)
wback = cgt.concatenate([wg_bhn[:,:,n-1:n], wg_bhn[:,:,:n-1]], axis=2)
w = wg_bhn
wfwd = cgt.concatenate([wg_bhn[:,:,1:n], wg_bhn[:,:,0:1]], axis=2)
return cgt.broadcast("*", s_bh3[:,:,0:1] , wback, "xx1,xxx")\
+ cgt.broadcast("*", s_bh3[:,:,1:2] , w, "xx1,xxx")\
+ cgt.broadcast("*", s_bh3[:,:,2:3] , wfwd, "xx1,xxx")
def rmsprop_update(grad, state):
state.sqgrad[:] *= state.decay_rate
np.square(grad, out=state.scratch) # scratch=g^2
state.sqgrad[:] += state.scratch
np.sqrt(state.sqgrad, out=state.scratch) # scratch = scaling
np.divide(grad, state.scratch, out=state.scratch) # scratch = grad/scaling
np.multiply(state.scratch, state.step_size, out=state.scratch)
state.theta[:] -= state.scratch
class Table(dict):
"dictionary-like object that exposes its keys as attributes"
def __init__(self, **kwargs):
dict.__init__(self, kwargs)
self.__dict__ = self
def make_rmsprop_state(theta, step_size, decay_rate):
return Table(theta=theta, sqgrad=np.zeros_like(theta)+1e-6, scratch=np.empty_like(theta),
step_size=step_size, decay_rate=decay_rate)
def get_parameters(xs):
# XXX
out = []
for node in core.topsorted(xs):
if node.is_data():
out.append(node)
return out
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--grad_check",action="store_true")
parser.add_argument("--n_batches",type=int,default=1000000)
parser.add_argument("--profile",action="store_true")
parser.add_argument("--unittest", action="store_true")
args = parser.parse_args()
np.seterr("raise")
cgt.set_precision("quad" if args.grad_check else "double")
np.random.seed(0)
# model parameters
if args.grad_check:
opt = NTMOpts(
b = 1, # batch size
h = 1, # number of heads
n = 2, # number of memory sites
m = 3, # dimension at each memory site
k = 4, # dimension of input
p = 2, # dimension of output
ff_hid_sizes = []
)
seq_length = 2
else:
opt = NTMOpts(
b = 64, # batch size
h = 3, # number of heads
n = 128, # number of memory sites
m = 20, # dimension at each memory site
k = 3, # dimension of input
p = 1, # dimension of output
ff_hid_sizes = [128,128]
)
seq_length = 10
if args.unittest:
seq_length=3
args.n_batches=3
tstart = time.time()
ntm = make_ntm(opt)
task = CopyTask(opt.b, seq_length, opt.p)
f_loss, f_loss_and_grad, params = make_funcs(opt, ntm, task.total_time(), task.loss_timesteps())
print "graph construction and compilation took %g seconds"%(time.time()-tstart)
pc = ParamCollection(params)
pc.set_value_flat(nr.uniform(-.1, .1, size=(pc.get_total_size(),)))
if args.grad_check:
x,y = task.gen_batch()
def f(thnew):
thold = th.copy()
pc.set_value_flat(thnew)
loss = f_loss(x,y)
pc.set_value_flat(thold)
return loss
from cgt.numeric_diff import numeric_grad
g_num = numeric_grad(f, th,eps=1e-8)
_, _, g_anal = f_loss_and_grad(x,y)
assert np.allclose(g_num, g_anal, atol=1e-8)
print "Gradient check succeeded!"
print "%i/%i elts of grad are nonzero"%( (g_anal != 0).sum(), g_anal.size )
return
seq_num = 0
state = make_rmsprop_state(pc.get_value_flat(), .01, .95)
print fmt_row(13, ["seq num", "CE (bits)", "accuracy", "|g|_inf"], header=True)
if args.profile: cgt.profiler.start()
for i in xrange(args.n_batches):
x,y = task.gen_batch()
seq_num += x.shape[1]
l,l01,g = f_loss_and_grad(x,y)
print fmt_row(13, [seq_num, l,l01,np.abs(g).max()])
rmsprop_update(g, state)
pc.set_value_flat(state.theta)
if not np.isfinite(l): break
if args.profile: cgt.profiler.print_stats()
if __name__ == "__main__":
main()
|
nebw/cgt
|
examples/demo_neural_turing_machine.py
|
Python
|
mit
| 16,698
|
"""
TopNav view classes.
"""
from Acquisition import aq_inner
from Products.CMFCore.utils import getToolByName
from Products.TeamSpace.permissions import ManageTeamMembership
from opencore.browser.base import BaseView
from opencore.browser.topnav.interfaces import ITopnavMenuItems
from opencore.i18n import _
from opencore.interfaces.message import ITransientMessage
from opencore.nui.contexthijack import HeaderHijackable
from opencore.content.page import OpenPage
from operator import itemgetter
from zope.component import getMultiAdapter
class TopNavView(HeaderHijackable):
"""
Provides req'd information for rendering top nav in any context.
"""
def contextmenu(self):
"""ask a viewlet manager for the context menu
HeaderHijackable takes care of making sure that the context
is set correctly if we are getting a request with certain
headers set to specify the context"""
manager = getMultiAdapter(
(self.context, self.request, self),
ITopnavMenuItems,
name="opencore.topnavmenu")
manager.update()
return manager.render()
@property
def usermenu(self):
if self.loggedin:
viewname = 'topnav-auth-user-menu'
else:
viewname = 'topnav-anon-user-menu'
view = getMultiAdapter((self.context, self.request), name=viewname)
return view.__of__(aq_inner(self.context))
class AnonMenuView(BaseView):
"""
View class for the user menu when user is anonymous.
"""
@property
def menudata(self):
site_url = getToolByName(self.context, 'portal_url')()
menudata = (
{'content': _(u'Sign in'),
'href': '%s/login' % site_url,
},
{'content': _(u'Create account'),
'href': '%s/join' % site_url,
},
)
return menudata
class AuthMenuView(BaseView):
"""
View class for the user menu when user is logged in.
"""
@property
def user_message_count(self):
"""
returns the number of transient messages currently stored
for the logged in member
"""
mem_id = self.loggedinmember.getId()
tm = ITransientMessage(self.portal)
t_msgs = tm.get_all_msgs(mem_id)
msg_count = sum([len(value) for key,value in t_msgs.iteritems() if not key == 'Trackback'])
query = dict(portal_type='OpenMembership',
getId=mem_id,
)
mship_brains = self.catalog(**query)
proj_invites = [brain for brain in mship_brains if brain.review_state == 'pending' and brain.lastWorkflowActor != mem_id]
return msg_count + len(proj_invites)
@property
def menudata(self):
mem_data = self.member_info
site_url = getToolByName(self.context, 'portal_url')()
menudata = (
{'content': _(u'Sign out'),
'href': '%s/logout' % site_url,
},
)
return menudata
|
socialplanning/opencore
|
opencore/browser/topnav/topnav.py
|
Python
|
gpl-3.0
| 3,065
|
# Copyright (C) 2013-2015 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Fox Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from ..helpers.command import Command
@Command('about', ['config'])
def cmd(send, _, args):
"""Returns info about the bot.
Syntax: {command}
"""
send("See https://github.com/%s Developed by the TJHSST CSL. Type %shelp for a list of commands." % (args['config']['api']['githubrepo'], args['config']['core']['cmdchar']))
|
N6UDP/cslbot
|
cslbot/commands/about.py
|
Python
|
gpl-2.0
| 1,181
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management import *
from resource_management.libraries.functions import stack_select
from falcon import falcon
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions import StackFeature
from resource_management.core.exceptions import ClientComponentHasNoStatus
class FalconClient(Script):
def configure(self, env):
import params
env.set_params(params)
falcon('client', action='config')
def status(self, env):
raise ClientComponentHasNoStatus()
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class FalconClientLinux(FalconClient):
def install(self, env):
self.install_packages(env)
self.configure(env)
def pre_upgrade_restart(self, env, upgrade_type=None):
Logger.info("Executing Stack Upgrade pre-restart")
import params
env.set_params(params)
# this function should not execute if the version can't be determined or
# the stack does not support rolling upgrade
if not (params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version)):
return
Logger.info("Executing Falcon Client Stack Upgrade pre-restart")
stack_select.select_packages(params.version)
@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
class FalconClientWindows(FalconClient):
def install(self, env):
import params
if params.falcon_home is None:
self.install_packages(env)
self.configure(env)
if __name__ == "__main__":
FalconClient().execute()
|
arenadata/ambari
|
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
|
Python
|
apache-2.0
| 2,394
|
#!/usr/bin/env python
# This file is Copyright David Francos Cuartero, licensed under the GPL2 license.
from distutils.core import setup
#from setuptools import setup
import os
setup(name='airgraph-ng',
version='1.1',
description='Aircrack-ng network grapher',
author='TheX1le',
console = [{"script": "airgraph-ng" }],
url='http://aircrack-ng.org',
license='GPL2',
classifiers=[
'Development Status :: 4 - Beta',
],
packages=['graphviz'],
scripts=['dump-join', 'airgraph-ng'],
)
|
entropy1337/infernal-twin
|
Modules/airgraph-ng/setup.py
|
Python
|
gpl-3.0
| 556
|
from allauth.account.signals import user_signed_up
from django.dispatch import receiver
from .models import CodeQuizAttempt
# todo -> this is copy pasted from quiz/signal_receivers.
# Can we extract common code?
@receiver(user_signed_up, dispatch_uid="codequiz:codequizattempt:user_signed_up")
def attribute_session_quizattempts_to_user(request, user, **kwargs):
CodeQuizAttempt.objects.attribute_to_user(user=user,
user_key=request.session.session_key)
|
pixyj/feel
|
django-server/feel/codequiz/signal_receivers.py
|
Python
|
mit
| 507
|
import pytest
from umodbus.server.serial import AbstractSerialServer
@pytest.fixture
def abstract_serial_server():
return AbstractSerialServer()
def test_abstract_serial_server_get_meta_data(abstract_serial_server):
""" Test if meta data is correctly extracted from request. """
assert abstract_serial_server.get_meta_data(b'\x01x\02\x03') ==\
{'unit_id': 1}
def test_abract_serial_server_shutdown(abstract_serial_server):
assert abstract_serial_server._shutdown_request is False
abstract_serial_server.shutdown()
assert abstract_serial_server._shutdown_request is True
|
AdvancedClimateSystems/python-modbus
|
tests/unit/server/serial/test_init.py
|
Python
|
mpl-2.0
| 611
|
#!/usr/bin/python
from sklearn.linear_model import RidgeCV
from sklearn.model_selection import train_test_split
from sklearn.externals import joblib
import numpy as np
import matplotlib.pyplot as plt
import os
data = np.loadtxt(fname = 'data.txt', delimiter = ',')
X, y = data[:,:5], data[:,5]
print(X)
print(y)
m = X.shape[0] #number of samples
#training
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
clf = RidgeCV(alphas = [0.1, 1.0, 10.0], normalize=True)
clf.fit(X_train, y_train)
#make parsistent the model or retrieve if exists
filename = 'model.pkl'
if (os.path.isfile(filename)):
joblib.load(filename)
print("loaded existing model")
else:
joblib.dump(clf, filename)
print("created new persistent model")
#predict
prediction = clf.predict(X_test);
print("X_test is: {}".format(X_test))
print("Expected is: {}".format(y_test[0]))
print("Prediction is: {}".format(prediction[0]))
print("Score: {}".format(clf.score(X, y)))
print("Alpha: {}".format(clf.alpha_))
#plotting all data
plt.figure(1)
real, = plt.plot(np.arange(m), y, 'b-', label='real')
predicted, = plt.plot(np.arange(m), clf.predict(X), 'r-', label='predicted')
plt.ylabel('Stock')
plt.xlabel('Time')
plt.legend([real, predicted], ['Real', 'Predicted'])
plt.show()
#pltting only test
mtest = X_test.shape[0]
real, = plt.plot(np.arange(mtest), y_test, 'b-', label='real')
test, = plt.plot(np.arange(mtest), clf.predict(X_test), 'go', label='test')
plt.ylabel('Stock')
plt.xlabel('Time')
plt.legend([real, test], ['Real', 'Test'])
plt.show()
|
samuxiii/prototypes
|
learning/stock/main.py
|
Python
|
mit
| 1,558
|
def to_binary(integer):
integer = int(integer)
integer = integer * -1 if integer < 0 else integer
number = ''
while integer:
number = number + str(int(integer) % 2)
integer /= 2
return number
|
vtemian/university_projects
|
arhitecure/hmw2/utils/to_binary.py
|
Python
|
apache-2.0
| 212
|
class MainPageTexts:
PAGE_TITLE = "System przeprowadzania wyborów"
ELECTION_NAME = "COG i CDO 2020"
ELECTION_INFO_LINK_TEXT = "Informacja wyborcza"
ELECTION_INFO_LINK_HREF = "https://forum.opencaching.pl/viewtopic.php?t=9085&p=143998#p143998"
class ElectionNotStaredTexts:
ELECTION_MESSAGE = "W tej chwili na opencaching.pl nie trwa żadne głosowanie"
class ElectionWaitingForCandidates:
ELECTION_MESSAGE = "Aktualnie trwa proces zbierania kandydatów do wyborów. Możesz złożyć swoją kandydaturę na " \
"forum. "
ELECTION_FORUM_LINK = "https://forum.opencaching.pl/viewtopic.php?f=30&t=9086"
class ElectionVoting:
LOG_ME_IN_BUTTON_TEXT = "Zaloguj mnie!"
VOTE_CAST_BUTTON_TEXT = "Oddaję głos!"
ERROR_MESSAGE_BOTH_LIST_REQUIRED = "Musisz wybrać przynajmniej jednego kandydata na każdej liście."
ERROR_MESSAGE_TOO_MANY_CANDIDATES = "Możesz wybrać do dwóch kandydatów z każdej listy."
TRY_AGAIN_TEXT = "Spróbuj ponownie"
SUCCESS_MESSAGE_AFTER_VOTING = "Dziękujemy za oddanie głosu!"
ERROR_MESSAGE_ALREADY_VOTED = "Już głosowałeś w tych wyborach!"
|
mzylowski/oc-pl-elections
|
tests/consts_text.py
|
Python
|
gpl-3.0
| 1,154
|
"""Remove nodes whose output is not used."""
from iotile.core.exceptions import ArgumentError
from iotile.sg.node import TrueTrigger, FalseTrigger, InputTrigger
from iotile.sg import DataStreamSelector
from iotile.sg.parser.stream_allocator import StreamAllocator
class RemoveDeadCodePass:
"""Remove nodes whose operation is discarded and has no side effects.
An operation has no side effects if it is not a call_rpc operation
(currently) since no one outside the sensorgraph engine knows if
the operation was run or not. It's output is discarded if its stream
is autogenerated by the compiler and it is not connected to anything.
Args:
sensor_graph (SensorGraph): The sensor graph to run
the optimization pass on
"""
def __init__(self):
pass
def run(self, sensor_graph, model):
"""Run this optimization pass on the sensor graph
If necessary, information on the device model being targeted
can be found in the associated model argument.
Args:
sensor_graph (SensorGraph): The sensor graph to optimize
model (DeviceModel): The device model we're using
"""
# We can only eliminate a node if the following checks are true
# 1. It has no other nodes connected to it
# 2. Its stream is not an output of the entire sensor graph
# 3. Its stream is autogenerated by the compiler
# 4. Its operation has no side effects
# 5. Its stream is not buffered so the value will not be accessible
for node, inputs, outputs in sensor_graph.iterate_bfs():
can_remove = False
# Check 1
if len(outputs) != 0:
continue
# Check 2
if sensor_graph.is_output(node.stream):
continue
# Check 3
if node.stream.stream_id < StreamAllocator.StartingID:
continue
# Check 4
if node.func_name == u'call_rpc':
continue
# Check 5
if node.stream.buffered:
# FIXME: Add a warning here if the stream is buffered since
# its weird for the user to be saving useless data to flash
continue
# Check 6
if node.func_name == u'trigger_streamer':
continue
# If all of the checks above have passed, we have found a useless
# node, let's remove it and return True so we run the pass again
# and look for additional nodes that are now made useles because
# of the removal of this one.
for input_node in inputs:
input_node.outputs.remove(node)
if node in sensor_graph.roots:
sensor_graph.roots.remove(node)
sensor_graph.nodes.remove(node)
# FIXME: Check if we need to destroy any walkers here
return True
return False
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/optimizer/passes/dead_code_elimination.py
|
Python
|
gpl-3.0
| 3,003
|
#!/bin/python
# coding: utf-8
import lglass.nic
database = lglass.nic.FileDatabase()
if hasattr(database, "session"):
session = database.session()
else:
session = database
print("# Database export from database {}".format(database.database_name))
print("# Current time: {}".format(datetime.datetime.now()))
print()
for spec in session.lookup():
print(session.fetch(spec))
if hasattr(session, "close"):
session.close()
|
fritz0705/lglass
|
contrib/grs-export.py
|
Python
|
mit
| 441
|
import csv, StringIO
from boto.s3.connection import S3Connection
from settings.common import SECRETS_DICT
from boto.s3.key import Key
CRON_BUCKET = 'citigroup-cron'
ACTIVE_ALERTS_KEY = 'gl_active_alerts.txt'
def get_active_alerts_contents():
cron_bucket = CRON_BUCKET
cron_key = ACTIVE_ALERTS_KEY
aws_access_key = SECRETS_DICT['CRONBOX_ACCESS_KEY']
aws_secrets_key = SECRETS_DICT['CRONBOX_SECRET_KEY']
conn = S3Connection(aws_access_key, aws_secrets_key)
bucket = conn.get_bucket(cron_bucket)
k = Key(bucket)
k.key = cron_key
cron_contents = k.get_contents_as_string()
print cron_contents
return cron_contents
def set_active_alerts_contents(new_alerts_contents):
cron_bucket = CRON_BUCKET
cron_key = ACTIVE_ALERTS_KEY
aws_access_key = SECRETS_DICT['CRONBOX_ACCESS_KEY']
aws_secrets_key = SECRETS_DICT['CRONBOX_SECRET_KEY']
conn = S3Connection(aws_access_key, aws_secrets_key)
bucket = conn.get_bucket(cron_bucket)
k = Key(bucket)
k.key = cron_key
k.set_contents_from_string(new_alerts_contents)
def get_facebook_active_alerts():
s3_contents = get_active_alerts_contents()
active_alerts = set()
csv_reader = csv.reader(StringIO.StringIO(s3_contents))
csv_reader.next()
for row in csv_reader:
fb_id = row[0]
phone = row[1]
active_alerts.add((fb_id, phone))
return active_alerts
def set_facebook_active_alerts(active_alerts):
csv_contents = 'fb_id, phone\n'
for alert in active_alerts:
csv_contents += alert[0] + ',' + alert[1] + '\n'
set_active_alerts_contents(csv_contents)
def add_facebook_cron_alert(phone, fb_id):
active_alerts = get_facebook_active_alerts()
if not (fb_id, phone) in active_alerts:
active_alerts.add((fb_id, phone))
set_facebook_active_alerts(active_alerts)
def remove_facebook_cron_alert(phone, fb_id):
active_alerts = get_facebook_active_alerts()
if (fb_id, phone) in active_alerts:
active_alerts.remove((fb_id, phone))
set_facebook_active_alerts(active_alerts)
if __name__ == '__main__':
get_facebook_active_alerts()
|
mhfowler/brocascoconut
|
greenlightning/cronbox_s3.py
|
Python
|
mit
| 2,154
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DetentionSampleProject.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
luismasuelli/django-detention
|
manage.py
|
Python
|
unlicense
| 265
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from djangogroovedown import settings
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'servicos.views.index', name='index'),
url(r'^popular/$', 'servicos.views.get_list_popular_music', name='popular'),
url(r'^download/$', 'servicos.views.download_musics', name='download'),
url(r'^buscar/$', 'servicos.views.search_musics', name='busca'),
# url(r'^djangogroovedown/', include('djangogroovedown.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
url(r'^public/media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT, 'show_indexes': True
}),
url(r'^public/static/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.STATIC_ROOT, 'show_indexes': True
}),
)
|
alexandreferreira/groovedowndl
|
djangogroovedown/djangogroovedown/urls.py
|
Python
|
apache-2.0
| 1,215
|
# Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally.plugins.openstack.scenarios.sahara import clusters
from tests.unit import test
SAHARA_CLUSTERS = ("rally.plugins.openstack.scenarios.sahara.clusters"
".SaharaClusters")
SAHARA_UTILS = "rally.plugins.openstack.scenarios.sahara.utils"
class SaharaClustersTestCase(test.ScenarioTestCase):
@mock.patch(SAHARA_CLUSTERS + "._delete_cluster")
@mock.patch(SAHARA_CLUSTERS + "._launch_cluster",
return_value=mock.MagicMock(id=42))
def test_create_and_delete_cluster(
self, mock__launch_cluster, mock__delete_cluster):
clusters_scenario = clusters.SaharaClusters(self.context)
clusters_scenario.context = {
"tenant": {
"sahara_image": "test_image"
}
}
clusters_scenario.create_and_delete_cluster(
flavor="test_flavor",
workers_count=5,
plugin_name="test_plugin",
hadoop_version="test_version")
mock__launch_cluster.assert_called_once_with(
flavor_id="test_flavor",
image_id="test_image",
workers_count=5,
plugin_name="test_plugin",
hadoop_version="test_version",
floating_ip_pool=None,
volumes_per_node=None,
volumes_size=None,
auto_security_group=None,
security_groups=None,
node_configs=None,
cluster_configs=None,
enable_anti_affinity=False,
enable_proxy=False)
mock__delete_cluster.assert_called_once_with(
mock__launch_cluster.return_value)
@mock.patch(SAHARA_CLUSTERS + "._delete_cluster")
@mock.patch(SAHARA_CLUSTERS + "._scale_cluster")
@mock.patch(SAHARA_CLUSTERS + "._launch_cluster",
return_value=mock.MagicMock(id=42))
def test_create_scale_delete_cluster(
self, mock__launch_cluster, mock__scale_cluster,
mock__delete_cluster):
self.clients("sahara").clusters.get.return_value = mock.MagicMock(
id=42, status="active"
)
clusters_scenario = clusters.SaharaClusters(self.context)
clusters_scenario.context = {
"tenant": {
"sahara_image": "test_image"
}
}
clusters_scenario.create_scale_delete_cluster(
flavor="test_flavor",
workers_count=5,
deltas=[1, -1],
plugin_name="test_plugin",
hadoop_version="test_version")
mock__launch_cluster.assert_called_once_with(
flavor_id="test_flavor",
image_id="test_image",
workers_count=5,
plugin_name="test_plugin",
hadoop_version="test_version",
floating_ip_pool=None,
volumes_per_node=None,
volumes_size=None,
auto_security_group=None,
security_groups=None,
node_configs=None,
cluster_configs=None,
enable_anti_affinity=False,
enable_proxy=False)
mock__scale_cluster.assert_has_calls([
mock.call(self.clients("sahara").clusters.get.return_value, 1),
mock.call(self.clients("sahara").clusters.get.return_value, -1),
])
mock__delete_cluster.assert_called_once_with(
self.clients("sahara").clusters.get.return_value)
|
vishnu-kumar/PeformanceFramework
|
tests/unit/plugins/openstack/scenarios/sahara/test_clusters.py
|
Python
|
apache-2.0
| 4,048
|
# This file is a part of the "SuMPF" package
# Copyright (C) 2018-2021 Jonas Schulte-Coerne
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""Contains interpolation functions."""
import functools
import numpy
from ._enums import Interpolations
__all__ = ("get", "zero", "one", "linear", "logarithmic", "log_x", "log_y", "stairs_lin", "stairs_log")
def get(flag):
"""Returns the interpolation function, that is specified by the given flag
from the :class:`~sumpf._internal.Interpolations` enumeration.
:param flag: a flag from the :class:`~sumpf._internal.Interpolations` enumeration
:returns: a function, that takes three parameters. The first is the x values
as an array or a scalar number, for which interpolated values shall
be calculated. The second is an array of x values, where the function
has supporting points and the third is an equally long array of the
function results at the supporting points. The function returns
an array of interpolated values or a single result, depending on
the first parameter being an array or a scalar.
"""
# pylint: disable=too-many-return-statements
if flag is Interpolations.ZERO:
return zero
elif flag is Interpolations.ONE:
return one
elif flag is Interpolations.LINEAR:
return linear
elif flag is Interpolations.LOGARITHMIC:
return logarithmic
elif flag is Interpolations.LOG_X:
return log_x
elif flag is Interpolations.LOG_Y:
return log_y
elif flag is Interpolations.STAIRS_LIN:
return stairs_lin
elif flag is Interpolations.STAIRS_LOG:
return stairs_log
else:
raise ValueError(f"Unknown interpolation flag: {flag}. See sumpf.Bands.interpolations for available flags.")
def interpolation(func):
"""A decorator for the interpolation functions, that implements the boilerplate,
that is common to all interpolation functions:
* if the given x values are an empty array, the result is empty as well.
* at the supporting points, the interpolation function returns the exact function values
"""
@functools.wraps(func)
def f(x, xs, ys):
if isinstance(x, (float, int)):
if x in xs:
return ys[xs == x][0]
else:
return func(x, xs, ys, scalar=True)
elif len(x) == 0: # pylint: disable=len-as-condition; x might be a NumPy array, where __nonzero__ is not equivalent to len(.)
return numpy.empty(0)
else:
result = func(x, xs, ys, scalar=False)
for i, x_i in enumerate(x):
if x_i in xs:
result[i] = ys[xs == x_i][0]
return result
return f
@interpolation
def zero(x, xs, ys, scalar): # pylint: disable=unused-argument; all interpolation functions shall have the same interface
"""An interpolation, that fills the unknown values with zeros.
:param x: an array or a scalar value, where the function shall be evaluated
:param xs: an array of x values of the supporting points
:param ys: an array of function values of the supporting points. This array
must have the same length as the xs array.
:returns: the interpolated or extrapolated function values as an array or a
scalar, depending on x being an array or a number
"""
# :param scalar: True if x is a scalar value, False if x is an array. This
# parameter is set by the "interpolation"-decorator and is not
# exposed to the user of the interpolation function.
if scalar:
return 0.0
else:
return numpy.zeros(len(x), dtype=ys.dtype)
@interpolation
def one(x, xs, ys, scalar): # pylint: disable=unused-argument; all interpolation functions shall have the same interface
"""An interpolation, that fills the unknown values with ones.
:param x: an array or a scalar value, where the function shall be evaluated
:param xs: an array of x values of the supporting points
:param ys: an array of function values of the supporting points. This array
must have the same length as the xs array.
:returns: the interpolated or extrapolated function values as an array or a
scalar, depending on x being an array or a number
"""
# :param scalar: True if x is a scalar value, False if x is an array. This
# parameter is set by the "interpolation"-decorator and is not
# exposed to the user of the interpolation function.
if scalar:
return 1.0
else:
return numpy.ones(len(x), dtype=ys.dtype)
def _linear(x, xs, ys, scalar):
"""A helper function, that implements the linear interpolation, so it can be
used by other interpolation functions, too.
"""
# pylint: disable=too-many-return-statements; there are many corner cases to be covered here
if len(xs) == 1:
if scalar:
return ys[0]
else:
return numpy.full(len(x), fill_value=ys[0], dtype=ys.dtype)
else:
if scalar:
if x < xs[0]:
m = (ys[1] - ys[0]) / (xs[1] - xs[0])
if numpy.isinf(m):
return numpy.nan
n = ys[0] - m * xs[0]
return m * x + n
elif x > xs[-1]:
m = (ys[-1] - ys[-2]) / (xs[-1] - xs[-2])
if numpy.isinf(m):
return numpy.nan
n = ys[-1] - m * xs[-1]
return m * x + n
else:
return numpy.interp(x, xp=xs, fp=ys)
else:
result = numpy.interp(x, xp=xs, fp=ys)
if x.min() < xs[0]:
mask = x < xs[0]
m = (ys[1] - ys[0]) / (xs[1] - xs[0])
n = ys[0] - m * xs[0]
result[mask] = m * x[mask] + n
if x.max() > xs[-1]:
mask = x > xs[-1]
m = (ys[-1] - ys[-2]) / (xs[-1] - xs[-2])
n = ys[-1] - m * xs[-1]
result[mask] = m * x[mask] + n
return result
@interpolation
def linear(x, xs, ys, scalar):
"""An interpolation, that interpolates linearly between two supporting points.
:param x: an array or a scalar value, where the function shall be evaluated
:param xs: an array of x values of the supporting points
:param ys: an array of function values of the supporting points. This array
must have the same length as the xs array.
:returns: the interpolated or extrapolated function values as an array or a
scalar, depending on x being an array or a number
"""
# :param scalar: True if x is a scalar value, False if x is an array. This
# parameter is set by the "interpolation"-decorator and is not
# exposed to the user of the interpolation function.
return _linear(x, xs, ys, scalar)
@interpolation
def logarithmic(x, xs, ys, scalar):
"""An interpolation, that hat interpolates logarithmically on both axes.
:param x: an array or a scalar value, where the function shall be evaluated
:param xs: an array of x values of the supporting points
:param ys: an array of function values of the supporting points. This array
must have the same length as the xs array.
:returns: the interpolated or extrapolated function values as an array or a
scalar, depending on x being an array or a number
"""
# :param scalar: True if x is a scalar value, False if x is an array. This
# parameter is set by the "interpolation"-decorator and is not
# exposed to the user of the interpolation function.
return numpy.exp2(_linear(numpy.log2(x), numpy.log2(xs), numpy.log2(ys), scalar)) # the actual base of the logarithm does not matter, but the log2 function proved to be twice as fast as log or log10, while exp was only slightly faster than exp2
@interpolation
def log_x(x, xs, ys, scalar):
"""An interpolation, that interpolates logarithmically on the x-axis and linearly
on the y-axis.
This interpolation might be useful for :class:`~sumpf.Bands` filters, where
the filter values are in dB, so that they shall not be processed logarithmically,
but the frequencies are plotted on a logarithmic x-axis. In such a case, this
interpolation will produce straight lines between two supporting points.
:param x: an array or a scalar value, where the function shall be evaluated
:param xs: an array of x values of the supporting points
:param ys: an array of function values of the supporting points. This array
must have the same length as the xs array.
:returns: the interpolated or extrapolated function values as an array or a
scalar, depending on x being an array or a number
"""
# :param scalar: True if x is a scalar value, False if x is an array. This
# parameter is set by the "interpolation"-decorator and is not
# exposed to the user of the interpolation function.
return _linear(numpy.log2(x), numpy.log2(xs), ys, scalar) # the actual base of the logarithm does not matter, but the log2 function proved to be twice as fast as log or log10
@interpolation
def log_y(x, xs, ys, scalar):
"""An interpolation, that hat interpolates linearly on the x-axis and logarithmically
on the y-axis.
:param x: an array or a scalar value, where the function shall be evaluated
:param xs: an array of x values of the supporting points
:param ys: an array of function values of the supporting points. This array
must have the same length as the xs array.
:returns: the interpolated or extrapolated function values as an array or a
scalar, depending on x being an array or a number
"""
# :param scalar: True if x is a scalar value, False if x is an array. This
# parameter is set by the "interpolation"-decorator and is not
# exposed to the user of the interpolation function.
return numpy.exp2(_linear(x, xs, numpy.log2(ys), scalar)) # the actual base of the logarithm does not matter, but the log2 function proved to be twice as fast as log or log10, while exp was only slightly faster than exp2
def _stairs_lin(x, xs, ys):
"""A helper function, that implements the linear stairs interpolation, so it
can be used by other interpolation functions, too.
Other than in the other interpolation functions, this helper function expects
x to be an array every time.
"""
i = numpy.searchsorted(xs, x)
i[i >= len(ys)] -= 1
left = x - xs[numpy.maximum(i - 1, 0)]
right = x - xs[i]
mask = (numpy.fabs(left) < numpy.fabs(right))
i[mask] -= 1
return ys[i]
@interpolation
def stairs_lin(x, xs, ys, scalar):
"""An interpolation, that maintains a constant value around a supporting point.
The x-value, where it switches from the function value of one supporting point
to the other is the average x-value between the x-values of the surrounding
supporting points.
:param x: an array or a scalar value, where the function shall be evaluated
:param xs: an array of x values of the supporting points
:param ys: an array of function values of the supporting points. This array
must have the same length as the xs array.
:returns: the interpolated or extrapolated function values as an array or a
scalar, depending on x being an array or a number
"""
# :param scalar: True if x is a scalar value, False if x is an array. This
# parameter is set by the "interpolation"-decorator and is not
# exposed to the user of the interpolation function.
if scalar:
return _stairs_lin(numpy.array((x,)), xs, ys)[0]
else:
return _stairs_lin(x, xs, ys)
@interpolation
def stairs_log(x, xs, ys, scalar):
"""An interpolation, that maintains a constant value around a supporting point.
The x-value, where it switches from the function value of one supporting point
to the other is the middle value between the surrounding supporting points on
a logarithmic x-axis.
:param x: an array or a scalar value, where the function shall be evaluated
:param xs: an array of x values of the supporting points
:param ys: an array of function values of the supporting points. This array
must have the same length as the xs array.
:returns: the interpolated or extrapolated function values as an array or a
scalar, depending on x being an array or a number
"""
# :param scalar: True if x is a scalar value, False if x is an array. This
# parameter is set by the "interpolation"-decorator and is not
# exposed to the user of the interpolation function.
minimum = xs.min()
if scalar:
if x < minimum:
return ys[xs == minimum][0]
else:
maximum = xs.max()
if x > maximum:
return ys[xs == maximum][0]
else:
return _stairs_lin(numpy.log2(numpy.array((x,))), numpy.log2(xs), ys)[0]
else:
result = _stairs_lin(numpy.log2(x), numpy.log2(xs), ys) # the actual base of the logarithm does not matter, but the log2 function proved to be twice as fast as log or log10
mask = (x < minimum)
result[mask] = ys[xs == minimum][0]
maximum = xs.max()
mask = (x > maximum)
result[mask] = ys[xs == maximum][0]
return result
|
JonasSC/SuMPF
|
sumpf/_internal/_interpolation.py
|
Python
|
lgpl-3.0
| 14,343
|
# coding: utf-8
"""
An API to insert and retrieve metadata on cloud artifacts.
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1alpha1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from grafeas.api_client import ApiClient
class GrafeasProjectsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_project(self, body, **kwargs): # noqa: E501
"""Creates a new project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ApiProject body: The project to create. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_project_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_project_with_http_info(body, **kwargs) # noqa: E501
return data
def create_project_with_http_info(self, body, **kwargs): # noqa: E501
"""Creates a new project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ApiProject body: The project to create. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_project`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/projects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_project(self, name, **kwargs): # noqa: E501
"""Deletes the specified project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_project(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the project in the form of `projects/{PROJECT_ID}`. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_project_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.delete_project_with_http_info(name, **kwargs) # noqa: E501
return data
def delete_project_with_http_info(self, name, **kwargs): # noqa: E501
"""Deletes the specified project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_project_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the project in the form of `projects/{PROJECT_ID}`. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project(self, name, **kwargs): # noqa: E501
"""Gets the specified project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the project in the form of `projects/{PROJECT_ID}`. (required)
:return: ApiProject
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_project_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.get_project_with_http_info(name, **kwargs) # noqa: E501
return data
def get_project_with_http_info(self, name, **kwargs): # noqa: E501
"""Gets the specified project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the project in the form of `projects/{PROJECT_ID}`. (required)
:return: ApiProject
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiProject', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_projects(self, **kwargs): # noqa: E501
"""Lists projects. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_projects(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str filter: The filter expression.
:param int page_size: Number of projects to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ApiListProjectsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_projects_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_projects_with_http_info(**kwargs) # noqa: E501
return data
def list_projects_with_http_info(self, **kwargs): # noqa: E501
"""Lists projects. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_projects_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str filter: The filter expression.
:param int page_size: Number of projects to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ApiListProjectsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter', 'page_size', 'page_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_projects" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
if 'page_token' in params:
query_params.append(('page_token', params['page_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiListProjectsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
grafeas/client-python
|
grafeas/api/grafeas_projects_api.py
|
Python
|
apache-2.0
| 15,932
|
import json
import importlib
import site
from openaps.configurable import Configurable
class ExtraConfig (Configurable):
prefix = 'device'
pass
class Device (Configurable):
vendor = None
required = ['name', 'vendor']
optional = [ ]
prefix = 'device'
_uses = [ ]
def __init__ (self, name, vendor):
self.name = name
self.vendor = vendor
self.fields = dict(vendor=vendor.__name__)
self.extra = ExtraConfig(name)
def read (self, args=None, config=None):
if args:
self.name = args.name
if getattr(args, 'extra', None):
self.fields['extra'] = args.extra.format(name=self.name, **self.fields)
self.vendor.set_config(args, self.extra)
else:
self.vendor.set_config(args, self)
if config:
# self.vendor.read_config(config)
self.fields.update(dict(config.items(self.section_name( ))))
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.fields.update(dict(extra.items(self.section_name( ))))
def get (self, k, *args):
return self.fields.get(k, self.extra.get(k, *args))
def format_url (self):
parts = ['{0:s}://{1:s}'.format(self.vendor.__name__.split('.').pop( ), self.name), ]
parts.append(self.vendor.display_device(self))
return ''.join(parts)
def register_uses (self, uses):
for u in uses.usages:
if u not in self._uses:
self._uses.append(u)
def store (self, config):
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.store(extra)
extra.save( )
super(Device, self).store(config)
@classmethod
def FromImport (klass, candidate, config=None):
name = candidate.get('name')
fields = candidate.get(name)
# site.addsitedir(fields.get('path'))
# fields['vendor'] = importlib.import_module(fields['vendor'])
from openaps import vendors
vendor = vendors.lookup_dotted(fields['vendor'], config)
inst = klass(name, vendor)
inst.fields = fields
inst.extra.fields = candidate['extra']
return inst
@classmethod
def FromConfig (klass, vendors, config):
devices = [ ]
for candidate in config.sections( ):
if candidate.startswith(klass.prefix):
name = json.loads(candidate.split(' ').pop( ))
vendor = vendors.lookup(config.get(candidate, 'vendor').split('.').pop( ), config)
device = klass(name, vendor)
device.read(config=config)
devices.append(device)
return devices
|
openaps/openaps
|
openaps/devices/device.py
|
Python
|
mit
| 2,563
|
"""
Public views
"""
from django.conf import settings
from django.shortcuts import redirect
from django.utils.http import urlquote_plus
from waffle.decorators import waffle_switch
from common.djangoapps.edxmako.shortcuts import render_to_response
from ..config import waffle
__all__ = ['register_redirect_to_lms', 'login_redirect_to_lms', 'howitworks', 'accessibility']
def register_redirect_to_lms(request):
"""
This view redirects to the LMS register view. It is used to temporarily keep the old
Studio signup url alive.
"""
register_url = '{register_url}{params}'.format(
register_url=settings.FRONTEND_REGISTER_URL,
params=_build_next_param(request),
)
return redirect(register_url, permanent=True)
def login_redirect_to_lms(request):
"""
This view redirects to the LMS login view. It is used for Django's LOGIN_URL
setting, which is where unauthenticated requests to protected endpoints are redirected.
"""
login_url = '{login_url}{params}'.format(
login_url=settings.FRONTEND_LOGIN_URL,
params=_build_next_param(request),
)
return redirect(login_url)
def _build_next_param(request):
""" Returns the next param to be used with login or register. """
next_url = request.GET.get('next')
next_url = next_url if next_url else settings.LOGIN_REDIRECT_URL
if next_url:
# Warning: do not use `build_absolute_uri` when `next_url` is empty because `build_absolute_uri` would
# build use the login url for the next url, which would cause a login redirect loop.
absolute_next_url = request.build_absolute_uri(next_url)
return '?next=' + urlquote_plus(absolute_next_url)
return ''
def howitworks(request):
"Proxy view"
if request.user.is_authenticated:
return redirect('/home/')
else:
return render_to_response('howitworks.html', {})
@waffle_switch('{}.{}'.format(waffle.WAFFLE_NAMESPACE, waffle.ENABLE_ACCESSIBILITY_POLICY_PAGE))
def accessibility(request):
"""
Display the accessibility accommodation form.
"""
return render_to_response('accessibility.html', {
'language_code': request.LANGUAGE_CODE
})
|
stvstnfrd/edx-platform
|
cms/djangoapps/contentstore/views/public.py
|
Python
|
agpl-3.0
| 2,210
|
import abc
import json
import six
import unittest
import billboard
@six.add_metaclass(abc.ABCMeta)
class Base:
@classmethod
@abc.abstractmethod
def setUpClass(cls):
pass
def testDate(self):
self.assertIsNotNone(self.chart.date)
def testTitle(self):
self.assertEqual(self.chart.title, self.expectedTitle)
def testRanks(self):
ranks = list(entry.rank for entry in self.chart)
self.assertEqual(ranks, list(range(1, self.expectedNumEntries + 1)))
def testEntriesValidity(self, skipTitleCheck=False):
self.assertEqual(len(self.chart), self.expectedNumEntries)
for entry in self.chart:
if not skipTitleCheck:
self.assertGreater(len(entry.title), 0)
self.assertGreater(len(entry.artist), 0)
# TODO: Add this check back after we can parse images
# self.assertGreater(len(entry.image), 0)
self.assertTrue(1 <= entry.peakPos <= self.expectedNumEntries)
self.assertTrue(0 <= entry.lastPos <= self.expectedNumEntries)
self.assertGreaterEqual(entry.weeks, 1)
self.assertIsInstance(entry.isNew, bool)
def testEntriesConsistency(self):
for entry in self.chart:
if entry.isNew:
self.assertEqual(0, entry.lastPos)
def testJson(self):
self.assertTrue(json.loads(self.chart.json()))
for entry in self.chart:
self.assertTrue(json.loads(entry.json()))
class TestCurrentHot100(Base, unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.chart = billboard.ChartData("hot-100")
cls.expectedTitle = "The Hot 100"
cls.expectedNumEntries = 100
class TestCurrentTraditionalJazzAlbums(Base, unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.chart = billboard.ChartData("traditional-jazz-albums")
cls.expectedTitle = "Traditional Jazz Albums"
cls.expectedNumEntries = 15
class TestCurrentGreatestHot100Singles(Base, unittest.TestCase):
"""The Greatest Hot 100 Singles chart is special in that there are no past
charts.
"""
@classmethod
def setUpClass(cls):
cls.chart = billboard.ChartData("greatest-hot-100-singles")
cls.expectedTitle = "Greatest of All Time Hot 100 Songs"
cls.expectedNumEntries = 100
def testDate(self):
# The date is in fact None
pass
def testEntriesValidity(self):
for entry in self.chart:
self.assertIsNone(entry.peakPos)
self.assertIsNone(entry.lastPos)
self.assertIsNone(entry.weeks)
class TestCurrentArtist100(Base, unittest.TestCase):
"""The Artist 100 chart is special in that it does not have titles.
"""
@classmethod
def setUpClass(cls):
cls.chart = billboard.ChartData("artist-100")
cls.expectedTitle = "Artist 100"
cls.expectedNumEntries = 100
def testEntriesValidity(self):
super(TestCurrentArtist100, self).testEntriesValidity(skipTitleCheck=True)
for entry in self.chart:
self.assertEqual(entry.title, "") # This chart has no titles
|
guoguo12/billboard-charts
|
tests/test_current_charts.py
|
Python
|
mit
| 3,187
|
class TweetCriteria:
def __init__(self):
self.maxTweets = 0
def setUsername(self, username):
self.username = username
return self
def setSince(self, since):
self.since = since
return self
def setUntil(self, until):
self.until = until
return self
def setQuerySearch(self, query):
self.query = query
return self
def setMaxTweets(self, maxTweets):
self.maxTweets = maxTweets
return self
def setLang(self, Lang):
self.lang = Lang
return self
|
batuhaniskr/Social-Network-Tracking-And-Analysis
|
parser/operation/tweet_query.py
|
Python
|
mit
| 576
|
# encoding: utf-8
class Enum(object):
@classmethod
def get_keys(cls):
return filter(lambda x: not x.startswith('_'), cls.__dict__.keys())
@classmethod
def items(cls):
return map(lambda x: (x, getattr(cls, x)), cls.get_keys())
@classmethod
def get_values(cls):
return map(lambda x: getattr(cls, x), cls.get_keys())
@classmethod
def as_choices(cls):
_choices = cls.get_values()
choices = []
for choice in _choices:
choices.append((choice, cls.get_key_from_value(choice)))
return tuple(choices)
@classmethod
def inverted_choices(cls):
_choices = cls.get_keys()
choices = []
for choice in _choices:
choices.append((choice, getattr(cls, choice)))
return tuple(choices)
@classmethod
def get_key_from_value(cls, value):
for key, v in cls.__dict__.items():
if value == v:
return key
@classmethod
def get_value(cls, key):
return getattr(cls, key)
|
OriHoch/Open-Knesset
|
knesset/enums.py
|
Python
|
bsd-3-clause
| 1,058
|
from coalib.bears.LocalBear import LocalBear
class SmellCapabilityBear(LocalBear):
CAN_FIX = {'Smell'}
LANGUAGES = {'All'}
def run(self, filename, file):
pass
|
MalkmusT/coala-quickstart
|
tests/test_bears/SmellCapabilityBear.py
|
Python
|
agpl-3.0
| 182
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2018 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
"""This module contains tools to help using Elastic Search"""
import logging
import json
import pytz
from datetime import datetime
from superdesk import app
logger = logging.getLogger(__name__)
REPOS = ["ingest", "archive", "published", "archived"]
POST_FILTER_MAP = {
"type": "type",
"desk": "task.desk",
"genre": "genre.name",
"category": "anpa_category.name",
"urgency": "urgency",
"priority": "priority",
"source": "source",
"legal": "flags.marked_for_legal",
"sms": "flags.marked_for_sms",
"language": "language",
}
SEARCH_CVS = [
{"id": "subject", "name": "Subject", "field": "subject", "list": "subjectcodes"},
{"id": "companycodes", "name": "Company Codes", "field": "company_codes", "list": "company_codes"},
]
DATE_FORMAT = "%d/%m/%Y"
DATE_FILTERS = {
"last_month": {"lte": "now-1M/M", "gte": "now-1M/M"},
"last_week": {"lte": "now-1w/w", "gte": "now-1w/w"},
"last_day": {"lte": "now-1d/d", "gte": "now-1d/d"},
"last_24_hours": {"gte": "now-24H"},
"last_7_hours": {"gte": "now-8H"},
}
DATE_FIELDS = ("firstcreated", "versioncreated", "firstpublished", "schedule_settings.utc_publish_schedule")
def get_index(repos=None):
"""Get index id for all repos."""
if repos is None:
repos = REPOS
indexes = {app.data.elastic.index}
for repo in repos:
indexes.add(app.config["ELASTICSEARCH_INDEXES"].get(repo, app.data.elastic.index))
return ",".join(indexes)
def get_docs(query_result):
"""Get documents from ES query result
:param dict query_result: ES query result, as returned by service.search
:return list: found documents
"""
try:
docs = [h["_source"] for h in query_result["hits"]["hits"]]
except KeyError:
logger.warning(u"Can't retrieve doc from ES results: {data}".format(data=query_result))
docs = []
return docs
def filter2query(filter_, user_id=None):
"""Convert filter syntax to ElasticSearch query
:param dict filter_: filter syntax, as used in saved_searches endpoint
:return dict: ElasticSearch query DSL usable with service.search method
"""
# we'll delete key while we handle them, to check that all has been managed at the end
search_query = filter_["query"].copy()
query_must = []
query_must_not = []
post_filter = []
post_filter_must_not = []
# controlled vocabularies can be overriden in settings
search_cvs = app.config.get("search_cvs", SEARCH_CVS)
for cv in search_cvs:
if cv["id"] in search_query and cv["field"] != cv["id"]:
terms = json.loads(search_query.pop(cv["id"]))
query_must.append({"terms": {cv["field"] + ".qcode": terms}})
try:
query_string = search_query.pop("q")
except KeyError:
pass
else:
for cv in search_cvs:
if cv["field"] != cv["id"]:
query_string.replace(cv["id"] + ".qcode:(", cv["field"] + "q.code:(")
query_must.append({"query_string": {"query": query_string, "default_operator": "AND"}})
to_delete = []
for key, value in search_query.items():
if key == "from_desk":
desk = value.split("-")
if len(desk) == 2:
if desk[1] == "authoring":
query_must.append({"term": {"task.last_authoring_desk": desk[0]}})
else:
query_must.append({"term": {"task.last_production_desk": desk[0]}})
else:
logger.warning('unexpected "from_desk" value: {value}'.format(value=value))
elif key == "to_desk":
desk = value.split("-")
if len(desk) == 2:
query_must.append({"term": {"task.desk": desk[0]}})
if "from_desk" not in filter_["query"]:
if desk[1] == "authoring":
field = "task.last_production_desk"
else:
field = "task.last_authoring_desk"
query_must.append({"exists": {field: field}})
else:
logger.warning('unexpected "from_desk" value: {value}'.format(value=value))
elif key == "spike":
if value == "include":
pass
elif value == "only":
query_must.append({"term": {"state": "spiked"}})
else:
query_must_not.append({"term": {"state": "spiked"}})
elif key == "featuremedia" and value:
query_must.append({"exists": {"field": "associations.featuremedia"}})
elif key == "subject":
terms = json.loads(value)
query_must.append(
{
"bool": {
"should": [{"terms": {"subject.qcode": terms}}, {"terms": {"subject.parent": terms}}],
"minimum_should_match": 1,
}
}
)
elif key == "company_codes":
terms = json.loads(value)
query_must.append({"terms": {"company_codes.qcode": terms}})
elif key == "marked_desks":
terms = json.loads(value)
query_must.append({"terms": {"marked_desks.desk_id": terms}})
elif key == "ignoreKilled":
query_must_not.append({"terms": {"state": ["killed", "recalled"]}})
elif key == "onlyLastPublished":
query_must_not.append({"term": {"last_published_version": "false"}})
elif key == "ignoreScheduled":
query_must_not.append({"term": {"state": "scheduled"}})
else:
continue
to_delete.append(key)
for key in to_delete:
del search_query[key]
for key, field in POST_FILTER_MAP.items():
value = search_query.pop(key, None)
if value is not None:
try:
post_filter.append({"terms": {field: json.loads(value)}})
except TypeError as e:
logger.error('Invalid data received for post filter "{key}": {e}\ndata: {value}'.format(
key=key, e=e, value=value))
# the value is probably not JSON encoded as expected, we try directly the value
post_filter.append({"terms": {field: value}})
else:
value = search_query.pop("not" + key, None)
if value is not None:
post_filter_must_not.append({"terms": {field: json.loads(value)}})
# ingest provider
ingest_provider = search_query.pop("ingest_provider", None)
if ingest_provider is not None:
post_filter.append({"term": {"ingest_provider": ingest_provider}})
# used by AAP multimedia datalayer
credit_qcode = search_query.pop("creditqcode", None)
if credit_qcode is not None:
values = json.loads(credit_qcode)
post_filter.append({"terms": {"credit": [v["value"] for v in values]}})
# date filters
tz = pytz.timezone(app.config["DEFAULT_TIMEZONE"])
range_ = {}
to_delete = []
for field in DATE_FIELDS:
value = search_query.get(field)
if value in DATE_FILTERS:
range_[field] = DATE_FILTERS[value]
to_delete.append(field)
else:
field_suff = field + "to"
value = search_query.get(field_suff)
if value:
to_delete.append(field_suff)
field_range = range_.setdefault(field, {})
try:
date = datetime.strptime(value, DATE_FORMAT)
except ValueError:
# the value doesn't correspond to DATE_FORMAT,
# it may be using ES date math syntax
field_range["lte"] = value
else:
date = tz.localize(datetime.combine(date, datetime.min.time()))
field_range["lte"] = date.isoformat()
field_suff = field + "from"
value = search_query.get(field_suff)
if value:
to_delete.append(field_suff)
field_range = range_.setdefault(field, {})
try:
date = datetime.strptime(value, DATE_FORMAT)
except ValueError:
# same as above
field_range["gte"] = value
else:
date = tz.localize(datetime.combine(date, datetime.max.time()))
field_range["gte"] = date.isoformat()
if range_:
post_filter.append({"range": range_})
for key in to_delete:
del search_query[key]
# remove other users drafts
if user_id is not None:
query_must.append(
{
"bool": {
"should": [
{"bool": {"must": [{"term": {"state": "draft"}}, {"term": {"original_creator": user_id}}]}},
{"bool": {"must_not": {"terms": {"state": ["draft"]}}}},
]
}
}
)
# this is needed for archived collection
query_must_not.append({"term": {"package_type": "takes"}})
query = {"query": {"bool": {"must": query_must, "must_not": query_must_not}}}
if post_filter or post_filter_must_not:
query["post_filter"] = {"bool": {"must": post_filter, "must_not": post_filter_must_not}}
repo = search_query.pop("repo", None)
repos = repo.split(',') if repo is not None else None
if "params" in search_query and (search_query['params'] is None or not json.loads(search_query['params'])):
del search_query['params']
if search_query:
logger.warning(
"All query fields have not been used, remaining fields: {search_query}".format(search_query=search_query)
)
return repos, query
|
mdhaman/superdesk-core
|
superdesk/es_utils.py
|
Python
|
agpl-3.0
| 10,087
|
# coding=utf-8
# This is the winner of a comparison of multiple
# methods for removing duplicates from a list, shamelessly taken from
#
# http://www.peterbe.com/plog/uniqifiers-benchmark
#
# remember: if you dont need to preserve the order you can simply
# type:
#
# >>>list(set(my_list))
def removeDuplicates(seq, idfun=None):
"""
removal all duplicates from a list, preserving the order:
>>> a=list('ABeeECcc')
>>> removeDuplicates(a)
['A', 'B', 'e', 'E', 'C', 'c']
>>> removeDuplicates(a, lambda x: x.lower())
['A', 'B', 'e', 'C']
"""
# order preserving
if idfun is None:
def idfun(x): return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
# in old Python versions:
# if seen.has_key(marker)
# but in new ones:
if marker in seen:
continue
seen[marker] = 1
result.append(item)
return result
if __name__ == '__main__':
import doctest
doctest.testmod()
|
radjkarl/fancyTools
|
fancytools/fcollections/removeDuplicates.py
|
Python
|
gpl-3.0
| 1,059
|
import re
if __name__ == '__main__':
n = int(input())
r = r'^[H|h][I|i]\s[^D|d]'
for _ in range(n):
s = input()
if re.match(r, s):
print(s)
|
MrinmoiHossain/HackerRank
|
Regex/Applications/Saying Hi.py
|
Python
|
mit
| 181
|
"""Blocking and non-blocking HTTP client interfaces.
This module defines a common interface shared by two implementations,
``simple_httpclient`` and ``curl_httpclient``. Applications may either
instantiate their chosen implementation class directly or use the
`AsyncHTTPClient` class from this module, which selects an implementation
that can be overridden with the `AsyncHTTPClient.configure` method.
The default implementation is ``simple_httpclient``, and this is expected
to be suitable for most users' needs. However, some applications may wish
to switch to ``curl_httpclient`` for reasons such as the following:
* ``curl_httpclient`` has some features not found in ``simple_httpclient``,
including support for HTTP proxies and the ability to use a specified
network interface.
* ``curl_httpclient`` is more likely to be compatible with sites that are
not-quite-compliant with the HTTP spec, or sites that use little-exercised
features of HTTP.
* ``curl_httpclient`` is faster.
Note that if you are using ``curl_httpclient``, it is highly
recommended that you use a recent version of ``libcurl`` and
``pycurl``. Currently the minimum supported version of libcurl is
7.22.0, and the minimum version of pycurl is 7.18.2. It is highly
recommended that your ``libcurl`` installation is built with
asynchronous DNS resolver (threaded or c-ares), otherwise you may
encounter various problems with request timeouts (for more
information, see
http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS
and comments in curl_httpclient.py).
To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup::
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
"""
import datetime
import functools
from io import BytesIO
import ssl
import time
import weakref
from tornado.concurrent import (
Future,
future_set_result_unless_cancelled,
future_set_exception_unless_cancelled,
)
from tornado.escape import utf8, native_str
from tornado import gen, httputil
from tornado.ioloop import IOLoop
from tornado.util import Configurable
from typing import Type, Any, Union, Dict, Callable, Optional, cast
class HTTPClient(object):
"""A blocking HTTP client.
This interface is provided to make it easier to share code between
synchronous and asynchronous applications. Applications that are
running an `.IOLoop` must use `AsyncHTTPClient` instead.
Typical usage looks like this::
http_client = httpclient.HTTPClient()
try:
response = http_client.fetch("http://www.google.com/")
print(response.body)
except httpclient.HTTPError as e:
# HTTPError is raised for non-200 responses; the response
# can be found in e.response.
print("Error: " + str(e))
except Exception as e:
# Other errors are possible, such as IOError.
print("Error: " + str(e))
http_client.close()
.. versionchanged:: 5.0
Due to limitations in `asyncio`, it is no longer possible to
use the synchronous ``HTTPClient`` while an `.IOLoop` is running.
Use `AsyncHTTPClient` instead.
"""
def __init__(
self,
async_client_class: "Optional[Type[AsyncHTTPClient]]" = None,
**kwargs: Any
) -> None:
# Initialize self._closed at the beginning of the constructor
# so that an exception raised here doesn't lead to confusing
# failures in __del__.
self._closed = True
self._io_loop = IOLoop(make_current=False)
if async_client_class is None:
async_client_class = AsyncHTTPClient
# Create the client while our IOLoop is "current", without
# clobbering the thread's real current IOLoop (if any).
async def make_client() -> "AsyncHTTPClient":
await gen.sleep(0)
assert async_client_class is not None
return async_client_class(**kwargs)
self._async_client = self._io_loop.run_sync(make_client)
self._closed = False
def __del__(self) -> None:
self.close()
def close(self) -> None:
"""Closes the HTTPClient, freeing any resources used."""
if not self._closed:
self._async_client.close()
self._io_loop.close()
self._closed = True
def fetch(
self, request: Union["HTTPRequest", str], **kwargs: Any
) -> "HTTPResponse":
"""Executes a request, returning an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
If it is a string, we construct an `HTTPRequest` using any additional
kwargs: ``HTTPRequest(request, **kwargs)``
If an error occurs during the fetch, we raise an `HTTPError` unless
the ``raise_error`` keyword argument is set to False.
"""
response = self._io_loop.run_sync(
functools.partial(self._async_client.fetch, request, **kwargs)
)
return response
class AsyncHTTPClient(Configurable):
"""An non-blocking HTTP client.
Example usage::
async def f():
http_client = AsyncHTTPClient()
try:
response = await http_client.fetch("http://www.google.com")
except Exception as e:
print("Error: %s" % e)
else:
print(response.body)
The constructor for this class is magic in several respects: It
actually creates an instance of an implementation-specific
subclass, and instances are reused as a kind of pseudo-singleton
(one per `.IOLoop`). The keyword argument ``force_instance=True``
can be used to suppress this singleton behavior. Unless
``force_instance=True`` is used, no arguments should be passed to
the `AsyncHTTPClient` constructor. The implementation subclass as
well as arguments to its constructor can be set with the static
method `configure()`
All `AsyncHTTPClient` implementations support a ``defaults``
keyword argument, which can be used to set default values for
`HTTPRequest` attributes. For example::
AsyncHTTPClient.configure(
None, defaults=dict(user_agent="MyUserAgent"))
# or with force_instance:
client = AsyncHTTPClient(force_instance=True,
defaults=dict(user_agent="MyUserAgent"))
.. versionchanged:: 5.0
The ``io_loop`` argument (deprecated since version 4.1) has been removed.
"""
_instance_cache = None # type: Dict[IOLoop, AsyncHTTPClient]
@classmethod
def configurable_base(cls) -> Type[Configurable]:
return AsyncHTTPClient
@classmethod
def configurable_default(cls) -> Type[Configurable]:
from tornado.simple_httpclient import SimpleAsyncHTTPClient
return SimpleAsyncHTTPClient
@classmethod
def _async_clients(cls) -> Dict[IOLoop, "AsyncHTTPClient"]:
attr_name = "_async_client_dict_" + cls.__name__
if not hasattr(cls, attr_name):
setattr(cls, attr_name, weakref.WeakKeyDictionary())
return getattr(cls, attr_name)
def __new__(cls, force_instance: bool = False, **kwargs: Any) -> "AsyncHTTPClient":
io_loop = IOLoop.current()
if force_instance:
instance_cache = None
else:
instance_cache = cls._async_clients()
if instance_cache is not None and io_loop in instance_cache:
return instance_cache[io_loop]
instance = super(AsyncHTTPClient, cls).__new__(cls, **kwargs) # type: ignore
# Make sure the instance knows which cache to remove itself from.
# It can't simply call _async_clients() because we may be in
# __new__(AsyncHTTPClient) but instance.__class__ may be
# SimpleAsyncHTTPClient.
instance._instance_cache = instance_cache
if instance_cache is not None:
instance_cache[instance.io_loop] = instance
return instance
def initialize(self, defaults: Optional[Dict[str, Any]] = None) -> None:
self.io_loop = IOLoop.current()
self.defaults = dict(HTTPRequest._DEFAULTS)
if defaults is not None:
self.defaults.update(defaults)
self._closed = False
def close(self) -> None:
"""Destroys this HTTP client, freeing any file descriptors used.
This method is **not needed in normal use** due to the way
that `AsyncHTTPClient` objects are transparently reused.
``close()`` is generally only necessary when either the
`.IOLoop` is also being closed, or the ``force_instance=True``
argument was used when creating the `AsyncHTTPClient`.
No other methods may be called on the `AsyncHTTPClient` after
``close()``.
"""
if self._closed:
return
self._closed = True
if self._instance_cache is not None:
cached_val = self._instance_cache.pop(self.io_loop, None)
# If there's an object other than self in the instance
# cache for our IOLoop, something has gotten mixed up. A
# value of None appears to be possible when this is called
# from a destructor (HTTPClient.__del__) as the weakref
# gets cleared before the destructor runs.
if cached_val is not None and cached_val is not self:
raise RuntimeError("inconsistent AsyncHTTPClient cache")
def fetch(
self,
request: Union[str, "HTTPRequest"],
raise_error: bool = True,
**kwargs: Any
) -> "Future[HTTPResponse]":
"""Executes a request, asynchronously returning an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
If it is a string, we construct an `HTTPRequest` using any additional
kwargs: ``HTTPRequest(request, **kwargs)``
This method returns a `.Future` whose result is an
`HTTPResponse`. By default, the ``Future`` will raise an
`HTTPError` if the request returned a non-200 response code
(other errors may also be raised if the server could not be
contacted). Instead, if ``raise_error`` is set to False, the
response will always be returned regardless of the response
code.
If a ``callback`` is given, it will be invoked with the `HTTPResponse`.
In the callback interface, `HTTPError` is not automatically raised.
Instead, you must check the response's ``error`` attribute or
call its `~HTTPResponse.rethrow` method.
.. versionchanged:: 6.0
The ``callback`` argument was removed. Use the returned
`.Future` instead.
The ``raise_error=False`` argument only affects the
`HTTPError` raised when a non-200 response code is used,
instead of suppressing all errors.
"""
if self._closed:
raise RuntimeError("fetch() called on closed AsyncHTTPClient")
if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs)
else:
if kwargs:
raise ValueError(
"kwargs can't be used if request is an HTTPRequest object"
)
# We may modify this (to add Host, Accept-Encoding, etc),
# so make sure we don't modify the caller's object. This is also
# where normal dicts get converted to HTTPHeaders objects.
request.headers = httputil.HTTPHeaders(request.headers)
request_proxy = _RequestProxy(request, self.defaults)
future = Future() # type: Future[HTTPResponse]
def handle_response(response: "HTTPResponse") -> None:
if response.error:
if raise_error or not response._error_is_response_code:
future_set_exception_unless_cancelled(future, response.error)
return
future_set_result_unless_cancelled(future, response)
self.fetch_impl(cast(HTTPRequest, request_proxy), handle_response)
return future
def fetch_impl(
self, request: "HTTPRequest", callback: Callable[["HTTPResponse"], None]
) -> None:
raise NotImplementedError()
@classmethod
def configure(
cls, impl: "Union[None, str, Type[Configurable]]", **kwargs: Any
) -> None:
"""Configures the `AsyncHTTPClient` subclass to use.
``AsyncHTTPClient()`` actually creates an instance of a subclass.
This method may be called with either a class object or the
fully-qualified name of such a class (or ``None`` to use the default,
``SimpleAsyncHTTPClient``)
If additional keyword arguments are given, they will be passed
to the constructor of each subclass instance created. The
keyword argument ``max_clients`` determines the maximum number
of simultaneous `~AsyncHTTPClient.fetch()` operations that can
execute in parallel on each `.IOLoop`. Additional arguments
may be supported depending on the implementation class in use.
Example::
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
"""
super(AsyncHTTPClient, cls).configure(impl, **kwargs)
class HTTPRequest(object):
"""HTTP client request object."""
_headers = None # type: Union[Dict[str, str], httputil.HTTPHeaders]
# Default values for HTTPRequest parameters.
# Merged with the values on the request object by AsyncHTTPClient
# implementations.
_DEFAULTS = dict(
connect_timeout=20.0,
request_timeout=20.0,
follow_redirects=True,
max_redirects=5,
decompress_response=True,
proxy_password="",
allow_nonstandard_methods=False,
validate_cert=True,
)
def __init__(
self,
url: str,
method: str = "GET",
headers: Optional[Union[Dict[str, str], httputil.HTTPHeaders]] = None,
body: Optional[Union[bytes, str]] = None,
auth_username: Optional[str] = None,
auth_password: Optional[str] = None,
auth_mode: Optional[str] = None,
connect_timeout: Optional[float] = None,
request_timeout: Optional[float] = None,
if_modified_since: Optional[Union[float, datetime.datetime]] = None,
follow_redirects: Optional[bool] = None,
max_redirects: Optional[int] = None,
user_agent: Optional[str] = None,
use_gzip: Optional[bool] = None,
network_interface: Optional[str] = None,
streaming_callback: Optional[Callable[[bytes], None]] = None,
header_callback: Optional[Callable[[str], None]] = None,
prepare_curl_callback: Optional[Callable[[Any], None]] = None,
proxy_host: Optional[str] = None,
proxy_port: Optional[int] = None,
proxy_username: Optional[str] = None,
proxy_password: Optional[str] = None,
proxy_auth_mode: Optional[str] = None,
allow_nonstandard_methods: Optional[bool] = None,
validate_cert: Optional[bool] = None,
ca_certs: Optional[str] = None,
allow_ipv6: Optional[bool] = None,
client_key: Optional[str] = None,
client_cert: Optional[str] = None,
body_producer: Optional[
Callable[[Callable[[bytes], None]], "Future[None]"]
] = None,
expect_100_continue: bool = False,
decompress_response: Optional[bool] = None,
ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
) -> None:
r"""All parameters except ``url`` are optional.
:arg str url: URL to fetch
:arg str method: HTTP method, e.g. "GET" or "POST"
:arg headers: Additional HTTP headers to pass on the request
:type headers: `~tornado.httputil.HTTPHeaders` or `dict`
:arg body: HTTP request body as a string (byte or unicode; if unicode
the utf-8 encoding will be used)
:arg body_producer: Callable used for lazy/asynchronous request bodies.
It is called with one argument, a ``write`` function, and should
return a `.Future`. It should call the write function with new
data as it becomes available. The write function returns a
`.Future` which can be used for flow control.
Only one of ``body`` and ``body_producer`` may
be specified. ``body_producer`` is not supported on
``curl_httpclient``. When using ``body_producer`` it is recommended
to pass a ``Content-Length`` in the headers as otherwise chunked
encoding will be used, and many servers do not support chunked
encoding on requests. New in Tornado 4.0
:arg str auth_username: Username for HTTP authentication
:arg str auth_password: Password for HTTP authentication
:arg str auth_mode: Authentication mode; default is "basic".
Allowed values are implementation-defined; ``curl_httpclient``
supports "basic" and "digest"; ``simple_httpclient`` only supports
"basic"
:arg float connect_timeout: Timeout for initial connection in seconds,
default 20 seconds
:arg float request_timeout: Timeout for entire request in seconds,
default 20 seconds
:arg if_modified_since: Timestamp for ``If-Modified-Since`` header
:type if_modified_since: `datetime` or `float`
:arg bool follow_redirects: Should redirects be followed automatically
or return the 3xx response? Default True.
:arg int max_redirects: Limit for ``follow_redirects``, default 5.
:arg str user_agent: String to send as ``User-Agent`` header
:arg bool decompress_response: Request a compressed response from
the server and decompress it after downloading. Default is True.
New in Tornado 4.0.
:arg bool use_gzip: Deprecated alias for ``decompress_response``
since Tornado 4.0.
:arg str network_interface: Network interface or source IP to use for request.
See ``curl_httpclient`` note below.
:arg collections.abc.Callable streaming_callback: If set, ``streaming_callback`` will
be run with each chunk of data as it is received, and
``HTTPResponse.body`` and ``HTTPResponse.buffer`` will be empty in
the final response.
:arg collections.abc.Callable header_callback: If set, ``header_callback`` will
be run with each header line as it is received (including the
first line, e.g. ``HTTP/1.0 200 OK\r\n``, and a final line
containing only ``\r\n``. All lines include the trailing newline
characters). ``HTTPResponse.headers`` will be empty in the final
response. This is most useful in conjunction with
``streaming_callback``, because it's the only way to get access to
header data while the request is in progress.
:arg collections.abc.Callable prepare_curl_callback: If set, will be called with
a ``pycurl.Curl`` object to allow the application to make additional
``setopt`` calls.
:arg str proxy_host: HTTP proxy hostname. To use proxies,
``proxy_host`` and ``proxy_port`` must be set; ``proxy_username``,
``proxy_pass`` and ``proxy_auth_mode`` are optional. Proxies are
currently only supported with ``curl_httpclient``.
:arg int proxy_port: HTTP proxy port
:arg str proxy_username: HTTP proxy username
:arg str proxy_password: HTTP proxy password
:arg str proxy_auth_mode: HTTP proxy Authentication mode;
default is "basic". supports "basic" and "digest"
:arg bool allow_nonstandard_methods: Allow unknown values for ``method``
argument? Default is False.
:arg bool validate_cert: For HTTPS requests, validate the server's
certificate? Default is True.
:arg str ca_certs: filename of CA certificates in PEM format,
or None to use defaults. See note below when used with
``curl_httpclient``.
:arg str client_key: Filename for client SSL key, if any. See
note below when used with ``curl_httpclient``.
:arg str client_cert: Filename for client SSL certificate, if any.
See note below when used with ``curl_httpclient``.
:arg ssl.SSLContext ssl_options: `ssl.SSLContext` object for use in
``simple_httpclient`` (unsupported by ``curl_httpclient``).
Overrides ``validate_cert``, ``ca_certs``, ``client_key``,
and ``client_cert``.
:arg bool allow_ipv6: Use IPv6 when available? Default is True.
:arg bool expect_100_continue: If true, send the
``Expect: 100-continue`` header and wait for a continue response
before sending the request body. Only supported with
``simple_httpclient``.
.. note::
When using ``curl_httpclient`` certain options may be
inherited by subsequent fetches because ``pycurl`` does
not allow them to be cleanly reset. This applies to the
``ca_certs``, ``client_key``, ``client_cert``, and
``network_interface`` arguments. If you use these
options, you should pass them on every request (you don't
have to always use the same values, but it's not possible
to mix requests that specify these options with ones that
use the defaults).
.. versionadded:: 3.1
The ``auth_mode`` argument.
.. versionadded:: 4.0
The ``body_producer`` and ``expect_100_continue`` arguments.
.. versionadded:: 4.2
The ``ssl_options`` argument.
.. versionadded:: 4.5
The ``proxy_auth_mode`` argument.
"""
# Note that some of these attributes go through property setters
# defined below.
self.headers = headers # type: ignore
if if_modified_since:
self.headers["If-Modified-Since"] = httputil.format_timestamp(
if_modified_since
)
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_username = proxy_username
self.proxy_password = proxy_password
self.proxy_auth_mode = proxy_auth_mode
self.url = url
self.method = method
self.body = body # type: ignore
self.body_producer = body_producer
self.auth_username = auth_username
self.auth_password = auth_password
self.auth_mode = auth_mode
self.connect_timeout = connect_timeout
self.request_timeout = request_timeout
self.follow_redirects = follow_redirects
self.max_redirects = max_redirects
self.user_agent = user_agent
if decompress_response is not None:
self.decompress_response = decompress_response # type: Optional[bool]
else:
self.decompress_response = use_gzip
self.network_interface = network_interface
self.streaming_callback = streaming_callback
self.header_callback = header_callback
self.prepare_curl_callback = prepare_curl_callback
self.allow_nonstandard_methods = allow_nonstandard_methods
self.validate_cert = validate_cert
self.ca_certs = ca_certs
self.allow_ipv6 = allow_ipv6
self.client_key = client_key
self.client_cert = client_cert
self.ssl_options = ssl_options
self.expect_100_continue = expect_100_continue
self.start_time = time.time()
@property
def headers(self) -> httputil.HTTPHeaders:
# TODO: headers may actually be a plain dict until fairly late in
# the process (AsyncHTTPClient.fetch), but practically speaking,
# whenever the property is used they're already HTTPHeaders.
return self._headers # type: ignore
@headers.setter
def headers(self, value: Union[Dict[str, str], httputil.HTTPHeaders]) -> None:
if value is None:
self._headers = httputil.HTTPHeaders()
else:
self._headers = value # type: ignore
@property
def body(self) -> bytes:
return self._body
@body.setter
def body(self, value: Union[bytes, str]) -> None:
self._body = utf8(value)
class HTTPResponse(object):
"""HTTP Response object.
Attributes:
* ``request``: HTTPRequest object
* ``code``: numeric HTTP status code, e.g. 200 or 404
* ``reason``: human-readable reason phrase describing the status code
* ``headers``: `tornado.httputil.HTTPHeaders` object
* ``effective_url``: final location of the resource after following any
redirects
* ``buffer``: ``cStringIO`` object for response body
* ``body``: response body as bytes (created on demand from ``self.buffer``)
* ``error``: Exception object, if any
* ``request_time``: seconds from request start to finish. Includes all
network operations from DNS resolution to receiving the last byte of
data. Does not include time spent in the queue (due to the
``max_clients`` option). If redirects were followed, only includes
the final request.
* ``start_time``: Time at which the HTTP operation started, based on
`time.time` (not the monotonic clock used by `.IOLoop.time`). May
be ``None`` if the request timed out while in the queue.
* ``time_info``: dictionary of diagnostic timing information from the
request. Available data are subject to change, but currently uses timings
available from http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html,
plus ``queue``, which is the delay (if any) introduced by waiting for
a slot under `AsyncHTTPClient`'s ``max_clients`` setting.
.. versionadded:: 5.1
Added the ``start_time`` attribute.
.. versionchanged:: 5.1
The ``request_time`` attribute previously included time spent in the queue
for ``simple_httpclient``, but not in ``curl_httpclient``. Now queueing time
is excluded in both implementations. ``request_time`` is now more accurate for
``curl_httpclient`` because it uses a monotonic clock when available.
"""
# I'm not sure why these don't get type-inferred from the references in __init__.
error = None # type: Optional[BaseException]
_error_is_response_code = False
request = None # type: HTTPRequest
def __init__(
self,
request: HTTPRequest,
code: int,
headers: Optional[httputil.HTTPHeaders] = None,
buffer: Optional[BytesIO] = None,
effective_url: Optional[str] = None,
error: Optional[BaseException] = None,
request_time: Optional[float] = None,
time_info: Optional[Dict[str, float]] = None,
reason: Optional[str] = None,
start_time: Optional[float] = None,
) -> None:
if isinstance(request, _RequestProxy):
self.request = request.request
else:
self.request = request
self.code = code
self.reason = reason or httputil.responses.get(code, "Unknown")
if headers is not None:
self.headers = headers
else:
self.headers = httputil.HTTPHeaders()
self.buffer = buffer
self._body = None # type: Optional[bytes]
if effective_url is None:
self.effective_url = request.url
else:
self.effective_url = effective_url
self._error_is_response_code = False
if error is None:
if self.code < 200 or self.code >= 300:
self._error_is_response_code = True
self.error = HTTPError(self.code, message=self.reason, response=self)
else:
self.error = None
else:
self.error = error
self.start_time = start_time
self.request_time = request_time
self.time_info = time_info or {}
@property
def body(self) -> bytes:
if self.buffer is None:
return b""
elif self._body is None:
self._body = self.buffer.getvalue()
return self._body
def rethrow(self) -> None:
"""If there was an error on the request, raise an `HTTPError`."""
if self.error:
raise self.error
def __repr__(self) -> str:
args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items()))
return "%s(%s)" % (self.__class__.__name__, args)
class HTTPClientError(Exception):
"""Exception thrown for an unsuccessful HTTP request.
Attributes:
* ``code`` - HTTP error integer error code, e.g. 404. Error code 599 is
used when no HTTP response was received, e.g. for a timeout.
* ``response`` - `HTTPResponse` object, if any.
Note that if ``follow_redirects`` is False, redirects become HTTPErrors,
and you can look at ``error.response.headers['Location']`` to see the
destination of the redirect.
.. versionchanged:: 5.1
Renamed from ``HTTPError`` to ``HTTPClientError`` to avoid collisions with
`tornado.web.HTTPError`. The name ``tornado.httpclient.HTTPError`` remains
as an alias.
"""
def __init__(
self,
code: int,
message: Optional[str] = None,
response: Optional[HTTPResponse] = None,
) -> None:
self.code = code
self.message = message or httputil.responses.get(code, "Unknown")
self.response = response
super(HTTPClientError, self).__init__(code, message, response)
def __str__(self) -> str:
return "HTTP %d: %s" % (self.code, self.message)
# There is a cyclic reference between self and self.response,
# which breaks the default __repr__ implementation.
# (especially on pypy, which doesn't have the same recursion
# detection as cpython).
__repr__ = __str__
HTTPError = HTTPClientError
class _RequestProxy(object):
"""Combines an object with a dictionary of defaults.
Used internally by AsyncHTTPClient implementations.
"""
def __init__(
self, request: HTTPRequest, defaults: Optional[Dict[str, Any]]
) -> None:
self.request = request
self.defaults = defaults
def __getattr__(self, name: str) -> Any:
request_attr = getattr(self.request, name)
if request_attr is not None:
return request_attr
elif self.defaults is not None:
return self.defaults.get(name, None)
else:
return None
def main() -> None:
from tornado.options import define, options, parse_command_line
define("print_headers", type=bool, default=False)
define("print_body", type=bool, default=True)
define("follow_redirects", type=bool, default=True)
define("validate_cert", type=bool, default=True)
define("proxy_host", type=str)
define("proxy_port", type=int)
args = parse_command_line()
client = HTTPClient()
for arg in args:
try:
response = client.fetch(
arg,
follow_redirects=options.follow_redirects,
validate_cert=options.validate_cert,
proxy_host=options.proxy_host,
proxy_port=options.proxy_port,
)
except HTTPError as e:
if e.response is not None:
response = e.response
else:
raise
if options.print_headers:
print(response.headers)
if options.print_body:
print(native_str(response.body))
client.close()
if __name__ == "__main__":
main()
|
bdarnell/tornado
|
tornado/httpclient.py
|
Python
|
apache-2.0
| 31,825
|
from .heymac_hsm import HeymacCsmaHsm
from .heymac_frame import HeymacFrame, HeymacFrameError, HeymacFrameFctl, \
HeymacFramePidIdent, HeymacFramePidType, \
HeymacIe, HeymacIeError, HeymacIeSequence, \
HeymacHIeTerm, HeymacHIeSqncNmbr, HeymacHIeCipher, \
HeymacPIeTerm, HeymacPIeFrag0, HeymacPIeFragN, HeymacPIeMic
from .heymac_cmd import HeymacCmd, HeymacCmdError, HeymacCmdUnknown, \
HeymacCmdTxt, HeymacCmdBcn, HeymacCmdNgbrData, \
HeymacCmdJoinRqst, HeymacCmdJoinAcpt, HeymacCmdJoinCnfm, HeymacCmdJoinRjct
|
dwhall/HeyMac
|
heymac/lnk/__init__.py
|
Python
|
mit
| 535
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import pprint
import tempfile
from subprocess import Popen, PIPE
import os
from libfuturize.fixer_util import is_shebang_comment
from lib2to3.fixer_util import FromImport
from lib2to3.pytree import Leaf, Node
from lib2to3.pygram import token
from future.tests.base import (CodeHandler, unittest, skip26, reformat_code,
order_future_lines, expectedFailurePY26)
from future.utils import PY2
class TestLibFuturize(unittest.TestCase):
def test_is_shebang_comment(self):
"""
Tests whether the libfuturize.fixer_util.is_shebang_comment() function is working
"""
node = FromImport(u'math', [Leaf(token.NAME, u'cos', prefix=" ")])
node.prefix = u'#!/usr/bin/env python\n'
self.assertTrue(is_shebang_comment(node))
class TestFuturizeSimple(CodeHandler):
"""
This class contains snippets of Python 2 code (invalid Python 3) and
tests for whether they can be passed to ``futurize`` and immediately
run under both Python 2 again and Python 3.
"""
def setUp(self):
self.tempdir = tempfile.mkdtemp() + os.path.sep
super(TestFuturizeSimple, self).setUp()
def test_shebang_blank_with_future_division_import(self):
"""
Issue #43: Is shebang line preserved as the first
line by futurize when followed by a blank line?
"""
before = """
#!/usr/bin/env python
import math
1 / 5
"""
after = """
#!/usr/bin/env python
from __future__ import division
from past.utils import old_div
import math
old_div(1, 5)
"""
self.convert_check(before, after)
def test_shebang_blank_with_print_import(self):
before = """
#!/usr/bin/env python
import math
print 'Hello'
"""
after = """
#!/usr/bin/env python
from __future__ import print_function
import math
print('Hello')
"""
self.convert_check(before, after)
def test_shebang_comment(self):
"""
Issue #43: Is shebang line preserved as the first
line by futurize when followed by a comment?
"""
before = """
#!/usr/bin/env python
# some comments
# and more comments
import math
print 'Hello!'
"""
after = """
#!/usr/bin/env python
# some comments
# and more comments
from __future__ import print_function
import math
print('Hello!')
"""
self.convert_check(before, after)
def test_shebang_docstring(self):
"""
Issue #43: Is shebang line preserved as the first
line by futurize when followed by a docstring?
"""
before = '''
#!/usr/bin/env python
"""
a doc string
"""
import math
print 'Hello!'
'''
after = '''
#!/usr/bin/env python
"""
a doc string
"""
from __future__ import print_function
import math
print('Hello!')
'''
self.convert_check(before, after)
def test_oldstyle_classes(self):
"""
Stage 2 should convert old-style to new-style classes. This makes
the new-style class explicit and reduces the gap between the
behaviour (e.g. method resolution order) on Py2 and Py3. It also
allows us to provide ``newobject`` (see
test_oldstyle_classes_iterator).
"""
before = """
class Blah:
pass
"""
after = """
from builtins import object
class Blah(object):
pass
"""
self.convert_check(before, after, ignore_imports=False)
def test_oldstyle_classes_iterator(self):
"""
An old-style class used as an iterator should be converted
properly. This requires ``futurize`` to do both steps (adding
inheritance from object and adding the newobject import) in the
right order. Any next() method should also be renamed to __next__.
"""
before = """
class Upper:
def __init__(self, iterable):
self._iter = iter(iterable)
def next(self):
return next(self._iter).upper()
def __iter__(self):
return self
assert list(Upper('hello')) == list('HELLO')
"""
after = """
from builtins import next
from builtins import object
class Upper(object):
def __init__(self, iterable):
self._iter = iter(iterable)
def __next__(self):
return next(self._iter).upper()
def __iter__(self):
return self
assert list(Upper('hello')) == list('HELLO')
"""
self.convert_check(before, after, ignore_imports=False)
# Try it again with this convention: class Upper():
before2 = """
class Upper():
def __init__(self, iterable):
self._iter = iter(iterable)
def next(self):
return next(self._iter).upper()
def __iter__(self):
return self
assert list(Upper('hello')) == list('HELLO')
"""
self.convert_check(before2, after)
@unittest.expectedFailure
def test_problematic_string(self):
""" This string generates a SyntaxError on Python 3 unless it has
an r prefix.
"""
before = r"""
s = 'The folder is "C:\Users"'.
"""
after = r"""
s = r'The folder is "C:\Users"'.
"""
self.convert_check(before, after)
@unittest.skip('--tobytes feature removed for now ...')
def test_tobytes(self):
"""
The --tobytes option converts all UNADORNED string literals 'abcd' to b'abcd'.
It does apply to multi-line strings but doesn't apply if it's a raw
string, because ur'abcd' is a SyntaxError on Python 2 and br'abcd' is a
SyntaxError on Python 3.
"""
before = r"""
s0 = '1234'
s1 = '''5678
'''
s2 = "9abc"
# Unchanged:
s3 = r'1234'
s4 = R"defg"
s5 = u'hijk'
s6 = u"lmno"
s7 = b'lmno'
s8 = b"pqrs"
"""
after = r"""
s0 = b'1234'
s1 = b'''5678
'''
s2 = b"9abc"
# Unchanged:
s3 = r'1234'
s4 = R"defg"
s5 = u'hijk'
s6 = u"lmno"
s7 = b'lmno'
s8 = b"pqrs"
"""
self.convert_check(before, after, tobytes=True)
def test_cmp(self):
before = """
assert cmp(1, 2) == -1
assert cmp(2, 1) == 1
"""
after = """
from past.builtins import cmp
assert cmp(1, 2) == -1
assert cmp(2, 1) == 1
"""
self.convert_check(before, after, stages=(1, 2), ignore_imports=False)
def test_execfile(self):
before = """
with open('mytempfile.py', 'w') as f:
f.write('x = 1')
execfile('mytempfile.py')
x += 1
assert x == 2
"""
after = """
from past.builtins import execfile
with open('mytempfile.py', 'w') as f:
f.write('x = 1')
execfile('mytempfile.py')
x += 1
assert x == 2
"""
self.convert_check(before, after, stages=(1, 2), ignore_imports=False)
@unittest.expectedFailure
def test_izip(self):
before = """
from itertools import izip
for (a, b) in izip([1, 3, 5], [2, 4, 6]):
pass
"""
after = """
from builtins import zip
for (a, b) in zip([1, 3, 5], [2, 4, 6]):
pass
"""
self.convert_check(before, after, stages=(1, 2), ignore_imports=False)
def test_UserList(self):
before = """
from UserList import UserList
a = UserList([1, 3, 5])
assert len(a) == 3
"""
after = """
from collections import UserList
a = UserList([1, 3, 5])
assert len(a) == 3
"""
self.convert_check(before, after, stages=(1, 2), ignore_imports=True)
@unittest.expectedFailure
def test_no_unneeded_list_calls(self):
"""
TODO: get this working
"""
code = """
for (a, b) in zip(range(3), range(3, 6)):
pass
"""
self.unchanged(code)
@expectedFailurePY26
def test_import_builtins(self):
before = """
a = raw_input()
b = open(a, b, c)
c = filter(a, b)
d = map(a, b)
e = isinstance(a, str)
f = bytes(a, encoding='utf-8')
for g in xrange(10**10):
pass
h = reduce(lambda x, y: x+y, [1, 2, 3, 4, 5])
super(MyClass, self)
"""
after = """
from builtins import bytes
from builtins import filter
from builtins import input
from builtins import map
from builtins import range
from functools import reduce
a = input()
b = open(a, b, c)
c = list(filter(a, b))
d = list(map(a, b))
e = isinstance(a, str)
f = bytes(a, encoding='utf-8')
for g in range(10**10):
pass
h = reduce(lambda x, y: x+y, [1, 2, 3, 4, 5])
super(MyClass, self)
"""
self.convert_check(before, after, ignore_imports=False, run=False)
def test_xrange(self):
"""
The ``from builtins import range`` line was being added to the
bottom of the file as of v0.11.4, but only using Py2.7's lib2to3.
(Py3.3's lib2to3 seems to work.)
"""
before = """
for i in xrange(10):
pass
"""
after = """
from builtins import range
for i in range(10):
pass
"""
self.convert_check(before, after, ignore_imports=False)
@expectedFailurePY26
def test_source_coding_utf8(self):
"""
Tests to ensure that the source coding line is not corrupted or
removed. It must be left as the first line in the file (including
before any __future__ imports). Also tests whether the unicode
characters in this encoding are parsed correctly and left alone.
"""
code = """
# -*- coding: utf-8 -*-
icons = [u"◐", u"◓", u"◑", u"◒"]
"""
self.unchanged(code)
def test_exception_syntax(self):
"""
Test of whether futurize handles the old-style exception syntax
"""
before = """
try:
pass
except IOError, e:
val = e.errno
"""
after = """
try:
pass
except IOError as e:
val = e.errno
"""
self.convert_check(before, after)
def test_super(self):
"""
This tests whether futurize keeps the old two-argument super() calls the
same as before. It should, because this still works in Py3.
"""
code = '''
class VerboseList(list):
def append(self, item):
print('Adding an item')
super(VerboseList, self).append(item)
'''
self.unchanged(code)
@unittest.expectedFailure
def test_file(self):
"""
file() as a synonym for open() is obsolete and invalid on Python 3.
"""
before = '''
f = file(__file__)
data = f.read()
f.close()
'''
after = '''
f = open(__file__)
data = f.read()
f.close()
'''
self.convert_check(before, after)
def test_apply(self):
before = '''
def addup(*x):
return sum(x)
assert apply(addup, (10,20)) == 30
'''
after = """
def addup(*x):
return sum(x)
assert addup(*(10,20)) == 30
"""
self.convert_check(before, after)
@unittest.skip('not implemented yet')
def test_download_pypi_package_and_test(self):
URL = 'http://pypi.python.org/pypi/{0}/json'
import requests
package = 'future'
r = requests.get(URL.format(package))
pprint.pprint(r.json())
download_url = r.json()['urls'][0]['url']
filename = r.json()['urls'][0]['filename']
# r2 = requests.get(download_url)
# with open('/tmp/' + filename, 'w') as tarball:
# tarball.write(r2.content)
@expectedFailurePY26
def test_raw_input(self):
"""
Passes in a string to the waiting input() after futurize
conversion.
The code is the first snippet from these docs:
http://docs.python.org/2/library/2to3.html
"""
before = """
from io import BytesIO
def greet(name):
print "Hello, {0}!".format(name)
print "What's your name?"
import sys
oldstdin = sys.stdin
sys.stdin = BytesIO(b'Ed\\n')
name = raw_input()
greet(name.decode())
sys.stdin = oldstdin
assert name == b'Ed'
"""
desired = """
from io import BytesIO
def greet(name):
print("Hello, {0}!".format(name))
print("What's your name?")
import sys
oldstdin = sys.stdin
sys.stdin = BytesIO(b'Ed\\n')
name = input()
greet(name.decode())
sys.stdin = oldstdin
assert name == b'Ed'
"""
self.convert_check(before, desired, run=False)
for interpreter in self.interpreters:
p1 = Popen([interpreter, self.tempdir + 'mytestscript.py'],
stdout=PIPE, stdin=PIPE, stderr=PIPE)
(stdout, stderr) = p1.communicate(b'Ed')
self.assertEqual(stderr, b'')
self.assertEqual(stdout, b"What's your name?\nHello, Ed!\n")
def test_literal_prefixes_are_not_stripped(self):
"""
Tests to ensure that the u'' and b'' prefixes on unicode strings and
byte strings are not removed by the futurize script. Removing the
prefixes on Py3.3+ is unnecessary and loses some information -- namely,
that the strings have explicitly been marked as unicode or bytes,
rather than just e.g. a guess by some automated tool about what they
are.
"""
code = '''
s = u'unicode string'
b = b'byte string'
'''
self.unchanged(code)
def test_division(self):
before = """
x = 1 / 2
"""
after = """
from past.utils import old_div
x = old_div(1, 2)
"""
self.convert_check(before, after, stages=[1, 2])
def test_already_future_division(self):
code = """
from __future__ import division
x = 1 / 2
assert x == 0.5
y = 3. / 2.
assert y == 1.5
"""
self.unchanged(code)
class TestFuturizeRenamedStdlib(CodeHandler):
@unittest.skip('Infinite loop?')
def test_renamed_modules(self):
before = """
import ConfigParser
import copy_reg
import cPickle
import cStringIO
"""
after = """
import configparser
import copyreg
import pickle
import io
"""
self.convert_check(before, after)
@unittest.skip('Not working yet ...')
def test_urllib_refactor(self):
# Code like this using urllib is refactored by futurize --stage2 to use
# the new Py3 module names, but ``future`` doesn't support urllib yet.
before = """
import urllib
URL = 'http://pypi.python.org/pypi/future/json'
package = 'future'
r = urllib.urlopen(URL.format(package))
data = r.read()
"""
after = """
from future import standard_library
standard_library.install_hooks()
import urllib.request
URL = 'http://pypi.python.org/pypi/future/json'
package = 'future'
r = urllib.request.urlopen(URL.format(package))
data = r.read()
"""
self.convert_check(before, after)
@unittest.skip('Infinite loop?')
def test_renamed_copy_reg_and_cPickle_modules(self):
"""
Example from docs.python.org/2/library/copy_reg.html
"""
before = """
import copy_reg
import copy
import cPickle
class C(object):
def __init__(self, a):
self.a = a
def pickle_c(c):
print('pickling a C instance...')
return C, (c.a,)
copy_reg.pickle(C, pickle_c)
c = C(1)
d = copy.copy(c)
p = cPickle.dumps(c)
"""
after = """
import copyreg
import copy
import pickle
class C(object):
def __init__(self, a):
self.a = a
def pickle_c(c):
print('pickling a C instance...')
return C, (c.a,)
copyreg.pickle(C, pickle_c)
c = C(1)
d = copy.copy(c)
p = pickle.dumps(c)
"""
self.convert_check(before, after)
@unittest.expectedFailure
def test_Py2_StringIO_module(self):
"""
This requires that the argument to io.StringIO be made a
unicode string explicitly if we're not using unicode_literals:
Ideally, there would be a fixer for this. For now:
TODO: add the Py3 equivalent for this to the docs. Also add back
a test for the unicode_literals case.
"""
before = """
import cStringIO
import StringIO
s1 = cStringIO.StringIO('my string')
s2 = StringIO.StringIO('my other string')
assert isinstance(s1, cStringIO.InputType)
"""
# There is no io.InputType in Python 3. futurize should change this to
# something like this. But note that the input to io.StringIO
# must be a unicode string on both Py2 and Py3.
after = """
import io
import io
s1 = io.StringIO(u'my string')
s2 = io.StringIO(u'my other string')
assert isinstance(s1, io.StringIO)
"""
self.convert_check(before, after)
class TestFuturizeStage1(CodeHandler):
"""
Tests "stage 1": safe optimizations: modernizing Python 2 code so that it
uses print functions, new-style exception syntax, etc.
The behaviour should not change and this should introduce no dependency on
the ``future`` package. It produces more modern Python 2-only code. The
goal is to reduce the size of the real porting patch-set by performing
the uncontroversial patches first.
"""
def test_apply(self):
"""
apply() should be changed by futurize --stage1
"""
before = '''
def f(a, b):
return a + b
args = (1, 2)
assert apply(f, args) == 3
assert apply(f, ('a', 'b')) == 'ab'
'''
after = '''
def f(a, b):
return a + b
args = (1, 2)
assert f(*args) == 3
assert f(*('a', 'b')) == 'ab'
'''
self.convert_check(before, after, stages=[1])
def test_next_1(self):
"""
Custom next methods should not be converted to __next__ in stage1, but
any obj.next() calls should be converted to next(obj).
"""
before = """
class Upper:
def __init__(self, iterable):
self._iter = iter(iterable)
def next(self): # note the Py2 interface
return next(self._iter).upper()
def __iter__(self):
return self
itr = Upper('hello')
assert itr.next() == 'H'
assert next(itr) == 'E'
assert list(itr) == list('LLO')
"""
after = """
class Upper:
def __init__(self, iterable):
self._iter = iter(iterable)
def next(self): # note the Py2 interface
return next(self._iter).upper()
def __iter__(self):
return self
itr = Upper('hello')
assert next(itr) == 'H'
assert next(itr) == 'E'
assert list(itr) == list('LLO')
"""
self.convert_check(before, after, stages=[1], run=PY2)
@unittest.expectedFailure
def test_next_2(self):
"""
This version of the above doesn't currently work: the self._iter.next() call in
line 5 isn't converted to next(self._iter).
"""
before = """
class Upper:
def __init__(self, iterable):
self._iter = iter(iterable)
def next(self): # note the Py2 interface
return self._iter.next().upper()
def __iter__(self):
return self
itr = Upper('hello')
assert itr.next() == 'H'
assert next(itr) == 'E'
assert list(itr) == list('LLO')
"""
after = """
class Upper(object):
def __init__(self, iterable):
self._iter = iter(iterable)
def next(self): # note the Py2 interface
return next(self._iter).upper()
def __iter__(self):
return self
itr = Upper('hello')
assert next(itr) == 'H'
assert next(itr) == 'E'
assert list(itr) == list('LLO')
"""
self.convert_check(before, after, stages=[1], run=PY2)
def test_xrange(self):
"""
xrange should not be changed by futurize --stage1
"""
code = '''
for i in xrange(10):
pass
'''
self.unchanged(code, stages=[1], run=PY2)
@unittest.expectedFailure
def test_absolute_import_changes(self):
"""
Implicit relative imports should be converted to absolute or explicit
relative imports correctly.
Issue #16 (with porting bokeh/bbmodel.py)
"""
with open(tempdir + 'specialmodels.py', 'w') as f:
f.write('pass')
before = """
import specialmodels.pandasmodel
specialmodels.pandasmodel.blah()
"""
after = """
from __future__ import absolute_import
from .specialmodels import pandasmodel
pandasmodel.blah()
"""
self.convert_check(before, after, stages=[1])
def test_safe_futurize_imports(self):
"""
The standard library module names should not be changed until stage 2
"""
before = """
import ConfigParser
import HTMLParser
from itertools import ifilterfalse
ConfigParser.ConfigParser
HTMLParser.HTMLParser
assert list(ifilterfalse(lambda x: x % 2, [2, 4])) == [2, 4]
"""
self.unchanged(before, stages=[1], run=PY2)
def test_print(self):
before = """
print 'Hello'
"""
after = """
print('Hello')
"""
self.convert_check(before, after, stages=[1])
before = """
import sys
print >> sys.stderr, 'Hello', 'world'
"""
after = """
import sys
print('Hello', 'world', file=sys.stderr)
"""
self.convert_check(before, after, stages=[1])
def test_print_already_function(self):
"""
Running futurize --stage1 should not add a second set of parentheses
"""
before = """
print('Hello')
"""
self.unchanged(before, stages=[1])
@unittest.expectedFailure
def test_print_already_function_complex(self):
"""
Running futurize --stage1 does add a second second set of parentheses
in this case. This is because the underlying lib2to3 has two distinct
grammars -- with a print statement and with a print function -- and,
when going forwards (2 to both), futurize assumes print is a statement,
which raises a ParseError.
"""
before = """
import sys
print('Hello', 'world', file=sys.stderr)
"""
self.unchanged(before, stages=[1])
def test_exceptions(self):
before = """
try:
raise AttributeError('blah')
except AttributeError, e:
pass
"""
after = """
try:
raise AttributeError('blah')
except AttributeError as e:
pass
"""
self.convert_check(before, after, stages=[1])
@unittest.expectedFailure
def test_string_exceptions(self):
"""
2to3 does not convert string exceptions: see
http://python3porting.com/differences.html.
"""
before = """
try:
raise "old string exception"
except Exception, e:
pass
"""
after = """
try:
raise Exception("old string exception")
except Exception as e:
pass
"""
self.convert_check(before, after, stages=[1])
def test_oldstyle_classes(self):
"""
We don't convert old-style classes to new-style automatically in
stage 1 (but we should in stage 2). So Blah should not inherit
explicitly from object yet.
"""
before = """
class Blah:
pass
"""
self.unchanged(before, stages=[1])
def test_stdlib_modules_not_changed(self):
"""
Standard library module names should not be changed in stage 1
"""
before = """
import ConfigParser
import HTMLParser
import collections
print 'Hello'
try:
raise AttributeError('blah')
except AttributeError, e:
pass
"""
after = """
import ConfigParser
import HTMLParser
import collections
print('Hello')
try:
raise AttributeError('blah')
except AttributeError as e:
pass
"""
self.convert_check(before, after, stages=[1], run=PY2)
def test_octal_literals(self):
before = """
mode = 0644
"""
after = """
mode = 0o644
"""
self.convert_check(before, after)
def test_long_int_literals(self):
before = """
bignumber = 12345678901234567890L
"""
after = """
bignumber = 12345678901234567890
"""
self.convert_check(before, after)
def test___future___import_position(self):
"""
Issue #4: __future__ imports inserted too low in file: SyntaxError
"""
code = """
# Comments here
# and here
__version__=''' $Id$ '''
__doc__="A Sequencer class counts things. It aids numbering and formatting lists."
__all__='Sequencer getSequencer setSequencer'.split()
#
# another comment
#
CONSTANTS = [ 0, 01, 011, 0111, 012, 02, 021, 0211, 02111, 013 ]
_RN_LETTERS = "IVXLCDM"
def my_func(value):
pass
''' Docstring-like comment here '''
"""
self.convert(code)
def test_issue_45(self):
"""
Tests whether running futurize -f libfuturize.fixes.fix_future_standard_library_urllib
on the code below causes a ValueError (issue #45).
"""
code = r"""
from __future__ import print_function
from urllib import urlopen, urlencode
oeis_url = 'http://oeis.org/'
def _fetch(url):
try:
f = urlopen(url)
result = f.read()
f.close()
return result
except IOError as msg:
raise IOError("%s\nError fetching %s." % (msg, url))
"""
self.convert(code)
def test_order_future_lines(self):
"""
Tests the internal order_future_lines() function.
"""
before = '''
# comment here
from __future__ import print_function
from __future__ import absolute_import
# blank line or comment here
from future.utils import with_metaclass
from builtins import zzz
from builtins import aaa
from builtins import blah
# another comment
import something_else
code_here
more_code_here
'''
after = '''
# comment here
from __future__ import absolute_import
from __future__ import print_function
# blank line or comment here
from future.utils import with_metaclass
from builtins import aaa
from builtins import blah
from builtins import zzz
# another comment
import something_else
code_here
more_code_here
'''
self.assertEqual(order_future_lines(reformat_code(before)),
reformat_code(after))
@unittest.expectedFailure
def test_issue_12(self):
"""
Issue #12: This code shouldn't be upset by additional imports.
__future__ imports must appear at the top of modules since about Python
2.5.
"""
code = """
from __future__ import with_statement
f = open('setup.py')
for i in xrange(100):
pass
"""
self.unchanged(code)
@expectedFailurePY26
def test_range_necessary_list_calls(self):
"""
On Py2.6 (only), the xrange_with_import fixer somehow seems to cause
l = range(10)
to be converted to:
l = list(list(range(10)))
with an extra list(...) call.
"""
before = """
l = range(10)
assert isinstance(l, list)
for i in range(3):
print i
for i in xrange(3):
print i
"""
after = """
from __future__ import print_function
from builtins import range
l = list(range(10))
assert isinstance(l, list)
for i in range(3):
print(i)
for i in range(3):
print(i)
"""
self.convert_check(before, after)
class TestConservativeFuturize(CodeHandler):
@unittest.expectedFailure
def test_basestring(self):
"""
In conservative mode, futurize would not modify "basestring"
but merely import it, and the following code would still run on
both Py2 and Py3.
"""
before = """
assert isinstance('hello', basestring)
assert isinstance(u'hello', basestring)
assert isinstance(b'hello', basestring)
"""
after = """
from past.builtins import basestring
assert isinstance('hello', basestring)
assert isinstance(u'hello', basestring)
assert isinstance(b'hello', basestring)
"""
self.convert_check(before, after, conservative=True)
@unittest.expectedFailure
def test_open(self):
"""
In conservative mode, futurize would not import io.open because
this changes the default return type from bytes to text.
"""
before = """
filename = 'temp_file_open.test'
contents = 'Temporary file contents. Delete me.'
with open(filename, 'w') as f:
f.write(contents)
with open(filename, 'r') as f:
data = f.read()
assert isinstance(data, str)
assert data == contents
"""
after = """
from past.builtins import open, str as oldbytes, unicode
filename = oldbytes(b'temp_file_open.test')
contents = oldbytes(b'Temporary file contents. Delete me.')
with open(filename, oldbytes(b'w')) as f:
f.write(contents)
with open(filename, oldbytes(b'r')) as f:
data = f.read()
assert isinstance(data, oldbytes)
assert data == contents
assert isinstance(oldbytes(b'hello'), basestring)
assert isinstance(unicode(u'hello'), basestring)
assert isinstance(oldbytes(b'hello'), basestring)
"""
self.convert_check(before, after, conservative=True)
def test_safe_division(self):
"""
Tests whether Py2 scripts using old-style division still work
after futurization.
"""
before = """
x = 3 / 2
y = 3. / 2
assert x == 1 and isinstance(x, int)
assert y == 1.5 and isinstance(y, float)
"""
after = """
from __future__ import division
from past.utils import old_div
x = old_div(3, 2)
y = old_div(3., 2)
assert x == 1 and isinstance(x, int)
assert y == 1.5 and isinstance(y, float)
"""
self.convert_check(before, after)
def test_safe_division_overloaded(self):
"""
If division is overloaded, futurize may produce spurious old_div
calls. This test is for whether the code still works on Py2
despite these calls.
"""
before = """
class Path(str):
def __div__(self, other):
return self.__truediv__(other)
def __truediv__(self, other):
return Path(str(self) + '/' + str(other))
path1 = Path('home')
path2 = Path('user')
z = path1 / path2
assert isinstance(z, Path)
assert str(z) == 'home/user'
"""
after = """
from __future__ import division
from past.utils import old_div
class Path(str):
def __div__(self, other):
return self.__truediv__(other)
def __truediv__(self, other):
return Path(str(self) + '/' + str(other))
path1 = Path('home')
path2 = Path('user')
z = old_div(path1, path2)
assert isinstance(z, Path)
assert str(z) == 'home/user'
"""
self.convert_check(before, after)
class TestFuturizeAllImports(CodeHandler):
"""
Tests "futurize --all-imports".
"""
@expectedFailurePY26
def test_all_imports(self):
before = """
import math
import os
l = range(10)
assert isinstance(l, list)
print 'Hello'
for i in xrange(100):
pass
print('Hello')
"""
after = """
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_hooks()
from builtins import range
from builtins import *
import math
import os
l = list(range(10))
assert isinstance(l, list)
print('Hello')
for i in range(100):
pass
print('Hello')
"""
self.convert_check(before, after, all_imports=True)
if __name__ == '__main__':
unittest.main()
|
krischer/python-future
|
tests/test_future/test_futurize.py
|
Python
|
mit
| 35,721
|
class Flattener:
def __init__(self, prefix=''):
self.prefix = prefix
def __flatten_list(self, structure, prefix):
result = dict()
for i, subitem in enumerate(structure):
path = '{0}[{1}]'.format(prefix, i)
flat = self.__flatten(subitem, path)
for path, item in flat.items():
result[path] = item
return result
def __flatten_dict(self, structure, prefix):
result = dict()
for subpath, subitem in structure.items():
path = '{0}.{1}'.format(prefix, subpath) if prefix else subpath
flat = self.__flatten(subitem, path)
for path, item in flat.items():
result[path] = item
return result
def __flatten_primitive(self, primitive, prefix):
return {prefix: primitive}
def __flatten(self, structure, prefix):
result = dict()
if type(structure) is dict:
flat = self.__flatten_dict(structure, prefix)
elif type(structure) is list or type(structure) is set:
flat = self.__flatten_list(structure, prefix)
else:
flat = self.__flatten_primitive(structure, prefix)
for path, item in flat.items():
result[path] = item
return result
def flatten(self, structure):
return self.__flatten(structure, self.prefix)
|
the-dalee/Pilifana
|
pilifana/conversion/structure.py
|
Python
|
mit
| 1,398
|
# covered in collections
|
ramesharpu/python
|
basic-coding/built-in-functions/frozenset.py
|
Python
|
gpl-2.0
| 24
|
#!/usr/bin/env python
###############################################################################
# $Id: mrsid.py 33793 2016-03-26 13:02:07Z goatbar $
#
# Project: GDAL/OGR Test Suite
# Purpose: Test read/write functionality for MrSID driver.
# Author: Frank Warmerdam <warmerdam@pobox.com>
#
###############################################################################
# Copyright (c) 2005, Frank Warmerdam <warmerdam@pobox.com>
# Copyright (c) 2009-2012, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import os
import sys
from osgeo import gdal
import shutil
sys.path.append( '../pymod' )
import gdaltest
###############################################################################
# Read a simple byte file, checking projections and geotransform.
def mrsid_1():
gdaltest.mrsid_drv = gdal.GetDriverByName( 'MrSID' )
if gdaltest.mrsid_drv is None:
return 'skip'
tst = gdaltest.GDALTest( 'MrSID', 'mercator.sid', 1, None )
gt = (-15436.385771224039, 60.0, 0.0, 3321987.8617962394, 0.0, -60.0)
#
# Old, internally generated.
#
prj = """PROJCS["MER E000|",
GEOGCS["NAD27",
DATUM["North_American_Datum_1927",
SPHEROID["Clarke 1866",6378206.4,294.9786982138982,
AUTHORITY["EPSG","7008"]],
AUTHORITY["EPSG","6267"]],
PRIMEM["Greenwich",0],
UNIT["degree",0.0174532925199433],
AUTHORITY["EPSG","4267"]],
PROJECTION["Mercator_1SP"],
PARAMETER["latitude_of_origin",0],
PARAMETER["central_meridian",0],
PARAMETER["scale_factor",1],
PARAMETER["false_easting",0],
PARAMETER["false_northing",0],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]]]"""
#
# MrSID SDK getWKT() method.
#
prj = """PROJCS["MER E000|",
GEOGCS["NAD27",
DATUM["North_American_Datum_1927",
SPHEROID["Clarke 1866",6378206.4,294.9786982139006,
AUTHORITY["EPSG","7008"]],
AUTHORITY["EPSG","6267"]],
PRIMEM["Greenwich",0],
UNIT["degree",0.0174532925199433],
AUTHORITY["EPSG","4267"]],
PROJECTION["Mercator_1SP"],
PARAMETER["latitude_of_origin",1],
PARAMETER["central_meridian",1],
PARAMETER["scale_factor",1],
PARAMETER["false_easting",1],
PARAMETER["false_northing",1],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]]]"""
#
# MrSID SDK getWKT() method - DSDK 8 and newer?
#
prj = """PROJCS["MER E000|",
GEOGCS["NAD27",
DATUM["North_American_Datum_1927",
SPHEROID["Clarke 1866",6378206.4,294.9786982139006,
AUTHORITY["EPSG","7008"]],
AUTHORITY["EPSG","6267"]],
PRIMEM["Greenwich",0],
UNIT["degree",0.0174532925199433],
AUTHORITY["EPSG","4267"]],
PROJECTION["Mercator_1SP"],
PARAMETER["central_meridian",0],
PARAMETER["scale_factor",1],
PARAMETER["false_easting",0],
PARAMETER["false_northing",0],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]]]"""
ret = tst.testOpen( check_gt = gt, \
check_stat = (0.0, 255.0, 103.319, 55.153), \
check_approx_stat = (2.0, 243.0, 103.131, 43.978) )
if ret != 'success':
return ret
ds = gdal.Open( 'data/mercator.sid' )
got_prj = ds.GetProjectionRef()
ds = None
if prj.find('North_American_Datum_1927') == -1 or \
prj.find('Mercator_1SP') == -1 :
gdaltest.post_reason('did not get expected projection')
print(got_prj)
return 'fail'
if got_prj != prj:
print('Warning: did not get exactly expected projection. Got %s' % got_prj)
return 'success'
###############################################################################
# Do a direct IO to read the image at a resolution for which there is no
# builtin overview. Checks for the bug Steve L found in the optimized
# RasterIO implementation.
def mrsid_2():
if gdaltest.mrsid_drv is None:
return 'skip'
ds = gdal.Open( 'data/mercator.sid' )
try:
data = ds.ReadRaster( 0, 0, 515, 515, buf_xsize = 10, buf_ysize = 10 )
except:
gdaltest.post_reason( 'Small overview read failed: ' + gdal.GetLastErrorMsg() )
return 'fail'
ds = None
is_bytes = False
try:
if (isinstance(data, bytes) and not isinstance(data, str)):
is_bytes = True
except:
pass
# check that we got roughly the right values by checking mean.
sum = 0
if is_bytes is True:
for i in range(len(data)):
sum = sum + data[i]
else:
for i in range(len(data)):
sum = sum + ord(data[i])
mean = float(sum) / len(data)
if mean < 95 or mean > 105:
gdaltest.post_reason( 'image mean out of range.' )
return 'fail'
return 'success'
###############################################################################
# Test overview reading.
def mrsid_3():
if gdaltest.mrsid_drv is None:
return 'skip'
ds = gdal.Open( 'data/mercator.sid' )
band = ds.GetRasterBand(1)
if band.GetOverviewCount() != 4:
gdaltest.post_reason( 'did not get expected overview count' )
return 'fail'
new_stat = band.GetOverview(3).GetStatistics(0,1)
check_stat = (11.0, 230.0, 103.42607897153351, 39.952592422557757)
stat_epsilon = 0.0001
for i in range(4):
if abs(new_stat[i]-check_stat[i]) > stat_epsilon:
print('')
print('old = ', check_stat)
print('new = ', new_stat)
gdaltest.post_reason( 'Statistics differ.' )
return 'fail'
return 'success'
###############################################################################
# Check a new (V3) file which uses a different form for coordinate sys.
def mrsid_4():
if gdaltest.mrsid_drv is None:
return 'skip'
try:
os.remove('data/mercator_new.sid.aux.xml')
except:
pass
tst = gdaltest.GDALTest( 'MrSID', 'mercator_new.sid', 1, None )
gt = (-15436.385771224039, 60.0, 0.0, 3321987.8617962394, 0.0, -60.0)
prj = """PROJCS["MER E000",
GEOGCS["NAD27",
DATUM["North_American_Datum_1927",
SPHEROID["Clarke 1866",6378206.4,294.9786982138982,
AUTHORITY["EPSG","7008"]],
AUTHORITY["EPSG","6267"]],
PRIMEM["Greenwich",0],
UNIT["degree",0.0174532925199433],
AUTHORITY["EPSG","4267"]],
PROJECTION["Mercator_1SP"],
PARAMETER["latitude_of_origin",33.76446202777777],
PARAMETER["central_meridian",-117.4745428888889],
PARAMETER["scale_factor",1],
PARAMETER["false_easting",0],
PARAMETER["false_northing",0],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]]]"""
ret = tst.testOpen( check_gt = gt, check_prj = prj, \
check_stat = (0.0, 255.0, 103.112, 52.477), \
check_approx_stat = (0.0, 255.0, 102.684, 51.614) )
try:
os.remove('data/mercator_new.sid.aux.xml')
except:
pass
return ret
###############################################################################
# Test JP2MrSID driver
def mrsid_5():
gdaltest.jp2mrsid_drv = gdal.GetDriverByName( 'JP2MrSID' )
if gdaltest.jp2mrsid_drv is None:
return 'skip'
gdaltest.deregister_all_jpeg2000_drivers_but('JP2MrSID')
return 'success'
###############################################################################
# Open byte.jp2
def mrsid_6():
if gdaltest.jp2mrsid_drv is None:
return 'skip'
srs = """PROJCS["NAD27 / UTM zone 11N",
GEOGCS["NAD27",
DATUM["North_American_Datum_1927",
SPHEROID["Clarke 1866",6378206.4,294.9786982138982,
AUTHORITY["EPSG","7008"]],
AUTHORITY["EPSG","6267"]],
PRIMEM["Greenwich",0],
UNIT["degree",0.0174532925199433],
AUTHORITY["EPSG","4267"]],
PROJECTION["Transverse_Mercator"],
PARAMETER["latitude_of_origin",0],
PARAMETER["central_meridian",-117],
PARAMETER["scale_factor",0.9996],
PARAMETER["false_easting",500000],
PARAMETER["false_northing",0],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]],
AUTHORITY["EPSG","26711"]]
"""
gt = (440720.0, 60.0, 0.0, 3751320.0, 0.0, -60.0)
tst = gdaltest.GDALTest( 'JP2MrSID', 'byte.jp2', 1, 50054 )
return tst.testOpen( check_prj = srs, check_gt = gt )
###############################################################################
# Open int16.jp2
def mrsid_7():
if gdaltest.jp2mrsid_drv is None:
return 'skip'
ds = gdal.Open( 'data/int16.jp2' )
ds_ref = gdal.Open( 'data/int16.tif' )
maxdiff = gdaltest.compare_ds(ds, ds_ref)
if maxdiff > 5:
gdaltest.post_reason('Image too different from reference')
print(ds.GetRasterBand(1).Checksum())
print(ds_ref.GetRasterBand(1).Checksum())
ds = None
ds_ref = None
return 'fail'
ds = None
ds_ref = None
return 'success'
###############################################################################
# Test PAM override for nodata, coordsys, and geotransform.
def mrsid_8():
if gdaltest.mrsid_drv is None:
return 'skip'
new_gt = (10000,50,0,20000,0,-50)
new_srs = """PROJCS["OSGB 1936 / British National Grid",GEOGCS["OSGB 1936",DATUM["OSGB_1936",SPHEROID["Airy 1830",6377563.396,299.3249646,AUTHORITY["EPSG","7001"]],AUTHORITY["EPSG","6277"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4277"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",49],PARAMETER["central_meridian",-2],PARAMETER["scale_factor",0.9996012717],PARAMETER["false_easting",400000],PARAMETER["false_northing",-100000],AUTHORITY["EPSG","27700"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]"""
gdal.PushErrorHandler('CPLQuietErrorHandler')
gdal.GetDriverByName('MrSID').Delete( 'tmp/mercator.sid' )
gdal.PopErrorHandler()
shutil.copyfile( 'data/mercator.sid', 'tmp/mercator.sid' )
ds = gdal.Open( 'tmp/mercator.sid' )
ds.SetGeoTransform( new_gt )
ds.SetProjection( new_srs )
ds.GetRasterBand(1).SetNoDataValue( 255 )
ds = None
ds = gdal.Open( 'tmp/mercator.sid' )
if new_srs != ds.GetProjectionRef():
print(ds.GetProjectionRef())
gdaltest.post_reason( 'SRS Override failed.' )
return 'fail'
if new_gt != ds.GetGeoTransform():
gdaltest.post_reason( 'Geotransform Override failed.' )
return 'fail'
if ds.GetRasterBand(1).GetNoDataValue() != 255:
gdaltest.post_reason( 'Nodata override failed.' )
return 'fail'
ds = None
gdal.GetDriverByName('MrSID').Delete( 'tmp/mercator.sid' )
return 'success'
###############################################################################
# Test VSI*L IO with .sid
def mrsid_9():
if gdaltest.mrsid_drv is None:
return 'skip'
f = open('data/mercator.sid', 'rb')
data = f.read()
f.close()
f = gdal.VSIFOpenL('/vsimem/mrsid_9.sid', 'wb')
gdal.VSIFWriteL(data, 1, len(data), f)
gdal.VSIFCloseL(f)
ds = gdal.Open('/vsimem/mrsid_9.sid')
if ds is None:
return 'fail'
ds = None
gdal.Unlink('/vsimem/mrsid_9.sid')
return 'success'
###############################################################################
# Test VSI*L IO with .jp2
def mrsid_10():
if gdaltest.jp2mrsid_drv is None:
return 'skip'
f = open('data/int16.jp2', 'rb')
data = f.read()
f.close()
f = gdal.VSIFOpenL('/vsimem/mrsid_10.jp2', 'wb')
gdal.VSIFWriteL(data, 1, len(data), f)
gdal.VSIFCloseL(f)
ds = gdal.Open('/vsimem/mrsid_10.jp2')
if ds is None:
return 'fail'
ds = None
gdal.Unlink('/vsimem/mrsid_10.jp2')
return 'success'
###############################################################################
# Check that we can use .j2w world files (#4651)
def mrsid_11():
if gdaltest.jp2mrsid_drv is None:
return 'skip'
ds = gdal.Open( 'data/byte_without_geotransform.jp2' )
geotransform = ds.GetGeoTransform()
if abs(geotransform[0]-440720) > 0.1 \
or abs(geotransform[1]-60) > 0.001 \
or abs(geotransform[2]-0) > 0.001 \
or abs(geotransform[3]-3751320) > 0.1 \
or abs(geotransform[4]-0) > 0.001 \
or abs(geotransform[5]- -60) > 0.001:
print(geotransform)
gdaltest.post_reason( 'geotransform differs from expected' )
return 'fail'
ds = None
return 'success'
###############################################################################
def mrsid_online_1():
if gdaltest.jp2mrsid_drv is None:
return 'skip'
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/jpeg2000/7sisters200.j2k', '7sisters200.j2k'):
return 'skip'
# Checksum = 29473 on my PC
tst = gdaltest.GDALTest( 'JP2MrSID', 'tmp/cache/7sisters200.j2k', 1, None, filename_absolute = 1 )
if tst.testOpen() != 'success':
return 'fail'
ds = gdal.Open('tmp/cache/7sisters200.j2k')
ds.GetRasterBand(1).Checksum()
ds = None
return 'success'
###############################################################################
def mrsid_online_2():
if gdaltest.jp2mrsid_drv is None:
return 'skip'
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/jpeg2000/gcp.jp2', 'gcp.jp2'):
return 'skip'
# Checksum = 209 on my PC
tst = gdaltest.GDALTest( 'JP2MrSID', 'tmp/cache/gcp.jp2', 1, None, filename_absolute = 1 )
if tst.testOpen() != 'success':
return 'fail'
# The JP2MrSID driver doesn't handle GCPs
ds = gdal.Open('tmp/cache/gcp.jp2')
ds.GetRasterBand(1).Checksum()
#if len(ds.GetGCPs()) != 15:
# gdaltest.post_reason('bad number of GCP')
# return 'fail'
#
#expected_wkt = """GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]]"""
#if ds.GetGCPProjection() != expected_wkt:
# gdaltest.post_reason('bad GCP projection')
# return 'fail'
ds = None
return 'success'
###############################################################################
def mrsid_online_3():
if gdaltest.jp2mrsid_drv is None:
return 'skip'
if not gdaltest.download_file('http://www.openjpeg.org/samples/Bretagne1.j2k', 'Bretagne1.j2k'):
return 'skip'
if not gdaltest.download_file('http://www.openjpeg.org/samples/Bretagne1.bmp', 'Bretagne1.bmp'):
return 'skip'
# checksum = 14443 on my PC
tst = gdaltest.GDALTest( 'JP2MrSID', 'tmp/cache/Bretagne1.j2k', 1, None, filename_absolute = 1 )
if tst.testOpen() != 'success':
return 'fail'
ds = gdal.Open('tmp/cache/Bretagne1.j2k')
ds_ref = gdal.Open('tmp/cache/Bretagne1.bmp')
maxdiff = gdaltest.compare_ds(ds, ds_ref,verbose=0)
ds = None
ds_ref = None
# Difference between the image before and after compression
if maxdiff > 17:
print(ds.GetRasterBand(1).Checksum())
print(ds_ref.GetRasterBand(1).Checksum())
gdaltest.compare_ds(ds, ds_ref,verbose=1)
gdaltest.post_reason('Image too different from reference')
return 'fail'
return 'success'
###############################################################################
def mrsid_online_4():
if gdaltest.jp2mrsid_drv is None:
return 'skip'
if not gdaltest.download_file('http://www.openjpeg.org/samples/Bretagne2.j2k', 'Bretagne2.j2k'):
return 'skip'
if not gdaltest.download_file('http://www.openjpeg.org/samples/Bretagne2.bmp', 'Bretagne2.bmp'):
return 'skip'
# Checksum = 53186 on my PC
tst = gdaltest.GDALTest( 'JP2MrSID', 'tmp/cache/Bretagne2.j2k', 1, None, filename_absolute = 1 )
if tst.testOpen() != 'success':
return 'fail'
ds = gdal.Open('tmp/cache/Bretagne2.j2k')
ds_ref = gdal.Open('tmp/cache/Bretagne2.bmp')
maxdiff = gdaltest.compare_ds(ds, ds_ref, width = 256, height = 256)
ds = None
ds_ref = None
# Difference between the image before and after compression
if maxdiff > 1:
print(ds.GetRasterBand(1).Checksum())
print(ds_ref.GetRasterBand(1).Checksum())
gdaltest.post_reason('Image too different from reference')
return 'fail'
return 'success'
###############################################################################
# Cleanup.
def mrsid_cleanup():
try:
os.remove( 'data/mercator.sid.aux.xml' )
os.remove( 'data/mercator_new.sid.aux.xml' )
except:
pass
gdaltest.reregister_all_jpeg2000_drivers()
return 'success'
gdaltest_list = [
mrsid_1,
mrsid_2,
mrsid_3,
mrsid_4,
mrsid_5,
mrsid_6,
mrsid_7,
mrsid_8,
mrsid_9,
mrsid_10,
mrsid_11,
mrsid_online_1,
mrsid_online_2,
mrsid_online_3,
mrsid_online_4,
mrsid_cleanup ]
if __name__ == '__main__':
gdaltest.setup_run( 'mrsid' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
|
nextgis-extra/tests
|
lib_gdal/gdrivers/mrsid.py
|
Python
|
gpl-2.0
| 18,492
|
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from oslo_serialization import jsonutils as json
from six.moves import urllib
from tempest.lib.api_schema.response.compute.v2_1 import versions as schema
from tempest.lib.common import rest_client
from tempest.lib.services.compute import base_compute_client
class VersionsClient(base_compute_client.BaseComputeClient):
def _get_base_version_url(self):
# NOTE: The URL which is got from keystone's catalog contains
# API version and project-id like "/app-name/v2/{project-id}" or
# "/v2/{project-id}", but we need to access the URL which doesn't
# contain API version for getting API versions. For that, here
# should use raw_request() instead of get().
endpoint = self.base_url
url = urllib.parse.urlsplit(endpoint)
new_path = re.split(r'(^|/)+v\d+(\.\d+)?', url.path)[0]
url = list(url)
url[2] = new_path + '/'
return urllib.parse.urlunsplit(url)
def list_versions(self):
version_url = self._get_base_version_url()
resp, body = self.raw_request(version_url, 'GET')
body = json.loads(body)
self.validate_response(schema.list_versions, resp, body)
return rest_client.ResponseBody(resp, body)
def get_version_by_url(self, version_url):
"""Get the version document by url.
This gets the version document for a url, useful in testing
the contents of things like /v2/ or /v2.1/ in Nova. That
controller needs authenticated access, so we have to get
ourselves a token before making the request.
"""
# we need a token for this request
resp, body = self.raw_request(version_url, 'GET',
{'X-Auth-Token': self.token})
body = json.loads(body)
self.validate_response(schema.get_one_version, resp, body)
return rest_client.ResponseBody(resp, body)
|
HybridF5/tempest_debug
|
tempest/lib/services/compute/versions_client.py
|
Python
|
apache-2.0
| 2,531
|
"""
This module provides tools.
.. moduleauthor:: Felix Mueller
.. moduleauthor:: Pascal Wittmann
:copyright: PenchY Developers 2011-2012, see AUTHORS
:license: MIT License, see LICENSE
"""
import os.path
from penchy.jobs.elements import Tool
from penchy.jobs.hooks import Hook
from penchy.jobs.typecheck import Types
from penchy.maven import MavenDependency
class Tamiflex(Tool):
"""
This tool implements the play-out agent of tamiflex. The play-out agent has no
configuration options. For general usage information visit the
`tamiflex homepage <http://code.google.com/p/tamiflex/>`_.
Outputs:
- ``reflection_log``: log file of all uses of the reflection API
- ``classfolder``: folder of all classes that were used (including generated)
"""
_POA = MavenDependency(
'de.tu_darmstadt.penchy',
'poa',
'2.0.0.0',
'http://mvn.0x0b.de',
checksum='df4418bed92205e4f27135bbf077895bd4c8c652')
DEPENDENCIES = set((_POA,))
outputs = Types(('reflection_log', list, str),
('classfolder', list, str))
def __init__(self, name=None):
"""
:param name: descriptive name of this tool
:type name: str
"""
super(Tamiflex, self).__init__(name)
self.hooks.extend([
Hook(teardown=lambda: self.out['reflection_log']
.append(os.path.abspath('out/refl.log'))),
Hook(teardown=lambda: self.out['classfolder']
.append(os.path.abspath('out/')))])
@property
def arguments(self):
return ["-javaagent:%s" % Tamiflex._POA.filename]
class HProf(Tool):
"""
This tool implements the hprof agent. Valid
options can be obtained with the command::
java -agentlib:hprof=help
For example: The instruction::
t = tools.HProf('heap=dump')
extends the commandline of the jvm about::
-agentlib:hprof=heap=dump
Outputs:
- ``hprof``: HProf output, i.e. the path to the java.hprof.txt file
"""
DEPENDENCIES = set()
outputs = Types(('hprof', list, str))
def __init__(self, option, name=None):
"""
:param option: the argument for hprof
:param name: descriptive name of this tool
:type name: str
"""
super(HProf, self).__init__(name)
# chooses always the right file because a new directory
# is generated for each invocation
self.hooks.append(Hook(teardown=lambda: self.out['hprof']
.append(os.path.abspath('java.hprof.txt'))))
self.option = option
@property
def arguments(self):
return ["-agentlib:hprof={0}".format(self.option)]
|
fhirschmann/penchy
|
penchy/jobs/tools.py
|
Python
|
mit
| 2,762
|
#!/usr/bin/python
#
# Copyright (c) 2017 Yawei Wang, <yaweiw@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_containerregistry
version_added: "2.5"
short_description: Manage an Azure Container Registry.
description:
- Create, update and delete an Azure Container Registry.
options:
resource_group:
description:
- Name of a resource group where the Container Registry exists or will be created.
required: true
name:
description:
- Name of the Container Registry.
required: true
state:
description:
- Assert the state of the container registry. Use C(present) to create or update an container registry and C(absent) to delete it.
default: present
choices:
- absent
- present
location:
description:
- Valid azure location. Defaults to location of the resource group.
admin_user_enabled:
description:
- If enabled, you can use the registry name as username and admin user access key as password to docker login to your container registry.
type: bool
default: no
sku:
description:
- Specifies the SKU to use. Currently can be either Basic, Standard or Premium.
default: Standard
choices:
- Basic
- Standard
- Premium
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Yawei Wang (@yaweiw)"
'''
EXAMPLES = '''
- name: Create an azure container registry
azure_rm_containerregistry:
name: testacr1
location: eastus
resource_group: myResourceGroup
admin_user_enabled: true
sku: Premium
tags:
Release: beta1
Environment: Production
- name: Remove an azure container registry
azure_rm_containerregistry:
name: testacr2
resource_group: myResourceGroup
state: absent
'''
RETURN = '''
id:
description:
- Resource ID
returned: always
type: str
sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.ContainerRegistry/registries/myRegistry
name:
description:
- Registry name
returned: always
type: str
sample: myregistry
location:
description:
- Resource location
returned: always
type: str
sample: westus
admin_user_enabled:
description:
- Is admin user enabled
returned: always
type: bool
sample: true
sku:
description:
- SKU
returned: always
type: str
sample: Standard
provisioning_state:
description:
- Provisioning state
returned: always
type: str
sample: Succeeded
login_server:
description:
- Registry login server
returned: always
type: str
sample: myregistry.azurecr.io
credentials:
description:
- Passwords defined for the registry
returned: always
type: complex
contains:
password:
description:
- password value
returned: when registry exists and C(admin_user_enabled) is set
type: str
sample: pass1value
password2:
description:
- password2 value
returned: when registry exists and C(admin_user_enabled) is set
type: str
sample: pass2value
tags:
description:
- Tags
returned: always
type: dict
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.containerregistry.models import (
Registry,
RegistryUpdateParameters,
StorageAccountProperties,
Sku,
SkuName,
SkuTier,
ProvisioningState,
PasswordName,
WebhookCreateParameters,
WebhookUpdateParameters,
WebhookAction,
WebhookStatus
)
from azure.mgmt.containerregistry import ContainerRegistryManagementClient
except ImportError as exc:
# This is handled in azure_rm_common
pass
def create_containerregistry_dict(registry, credentials):
'''
Helper method to deserialize a ContainerRegistry to a dict
:param: registry: return container registry object from Azure rest API call
:param: credentials: return credential objects from Azure rest API call
:return: dict of return container registry and it's credentials
'''
results = dict(
id=registry.id if registry is not None else "",
name=registry.name if registry is not None else "",
location=registry.location if registry is not None else "",
admin_user_enabled=registry.admin_user_enabled if registry is not None else "",
sku=registry.sku.name if registry is not None else "",
provisioning_state=registry.provisioning_state if registry is not None else "",
login_server=registry.login_server if registry is not None else "",
credentials=dict(),
tags=registry.tags if registry is not None else ""
)
if credentials:
results['credentials'] = dict(
password=credentials.passwords[0].value,
password2=credentials.passwords[1].value
)
return results
class Actions:
NoAction, Create, Update = range(3)
class AzureRMContainerRegistry(AzureRMModuleBase):
"""Configuration class for an Azure RM container registry resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
),
location=dict(
type='str'
),
admin_user_enabled=dict(
type='bool',
default=False
),
sku=dict(
type='str',
default='Standard',
choices=['Basic', 'Standard', 'Premium']
)
)
self.resource_group = None
self.name = None
self.location = None
self.state = None
self.sku = None
self.tags = None
self.results = dict(changed=False, state=dict())
super(AzureRMContainerRegistry, self).__init__(
derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
resource_group = None
response = None
to_do = Actions.NoAction
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
self.location = resource_group.location
# Check if the container registry instance already present in the RG
if self.state == 'present':
response = self.get_containerregistry()
if not response:
to_do = Actions.Create
else:
self.log('Results : {0}'.format(response))
self.results.update(response)
if response['provisioning_state'] == "Succeeded":
to_do = Actions.NoAction
if (self.location is not None) and self.location != response['location']:
to_do = Actions.Update
elif (self.sku is not None) and self.location != response['sku']:
to_do = Actions.Update
else:
to_do = Actions.NoAction
self.log("Create / Update the container registry instance")
if self.check_mode:
return self.results
self.results.update(self.create_update_containerregistry(to_do))
if to_do != Actions.NoAction:
self.results['changed'] = True
else:
self.results['changed'] = False
self.log("Container registry instance created or updated")
elif self.state == 'absent':
if self.check_mode:
return self.results
self.delete_containerregistry()
self.log("Container registry instance deleted")
return self.results
def create_update_containerregistry(self, to_do):
'''
Creates or updates a container registry.
:return: deserialized container registry instance state dictionary
'''
self.log("Creating / Updating the container registry instance {0}".format(self.name))
try:
if to_do != Actions.NoAction:
if to_do == Actions.Create:
name_status = self.containerregistry_client.registries.check_name_availability(self.name)
if name_status.name_available:
poller = self.containerregistry_client.registries.create(
resource_group_name=self.resource_group,
registry_name=self.name,
registry=Registry(
location=self.location,
sku=Sku(
name=self.sku
),
tags=self.tags,
admin_user_enabled=self.admin_user_enabled
)
)
else:
raise Exception("Invalid registry name. reason: " + name_status.reason + " message: " + name_status.message)
else:
registry = self.containerregistry_client.registries.get(self.resource_group, self.name)
if registry is not None:
poller = self.containerregistry_client.registries.update(
resource_group_name=self.resource_group,
registry_name=self.name,
registry_update_parameters=RegistryUpdateParameters(
sku=Sku(
name=self.sku
),
tags=self.tags,
admin_user_enabled=self.admin_user_enabled
)
)
else:
raise Exception("Update registry failed as registry '" + self.name + "' doesn't exist.")
response = self.get_poller_result(poller)
if self.admin_user_enabled:
credentials = self.containerregistry_client.registries.list_credentials(self.resource_group, self.name)
else:
self.log('Cannot perform credential operations as admin user is disabled')
credentials = None
else:
response = None
credentials = None
except (CloudError, Exception) as exc:
self.log('Error attempting to create / update the container registry instance.')
self.fail("Error creating / updating the container registry instance: {0}".format(str(exc)))
return create_containerregistry_dict(response, credentials)
def delete_containerregistry(self):
'''
Deletes the specified container registry in the specified subscription and resource group.
:return: True
'''
self.log("Deleting the container registry instance {0}".format(self.name))
try:
self.containerregistry_client.registries.delete(self.resource_group, self.name).wait()
except CloudError as e:
self.log('Error attempting to delete the container registry instance.')
self.fail("Error deleting the container registry instance: {0}".format(str(e)))
return True
def get_containerregistry(self):
'''
Gets the properties of the specified container registry.
:return: deserialized container registry state dictionary
'''
self.log("Checking if the container registry instance {0} is present".format(self.name))
found = False
try:
response = self.containerregistry_client.registries.get(self.resource_group, self.name)
found = True
self.log("Response : {0}".format(response))
self.log("Container registry instance : {0} found".format(response.name))
except CloudError as e:
if e.error.error == 'ResourceNotFound':
self.log('Did not find the container registry instance: {0}'.format(str(e)))
else:
self.fail('Error while trying to get container registry instance: {0}'.format(str(e)))
response = None
if found is True and self.admin_user_enabled is True:
try:
credentials = self.containerregistry_client.registries.list_credentials(self.resource_group, self.name)
except CloudError as e:
self.fail('List registry credentials failed: {0}'.format(str(e)))
credentials = None
elif found is True and self.admin_user_enabled is False:
credentials = None
else:
return None
return create_containerregistry_dict(response, credentials)
def main():
"""Main execution"""
AzureRMContainerRegistry()
if __name__ == '__main__':
main()
|
dagwieers/ansible
|
lib/ansible/modules/cloud/azure/azure_rm_containerregistry.py
|
Python
|
gpl-3.0
| 14,141
|
# -*- coding: utf-8 -*-
"""
Wikipedia channel for IFTTT
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Copyright 2015 Ori Livneh <ori@wikimedia.org>
Stephen LaPorte <stephen.laporte@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import datetime
import re
import time
import uuid
import socket
def snake_case(s):
"""Convert CamelCase to snake_case."""
s = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s).lower()
def select(element, selector):
"""Syntactic sugar for element#cssselect that grabs the first match."""
matches = element.cssselect(selector)
return matches[0]
def url_to_uuid5(url):
"""Generate a UUID5 for a given URL."""
return str(uuid.uuid5(uuid.NAMESPACE_URL, url.encode('utf-8')))
def utc_to_iso8601(struct_time):
"""Make a W3-style ISO 8601 UTC timestamp from a struct_time object."""
struct_time = datetime.datetime.utcfromtimestamp(time.mktime(struct_time))
return struct_time.date().isoformat()
def utc_to_epoch(struct_time):
"""Convert a struct_time to an integer number of seconds since epoch."""
return int(time.mktime(struct_time))
def iso8601_to_epoch(iso_time):
dt = datetime.datetime.strptime(iso_time, '%Y-%m-%dT%H:%M:%SZ')
epoch = datetime.datetime.utcfromtimestamp(0)
return int((dt - epoch).total_seconds())
def is_valid_ip(address):
try:
socket.inet_aton(address)
return True
except socket.error:
pass
try:
socket.inet_pton(socket.AF_INET6, address)
return True
except socket.error:
pass
return False
# From boltons
HASHTAG_RE = re.compile(r"(?:^|\s)[##]{1}(\w+)", re.UNICODE)
def find_hashtags(string):
"""Finds and returns all hashtags in a string, with the hashmark
removed. Supports full-width hashmarks for Asian languages and
does not false-positive on URL anchors.
>>> find_hashtags('#atag http://asite/#ananchor')
['atag']
``find_hashtags`` also works with unicode hashtags.
"""
# the following works, doctest just struggles with it
# >>> find_hashtags(u"can't get enough of that dignity chicken #肯德基 woo")
# [u'\u80af\u5fb7\u57fa']
return HASHTAG_RE.findall(string)
|
wikimedia/ifttt
|
ifttt/utils.py
|
Python
|
apache-2.0
| 2,768
|
import os
import threading
import time
import pytest
import splinter
import waitress
from .demo import app
APP_HTTP_PORT = 65431
APP_URL = f'http://0.0.0.0:{APP_HTTP_PORT}/'
WAIT_DELAY = 0.5
# See .travis.yml to know why we use Chrome by default. You are
# encouraged to also test with `DRIVER_NAME="firefox" pytest` locally.
DRIVER_NAME = os.environ.get('DRIVER_NAME', 'chrome')
class ServerThread(threading.Thread):
def __init__(self, **kwargs):
super().__init__(**kwargs)
# A better way would be to let the system choose an available
# port. Passing `port=0` would work but then we would need the
# child to communicate the port number to the parent process.
# Let's do something simple instead.
self.server = waitress.server.create_server(
app.make_app({}), port=APP_HTTP_PORT, expose_tracebacks=True
)
def run(self):
self.server.run()
@pytest.fixture(scope='module')
def server():
thread = ServerThread(daemon=True)
thread.start()
yield thread.server
thread.join(0)
@pytest.fixture(scope='module')
def browser(wait_time=2):
with splinter.Browser(DRIVER_NAME) as b:
yield b
def find_links(parent, text):
# `parent.links.find_by_text()` does not return the expected results.
return [
link
for link in parent.find_by_tag('a')
if link.text == text
]
def _check_autocompletion(b, field):
assert not find_links(b, 'Jimi Hendrix')
field.fill('jim')
time.sleep(WAIT_DELAY)
assert find_links(b, 'Jimi Hendrix')
assert find_links(b, 'Jimmy Page')
field.type('i')
time.sleep(WAIT_DELAY)
assert find_links(b, 'Jimi Hendrix')
assert not find_links(b, 'Jimmy Page')
link = find_links(b, 'Jimi Hendrix')[0]
link.click()
assert field.value == 'Jimi Hendrix'
def test_add_form_single(server, browser): # pylint: disable=redefined-outer-name
b = browser
b.visit(APP_URL)
form = b.find_by_id('add_single').first
assert not find_links(b, 'Jimi Hendrix')
field = form.find_by_name('person_autocomplete').first
_check_autocompletion(b, field)
save = form.find_by_tag('button').first
save.click()
assert '/saved' in b.url
b.is_text_present('The following data have been saved')
saved_data = b.find_by_tag('pre').value
assert saved_data == "{'person': 'jhendrix'}"
def test_add_form_multiple(server, browser): # pylint: disable=redefined-outer-name
b = browser
b.visit(APP_URL)
form = b.find_by_id('add_multiple').first
field = form.find_by_name('person_autocomplete').first
_check_autocompletion(b, field)
add_person = form.find_by_text('Add Person')
add_person.click()
field2 = form.find_by_name('person_autocomplete').last
assert not find_links(b, 'John Bonham')
field2.fill('john')
time.sleep(WAIT_DELAY)
bonham = find_links(b, 'John Bonham')[0]
assert bonham
bonham.click()
save = form.find_by_tag('button').first
save.click()
assert '/saved' in b.url
b.is_text_present('The following data have been saved')
saved_data = b.find_by_tag('pre').value
assert saved_data == "{'persons': ['jhendrix', 'jbonham']}"
def test_edit_form_multiple(server, browser): # pylint: disable=redefined-outer-name
b = browser
b.visit(APP_URL)
form = b.find_by_id('edit_multiple').first
fields = form.find_by_name('person_autocomplete')
field1, field2 = fields.first, fields.last
assert field1.value == 'Jimmy Page'
assert field2.value == 'John Bonham'
field1.fill('jimi')
time.sleep(WAIT_DELAY)
hendrix = find_links(b, 'Jimi Hendrix')[0]
hendrix.click()
assert field1.value == 'Jimi Hendrix'
save = form.find_by_tag('button').first
save.click()
assert '/saved' in b.url
b.is_text_present('The following data have been saved')
saved_data = b.find_by_tag('pre').value
assert saved_data == "{'persons': ['jhendrix', 'jbonham']}"
def test_read_only_form(server, browser): # pylint: disable=redefined-outer-name
b = browser
b.visit(APP_URL)
form = b.find_by_id('read_only').first
inputs = list(form.find_by_tag('input'))
assert len(inputs) == 1
assert inputs[0]._element.get_attribute('name') == 'submitted_form'
paragraphs = list(form.find_by_tag('p'))
assert len(paragraphs) == 2
assert paragraphs[0].value == 'Jimmy Page'
assert paragraphs[1].value == 'John Bonham'
|
dbaty/deform_ext_autocomplete
|
tests/test_browser.py
|
Python
|
bsd-3-clause
| 4,511
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum
from six import with_metaclass
from azure.core import CaseInsensitiveEnumMeta
class AccessControlEntryAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Action object.
"""
PERMIT = "Permit"
DENY = "Deny"
class AppServiceCertificateOrderPatchResourcePropertiesAppServiceCertificateNotRenewableReasonsItem(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
REGISTRATION_STATUS_NOT_SUPPORTED_FOR_RENEWAL = "RegistrationStatusNotSupportedForRenewal"
EXPIRATION_NOT_IN_RENEWAL_TIME_RANGE = "ExpirationNotInRenewalTimeRange"
SUBSCRIPTION_NOT_ACTIVE = "SubscriptionNotActive"
class AppServiceCertificateOrderPropertiesAppServiceCertificateNotRenewableReasonsItem(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
REGISTRATION_STATUS_NOT_SUPPORTED_FOR_RENEWAL = "RegistrationStatusNotSupportedForRenewal"
EXPIRATION_NOT_IN_RENEWAL_TIME_RANGE = "ExpirationNotInRenewalTimeRange"
SUBSCRIPTION_NOT_ACTIVE = "SubscriptionNotActive"
class AppServicePlanRestrictions(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""App Service plans this offer is restricted to.
"""
NONE = "None"
FREE = "Free"
SHARED = "Shared"
BASIC = "Basic"
STANDARD = "Standard"
PREMIUM = "Premium"
class AutoHealActionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Predefined action to be taken.
"""
RECYCLE = "Recycle"
LOG_EVENT = "LogEvent"
CUSTOM_ACTION = "CustomAction"
class AzureResourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Type of the Azure resource the hostname is assigned to.
"""
WEBSITE = "Website"
TRAFFIC_MANAGER = "TrafficManager"
class AzureStorageState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""State of the storage account.
"""
OK = "Ok"
INVALID_CREDENTIALS = "InvalidCredentials"
INVALID_SHARE = "InvalidShare"
class AzureStorageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Type of storage.
"""
AZURE_FILES = "AzureFiles"
AZURE_BLOB = "AzureBlob"
class BackupItemStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Backup status.
"""
IN_PROGRESS = "InProgress"
FAILED = "Failed"
SUCCEEDED = "Succeeded"
TIMED_OUT = "TimedOut"
CREATED = "Created"
SKIPPED = "Skipped"
PARTIALLY_SUCCEEDED = "PartiallySucceeded"
DELETE_IN_PROGRESS = "DeleteInProgress"
DELETE_FAILED = "DeleteFailed"
DELETED = "Deleted"
class BackupRestoreOperationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Operation type.
"""
DEFAULT = "Default"
CLONE = "Clone"
RELOCATION = "Relocation"
SNAPSHOT = "Snapshot"
CLOUD_FS = "CloudFS"
class BuildStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The status of the static site build.
"""
WAITING_FOR_DEPLOYMENT = "WaitingForDeployment"
UPLOADING = "Uploading"
DEPLOYING = "Deploying"
READY = "Ready"
FAILED = "Failed"
DELETING = "Deleting"
DETACHED = "Detached"
class BuiltInAuthenticationProvider(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
"""
AZURE_ACTIVE_DIRECTORY = "AzureActiveDirectory"
FACEBOOK = "Facebook"
GOOGLE = "Google"
MICROSOFT_ACCOUNT = "MicrosoftAccount"
TWITTER = "Twitter"
class CertificateOrderActionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Action type.
"""
CERTIFICATE_ISSUED = "CertificateIssued"
CERTIFICATE_ORDER_CANCELED = "CertificateOrderCanceled"
CERTIFICATE_ORDER_CREATED = "CertificateOrderCreated"
CERTIFICATE_REVOKED = "CertificateRevoked"
DOMAIN_VALIDATION_COMPLETE = "DomainValidationComplete"
FRAUD_DETECTED = "FraudDetected"
ORG_NAME_CHANGE = "OrgNameChange"
ORG_VALIDATION_COMPLETE = "OrgValidationComplete"
SAN_DROP = "SanDrop"
FRAUD_CLEARED = "FraudCleared"
CERTIFICATE_EXPIRED = "CertificateExpired"
CERTIFICATE_EXPIRATION_WARNING = "CertificateExpirationWarning"
FRAUD_DOCUMENTATION_REQUIRED = "FraudDocumentationRequired"
UNKNOWN = "Unknown"
class CertificateOrderStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Current order status.
"""
PENDINGISSUANCE = "Pendingissuance"
ISSUED = "Issued"
REVOKED = "Revoked"
CANCELED = "Canceled"
DENIED = "Denied"
PENDINGREVOCATION = "Pendingrevocation"
PENDING_REKEY = "PendingRekey"
UNUSED = "Unused"
EXPIRED = "Expired"
NOT_SUBMITTED = "NotSubmitted"
class CertificateProductType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Certificate product type.
"""
STANDARD_DOMAIN_VALIDATED_SSL = "StandardDomainValidatedSsl"
STANDARD_DOMAIN_VALIDATED_WILD_CARD_SSL = "StandardDomainValidatedWildCardSsl"
class Channels(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""List of channels that this recommendation can apply.
"""
NOTIFICATION = "Notification"
API = "Api"
EMAIL = "Email"
WEBHOOK = "Webhook"
ALL = "All"
class CheckNameResourceTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Resource type used for verification.
"""
SITE = "Site"
SLOT = "Slot"
HOSTING_ENVIRONMENT = "HostingEnvironment"
PUBLISHING_USER = "PublishingUser"
MICROSOFT_WEB_SITES = "Microsoft.Web/sites"
MICROSOFT_WEB_SITES_SLOTS = "Microsoft.Web/sites/slots"
MICROSOFT_WEB_HOSTING_ENVIRONMENTS = "Microsoft.Web/hostingEnvironments"
MICROSOFT_WEB_PUBLISHING_USERS = "Microsoft.Web/publishingUsers"
class CloneAbilityResult(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Name of app.
"""
CLONEABLE = "Cloneable"
PARTIALLY_CLONEABLE = "PartiallyCloneable"
NOT_CLONEABLE = "NotCloneable"
class ComputeModeOptions(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Shared/dedicated workers.
"""
SHARED = "Shared"
DEDICATED = "Dedicated"
DYNAMIC = "Dynamic"
class ConnectionStringType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Type of database.
"""
MY_SQL = "MySql"
SQL_SERVER = "SQLServer"
SQL_AZURE = "SQLAzure"
CUSTOM = "Custom"
NOTIFICATION_HUB = "NotificationHub"
SERVICE_BUS = "ServiceBus"
EVENT_HUB = "EventHub"
API_HUB = "ApiHub"
DOC_DB = "DocDb"
REDIS_CACHE = "RedisCache"
POSTGRE_SQL = "PostgreSQL"
class ContinuousWebJobStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Job status.
"""
INITIALIZING = "Initializing"
STARTING = "Starting"
RUNNING = "Running"
PENDING_RESTART = "PendingRestart"
STOPPED = "Stopped"
class CustomHostNameDnsRecordType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Type of the DNS record.
"""
C_NAME = "CName"
A = "A"
class DatabaseType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Database type (e.g. SqlAzure / MySql).
"""
SQL_AZURE = "SqlAzure"
MY_SQL = "MySql"
LOCAL_MY_SQL = "LocalMySql"
POSTGRE_SQL = "PostgreSql"
class DnsType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Current DNS type
"""
AZURE_DNS = "AzureDns"
DEFAULT_DOMAIN_REGISTRAR_DNS = "DefaultDomainRegistrarDns"
class DnsVerificationTestResult(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""DNS verification test result.
"""
PASSED = "Passed"
FAILED = "Failed"
SKIPPED = "Skipped"
class DomainPatchResourcePropertiesDomainNotRenewableReasonsItem(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
REGISTRATION_STATUS_NOT_SUPPORTED_FOR_RENEWAL = "RegistrationStatusNotSupportedForRenewal"
EXPIRATION_NOT_IN_RENEWAL_TIME_RANGE = "ExpirationNotInRenewalTimeRange"
SUBSCRIPTION_NOT_ACTIVE = "SubscriptionNotActive"
class DomainPropertiesDomainNotRenewableReasonsItem(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
REGISTRATION_STATUS_NOT_SUPPORTED_FOR_RENEWAL = "RegistrationStatusNotSupportedForRenewal"
EXPIRATION_NOT_IN_RENEWAL_TIME_RANGE = "ExpirationNotInRenewalTimeRange"
SUBSCRIPTION_NOT_ACTIVE = "SubscriptionNotActive"
class DomainStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Domain registration status.
"""
ACTIVE = "Active"
AWAITING = "Awaiting"
CANCELLED = "Cancelled"
CONFISCATED = "Confiscated"
DISABLED = "Disabled"
EXCLUDED = "Excluded"
EXPIRED = "Expired"
FAILED = "Failed"
HELD = "Held"
LOCKED = "Locked"
PARKED = "Parked"
PENDING = "Pending"
RESERVED = "Reserved"
REVERTED = "Reverted"
SUSPENDED = "Suspended"
TRANSFERRED = "Transferred"
UNKNOWN = "Unknown"
UNLOCKED = "Unlocked"
UNPARKED = "Unparked"
UPDATED = "Updated"
JSON_CONVERTER_FAILED = "JsonConverterFailed"
class DomainType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Valid values are Regular domain: Azure will charge the full price of domain registration,
SoftDeleted: Purchasing this domain will simply restore it and this operation will not cost
anything.
"""
REGULAR = "Regular"
SOFT_DELETED = "SoftDeleted"
class Enum4(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
WINDOWS = "Windows"
LINUX = "Linux"
WINDOWS_FUNCTIONS = "WindowsFunctions"
LINUX_FUNCTIONS = "LinuxFunctions"
class Enum5(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
WINDOWS = "Windows"
LINUX = "Linux"
WINDOWS_FUNCTIONS = "WindowsFunctions"
LINUX_FUNCTIONS = "LinuxFunctions"
class FrequencyUnit(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The unit of time for how often the backup should be executed (e.g. for weekly backup, this
should be set to Day and FrequencyInterval should be set to 7)
"""
DAY = "Day"
HOUR = "Hour"
class FtpsState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""State of FTP / FTPS service
"""
ALL_ALLOWED = "AllAllowed"
FTPS_ONLY = "FtpsOnly"
DISABLED = "Disabled"
class HostingEnvironmentStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Current status of the App Service Environment.
"""
PREPARING = "Preparing"
READY = "Ready"
SCALING = "Scaling"
DELETING = "Deleting"
class HostNameType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Type of the hostname.
"""
VERIFIED = "Verified"
MANAGED = "Managed"
class HostType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Indicates whether the hostname is a standard or repository hostname.
"""
STANDARD = "Standard"
REPOSITORY = "Repository"
class InAvailabilityReasonType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
""":code:`<code>Invalid</code>` indicates the name provided does not match Azure App Service
naming requirements. :code:`<code>AlreadyExists</code>` indicates that the name is already in
use and is therefore unavailable.
"""
INVALID = "Invalid"
ALREADY_EXISTS = "AlreadyExists"
class InternalLoadBalancingMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Specifies which endpoints to serve internally in the Virtual Network for the App Service
Environment.
"""
NONE = "None"
WEB = "Web"
PUBLISHING = "Publishing"
class IpFilterTag(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Defines what this IP filter will be used for. This is to support IP filtering on proxies.
"""
DEFAULT = "Default"
XFF_PROXY = "XffProxy"
class IssueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Represents the type of the Detector
"""
SERVICE_INCIDENT = "ServiceIncident"
APP_DEPLOYMENT = "AppDeployment"
APP_CRASH = "AppCrash"
RUNTIME_ISSUE_DETECTED = "RuntimeIssueDetected"
ASE_DEPLOYMENT = "AseDeployment"
USER_ISSUE = "UserIssue"
PLATFORM_ISSUE = "PlatformIssue"
OTHER = "Other"
class KeyVaultSecretStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Status of the Key Vault secret.
"""
INITIALIZED = "Initialized"
WAITING_ON_CERTIFICATE_ORDER = "WaitingOnCertificateOrder"
SUCCEEDED = "Succeeded"
CERTIFICATE_ORDER_FAILED = "CertificateOrderFailed"
OPERATION_NOT_PERMITTED_ON_KEY_VAULT = "OperationNotPermittedOnKeyVault"
AZURE_SERVICE_UNAUTHORIZED_TO_ACCESS_KEY_VAULT = "AzureServiceUnauthorizedToAccessKeyVault"
KEY_VAULT_DOES_NOT_EXIST = "KeyVaultDoesNotExist"
KEY_VAULT_SECRET_DOES_NOT_EXIST = "KeyVaultSecretDoesNotExist"
UNKNOWN_ERROR = "UnknownError"
EXTERNAL_PRIVATE_KEY = "ExternalPrivateKey"
UNKNOWN = "Unknown"
class LogLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Log level.
"""
OFF = "Off"
VERBOSE = "Verbose"
INFORMATION = "Information"
WARNING = "Warning"
ERROR = "Error"
class ManagedPipelineMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Managed pipeline mode.
"""
INTEGRATED = "Integrated"
CLASSIC = "Classic"
class ManagedServiceIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Type of managed service identity.
"""
SYSTEM_ASSIGNED = "SystemAssigned"
USER_ASSIGNED = "UserAssigned"
SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned, UserAssigned"
NONE = "None"
class MSDeployLogEntryType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Log entry type
"""
MESSAGE = "Message"
WARNING = "Warning"
ERROR = "Error"
class MSDeployProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Provisioning state
"""
ACCEPTED = "accepted"
RUNNING = "running"
SUCCEEDED = "succeeded"
FAILED = "failed"
CANCELED = "canceled"
class MySqlMigrationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The type of migration operation to be done
"""
LOCAL_TO_REMOTE = "LocalToRemote"
REMOTE_TO_LOCAL = "RemoteToLocal"
class NotificationLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Level indicating how critical this recommendation can impact.
"""
CRITICAL = "Critical"
WARNING = "Warning"
INFORMATION = "Information"
NON_URGENT_SUGGESTION = "NonUrgentSuggestion"
class OperationStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The current status of the operation.
"""
IN_PROGRESS = "InProgress"
FAILED = "Failed"
SUCCEEDED = "Succeeded"
TIMED_OUT = "TimedOut"
CREATED = "Created"
class ProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Status of certificate order.
"""
SUCCEEDED = "Succeeded"
FAILED = "Failed"
CANCELED = "Canceled"
IN_PROGRESS = "InProgress"
DELETING = "Deleting"
class PublicCertificateLocation(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Public Certificate Location
"""
CURRENT_USER_MY = "CurrentUserMy"
LOCAL_MACHINE_MY = "LocalMachineMy"
UNKNOWN = "Unknown"
class PublishingProfileFormat(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Name of the format. Valid values are:
FileZilla3
WebDeploy -- default
Ftp
"""
FILE_ZILLA3 = "FileZilla3"
WEB_DEPLOY = "WebDeploy"
FTP = "Ftp"
class RedundancyMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Site redundancy mode
"""
NONE = "None"
MANUAL = "Manual"
FAILOVER = "Failover"
ACTIVE_ACTIVE = "ActiveActive"
GEO_REDUNDANT = "GeoRedundant"
class RenderingType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Rendering Type
"""
NO_GRAPH = "NoGraph"
TABLE = "Table"
TIME_SERIES = "TimeSeries"
TIME_SERIES_PER_INSTANCE = "TimeSeriesPerInstance"
class ResolveStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
INITIALIZED = "Initialized"
RESOLVED = "Resolved"
INVALID_SYNTAX = "InvalidSyntax"
MSI_NOT_ENABLED = "MSINotEnabled"
VAULT_NOT_FOUND = "VaultNotFound"
SECRET_NOT_FOUND = "SecretNotFound"
SECRET_VERSION_NOT_FOUND = "SecretVersionNotFound"
ACCESS_TO_KEY_VAULT_DENIED = "AccessToKeyVaultDenied"
OTHER_REASONS = "OtherReasons"
class ResourceScopeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Name of a resource type this recommendation applies, e.g. Subscription, ServerFarm, Site.
"""
SERVER_FARM = "ServerFarm"
SUBSCRIPTION = "Subscription"
WEB_SITE = "WebSite"
class RouteType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The type of route this is:
DEFAULT - By default, every app has routes to the local address ranges specified by RFC1918
INHERITED - Routes inherited from the real Virtual Network routes
STATIC - Static route set on the app only
These values will be used for syncing an app's routes with those from a Virtual Network.
"""
DEFAULT = "DEFAULT"
INHERITED = "INHERITED"
STATIC = "STATIC"
class ScmType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""SCM type.
"""
NONE = "None"
DROPBOX = "Dropbox"
TFS = "Tfs"
LOCAL_GIT = "LocalGit"
GIT_HUB = "GitHub"
CODE_PLEX_GIT = "CodePlexGit"
CODE_PLEX_HG = "CodePlexHg"
BITBUCKET_GIT = "BitbucketGit"
BITBUCKET_HG = "BitbucketHg"
EXTERNAL_GIT = "ExternalGit"
EXTERNAL_HG = "ExternalHg"
ONE_DRIVE = "OneDrive"
VSO = "VSO"
VSTSRM = "VSTSRM"
class SiteAvailabilityState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Management information availability state for the app.
"""
NORMAL = "Normal"
LIMITED = "Limited"
DISASTER_RECOVERY_MODE = "DisasterRecoveryMode"
class SiteExtensionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Site extension type.
"""
GALLERY = "Gallery"
WEB_ROOT = "WebRoot"
class SiteLoadBalancing(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Site load balancing.
"""
WEIGHTED_ROUND_ROBIN = "WeightedRoundRobin"
LEAST_REQUESTS = "LeastRequests"
LEAST_RESPONSE_TIME = "LeastResponseTime"
WEIGHTED_TOTAL_TRAFFIC = "WeightedTotalTraffic"
REQUEST_HASH = "RequestHash"
class SiteRuntimeState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
READY = "READY"
STOPPED = "STOPPED"
UNKNOWN = "UNKNOWN"
class SkuName(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
FREE = "Free"
SHARED = "Shared"
BASIC = "Basic"
STANDARD = "Standard"
PREMIUM = "Premium"
DYNAMIC = "Dynamic"
ISOLATED = "Isolated"
PREMIUM_V2 = "PremiumV2"
ELASTIC_PREMIUM = "ElasticPremium"
ELASTIC_ISOLATED = "ElasticIsolated"
class SolutionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Type of Solution
"""
QUICK_SOLUTION = "QuickSolution"
DEEP_INVESTIGATION = "DeepInvestigation"
BEST_PRACTICES = "BestPractices"
class SslState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""SSL type.
"""
DISABLED = "Disabled"
SNI_ENABLED = "SniEnabled"
IP_BASED_ENABLED = "IpBasedEnabled"
class StatusOptions(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""App Service plan status.
"""
READY = "Ready"
PENDING = "Pending"
CREATING = "Creating"
class SupportedTlsVersions(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""MinTlsVersion: configures the minimum version of TLS required for SSL requests
"""
ONE0 = "1.0"
ONE1 = "1.1"
ONE2 = "1.2"
class TriggeredWebJobStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Job status.
"""
SUCCESS = "Success"
FAILED = "Failed"
ERROR = "Error"
class TriggerTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The trigger type of the function
"""
HTTP_TRIGGER = "HttpTrigger"
UNKNOWN = "Unknown"
class UnauthenticatedClientAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The action to take when an unauthenticated client attempts to access the app.
"""
REDIRECT_TO_LOGIN_PAGE = "RedirectToLoginPage"
ALLOW_ANONYMOUS = "AllowAnonymous"
class UsageState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""State indicating whether the app has exceeded its quota usage. Read-only.
"""
NORMAL = "Normal"
EXCEEDED = "Exceeded"
class ValidateResourceTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Resource type used for verification.
"""
SERVER_FARM = "ServerFarm"
SITE = "Site"
class WebJobType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Job type.
"""
CONTINUOUS = "Continuous"
TRIGGERED = "Triggered"
class WorkerSizeOptions(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Size of the machines.
"""
SMALL = "Small"
MEDIUM = "Medium"
LARGE = "Large"
D1 = "D1"
D2 = "D2"
D3 = "D3"
NESTED_SMALL = "NestedSmall"
DEFAULT = "Default"
|
Azure/azure-sdk-for-python
|
sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2019_08_01/models/_web_site_management_client_enums.py
|
Python
|
mit
| 21,622
|
from oscar.core.loading import get_model
from accounts import names
Account = get_model('accounts', 'Account')
def redemptions_account():
return Account.objects.get(name=names.REDEMPTIONS)
def lapsed_account():
return Account.objects.get(name=names.LAPSED)
|
michaelkuty/django-oscar-accounts
|
accounts/core.py
|
Python
|
bsd-3-clause
| 271
|
##
# Copyright (c) 2010-2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from twext.enterprise.dal.syntax import Delete, Insert, Select, Count
from twisted.internet.defer import inlineCallbacks, succeed, returnValue
from twisted.trial.unittest import TestCase
from twistedcaldav.config import config
from txdav.caldav.datastore.sql import CalendarStoreFeatures
from txdav.caldav.datastore.test.util import CommonStoreTests
from txdav.common.datastore.sql_tables import schema
from txdav.common.datastore.test.util import theStoreBuilder, \
StubNotifierFactory
from txdav.common.datastore.upgrade.sql.others import attachment_migration
from txdav.common.datastore.upgrade.sql.upgrade import UpgradeDatabaseOtherStep
import hashlib
import os
"""
Tests for L{txdav.common.datastore.upgrade.sql.upgrade}.
"""
class AttachmentMigrationModeTests(CommonStoreTests):
"""
Tests for L{UpgradeDatabaseSchemaStep}.
"""
@inlineCallbacks
def setUp(self):
# We need to skip the immediate base class since we are creating our own
# store in each test
yield TestCase.setUp(self)
@inlineCallbacks
def _initStore(self, enableManagedAttachments=True):
"""
Build a store with certain bits cleaned out.
"""
self.patch(config, "EnableManagedAttachments", enableManagedAttachments)
store = yield theStoreBuilder.buildStore(
self, {"push": StubNotifierFactory()}
)
store.enableManagedAttachments = enableManagedAttachments
txn = store.newTransaction()
cs = schema.CALENDARSERVER
yield Delete(
From=cs,
Where=cs.NAME == "MANAGED-ATTACHMENTS"
).on(txn)
yield txn.commit()
returnValue(store)
@inlineCallbacks
def test_upgradeFromEmptyDropbox(self):
"""
Test L{attachment_migration.doUpgrade} when managed attachments is enabled and dropbox items do not exist.
"""
didUpgrade = [False, ]
def _hasDropboxAttachments(_self, txn):
return succeed(False)
self.patch(CalendarStoreFeatures, "hasDropboxAttachments", _hasDropboxAttachments)
def _upgradeToManagedAttachments(_self, batchSize=10):
didUpgrade[0] = True
return succeed(None)
self.patch(CalendarStoreFeatures, "upgradeToManagedAttachments", _upgradeToManagedAttachments)
store = (yield self._initStore())
upgrader = UpgradeDatabaseOtherStep(store)
yield attachment_migration.doUpgrade(upgrader)
self.assertFalse(didUpgrade[0])
txn = upgrader.sqlStore.newTransaction()
managed = (yield txn.calendarserverValue("MANAGED-ATTACHMENTS", raiseIfMissing=False))
yield txn.commit()
self.assertNotEqual(managed, None)
@inlineCallbacks
def test_upgradeFromDropboxOK(self):
"""
Test L{attachment_migration.doUpgrade} when managed attachments is enabled and dropbox items exist.
"""
didUpgrade = [False, ]
def _hasDropboxAttachments(_self, txn):
return succeed(True)
self.patch(CalendarStoreFeatures, "hasDropboxAttachments", _hasDropboxAttachments)
def _upgradeToManagedAttachments(_self, batchSize=10):
didUpgrade[0] = True
return succeed(None)
self.patch(CalendarStoreFeatures, "upgradeToManagedAttachments", _upgradeToManagedAttachments)
store = (yield self._initStore())
upgrader = UpgradeDatabaseOtherStep(store)
yield attachment_migration.doUpgrade(upgrader)
self.assertFalse(didUpgrade[0])
txn = upgrader.sqlStore.newTransaction()
managed = (yield txn.calendarserverValue("MANAGED-ATTACHMENTS", raiseIfMissing=False))
yield txn.commit()
self.assertNotEqual(managed, None)
@inlineCallbacks
def test_upgradeAlreadyDone(self):
"""
Test L{attachment_migration.doUpgrade} when managed attachments is enabled and migration already done.
"""
didUpgrade = [False, ]
def _hasDropboxAttachments(_self, txn):
return succeed(True)
self.patch(CalendarStoreFeatures, "hasDropboxAttachments", _hasDropboxAttachments)
def _upgradeToManagedAttachments(_self, batchSize=10):
didUpgrade[0] = True
return succeed(None)
self.patch(CalendarStoreFeatures, "upgradeToManagedAttachments", _upgradeToManagedAttachments)
store = (yield self._initStore())
txn = store.newTransaction()
yield txn.setCalendarserverValue("MANAGED-ATTACHMENTS", "1")
yield txn.commit()
upgrader = UpgradeDatabaseOtherStep(store)
yield attachment_migration.doUpgrade(upgrader)
self.assertFalse(didUpgrade[0])
txn = upgrader.sqlStore.newTransaction()
managed = (yield txn.calendarserverValue("MANAGED-ATTACHMENTS", raiseIfMissing=False))
yield txn.commit()
self.assertNotEqual(managed, None)
@inlineCallbacks
def test_upgradeNotEnabled(self):
"""
Test L{attachment_migration.doUpgrade} when managed attachments is disabled.
"""
didUpgrade = [False, ]
def _hasDropboxAttachments(_self, txn):
return succeed(True)
self.patch(CalendarStoreFeatures, "hasDropboxAttachments", _hasDropboxAttachments)
def _upgradeToManagedAttachments(_self, batchSize=10):
didUpgrade[0] = True
return succeed(None)
self.patch(CalendarStoreFeatures, "upgradeToManagedAttachments", _upgradeToManagedAttachments)
store = (yield self._initStore(False))
upgrader = UpgradeDatabaseOtherStep(store)
yield attachment_migration.doUpgrade(upgrader)
self.assertFalse(didUpgrade[0])
txn = upgrader.sqlStore.newTransaction()
managed = (yield txn.calendarserverValue("MANAGED-ATTACHMENTS", raiseIfMissing=False))
yield txn.commit()
self.assertEqual(managed, None)
class AttachmentMigrationTests(CommonStoreTests):
"""
Tests for L{UpgradeDatabaseSchemaStep}.
"""
@inlineCallbacks
def setUp(self):
self.patch(config, "EnableManagedAttachments", True)
yield super(AttachmentMigrationTests, self).setUp()
self._sqlCalendarStore.enableManagedAttachments = True
txn = self.transactionUnderTest()
cs = schema.CALENDARSERVER
yield Delete(
From=cs,
Where=cs.NAME == "MANAGED-ATTACHMENTS"
).on(txn)
yield self.commit()
@inlineCallbacks
def test_upgradeOrphanedAttachment(self):
"""
Test L{attachment_migration.doUpgrade} when an orphaned attachment is present.
"""
def _hasDropboxAttachments(_self, txn):
return succeed(True)
self.patch(CalendarStoreFeatures, "hasDropboxAttachments", _hasDropboxAttachments)
# Create orphaned attachment
dropboxID = "ABCD.dropbox"
attachmentName = "test.txt"
home = yield self.homeUnderTest(name="user01")
at = schema.ATTACHMENT
yield Insert(
{
at.CALENDAR_HOME_RESOURCE_ID: home._resourceID,
at.DROPBOX_ID: dropboxID,
at.CONTENT_TYPE: "text/plain",
at.SIZE: 10,
at.MD5: "abcd",
at.PATH: attachmentName,
}
).on(self.transactionUnderTest())
yield self.commit()
hasheduid = hashlib.md5(dropboxID).hexdigest()
fp = self._sqlCalendarStore.attachmentsPath.child(hasheduid[0:2]).child(hasheduid[2:4]).child(hasheduid)
fp.makedirs()
fp = fp.child(attachmentName)
fp.setContent("1234567890")
self.assertTrue(os.path.exists(fp.path))
upgrader = UpgradeDatabaseOtherStep(self._sqlCalendarStore)
yield attachment_migration.doUpgrade(upgrader)
txn = upgrader.sqlStore.newTransaction()
managed = (yield txn.calendarserverValue("MANAGED-ATTACHMENTS", raiseIfMissing=False))
count = (yield Select(
[Count(at.DROPBOX_ID), ],
From=at,
).on(txn))[0][0]
yield txn.commit()
self.assertEqual(count, 1)
self.assertNotEqual(managed, None)
self.assertTrue(os.path.exists(fp.path))
|
trevor/calendarserver
|
txdav/common/datastore/upgrade/sql/others/test/test_attachment_migration.py
|
Python
|
apache-2.0
| 8,918
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-05-11 18:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('market_blog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='atik',
name='end_date',
field=models.DateField(null=True, verbose_name=['%Y-%m-%d %H:%M:%S']),
),
]
|
SHARPRISE/AnGroDeto
|
market_blog/migrations/0002_atik_end_date.py
|
Python
|
apache-2.0
| 471
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
from setuptools.command.test import test
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
import os
here = os.path.dirname(os.path.abspath(__file__))
f = open(os.path.join(here, 'README.md'))
long_description = f.read().strip()
f.close()
setup(
name='django_spam',
version='1.0.0',
author='Nick Kelly',
author_email='nick.kelly@tivix.com',
url='http://github.com/Tivix/django-spam',
description='Redirecting bots to utilize their time better...',
packages=find_packages(exclude=('tests*',)),
long_description=long_description,
long_description_content_type='text/markdown',
keywords='django spam',
zip_safe=False,
include_package_data=True,
py_modules=['django_spam'],
test_suite='runtests.runtests',
install_requires=[
'Django>=2.0.0',
],
classifiers=[
'Framework :: Django',
'Framework :: Django :: 2.0',
'Framework :: Django :: 2.1',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.0',
'Framework :: Django :: 3.1',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
]
)
|
Tivix/django-spam
|
setup.py
|
Python
|
mit
| 1,454
|
import os
from countershape import model
from countershape import state
from countershape import widgets
from . import testpages, tutils
class TestContext(testpages.DummyState):
def setUp(self):
testpages.DummyState.setUp(self)
def tearDown(self):
testpages.DummyState.tearDown(self)
def test_relativePath(self):
self.application = testpages.TestApplication(
model.BaseRoot(
[
testpages.TPageHTML("foo"), [
testpages.TPageHTML("bar")
]
]
)
)
p = self.application.getPage(os.path.join("foo","bar"))
assert p.relativePath(["oink"]) == "../oink"
assert p.relativePath(["oink", "voing"]) == "../oink/voing"
assert p.relativePath(["foo"]) == "../foo"
assert p.relativePath([]) == ".."
assert p.relativePath(["foo", "bar"]) == "bar"
assert p.relativePath(["foo", "bar", "voing"]) == "bar/voing"
assert p.absolutePath() == "foo/bar"
p = self.application.getPage("foo")
assert p.relativePath(["oink"]) == "oink"
assert p.relativePath([""]) == ""
assert p.relativePath(["foo", "bar"]) == "foo/bar"
assert p.absolutePath() == "foo"
def test_top(self):
assert not self.application.getPage(os.path.join("foo","bar"))
class TestPageInstantiate(testpages.DummyState):
def test_instantiate_err(self):
self.application.testing = False
tutils.raises("instantiated during page call", model.BasePage)
class TestHeader(testpages.DummyState):
def test_path(self):
h = model.Header(state.page)
h.path("foo.css")
h.path("bar.js")
assert "foo" in h._cssPath[0]
assert "bar" in h._jsPath[0]
def test_path_err(self):
h = model.Header(state.page)
tutils.raises("unrecognised resource extension", h.path, "foo.bar")
def test_cssPath(self):
h = model.Header(state.page)
h.cssPath("foo")
h.cssPath("bar")
assert "foo" in h._cssPath[0]
assert "bar" in h._cssPath[1]
def test_jsPath(self):
h = model.Header(state.page)
h.jsPath("foo")
h.jsPath("bar")
assert "foo" in h._jsPath[0]
assert "bar" in h._jsPath[1]
def test_str(self):
h = model.Header(state.page)
h.cssPath("foo")
h.cssPath("bar")
h.jsPath("foo")
h.jsPath("bar")
s = str(h)
assert len([i for i in s.splitlines() if i]) == 4
class TestHTMLPage(testpages.RenderTester):
def setUp(self):
self.application = testpages.TestApplication(
model.BaseRoot(
[
testpages.TPageHTMLFileTemplate(),
[
testpages.TPageHTML("nestedpage")
],
testpages.TPageHTMLTemplate(),
]
)
)
self.application.testing = 2
def test_pageTitle(self):
t = testpages.TPageHTMLTemplate()
assert t.pageTitle() == "TPageHTMLTemplate"
t.title = "Foo"
assert t.pageTitle() == "Foo"
t.pageTitle = "Bar"
assert t.pageTitle == "Bar"
def test_template(self):
d = self.call("TPageHTMLTemplate")
assert d.find("html") > -1
assert d.find("TPageHTMLTemplate") > -1
def test_filetemplate(self):
d = self.call("TPageHTMLFileTemplate")
assert d.find("template") > -1
assert d.find("html") > -1
def test_repr(self):
t = testpages.TPageHTMLTemplate()
assert repr(t)
class TestBaseApplication(testpages.RenderTester):
def setUp(self):
self.r = model.BaseRoot(
[
TException("one"),
]
)
self.application = model.BaseApplication(self.r)
def test_pageexception(self):
p = self.application.getPage("one")
tutils.raises("an exception", self.application, p)
class TestApplication(testpages.DummyState):
def setUp(self):
self.application = testpages.TestApplication(
model.BaseRoot(
[
testpages.TPageHTML("base"),
[
testpages.TPageNoLink(),
testpages.TPageWithTitle()
],
testpages.TPage("internal", internal=True)
]
)
)
self.pageName = "base"
testpages.DummyState.setUp(self)
def test_getPageErr(self):
assert not self.application.getPage('nonexistent')
tutils.raises("invalid argument", self.application.getPage, 0)
def test_getPageIdempotence(self):
p = self.application.getPage('base')
assert self.application.getPage(p) == p
def test_getPageRoot(self):
assert self.application.getPage("").name == "BaseRoot"
def test_LinkTo(self):
assert str(widgets.LinkTo("base"))
assert widgets.LinkTo("base")()
def test_linkTo_withTitle(self):
assert str(widgets.LinkTo("TPageWithTitle"))
def test_linkTo_nopage(self):
tutils.raises(
"unknown page",
str,
widgets.LinkTo("Nonexistent")
)
def test_linkTo_nolink(self):
assert str(widgets.LinkTo("TPageNoLink"))
def test_url(self):
assert str(widgets.UrlTo("TPageNoLink"))
def test_url_anchor(self):
s = str(widgets.UrlTo("TPageNoLink", anchor="foo"))
assert s == "base/TPageNoLink#foo"
def test_url_nopage(self):
tutils.raises("unknown page", str, widgets.UrlTo("Nonexistent"))
def test_url_internal(self):
tutils.raises("internal page", str, widgets.UrlTo("internal"))
def test_alink(self):
s = str(widgets.ALink("TPageNoLink", "text", "foo"))
assert "TPageNoLink#foo" in s
def test_linkTo_internal(self):
tutils.raises(
model.exceptions.ApplicationError,
str,
widgets.LinkTo("internal")
)
class TestPageModel:
"""
A suite of tests testing the application page model functionality.
Tests span the Application and Page classes.
"""
def setUp(self):
state.page = None
self.a, self.b = testpages.TPage("test"), testpages.TPage("test")
self.s1, self.s2 = testpages.TPage("end", structural=True), testpages.TPage("end", structural=True)
self.p1, self.p2 = testpages.TPage("sub1", structural=True), testpages.TPage("sub2", structural=True)
self.r = model.BaseRoot([
testpages.TPage("base", structural=False, internal=True),[
self.a,
testpages.TPage("one", structural=True), [
testpages.TPage("X", structural=False),[
testpages.TPage("two", structural=True, internal=False), [
self.b,
]
]
],
self.p1, [
testpages.TPage("page", structural=True), [
self.s1
],
],
self.p2, [
testpages.TPage("page", structural=True), [
self.s2,
]
],
]
])
self.t = testpages.TestApplication(self.r)
state.application = self.t
def tearDown(self):
state.ctx = None
def test_getPage(self):
tutils.raises("ambiguous path", self.t.getPage, os.path.join("page","end"))
assert self.t.getPage(os.path.join("sub1","page","end"))
assert self.t.getPage(os.path.join("sub2","page","end"))
def test_getPageChild(self):
state.page = self.p1
assert self.t.getPage(os.path.join(".","page","end")) is self.s1
assert not self.t.getPage(os.path.join(".","page","foo"))
assert self.t.getPage(os.path.join(".","page"))
def test_getPage_nostate(self):
tutils.raises("relative page link", self.t.getPage, os.path.join(".","page","end"))
def test_getPageParent(self):
state.page = self.s1
assert self.t.getPage("^/page") is self.p1.children[0]
assert self.t.getPage("^/sub1") is self.p1
def test_getPageSibling(self):
state.page = self.p1
assert self.t.getPage("-/sub2") is self.p2
assert not self.t.getPage("-/page")
def test_getPageLocal(self):
state.page = self.p1
assert self.t.getPage("$/sub2") is self.p2
assert self.t.getPage("$/base")
assert not self.t.getPage("$/X")
def test_match(self):
assert self.b.match([], False)
assert self.b.match("", False)
assert self.b.match(["two", "test"], False)
assert self.b.match(["one", "two", "test"], False)
assert not self.b.match(["two", "two", "test"], False)
assert self.s1.match(["sub1", "page", "end"], False)
assert self.s1.match(["page", "end"], False)
assert self.s2.match(["page", "end"], False)
assert self.s1.match(["sub1", "page", "end"], True)
assert self.s1.match("sub1/page/end", True)
assert not self.s1.match(["page", "end"], True)
assert not self.r.match(["page", "end"], False)
def test_getPath(self):
page, path = self.t.getPath(["one", "two"])
assert page.name == "two"
assert path == []
page, path = self.t.getPath(["one"])
assert page.name == "one"
assert path == []
page, path = self.t.getPath(["one", "argument"])
assert page.name == "one"
assert path == ["argument"]
page, path = self.t.getPath(["test"])
assert page.name == "test"
assert path == []
assert self.t.getPath([]) == (self.r, [])
assert self.t.getPath(["piglet"]) == (self.r, ["piglet"])
assert self.t.getPath(["two", "foo"]) == (self.r, ["two", "foo"])
def test_url(self):
state.page = self.t.getPage(os.path.join("one","two"))
assert str(widgets.UrlTo("two")) == "two"
state.page = self.t.getPage("one")
assert str(widgets.UrlTo("one")) == "one"
class TestPage:
def test_isDocDescendantOf(self):
one = testpages.TPage("one")
two = testpages.TPage("two")
r = model.BaseRoot(
[
one,
testpages.TPage("dir", internal=True), [
two
]
]
)
t = testpages.TestApplication(r)
assert not two.isDescendantOf(one)
assert two.isDocDescendantOf(one)
assert two.isDocDescendantOf(r)
assert r.isDescendantOf(two)
class TestPageModelErrors:
def test_ambiguouschild(self):
r = model.BaseRoot([
testpages.TPage("one", structural=True), [
testpages.TPage("test"),
testpages.TPage("test"),
]
])
tutils.raises(
model.exceptions.ApplicationError,
testpages.TestApplication,
r
)
def test_ambiguouschild2(self):
r = model.BaseRoot([
testpages.TPage("one", structural=True), [
testpages.TPage("test"),
testpages.TPage("X", structural=False),[
testpages.TPage("test"),
]
]
])
tutils.raises(
model.exceptions.ApplicationError,
testpages.TestApplication,
r
)
def test_ambiguoustoplevel(self):
r = model.BaseRoot([
testpages.TPage("test", structural=True),
testpages.TPage("test", structural=False),
])
tutils.raises(
model.exceptions.ApplicationError,
testpages.TestApplication,
r
)
class TException(testpages.TPage):
def render(self, *args, **kwargs):
raise ValueError("An exception")
_TestApp = testpages.TestApplication(
model.BaseRoot(
[
testpages.TPage("one", structural=True),
[
testpages.TPage("two"),
testpages.TPage("three")
],
testpages.TPage("internal", internal=True),
TException("exception"),
]
)
)
class TestApplicationRenderNoTesting(testpages.RenderTester):
def setUp(self):
self.application = _TestApp
self.application.testing = 1
def test_prenotesting(self):
self.application.testing = 0
p = model.BasePage()
self.application.pre(p)
class TestApplicationRender(testpages.RenderTester):
def setUp(self):
self.application = _TestApp
self.application.testing = 2
def test_call(self):
assert self.call("one")
def test_call_nonexistent(self):
tutils.raises(model.exceptions.ApplicationError, self.call, "nonexistent")
class TestApplicationError:
def test_str(self):
a = model.exceptions.ApplicationError("foo")
str(a)
|
mhils/countershape
|
test/test_model.py
|
Python
|
mit
| 13,305
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/trevor/ROS/catkin_ws/devel/include".split(';') if "/home/trevor/ROS/catkin_ws/devel/include" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "robot_pose_ekf"
PROJECT_SPACE_DIR = "/home/trevor/ROS/catkin_ws/devel"
PROJECT_VERSION = "1.13.0"
|
siketh/ASR
|
catkin_ws/build/navigation/robot_pose_ekf/catkin_generated/pkg.develspace.context.pc.py
|
Python
|
mit
| 467
|
# Usage: mitmdump -s "iframe_injector.py url"
# (this script works best with --anticache)
import sys
from bs4 import BeautifulSoup
class Injector:
def __init__(self, iframe_url):
self.iframe_url = iframe_url
def response(self, flow):
if flow.request.host in self.iframe_url:
return
html = BeautifulSoup(flow.response.content, "html.parser")
if html.body:
iframe = html.new_tag(
"iframe",
src=self.iframe_url,
frameborder=0,
height=0,
width=0)
html.body.insert(0, iframe)
flow.response.content = str(html).encode("utf8")
def start():
if len(sys.argv) != 2:
raise ValueError('Usage: -s "iframe_injector.py url"')
return Injector(sys.argv[1])
|
mosajjal/mitmproxy
|
examples/simple/modify_body_inject_iframe.py
|
Python
|
mit
| 829
|
#!/usr/bin/env python
try:
# Install prereqs here and now if we can.
from setuptools import setup
kw = {
'install_requires': ['pyzmq>=2.0.10.0']
}
except ImportError:
from distutils.core import setup
print 'No setuptools. You may have to manually install dependencies.'
kw = {}
setup(name='zmqrepl',
license='GPLv3',
version='0.0.1',
description='A zmq repl.',
author='John Krauss',
author_email='irving.krauss@gmail.com',
url='http://github.com/talos/zmqrepl',
scripts=['zmqrepl'],
**kw
)
|
talos/zmqrepl
|
setup.py
|
Python
|
gpl-3.0
| 587
|
# -*- coding: utf-8 -*-
#
# This hook generates a system notification for Linux when using MAL
#
# Written by matoro, last updated 2016/09/01
# https://github.com/matoro/
# https://myanimelist.net/profile/Matoro_Mahri
#
# To use, copy this file to ~/.trackma/hooks/
import os
import trackma.utils as utils
def episode_changed(engine, show):
os.system('notify-send --icon=/usr/lib/python3.5/site-packages/trackma/data/mal.jpg --app-name=trackma "Updated '+show['title']+'" "Progress: '+str(show['my_progress'])+'/'+str(show['total'])+'"')
|
z411/trackma
|
hooks/notification.py
|
Python
|
gpl-3.0
| 545
|
import _plotly_utils.basevalidators
class UirevisionValidator(_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="uirevision", parent_name="scatter", **kwargs):
super(UirevisionValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/scatter/_uirevision.py
|
Python
|
mit
| 447
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008 Adriano Monteiro Marques
#
# Author: Francesco Piccinno <stack.box@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
BaseContext are defined into BaseContext/ directory
The Context directory has only __init__.py to choose
the correct backend and initialize it.
"""
|
dennisfrancis/PacketManipulator
|
umit/pm/backend/abstract/__init__.py
|
Python
|
gpl-2.0
| 1,421
|
__author__ = 'alexander'
treinennet = dict()
#dict met lists
treinennet['Utrecht Centraal'] = ['\'s-Hertogenbosch','Amersfoort', 'Arnhem', 'Bunnik', 'Driebergen-Zeist', 'Gouda', 'Utrecht Leidsche Rijn', 'Utrecht Lunetten', 'Utrecht Overvecht', 'Utrecht Terwijde', 'Utrecht Zuilen', 'Veenendaal-De Klomp', 'Woerden']
treinennet['\'s-Hertogenbosch'] = ['\'s-Hertogenbosch Oost', 'Eindhoven', 'Nijmegen', 'Tilburg', 'Utrecht Centraal', 'Vught', 'Zaltbommel']
treinennet['Nijmegen'] = ['\'s-Hertogenbosch', 'Arnhem','Nijmegen Goffert', 'Nijmegen Heyendaal', 'Nijmegen Lent', 'Oss']
def check_verbinding1(begin, eind):
if begin in treinennet:
if eind in treinennet[begin]:
print("Verbonden!")
else:
print("Niet verbonden...")
check_verbinding1('Utrecht Centraal', 'Arnhem')
|
Alexanderkorn/Automatisation
|
oude scripts/les 9/traject code ns.py
|
Python
|
gpl-3.0
| 813
|
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
# https://docs.djangoproject.com/en/dev/ref/models/fields/
@python_2_unicode_compatible
class Student(models.Model):
user = models.OneToOneField(User, related_name='student_of', verbose_name=_('User'))
enrollment_date = models.DateField(_('Enrollment Date'),auto_now_add=True)
photo = models.ImageField(_('Photo'), upload_to='students', null=True)
def __str__(self):
return self.user.get_full_name()
class Meta:
verbose_name = _('Student')
verbose_name_plural = _('Students')
@python_2_unicode_compatible
class Instructor(models.Model):
user = models.OneToOneField(User, related_name='instructor_of', verbose_name=_('User'))
hire_date = models.DateField(_('Hire Date'), auto_now_add=True)
office = models.ForeignKey('Office', verbose_name=_('Office'))
departament = models.ForeignKey('Departament', verbose_name=_('Departament'))
def __str__(self):
return self.user.get_full_name()
class Meta:
verbose_name = _('Instructor')
verbose_name_plural = _('Instructors')
@python_2_unicode_compatible
class Departament(models.Model):
name = models.CharField(_('Name'), max_length=500)
budget = models.FloatField(_('Budget'))
start_date = models.DateField(_('Start Date'), auto_now_add=True)
def __str__(self):
return self.name
class Meta:
verbose_name = _('Departament')
verbose_name_plural = _('Departaments')
@python_2_unicode_compatible
class Course(models.Model):
title = models.CharField(_('title'), max_length=500)
credits = models.IntegerField(_('credits'), default=4)
instructor = models.ForeignKey(Instructor, verbose_name=_('Instructor'))
students = models.ManyToManyField(Student, verbose_name=_('Students'))
def __str__(self):
return self.title
class Meta:
verbose_name = _('Course')
verbose_name_plural = _('Courses')
@python_2_unicode_compatible
class Office(models.Model):
location = models.SlugField(_('location'))
def __str__(self):
return self.location
class Meta:
verbose_name = _("Instructor office")
verbose_name_plural = _("Instructor offices")
|
luisza/university
|
university/models.py
|
Python
|
gpl-2.0
| 2,471
|
from django.apps import AppConfig
class RequestsConfig(AppConfig):
name = 'requests'
|
elenamvk/tourcouch2.0
|
requests/apps.py
|
Python
|
mit
| 91
|
#!/usr/bin/env python
# Maintained by Marshall Mattingly
import os, subprocess, sys
# global variables
base = os.path.dirname(os.path.realpath(__file__))
env = os.path.join(base, 'env')
def create_env():
'''Creates our virtual environment.'''
# see if it's already been created
if os.path.exists(env):
return True
# try to create it
virtualenv = os.path.join(base, 'virtualenv-1.11.4', 'virtualenv.py')
if 0 != subprocess.call(['python', virtualenv, 'env']):
print 'Unable to create env. Please delete env folder and try again.'
return False
# setup our virtual environment
pip = os.path.join(env, 'bin', 'pip')
if not os.path.exists(pip):
pip = os.path.join(env, 'Scripts', 'pip.exe')
if not os.path.exists(pip):
print 'Unable to find pip executable. Make sure env was created.'
return False
# install our requirements
if 0 != subprocess.call([pip, 'install', '-r', 'requirements.txt']):
print 'Issues install our requirements. Please check output.'
return False
# we did it all!
return True
# we do all this outside of main, because yolo
# make sure that our environment is created
if not create_env():
print 'Unable to create virtual environment!'
sys.exit(1)
# determine the virtual python executable
INTERP = os.path.join(env, 'bin', 'python')
if not os.path.exists(INTERP):
INTERP = os.path.join(env, 'Scripts', 'python.exe')
# ensure we found it
if not os.path.exists(INTERP):
print 'Unable to determine virtual python executable location.'
sys.exit(1)
# run the process from the virtual python env
if sys.executable != INTERP:
os.execl(INTERP, INTERP, *sys.argv)
# update the sys path
sys.path.insert(0, base)
# run the app
if __name__ == '__main__':
print "Running the app",
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "atlascms.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
UND-CSCI491/atlascms
|
manage.py
|
Python
|
apache-2.0
| 2,123
|
import math
from scipy import constants
from ROOT import TVector3
from heppy.utils.deltar import deltaPhi
from collections import OrderedDict
class Path(object):
'''Path followed by a particle in 3D space.
Assumes constant speed magnitude both along the z axis and in the transverse plane.
'''
def __init__(self, p4, origin):
self.p4 = p4
self.udir = p4.Vect().Unit()
self.origin = origin
self.speed = self.p4.Beta() * constants.c
self.points = OrderedDict()
self.points['vertex'] = origin
def time_at_z(self, z):
dest_time = (z - self.origin.Z())/self.vz()
return dest_time
def deltat(self, path_length):
'''Time needed to follow a given path length'''
return path_length / self.speed
def point_at_time(self, time):
'''Returns the 3D point on the path at a given time'''
return self.origin + self.udir * self.speed * time
def vz(self):
'''Speed magnitude along z axis'''
return self.p4.Beta() * constants.c * self.udir.Z()
def vperp(self):
'''Speed magnitude in the transverse plane'''
return self.p4.Beta() * constants.c * self.udir.Perp()
class StraightLine(Path):
pass
class Helix(Path):
def __init__(self, field, charge, p4, origin):
super(Helix, self).__init__(p4, origin)
self.charge = charge
self.rho = p4.Perp() / (abs(charge)*field) * 1e9/constants.c
self.v_over_omega = p4.Vect()
self.v_over_omega *= 1./(charge*field)*1e9/constants.c
self.omega = charge*field*constants.c**2 / (p4.M()*p4.Gamma()*1e9)
momperp_xy = TVector3(-p4.Y(), p4.X(), 0.).Unit()
origin_xy = TVector3(origin.X(), origin.Y(), 0.)
self.center_xy = origin_xy - charge * momperp_xy * self.rho
self.extreme_point_xy = TVector3(self.rho, 0, 0)
if self.center_xy.X()!=0 or self.center_xy.Y()!=0:
self.extreme_point_xy = self.center_xy + self.center_xy.Unit() * self.rho
# calculate phi range with the origin at the center,
# for display purposes
center_to_origin = origin_xy - self.center_xy
self.phi0 = center_to_origin.Phi()
self.phi_min = self.phi0 * 180 / math.pi
self.phi_max = self.phi_min + 360.
def polar_at_time(self, time):
z = self.vz() * time + self.origin.Z()
rho = self.rho
phi = - self.omega * time + self.phi0
return rho, z, phi
def time_at_phi(self, phi):
time = deltaPhi(self.phi0, phi) / self.omega
return time
def phi(self, x, y):
xy = TVector3(x,y,0)
xy -= self.center_xy
return xy.Phi()
def point_from_polar(self, polar):
rho,z,phi = polar
xy = self.center_xy + self.rho * TVector3(math.cos(phi), math.sin(phi), 0)
return TVector3(xy.X(), xy.Y(), z)
def point_at_time(self, time):
z = self.vz() * time + self.origin.Z()
x = self.origin.X() + \
self.v_over_omega.Y() * (1-math.cos(self.omega*time)) \
+ self.v_over_omega.X() * math.sin(self.omega*time)
y = self.origin.Y() - \
self.v_over_omega.X() * (1-math.cos(self.omega*time)) \
+ self.v_over_omega.Y() * math.sin(self.omega*time)
return TVector3(x, y, z)
def path_length(self, deltat):
'''ds2 = dx2+dy2+dz2 = [w2rho2 + vz2] dt2'''
return math.sqrt(self.omega**2 * self.rho**2 + self.vz()**2)*deltat
# def deltat(self, path_length):
# #TODO: shouldn't this just use beta????
# d1 = path_length / (self.p4.Beta()*constants.c)
# # d2 = path_length / math.sqrt(self.omega**2 * self.rho**2 + self.vz()**2)
# return d1
if __name__ == '__main__':
from ROOT import TLorentzVector, TVector3
p4 = TLorentzVector()
p4.SetPtEtaPhiM(1, 0, 0, 5.11e-4)
helix = Helix(3.8, 1, p4, TVector3(0,0,0))
length = helix.path_length(1e-9)
helix.deltat(length)
|
semkiv/heppy_fcc
|
fastsim/path.py
|
Python
|
gpl-3.0
| 4,066
|
import matplotlib,numpy,sys,scipy,pickle
import matplotlib.pyplot
sys.path.append('../lib')
import calculateStatistics
### MAIN
matplotlib.rcParams.update({'font.size':36,'font.family':'Times New Roman','xtick.labelsize':28,'ytick.labelsize':28})
thePointSize=12
jarDir='/Users/adriandelomana/scratch/'
# mutagenized 2.3
xSignal=numpy.array([[205,162,175,200,150],[35,33,50,48,45]])
xNoSignal=numpy.array([[190,166,175,145,139],[47,49,33,31,36]])
cf_mu_0, cf_sd_0, pvalue_0 = calculateStatistics.main(xSignal, xNoSignal)
xSignal=numpy.array([[126,116,139,114,112],[55,53,56,54,46]])
xNoSignal=numpy.array([[115,143,135,123,142],[60,70,62,71,65]])
cf_mu_50, cf_sd_50, pvalue_50 = calculateStatistics.main(xSignal, xNoSignal)
xSignal=numpy.array([[97,126,132,140,168],[124,105,124,114]])
xNoSignal=numpy.array([[139,130,157,132,120],[113,150,116,95,127]])
cf_mu_100, cf_sd_100, pvalue_100 = calculateStatistics.main(xSignal, xNoSignal)
xSignal=numpy.array([[161,149,143,154,140],[155,134,131,167,151]])
xNoSignal=numpy.array([[148,176,172,184,185],[141,172,160,146,140]])
cf_mu_150, cf_sd_150, pvalue_150 = calculateStatistics.main(xSignal, xNoSignal)
xSignal=numpy.array([[198,149,151,203,168],[133,143,147,139,144]])
xNoSignal=numpy.array([[193,187,195,183,171],[147,149,147,145,136]])
cf_mu_200, cf_sd_200, pvalue_200 = calculateStatistics.main(xSignal, xNoSignal)
xSignal=numpy.array([[154,177,177,176,160],[159,176,177,172,179]])
xNoSignal=numpy.array([[171,161,220,194,180],[142,170,143,150,162]])
cf_mu_250, cf_sd_250, pvalue_250 = calculateStatistics.main(xSignal, xNoSignal)
x = [0, 50, 100, 150, 200, 250]
y = [cf_mu_0, cf_mu_50, cf_mu_100, cf_mu_150, cf_mu_200, cf_mu_250]
z = [cf_sd_0, cf_sd_50, cf_sd_100, cf_sd_150, cf_sd_200, cf_sd_250]
w = [pvalue_0, pvalue_50, pvalue_100, pvalue_150, pvalue_200, pvalue_250]
matplotlib.pyplot.errorbar(x,y,yerr=z,fmt=':o',color='red',ecolor='red',markeredgecolor='red',capsize=0,ms=thePointSize,mew=0)
for i in range(len(w)):
if y[i] > 0.:
sp=y[i]+z[i]+0.02
else:
sp=y[i]-z[i]-0.02
if w[i] < 0.05 and w[i] >= 0.01:
matplotlib.pyplot.scatter(x[i], sp, s=75, c='black', marker=r"${*}$", edgecolors='none')
if w[i] < 0.01:
matplotlib.pyplot.scatter(x[i]-3, sp, s=75, c='black', marker=r"${*}$", edgecolors='none')
matplotlib.pyplot.scatter(x[i]+3, sp, s=75, c='black', marker=r"${*}$", edgecolors='none')
matplotlib.pyplot.plot([0,300],[0,0],'--',color='black')
matplotlib.pyplot.xlim([-25,325])
matplotlib.pyplot.ylim([-0.4,0.4])
matplotlib.pyplot.yticks([-0.4,-0.2,0,0.2,0.4])
matplotlib.pyplot.xlabel('Generation')
matplotlib.pyplot.ylabel('Conditioned Fitness')
matplotlib.pyplot.tight_layout(pad=0.5)
matplotlib.pyplot.savefig('figure.mutagenized.2.3.pdf')
matplotlib.pyplot.clf()
# save processed data alternative plotting
trajectory=[x,y,z]
jarFile=jarDir+'mutagenized.2.3.pickle'
f=open(jarFile,'wb')
pickle.dump(trajectory,f)
f.close()
|
adelomana/cassandra
|
conditionedFitness/figureMutagenized/script.2.3.py
|
Python
|
gpl-3.0
| 2,965
|
from time import time
class Stream(object):
"""
Stream abstract class.
"""
def __init__(self, cond_stream, inputs):
"""
:param cond_stream: a :class:`.ConditionalStream` which produced this stream
:param inputs: a list of values for parameters ``cond_stream.inputs`` that are the inputs to this stream
"""
self.cond_stream = cond_stream
self.inputs = tuple(inputs)
self.history = {}
self.enumerated = False
self.call_times = []
self.call_history = []
self.reset()
self.conditions = self.cond_stream.instantiate_conditions(self.inputs)
def instantiate_conditions(self):
return self.conditions
def instantiate_effects(self, outputs):
return self.cond_stream.instantiate_effects(self.inputs, outputs)
def get_values(self, **kwargs):
"""
:returns: a list of :class:`.Constant` and :class:`.Atom` of the values produced by the stream
"""
raise NotImplementedError()
@property
def call_time(self):
return sum(self.call_times)
@property
def calls(self):
return len(self.call_times)
def call(self, **kwargs):
assert not self.enumerated
if not self.called and self.calls:
self.called = True
self.enumerated = self.true_enumerated
all_values = []
for values in self.call_history:
all_values += list(values)
return all_values
t0 = time()
self.called = True
new_values = tuple(self.get_values(**kwargs))
for value in new_values:
if value not in self.history:
self.history[value] = []
if len(self.history) not in self.history[value]:
self.history[value].append(len(self.history))
self.call_history.append(new_values)
self.call_times.append(time() - t0)
return new_values
def reset(self):
self.called = False
self.true_enumerated = self.enumerated
self.enumerated = False
def __repr__(self):
return '%s(%s | %s)' % (self.cond_stream.__class__.__name__, self.cond_stream.outputs, self.inputs)
__str__ = __repr__
class StrictStream(Stream):
"""
Stream abstract class which only supports returning tuples of values which satisfy ``self.cond_stream.outputs``.
"""
def get_next(self, **kwargs):
"""
:returns: a list of tuples of :class:`.Constant` which satisfy ``self.cond_stream.outputs``
"""
raise NotImplementedError()
def get_values(self, **kwargs):
assert not self.enumerated
values = []
for outputs in self.get_next(**kwargs):
assert len(self.cond_stream.outputs) == len(outputs) and all(
p.type == a.type for p, a in zip(self.cond_stream.outputs, outputs))
values += list(outputs) + \
self.cond_stream.instantiate_effects(self.inputs, outputs)
return values
class GeneratorStream(StrictStream):
"""
:class:`.StrictStream` which can be specified using a function to a generator.
"""
def __init__(self, cond_stream, inputs):
self.generator = self.get_generator(inputs)
super(GeneratorStream, self).__init__(cond_stream, inputs)
def get_generator(self, inputs):
"""
Generator function which must be overridden.
:param inputs: a list of values for parameters ``cond_stream.inputs`` that are the inputs to the stream
:returns: a generator of tuples of :class:`.Constant` which satisfy ``self.cond_stream.outputs``
"""
raise NotImplementedError()
def get_next(self, **kwargs):
try:
values = next(self.generator)
if not isinstance(values, list):
return [values]
return values
except StopIteration:
self.enumerated = True
return []
class ListStream(StrictStream):
"""
:class:`.StrictStream` which can be specified using a function to a list.
"""
def get_list(self, inputs):
"""
List function which must be overridden.
:param inputs: a list of values for parameters ``cond_stream.inputs`` that are the inputs to the stream
:returns: a list of tuples of :class:`.Constant` which satisfy ``self.cond_stream.outputs``
"""
raise NotImplementedError()
def get_next(self, **kwargs):
self.enumerated = True
return self.get_list(self.inputs)
class FunctionStream(StrictStream):
"""
:class:`.StrictStream` which can be specified using a function.
"""
def function(self, inputs):
"""
Function which must be overridden.
:param inputs: a list of values for parameters ``cond_stream.inputs`` that are the inputs to the stream
:returns: a tuple of :class:`.Constant` which satisfies ``self.cond_stream.outputs``
"""
raise NotImplementedError()
def test(self, inputs):
"""
Test which may optionally be overridden.
:param inputs: a list of values for parameters ``cond_stream.inputs`` that are the inputs to the stream
:returns: a boolean value ``{False, True}``
"""
return True
def get_next(self, **kwargs):
self.enumerated = True
if self.test(self.inputs):
return [self.function(self.inputs)]
return []
class TestStream(StrictStream):
"""
:class:`.StrictStream` which can be specified using a test.
"""
def test(self, inputs):
"""
Test which must be overridden.
:param inputs: a list of values for parameters ``cond_stream.inputs`` that are the inputs to the stream
:returns: a boolean value ``{False, True}``
"""
raise NotImplementedError()
def get_next(self, **kwargs):
self.enumerated = True
return [()] if self.test(self.inputs) else []
|
caelan/stripstream
|
stripstream/pddl/streams.py
|
Python
|
mit
| 6,045
|
import unittest
from .. import Message
class TestStringMethods(unittest.TestCase):
def setUp(self):
self.data = {
"from": {
"name": "Jim",
},
"message": "Hello",
"created_time": "2015-12-12T22:22:22+0000",
}
self.conversation = None
self.msg = Message(self.data, self.conversation)
def test_comp(self):
msg = Message(self.data, self.conversation)
data1 = self.data
data1["from"]["name"] = "Parsons"
msg1 = Message(data1, self.conversation)
self.assertTrue(msg == self.msg)
self.assertFalse(msg1 == self.msg)
|
fantastic001/pyfb
|
pyfacebook/tests/test_message.py
|
Python
|
gpl-2.0
| 697
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
# Author: illuz <iilluzen[at]gmail.com>
# File: AC_brute_force_nk.py
# Create Date: 2015-08-18 18:49:00
# Usage: AC_brute_force_nk.py
# Descripton:
class Solution:
# @param {integer[]} nums
# @param {integer} k
# @return {integer[]}
def maxSlidingWindow(self, nums, k):
res = []
nums_len = len(nums)
if nums_len < k or k == 0:
return res
for i in xrange(0, nums_len - k + 1):
res.append(max(nums[i:i+k]))
return res
# test
s = Solution()
print s.maxSlidingWindow([1,3,-1,-3,5,3,6,7], 3)
|
bssrdf/leetcode-7
|
solutions/239.Sliding_Window_Maximum/AC_brute_force_nk.py
|
Python
|
gpl-2.0
| 668
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
try:
from PIL import Image, GifImagePlugin, JpegImagePlugin, PngImagePlugin, TiffImagePlugin
except ImportError:
import Image, GifImagePlugin, JpegImagePlugin, PngImagePlugin, TiffImagePlugin
import logging
import os
import subprocess
# import tempfile
from module.plugins.internal.Plugin import Plugin
from module.utils import save_join as fs_join
class OCR(Plugin):
__name__ = "OCR"
__type__ = "ocr"
__version__ = "0.19"
__status__ = "testing"
__description__ = """OCR base plugin"""
__license__ = "GPLv3"
__authors__ = [("pyLoad Team", "admin@pyload.org")]
def __init__(self, plugin):
self._init(plugin.pyload)
self.plugin = plugin
self.init()
def init(self):
"""
Initialize additional data structures
"""
pass
def _log(self, level, plugintype, pluginname, messages):
return self.plugin._log(level,
plugintype,
self.plugin.__name__,
(self.__name__,) + messages)
def load_image(self, image):
self.image = Image.open(image)
self.pixels = self.image.load()
self.result_captcha = ""
def deactivate(self):
"""
Delete all tmp images
"""
pass
def threshold(self, value):
self.image = self.image.point(lambda a: a * value + 10)
def run(self, command):
"""
Run a command
"""
popen = subprocess.Popen(command, bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
popen.wait()
output = popen.stdout.read() + " | " + popen.stderr.read()
popen.stdout.close()
popen.stderr.close()
self.pyload.log_debug("Tesseract ReturnCode " + popen.returncode, "Output: " + output)
def run_tesser(self, subset=False, digits=True, lowercase=True, uppercase=True, pagesegmode=None):
# tmpTif = tempfile.NamedTemporaryFile(suffix=".tif")
try:
tmpTif = open(fs_join("tmp", "tmpTif_%s.tif" % self.__name__), "wb")
tmpTif.close()
# tmpTxt = tempfile.NamedTemporaryFile(suffix=".txt")
tmpTxt = open(fs_join("tmp", "tmpTxt_%s.txt" % self.__name__), "wb")
tmpTxt.close()
except IOError, e:
self.log_error(e)
return
self.pyload.log_debug("Saving tiff...")
self.image.save(tmpTif.name, 'TIFF')
if os.name is "nt":
tessparams = [os.path.join(pypath, "tesseract", "tesseract.exe")]
else:
tessparams = ["tesseract"]
tessparams.extend([os.path.abspath(tmpTif.name), os.path.abspath(tmpTxt.name).replace(".txt", "")])
if pagesegmode:
tessparams.extend(["-psm", str(pagesegmode)])
if subset and (digits or lowercase or uppercase):
# tmpSub = tempfile.NamedTemporaryFile(suffix=".subset")
with open(fs_join("tmp", "tmpSub_%s.subset" % self.__name__), "wb") as tmpSub:
tmpSub.write("tessedit_char_whitelist ")
if digits:
tmpSub.write("0123456789")
if lowercase:
tmpSub.write("abcdefghijklmnopqrstuvwxyz")
if uppercase:
tmpSub.write("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
tmpSub.write("\n")
tessparams.append("nobatch")
tessparams.append(os.path.abspath(tmpSub.name))
self.pyload.log_debug("Running tesseract...")
self.run(tessparams)
self.pyload.log_debug("Reading txt...")
try:
with open(tmpTxt.name, 'r') as f:
self.result_captcha = f.read().replace("\n", "")
except Exception:
self.result_captcha = ""
self.pyload.log_info(_("OCR result: ") + self.result_captcha)
try:
os.remove(tmpTif.name)
os.remove(tmpTxt.name)
if subset and (digits or lowercase or uppercase):
os.remove(tmpSub.name)
except OSError, e:
self.log_warning(e)
def recognize(self, name):
raise NotImplementedError
def to_greyscale(self):
if self.image.mode != 'L':
self.image = self.image.convert('L')
self.pixels = self.image.load()
def eval_black_white(self, limit):
self.pixels = self.image.load()
w, h = self.image.size
for x in xrange(w):
for y in xrange(h):
if self.pixels[x, y] > limit:
self.pixels[x, y] = 255
else:
self.pixels[x, y] = 0
def clean(self, allowed):
pixels = self.pixels
w, h = self.image.size
for x in xrange(w):
for y in xrange(h):
if pixels[x, y] == 255:
continue
#: No point in processing white pixels since we only want to remove black pixel
count = 0
try:
if pixels[x - 1, y - 1] != 255:
count += 1
if pixels[x - 1, y] != 255:
count += 1
if pixels[x - 1, y + 1] != 255:
count += 1
if pixels[x, y + 1] != 255:
count += 1
if pixels[x + 1, y + 1] != 255:
count += 1
if pixels[x + 1, y] != 255:
count += 1
if pixels[x + 1, y - 1] != 255:
count += 1
if pixels[x, y - 1] != 255:
count += 1
except Exception:
pass
#: Not enough neighbors are dark pixels so mark this pixel
#: To be changed to white
if count < allowed:
pixels[x, y] = 1
#: Second pass: this time set all 1's to 255 (white)
for x in xrange(w):
for y in xrange(h):
if pixels[x, y] == 1:
pixels[x, y] = 255
self.pixels = pixels
def derotate_by_average(self):
"""
Rotate by checking each angle and guess most suitable
"""
w, h = self.image.size
pixels = self.pixels
for x in xrange(w):
for y in xrange(h):
if pixels[x, y] == 0:
pixels[x, y] = 155
highest = {}
counts = {}
for angle in xrange(-45, 45):
tmpimage = self.image.rotate(angle)
pixels = tmpimage.load()
w, h = self.image.size
for x in xrange(w):
for y in xrange(h):
if pixels[x, y] == 0:
pixels[x, y] = 255
count = {}
for x in xrange(w):
count[x] = 0
for y in xrange(h):
if pixels[x, y] == 155:
count[x] += 1
sum = 0
cnt = 0
for x in count.values():
if x != 0:
sum += x
cnt += 1
avg = sum / cnt
counts[angle] = cnt
highest[angle] = 0
for x in count.values():
if x > highest[angle]:
highest[angle] = x
highest[angle] = highest[angle] - avg
hkey = 0
hvalue = 0
for key, value in highest.items():
if value > hvalue:
hkey = key
hvalue = value
self.image = self.image.rotate(hkey)
pixels = self.image.load()
for x in xrange(w):
for y in xrange(h):
if pixels[x, y] == 0:
pixels[x, y] = 255
if pixels[x, y] == 155:
pixels[x, y] = 0
self.pixels = pixels
def split_captcha_letters(self):
captcha = self.image
started = False
letters = []
width, height = captcha.size
bottomY, topY = 0, height
pixels = captcha.load()
for x in xrange(width):
black_pixel_in_col = False
for y in xrange(height):
if pixels[x, y] != 255:
if not started:
started = True
firstX = x
lastX = x
if y > bottomY:
bottomY = y
if y < topY:
topY = y
if x > lastX:
lastX = x
black_pixel_in_col = True
if black_pixel_in_col is False and started is True:
rect = (firstX, topY, lastX, bottomY)
new_captcha = captcha.crop(rect)
w, h = new_captcha.size
if w > 5 and h > 5:
letters.append(new_captcha)
started = False
bottomY, topY = 0, height
return letters
def correct(self, values, var=None):
if var:
result = var
else:
result = self.result_captcha
for key, item in values.items():
if key.__class__ is str:
result = result.replace(key, item)
else:
for expr in key:
result = result.replace(expr, item)
if var:
return result
else:
self.result_captcha = result
|
jansohn/pyload
|
module/plugins/internal/OCR.py
|
Python
|
gpl-3.0
| 9,734
|
# Benchmark the calculation of a distance in a PDB file
# The distance is the closest distance between any atoms of residues 50 and 60
# of chain A in 1AKE
import time
from chemfiles import Trajectory, Selection
def distance(frame):
# FIXME: this should use Selection("resid 50 and [chainname] A") which will
# be available in chemfiles 0.10 (the next release)
r50 = Selection("resid 50 and index < 1000").evaluate(frame)
r60 = Selection("resid 60 and index < 1000").evaluate(frame)
min = float('inf')
for i in r50:
for j in r60:
r = frame.distance(i, j)
if r < min:
min = r
return min
pdb_filepath = "data/1AKE.pdb"
frame = Trajectory(pdb_filepath).read()
start = time.time()
distance(frame)
end = time.time()
print(end - start)
|
jgreener64/pdb-benchmarks
|
chemfiles/distance.py
|
Python
|
mit
| 817
|
from setuptools import setup
setup(name='blended',
version='5.0.2',
description='The Most Versatile Static HTML Site Generator',
url='http://jmroper.com/blended/',
author='John Roper',
author_email='johnroper100@gmail.com',
license='GPL3.0',
packages=['blended'],
install_requires=[
'click',
'colorama',
'watchdog',
'Markdown',
'textile',
'docutils',
'mammoth',
'libsass',
'pyjade',
'lesscpy',
'stylus',
'coffeescript',
'jsmin',
'cssmin',
'untangle',
],
entry_points={
'console_scripts': [
'blended=blended.__main__:cli',
],
},
zip_safe=False)
|
BlendedSiteGenerator/Blended
|
setup.py
|
Python
|
gpl-3.0
| 801
|
#!/usr/bin/env python
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Manage TCP ports for unit tests; started by run_tests.py"""
import argparse
import BaseHTTPServer
import hashlib
import os
import socket
import sys
import time
argp = argparse.ArgumentParser(description='Server for httpcli_test')
argp.add_argument('-p', '--port', default=12345, type=int)
args = argp.parse_args()
print 'port server running on port %d' % args.port
pool = []
in_use = {}
with open(__file__) as f:
_MY_VERSION = hashlib.sha1(f.read()).hexdigest()
def refill_pool():
"""Scan for ports not marked for being in use"""
for i in range(10000, 65000):
if len(pool) > 100: break
if i in in_use:
age = time.time() - in_use[i]
if age < 600:
continue
del in_use[i]
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(('localhost', i))
pool.append(i)
except:
pass # we really don't care about failures
finally:
s.close()
def allocate_port():
global pool
global in_use
if not pool:
refill_pool()
port = pool[0]
pool = pool[1:]
in_use[port] = time.time()
return port
keep_running = True
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
global keep_running
if self.path == '/get':
# allocate a new port, it will stay bound for ten minutes and until
# it's unused
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.end_headers()
p = allocate_port()
self.log_message('allocated port %d' % p)
self.wfile.write('%d' % p)
elif self.path == '/version':
# fetch a version string and the current process pid
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.end_headers()
self.wfile.write(_MY_VERSION)
elif self.path == '/quit':
self.send_response(200)
self.end_headers()
keep_running = False
httpd = BaseHTTPServer.HTTPServer(('', args.port), Handler)
while keep_running:
httpd.handle_request()
print 'done'
|
fichter/grpc
|
tools/run_tests/port_server.py
|
Python
|
bsd-3-clause
| 3,575
|
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2010 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file license.txt for copying permission.
"""
from . import base
from xml.etree import cElementTree
class StanzaPath(base.MatcherBase):
def match(self, stanza):
return stanza.match(self._criteria)
|
sezuan/SleekXMPP
|
sleekxmpp/xmlstream/matcher/stanzapath.py
|
Python
|
mit
| 335
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
import paddle
import paddle.fluid as fluid
import math
import sys
# need to fix random seed and training data to compare the loss
# value accurately calculated by the default and the memory optimization
# version.
fluid.default_startup_program().random_seed = 111
def resnet_cifar10(input, depth=32):
def conv_bn_layer(input, ch_out, filter_size, stride, padding, act='relu'):
tmp = fluid.layers.conv2d(
input=input,
filter_size=filter_size,
num_filters=ch_out,
stride=stride,
padding=padding,
act=None,
bias_attr=False)
return fluid.layers.batch_norm(input=tmp, act=act)
def shortcut(input, ch_in, ch_out, stride):
if ch_in != ch_out:
return conv_bn_layer(input, ch_out, 1, stride, 0, None)
else:
return input
def basicblock(input, ch_in, ch_out, stride):
tmp = conv_bn_layer(input, ch_out, 3, stride, 1)
tmp = conv_bn_layer(tmp, ch_out, 3, 1, 1, act=None)
short = shortcut(input, ch_in, ch_out, stride)
return fluid.layers.elementwise_add(x=tmp, y=short, act='relu')
def layer_warp(block_func, input, ch_in, ch_out, count, stride):
tmp = block_func(input, ch_in, ch_out, stride)
for i in range(1, count):
tmp = block_func(tmp, ch_out, ch_out, 1)
return tmp
assert (depth - 2) % 6 == 0
n = (depth - 2) / 6
conv1 = conv_bn_layer(
input=input, ch_out=16, filter_size=3, stride=1, padding=1)
res1 = layer_warp(basicblock, conv1, 16, 16, n, 1)
res2 = layer_warp(basicblock, res1, 16, 32, n, 2)
res3 = layer_warp(basicblock, res2, 32, 64, n, 2)
pool = fluid.layers.pool2d(
input=res3, pool_size=8, pool_type='avg', pool_stride=1)
return pool
def vgg16_bn_drop(input):
def conv_block(input, num_filter, groups, dropouts):
return fluid.nets.img_conv_group(
input=input,
pool_size=2,
pool_stride=2,
conv_num_filter=[num_filter] * groups,
conv_filter_size=3,
conv_act='relu',
conv_with_batchnorm=True,
conv_batchnorm_drop_rate=dropouts,
pool_type='max')
conv1 = conv_block(input, 64, 2, [0.3, 0])
conv2 = conv_block(conv1, 128, 2, [0.4, 0])
conv3 = conv_block(conv2, 256, 3, [0.4, 0.4, 0])
conv4 = conv_block(conv3, 512, 3, [0.4, 0.4, 0])
conv5 = conv_block(conv4, 512, 3, [0.4, 0.4, 0])
drop = fluid.layers.dropout(x=conv5, dropout_prob=0.5)
fc1 = fluid.layers.fc(input=drop, size=4096, act=None)
bn = fluid.layers.batch_norm(input=fc1, act='relu')
drop2 = fluid.layers.dropout(x=bn, dropout_prob=0.5)
fc2 = fluid.layers.fc(input=drop2, size=4096, act=None)
return fc2
classdim = 10
data_shape = [3, 32, 32]
images = fluid.layers.data(name='pixel', shape=data_shape, dtype='float32')
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
net_type = "vgg"
if len(sys.argv) >= 2:
net_type = sys.argv[1]
if net_type == "vgg":
print("train vgg net")
net = vgg16_bn_drop(images)
elif net_type == "resnet":
print("train resnet")
net = resnet_cifar10(images, 32)
else:
raise ValueError("%s network is not supported" % net_type)
predict = fluid.layers.fc(input=net, size=classdim, act='softmax')
cost = fluid.layers.cross_entropy(input=predict, label=label)
avg_cost = fluid.layers.mean(cost)
optimizer = fluid.optimizer.Adam(learning_rate=0.001)
opts = optimizer.minimize(avg_cost)
batch_size = fluid.layers.create_tensor(dtype='int64')
batch_acc = fluid.layers.accuracy(input=predict, label=label, total=batch_size)
# fluid.memory_optimize(fluid.default_main_program(), level=0)
fluid.release_memory(fluid.default_main_program())
BATCH_SIZE = 16
PASS_NUM = 1
# fix the order of training data
train_reader = paddle.batch(
paddle.dataset.cifar.train10(), batch_size=BATCH_SIZE)
# train_reader = paddle.batch(
# paddle.reader.shuffle(
# paddle.dataset.cifar.train10(), buf_size=128 * 10),
# batch_size=BATCH_SIZE)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
feeder = fluid.DataFeeder(place=place, feed_list=[images, label])
exe.run(fluid.default_startup_program())
i = 0
accuracy = fluid.average.WeightedAverage()
for pass_id in range(PASS_NUM):
accuracy.reset()
for data in train_reader():
loss, acc, weight = exe.run(
fluid.default_main_program(),
feed=feeder.feed(data),
fetch_list=[avg_cost, batch_acc, batch_size])
accuracy.add(value=acc, weight=weight)
pass_acc = accuracy.eval()
print("loss:" + str(loss) + " acc:" + str(acc) + " pass_acc:" + str(
pass_acc))
# this model is slow, so if we can train two mini batch, we think it works properly.
if i > 0:
exit(0)
if math.isnan(float(loss)):
sys.exit("got NaN loss, training failed.")
i += 1
exit(1)
|
Canpio/Paddle
|
python/paddle/fluid/tests/book_memory_optimization/test_memopt_image_classification_train.py
|
Python
|
apache-2.0
| 5,674
|
#
# InsCfgTab.py -- Plugin to display/edit the instrument configuration in a table GUI
#
import datetime
from qtpy import QtCore
from qtpy import QtWidgets as QtGui
from qplan import entity
from qplan.plugins import QueueFileTab
class InsCfgTab(QueueFileTab.QueueCfgFileTab):
def build_table(self):
super(InsCfgTab, self).build_table('InsCfgTab', 'TableModel')
self.table_model.proposal = self.proposal
self.table_model.insCfgTab = self
class TableModel(QueueFileTab.TableModel):
def __init__(self, inputData, columns, data, qmodel, logger):
super(TableModel, self).__init__(inputData, columns, data, qmodel,
logger)
self.parse_flag = True
self.proposal = None
def setData(self, index, value, role = QtCore.Qt.EditRole):
# We implement the setData method so that the ObsList table
# can be editable. If we are called with
# role=QtCore.Qt.EditRole, that means the user has changed a
# value in the table. Check to make sure the new value is
# acceptable. If not, reset the cell to the original value.
if role == QtCore.Qt.EditRole:
row, col = index.row(), index.column()
colHeader = self.columns[col]
# Update the value in the table
self.logger.debug("Setting model_data row %d col %d to %s" % (
row, col, value))
self.model_data[row][col] = value
# Update the programs data structure in the QueueModel.
self.qmodel.update_inscfg(self.proposal, row, colHeader, value,
self.parse_flag)
# inscfg data has changed, so enable the File->Save menu
# item
self.insCfgTab.enable_save_item()
# Emit the dataChanged signal, as required by PyQt4 for
# implementations of the setData method.
self.dataChanged.emit(index, index)
return True
else:
return False
|
naojsoft/qplan
|
qplan/plugins/InsCfgTab.py
|
Python
|
bsd-3-clause
| 2,049
|
class PjaxrMixin(object):
"""
View mixin that provides pjaxr functionality
"""
namespace = ""
parent_namespace = ""
matching_count = 0
def get_matching_count(self, request):
"""
takes current_namespace to return the matching namespaces of the previous pjaxr-request and the current
"""
if not self.is_pjaxr_request(request):
return 0
current_namespaces = self.namespace.split(".")
previous_namespaces = self.get_previous_namespace(request).split(".")
level = 0
matching_count = 0
while level < len(previous_namespaces) and level < len(current_namespaces):
if previous_namespaces[level] == current_namespaces[level]:
level += 1
matching_count = level
else:
break
return matching_count
def get_previous_namespace(self, request):
if self.is_pjaxr_request(request) and request.META.get('HTTP_X_PJAX_NAMESPACE', False):
return request.META['HTTP_X_PJAX_NAMESPACE']
return ""
def is_pjaxr_request(self, request):
return True if request.META.get('HTTP_X_PJAX_NAMESPACE', False) else False
def get_context_data(self, **kwargs):
context = super(PjaxrMixin, self).get_context_data(**kwargs)
context.update({'pjaxr_namespace_current': self.namespace})
context.update({'pjaxr_namespace_parent': self.parent_namespace})
return context
class IekadouPjaxrMixin(PjaxrMixin):
pjaxr_site = True
pjaxr_page = True
pjaxr_content = True
pjaxr_inner_content = True
def dispatch(self, request, *args, **kwargs):
self.matching_count = self.get_matching_count(request)
self.pjaxr_site = self.matching_count <= 0
self.pjaxr_page = self.matching_count <= 1
self.pjaxr_content = self.matching_count <= 2
self.pjaxr_inner_content = self.matching_count <= 3
return super(IekadouPjaxrMixin, self).dispatch(request, *args, **kwargs)
|
iekadou/django-pjaxr
|
django_pjaxr/mixins.py
|
Python
|
mit
| 2,048
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.